abstract class StochasticGradientDescent[T] extends FirstOrderMinimizer[T, StochasticDiffFunction[T]] with SerializableLogging
Minimizes a function using stochastic gradient descent
Linear Supertypes
Known Subclasses
Ordering
- Alphabetic
- By Inheritance
Inherited
- StochasticGradientDescent
- FirstOrderMinimizer
- SerializableLogging
- Serializable
- Serializable
- Minimizer
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
Type Members
-
abstract
type
History
Any history the derived minimization function needs to do its updates.
Any history the derived minimization function needs to do its updates. typically an approximation to the second derivative/hessian matrix.
- Definition Classes
- FirstOrderMinimizer
-
type
State = FirstOrderMinimizer.State[T, Info, History]
- Definition Classes
- FirstOrderMinimizer
Abstract Value Members
-
abstract
def
initialHistory(f: StochasticDiffFunction[T], init: T): History
- Attributes
- protected
- Definition Classes
- FirstOrderMinimizer
-
abstract
def
updateHistory(newX: T, newGrad: T, newVal: Double, f: StochasticDiffFunction[T], oldState: State): History
- Attributes
- protected
- Definition Classes
- FirstOrderMinimizer
Concrete Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
adjust(newX: T, newGrad: T, newVal: Double): (Double, T)
- Attributes
- protected
- Definition Classes
- FirstOrderMinimizer
-
def
adjustFunction(f: StochasticDiffFunction[T]): StochasticDiffFunction[T]
- Attributes
- protected
- Definition Classes
- FirstOrderMinimizer
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
calculateObjective(f: StochasticDiffFunction[T], x: T, history: History): (Double, T)
- Attributes
- protected
- Definition Classes
- FirstOrderMinimizer
-
def
chooseDescentDirection(state: State, fn: StochasticDiffFunction[T]): T
- Attributes
- protected
- Definition Classes
- StochasticGradientDescent → FirstOrderMinimizer
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate() @throws( ... )
-
val
convergenceCheck: ConvergenceCheck[T]
- Definition Classes
- FirstOrderMinimizer
- val defaultStepSize: Double
-
def
determineStepSize(state: State, f: StochasticDiffFunction[T], dir: T): Double
Choose a step size scale for this iteration.
Choose a step size scale for this iteration.
Default is eta / math.pow(state.iter + 1,2.0 / 3.0)
- Definition Classes
- StochasticGradientDescent → FirstOrderMinimizer
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
infiniteIterations(f: StochasticDiffFunction[T], state: State): Iterator[State]
- Definition Classes
- FirstOrderMinimizer
-
def
initialState(f: StochasticDiffFunction[T], init: T): State
- Attributes
- protected
- Definition Classes
- FirstOrderMinimizer
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
iterations(f: StochasticDiffFunction[T], init: T): Iterator[State]
- Definition Classes
- FirstOrderMinimizer
-
def
logger: LazyLogger
- Attributes
- protected
- Definition Classes
- SerializableLogging
- val maxIter: Int
-
def
minimize(f: StochasticDiffFunction[T], init: T): T
- Definition Classes
- FirstOrderMinimizer → Minimizer
-
def
minimizeAndReturnState(f: StochasticDiffFunction[T], init: T): State
- Definition Classes
- FirstOrderMinimizer
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
takeStep(state: State, dir: T, stepSize: Double): T
Projects the vector x onto whatever ball is needed.
Projects the vector x onto whatever ball is needed. Can also incorporate regularization, or whatever.
Default just takes a step
- Attributes
- protected
- Definition Classes
- StochasticGradientDescent → FirstOrderMinimizer
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
implicit
val
vspace: NormedModule[T, Double]
- Attributes
- protected
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )