case class OptParams(batchSize: Int = 512, regularization: Double = 0.0, alpha: Double = 0.5, maxIterations: Int = 1000, useL1: Boolean = false, tolerance: Double = 1E-5, useStochastic: Boolean = false, randomSeed: Int = 0) extends Product with Serializable
OptParams is a Configuration-compatible case class that can be used to select optimization routines at runtime.
Configurations: 1) useStochastic=false,useL1=false: LBFGS with L2 regularization 2) useStochastic=false,useL1=true: OWLQN with L1 regularization 3) useStochastic=true,useL1=false: AdaptiveGradientDescent with L2 regularization 3) useStochastic=true,useL1=true: AdaptiveGradientDescent with L1 regularization
- batchSize
size of batches to use if useStochastic and you give a BatchDiffFunction
- regularization
regularization constant to use.
- alpha
rate of change to use, only applies to SGD.
- useL1
if true, use L1 regularization. Otherwise, use L2.
- tolerance
convergence tolerance, looking at both average improvement and the norm of the gradient.
- useStochastic
if false, use LBFGS or OWLQN. If true, use some variant of Stochastic Gradient Descent.
- Alphabetic
- By Inheritance
- OptParams
- Serializable
- Serializable
- Product
- Equals
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
OptParams(batchSize: Int = 512, regularization: Double = 0.0, alpha: Double = 0.5, maxIterations: Int = 1000, useL1: Boolean = false, tolerance: Double = 1E-5, useStochastic: Boolean = false, randomSeed: Int = 0)
- batchSize
size of batches to use if useStochastic and you give a BatchDiffFunction
- regularization
regularization constant to use.
- alpha
rate of change to use, only applies to SGD.
- useL1
if true, use L1 regularization. Otherwise, use L2.
- tolerance
convergence tolerance, looking at both average improvement and the norm of the gradient.
- useStochastic
if false, use LBFGS or OWLQN. If true, use some variant of Stochastic Gradient Descent.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- val alpha: Double
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val batchSize: Int
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate() @throws( ... )
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val maxIterations: Int
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- val randomSeed: Int
- val regularization: Double
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- val tolerance: Double
- val useL1: Boolean
- val useStochastic: Boolean
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @Deprecated @deprecated @throws( classOf[java.lang.Throwable] )
- Deprecated
(Since version ) see corresponding Javadoc for more information.
-
def
iterations[T, K](f: DiffFunction[T], init: T)(implicit space: MutableEnumeratedCoordinateField[T, K, Double]): Iterator[State[T, Info, ApproximateInverseHessian[T]] forSome {val _1: LBFGS[T]}]
- Annotations
- @deprecated
- Deprecated
(Since version 0.10) Use breeze.optimize.iterations(f, init, params) instead.
-
def
iterations[T](f: StochasticDiffFunction[T], init: T)(implicit space: MutableFiniteCoordinateField[T, _, Double]): Iterator[State[T, Info, FirstOrderMinimizer._1.type.History] forSome {val _1: FirstOrderMinimizer[T, StochasticDiffFunction[T]]}]
- Annotations
- @deprecated
- Deprecated
(Since version 0.10) Use breeze.optimize.iterations(f, init, params) instead.
-
def
iterations[T](f: BatchDiffFunction[T], init: T)(implicit space: MutableFiniteCoordinateField[T, _, Double]): Iterator[State[T, Info, FirstOrderMinimizer._1.type.History] forSome {val _1: FirstOrderMinimizer[T, BatchDiffFunction[T]]}]
- Annotations
- @deprecated
- Deprecated
(Since version 0.10) Use breeze.optimize.iterations(f, init, params) instead.
-
def
minimize[T](f: DiffFunction[T], init: T)(implicit space: MutableEnumeratedCoordinateField[T, _, Double]): T
- Annotations
- @deprecated
- Deprecated
(Since version 0.10) Use breeze.optimize.minimize(f, init, params) instead.
-
def
minimize[T](f: BatchDiffFunction[T], init: T)(implicit space: MutableFiniteCoordinateField[T, _, Double]): T
- Annotations
- @deprecated
- Deprecated
(Since version 0.10) Use breeze.optimize.minimize(f, init, params) instead.