class ConjugateGradient[T, M] extends SerializableLogging
Solve argmin (a dot x + .5 * x dot (B * x) + .5 * normSquaredPenalty * (x dot x)) for x subject to norm(x) <= maxNormValue
Based on the code from "Trust Region Newton Method for Large-Scale Logistic Regression" * @author dlwh
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- ConjugateGradient
- SerializableLogging
- Serializable
- Serializable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new ConjugateGradient(maxNormValue: Double = Double.PositiveInfinity, maxIterations: Int = -1, normSquaredPenalty: Double = 0, tolerance: Double = 1E-5)(implicit space: MutableInnerProductVectorSpace[T, Double], mult: linalg.operators.OpMulMatrix.Impl2[M, T, T])
Type Members
- case class State extends Product with Serializable
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate() @throws( ... )
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def iterations(a: T, B: M, initX: T): Iterator[State]
-
def
logger: LazyLogger
- Attributes
- protected
- Definition Classes
- SerializableLogging
- def minimize(a: T, B: M, initX: T): T
- def minimize(a: T, B: M): T
-
def
minimizeAndReturnResidual(a: T, B: M, initX: T): (T, T)
Returns the vector x and the vector r.
Returns the vector x and the vector r. x is the minimizer, while r is the residual error (which may not be near zero because of the norm constraint.)
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )