c

ch.cern.sparkmeasure

StageMetrics

case class StageMetrics(sparkSession: SparkSession) extends Product with Serializable

Linear Supertypes
Serializable, Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. StageMetrics
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. AnyRef
  7. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new StageMetrics(sparkSession: SparkSession)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def aggregateStageMetrics(nameTempView: String = "PerfStageMetrics"): DataFrame
  5. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  6. def begin(): Long
  7. var beginSnapshot: Long

    Variables used to store the start and end time of the period of interest for the metrics report

  8. def clone(): AnyRef
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @native() @throws( ... )
  9. def createAccumulablesDF(nameTempView: String = "AccumulablesStageMetrics"): DataFrame
  10. def createStageMetricsDF(nameTempView: String = "PerfStageMetrics"): DataFrame

    Move data recorded from the custom listener into a DataFrame and register it as a view for easier processing

  11. def end(): Long
  12. var endSnapshot: Long
  13. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  14. def finalize(): Unit
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  15. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  16. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  17. val listenerStage: StageInfoRecorderListener

    This inserts the custom Spark Listener into the live Spark Context

  18. lazy val logger: Logger
  19. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  20. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  21. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  22. def printAccumulables(): Unit
  23. def printReport(): Unit
  24. def report(): String

    Custom aggregations and post-processing of metrics data

  25. def reportAccumulables(): String

    for internal metrics sum all the values, for the accumulables compute max value for each accId and name

  26. def runAndMeasure[T](f: ⇒ T): T

    Shortcut to run and measure the metrics for Spark execution, built after spark.time()

  27. def saveData(df: DataFrame, fileName: String, fileFormat: String = "json"): Unit

    Helper method to save data, we expect to have small amounts of data so collapsing to 1 partition seems OK

  28. def sendReportPrometheus(serverIPnPort: String, metricsJob: String, labelName: String = sparkSession.sparkContext.appName, labelValue: String = ...): Unit

    Send the metrics to Prometheus.

    Send the metrics to Prometheus. serverIPnPort: String with prometheus pushgateway address, format is hostIP:Port, metricsJob: job name, labelName: metrics label name, default is sparkSession.sparkContext.appName, labelValue: metrics label value, default is sparkSession.sparkContext.applicationId

  29. val sparkSession: SparkSession
  30. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  31. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  32. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  33. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @throws( ... )

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped