c

ch.cern.sparkmeasure

TaskMetrics

case class TaskMetrics(sparkSession: SparkSession, gatherAccumulables: Boolean = false) extends Product with Serializable

Linear Supertypes
Serializable, Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TaskMetrics
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. AnyRef
  7. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new TaskMetrics(sparkSession: SparkSession, gatherAccumulables: Boolean = false)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def aggregateTaskMetrics(nameTempView: String = "PerfTaskMetrics"): DataFrame
  5. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  6. def begin(): Long
  7. var beginSnapshot: Long

    Variables used to store the start and end time of the period of interest for the metrics report

  8. def clone(): AnyRef
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @native() @throws( ... )
  9. def createAccumulablesDF(nameTempView: String = "AccumulablesTaskMetrics"): DataFrame
  10. def createTaskMetricsDF(nameTempView: String = "PerfTaskMetrics"): DataFrame
  11. def end(): Long
  12. var endSnapshot: Long
  13. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  14. def finalize(): Unit
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  15. val gatherAccumulables: Boolean
  16. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  17. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  18. val listenerTask: TaskInfoRecorderListener

    This inserts the custom Spark Listener into the live Spark Context

  19. lazy val logger: Logger
  20. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  21. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  22. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  23. def printAccumulables(): Unit
  24. def printReport(): Unit
  25. def report(): String
  26. def runAndMeasure[T](f: ⇒ T): T

    Shortcut to run and measure the metrics for Spark execution, built after spark.time()

  27. def saveData(df: DataFrame, fileName: String, fileFormat: String = "json"): Unit

    helper method to save data, we expect to have moderate amounts of data so collapsing to 1 partition seems OK

  28. def sendReportPrometheus(serverIPnPort: String, metricsJob: String, labelName: String = sparkSession.sparkContext.appName, labelValue: String = ...): Unit

    Send the metrics to Prometheus.

    Send the metrics to Prometheus. serverIPnPort: String with prometheus pushgateway address, format is hostIP:Port, metricsJob: job name, labelName: metrics label name, default is sparkSession.sparkContext.appName, labelValue: metrics label value, default is sparkSession.sparkContext.applicationId

  29. val sparkSession: SparkSession
  30. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  31. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  32. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  33. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @throws( ... )

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped