object SparkUtils extends LazyLogging
- Alphabetic
- By Inheritance
- SparkUtils
- LazyLogging
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Type Members
- case class SimpleFeatureRowMapping(sft: SimpleFeatureType, mappings: Seq[(Int, Int, Boolean)], id: (Row) => String) extends Product with Serializable
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- def createFeatureType(name: String, struct: StructType): SimpleFeatureType
- def createStructType(sft: SimpleFeatureType): StructType
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- val ff: FilterFactory2
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getExtractors(requiredColumns: Array[String], schema: StructType): Array[(SimpleFeature) => AnyRef]
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def joinedSf2row(schema: StructType, sf1: SimpleFeature, sf2: SimpleFeature, extractors: Array[(SimpleFeature) => AnyRef]): Row
- lazy val logger: Logger
- Attributes
- protected
- Definition Classes
- LazyLogging
- Annotations
- @transient()
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def rowsToFeatures(name: String, schema: StructType): SimpleFeatureRowMapping
Creates a function to convert a row to a simple feature, which will be based on the columns in the row schema.
Creates a function to convert a row to a simple feature, which will be based on the columns in the row schema.
If the row has a
fidcolumn, it will be used for the feature id. Otherwise, it will use a random id prefixed with the current time- name
simple feature type name to use
- schema
row schema
- def rowsToFeatures(sft: SimpleFeatureType, schema: StructType): SimpleFeatureRowMapping
Creates a function to convert a row to a simple feature.
Creates a function to convert a row to a simple feature. Columns will be mapped to attributes based on matching names.
If the row has a
fidcolumn, it will be used for the feature id. Otherwise, it will use a random id prefixed with the current time- sft
simple feature type
- schema
dataframe schema
- def sf2row(schema: StructType, sf: SimpleFeature, extractors: Array[(SimpleFeature) => AnyRef]): Row
- def sparkFilterToCQLFilter(filt: Filter): Option[Filter]
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()