Packages

  • package root
    Definition Classes
    root
  • package ai
    Definition Classes
    root
  • package eto
    Definition Classes
    ai
  • package rikai
    Definition Classes
    eto
  • package sql

    Rikai SQL-ML extension.

    Rikai SQL-ML extension.

    Rikai offers DDL to manipulate ML Models:

    CREATE MODEL model_name
    [ OPTIONS (key=value, key=value, ...) ]
    [ AS "model_registry_uri" ]
    
    # List all registered models.
    SHOW MODELS
    
    # Describe the details of a model.
    (DESC | DESCRIBE) MODEL model_name
    
    # Drop a Model
    DROP MODEL model_name

    A ML_PREDICT function is implemented to run model inference.

    SELECT id, ML_PREDICT(model_name, col1, col2, col3) as predicted FROM table
    Definition Classes
    rikai
  • FilterUtils
  • Indexer
  • RikaiDataSource
  • RikaiFileFormat
  • RikaiReader
  • RikaiWriter
c

ai.eto.rikai

RikaiFileFormat

class RikaiFileFormat extends ParquetFileFormat

Linear Supertypes
ParquetFileFormat, Serializable, Serializable, Logging, DataSourceRegister, FileFormat, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. RikaiFileFormat
  2. ParquetFileFormat
  3. Serializable
  4. Serializable
  5. Logging
  6. DataSourceRegister
  7. FileFormat
  8. AnyRef
  9. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new RikaiFileFormat()

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def buildReader(sparkSession: SparkSession, dataSchema: StructType, partitionSchema: StructType, requiredSchema: StructType, filters: Seq[Filter], options: Map[String, String], hadoopConf: Configuration): (PartitionedFile) ⇒ Iterator[InternalRow]
    Attributes
    protected
    Definition Classes
    FileFormat
  6. def buildReaderWithPartitionValues(sparkSession: SparkSession, dataSchema: StructType, partitionSchema: StructType, requiredSchema: StructType, filters: Seq[Filter], options: Map[String, String], hadoopConf: Configuration): (PartitionedFile) ⇒ Iterator[InternalRow]
    Definition Classes
    ParquetFileFormat → FileFormat
  7. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  8. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  9. def equals(other: Any): Boolean
    Definition Classes
    ParquetFileFormat → AnyRef → Any
  10. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  11. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  12. def hashCode(): Int
    Definition Classes
    ParquetFileFormat → AnyRef → Any
  13. def inferSchema(sparkSession: SparkSession, parameters: Map[String, String], files: Seq[FileStatus]): Option[StructType]
    Definition Classes
    ParquetFileFormat → FileFormat
  14. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  15. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  16. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  17. def isSplitable(sparkSession: SparkSession, options: Map[String, String], path: Path): Boolean
    Definition Classes
    ParquetFileFormat → FileFormat
  18. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  19. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  20. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  21. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  22. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  23. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  24. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  25. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  26. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  27. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  28. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  29. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  30. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  31. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  32. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  33. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  34. def prepareWrite(sparkSession: SparkSession, job: Job, options: Map[String, String], dataSchema: StructType): OutputWriterFactory
    Definition Classes
    RikaiFileFormat → ParquetFileFormat → FileFormat
  35. def shortName(): String
    Definition Classes
    ParquetFileFormat → DataSourceRegister
  36. def supportBatch(sparkSession: SparkSession, schema: StructType): Boolean
    Definition Classes
    ParquetFileFormat → FileFormat
  37. def supportDataType(dataType: DataType): Boolean
    Definition Classes
    ParquetFileFormat → FileFormat
  38. def supportFieldName(name: String): Boolean
    Definition Classes
    ParquetFileFormat → FileFormat
  39. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  40. def toString(): String
    Definition Classes
    ParquetFileFormat → AnyRef → Any
  41. def vectorTypes(requiredSchema: StructType, partitionSchema: StructType, sqlConf: SQLConf): Option[Seq[String]]
    Definition Classes
    ParquetFileFormat → FileFormat
  42. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  43. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  44. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  45. def writeMetadataFile(metadataFile: Path, sparkSession: SparkSession, options: RikaiOptions): Unit

Inherited from ParquetFileFormat

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from DataSourceRegister

Inherited from FileFormat

Inherited from AnyRef

Inherited from Any

Ungrouped