Package gorsat.spark
Class GorBatchTable
java.lang.Object
gorsat.spark.GorBatchTable
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.SupportsRead,org.apache.spark.sql.connector.catalog.SupportsWrite,org.apache.spark.sql.connector.catalog.Table,org.apache.spark.sql.connector.read.ScanBuilder,org.apache.spark.sql.connector.read.SupportsPushDownFilters
public abstract class GorBatchTable
extends Object
implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.read.SupportsPushDownFilters
-
Constructor Summary
ConstructorsConstructorDescriptionGorBatchTable(String query, boolean tag, String path, String filter, String filterFile, String filterColumn, String splitFile, String seek, String redisUri, String streamKey, String jobId, String cacheFile, String securityContext, String useCpp, boolean hadoopInfer) GorBatchTable(String query, boolean tag, String path, String filter, String filterFile, String filterColumn, String splitFile, String seek, org.apache.spark.sql.types.StructType schema, String redisUri, String streamKey, String jobId, String cacheFile, String securityContext, String useCpp, boolean hadoopInfer) -
Method Summary
Modifier and TypeMethodDescriptionorg.apache.spark.sql.connector.read.Scanbuild()Set<org.apache.spark.sql.connector.catalog.TableCapability>name()org.apache.spark.sql.connector.read.ScanBuildernewScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap caseInsensitiveStringMap) org.apache.spark.sql.connector.write.WriteBuildernewWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) org.apache.spark.sql.connector.expressions.Transform[]org.apache.spark.sql.sources.Filter[]org.apache.spark.sql.sources.Filter[]pushFilters(org.apache.spark.sql.sources.Filter[] filters) org.apache.spark.sql.types.StructTypeschema()voidsetAliasFile(String aliasFile) voidsetCacheDir(String cacheDir) voidsetConfigFile(String configFile) voidsetProjectRoot(String projectRoot) Methods inherited from class java.lang.Object
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, waitMethods inherited from interface org.apache.spark.sql.connector.catalog.Table
properties
-
Constructor Details
-
GorBatchTable
public GorBatchTable(String query, boolean tag, String path, String filter, String filterFile, String filterColumn, String splitFile, String seek, String redisUri, String streamKey, String jobId, String cacheFile, String securityContext, String useCpp, boolean hadoopInfer) throws IOException - Throws:
IOException
-
GorBatchTable
public GorBatchTable(String query, boolean tag, String path, String filter, String filterFile, String filterColumn, String splitFile, String seek, org.apache.spark.sql.types.StructType schema, String redisUri, String streamKey, String jobId, String cacheFile, String securityContext, String useCpp, boolean hadoopInfer) throws IOException - Throws:
IOException
-
-
Method Details
-
setProjectRoot
-
setCacheDir
-
setConfigFile
-
setAliasFile
-
build
public org.apache.spark.sql.connector.read.Scan build()- Specified by:
buildin interfaceorg.apache.spark.sql.connector.read.ScanBuilder
-
pushFilters
public org.apache.spark.sql.sources.Filter[] pushFilters(org.apache.spark.sql.sources.Filter[] filters) - Specified by:
pushFiltersin interfaceorg.apache.spark.sql.connector.read.SupportsPushDownFilters
-
pushedFilters
public org.apache.spark.sql.sources.Filter[] pushedFilters()- Specified by:
pushedFiltersin interfaceorg.apache.spark.sql.connector.read.SupportsPushDownFilters
-
schema
public org.apache.spark.sql.types.StructType schema()- Specified by:
schemain interfaceorg.apache.spark.sql.connector.catalog.Table
-
name
- Specified by:
namein interfaceorg.apache.spark.sql.connector.catalog.Table
-
capabilities
- Specified by:
capabilitiesin interfaceorg.apache.spark.sql.connector.catalog.Table
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) - Specified by:
newWriteBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap caseInsensitiveStringMap) - Specified by:
newScanBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
partitioning
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()- Specified by:
partitioningin interfaceorg.apache.spark.sql.connector.catalog.Table
-