Package gorsat.spark
Class GorBatchTable
java.lang.Object
gorsat.spark.GorBatchTable
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.SupportsRead,org.apache.spark.sql.connector.catalog.SupportsWrite,org.apache.spark.sql.connector.catalog.Table,org.apache.spark.sql.connector.read.ScanBuilder,org.apache.spark.sql.connector.read.SupportsPushDownFilters
public abstract class GorBatchTable
extends java.lang.Object
implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.read.SupportsPushDownFilters
-
Constructor Summary
Constructors Constructor Description GorBatchTable(java.lang.String query, boolean tag, java.lang.String path, java.lang.String filter, java.lang.String filterFile, java.lang.String filterColumn, java.lang.String splitFile, java.lang.String seek, java.lang.String redisUri, java.lang.String jobId, java.lang.String cacheFile, java.lang.String useCpp)GorBatchTable(java.lang.String query, boolean tag, java.lang.String path, java.lang.String filter, java.lang.String filterFile, java.lang.String filterColumn, java.lang.String splitFile, java.lang.String seek, org.apache.spark.sql.types.StructType schema, java.lang.String redisUri, java.lang.String jobId, java.lang.String cacheFile, java.lang.String useCpp) -
Method Summary
Modifier and Type Method Description org.apache.spark.sql.connector.read.Scanbuild()java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability>capabilities()java.lang.Stringname()org.apache.spark.sql.connector.read.ScanBuildernewScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap caseInsensitiveStringMap)org.apache.spark.sql.connector.write.WriteBuildernewWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)org.apache.spark.sql.connector.expressions.Transform[]partitioning()org.apache.spark.sql.sources.Filter[]pushedFilters()org.apache.spark.sql.sources.Filter[]pushFilters(org.apache.spark.sql.sources.Filter[] filters)org.apache.spark.sql.types.StructTypeschema()voidsetCacheDir(java.lang.String cacheDir)voidsetProjectRoot(java.lang.String projectRoot)
-
Constructor Details
-
GorBatchTable
public GorBatchTable(java.lang.String query, boolean tag, java.lang.String path, java.lang.String filter, java.lang.String filterFile, java.lang.String filterColumn, java.lang.String splitFile, java.lang.String seek, java.lang.String redisUri, java.lang.String jobId, java.lang.String cacheFile, java.lang.String useCpp) throws java.io.IOException, java.util.zip.DataFormatException- Throws:
java.io.IOExceptionjava.util.zip.DataFormatException
-
GorBatchTable
public GorBatchTable(java.lang.String query, boolean tag, java.lang.String path, java.lang.String filter, java.lang.String filterFile, java.lang.String filterColumn, java.lang.String splitFile, java.lang.String seek, org.apache.spark.sql.types.StructType schema, java.lang.String redisUri, java.lang.String jobId, java.lang.String cacheFile, java.lang.String useCpp)
-
-
Method Details
-
setProjectRoot
public void setProjectRoot(java.lang.String projectRoot) -
setCacheDir
public void setCacheDir(java.lang.String cacheDir) -
build
public org.apache.spark.sql.connector.read.Scan build()- Specified by:
buildin interfaceorg.apache.spark.sql.connector.read.ScanBuilder
-
pushFilters
public org.apache.spark.sql.sources.Filter[] pushFilters(org.apache.spark.sql.sources.Filter[] filters)- Specified by:
pushFiltersin interfaceorg.apache.spark.sql.connector.read.SupportsPushDownFilters
-
pushedFilters
public org.apache.spark.sql.sources.Filter[] pushedFilters()- Specified by:
pushedFiltersin interfaceorg.apache.spark.sql.connector.read.SupportsPushDownFilters
-
schema
public org.apache.spark.sql.types.StructType schema()- Specified by:
schemain interfaceorg.apache.spark.sql.connector.catalog.Table
-
name
public java.lang.String name()- Specified by:
namein interfaceorg.apache.spark.sql.connector.catalog.Table
-
capabilities
public java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()- Specified by:
capabilitiesin interfaceorg.apache.spark.sql.connector.catalog.Table
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)- Specified by:
newWriteBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap caseInsensitiveStringMap)- Specified by:
newScanBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
partitioning
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()- Specified by:
partitioningin interfaceorg.apache.spark.sql.connector.catalog.Table
-