@InterfaceAudience.Public
public class HFileOutputFormat2
extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat<org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.Cell>
HFiles. Calling write(null,null) will forcibly roll
all HFiles being written.
Using this class as part of a MapReduce job is best done
using configureIncrementalLoad(Job, TableDescriptor, RegionLocator).
| Modifier and Type | Field and Description |
|---|---|
static String |
DATABLOCK_ENCODING_OVERRIDE_CONF_KEY |
static String |
LOCALITY_SENSITIVE_CONF_KEY
Keep locality while generating HFiles for bulkload.
|
static String |
STORAGE_POLICY_PROPERTY |
static String |
STORAGE_POLICY_PROPERTY_CF_PREFIX |
protected static byte[] |
tableSeparator |
| Constructor and Description |
|---|
HFileOutputFormat2() |
| Modifier and Type | Method and Description |
|---|---|
protected static byte[] |
combineTableNameSuffix(byte[] tableName,
byte[] suffix) |
static void |
configureIncrementalLoad(org.apache.hadoop.mapreduce.Job job,
org.apache.hadoop.hbase.client.TableDescriptor tableDescriptor,
org.apache.hadoop.hbase.client.RegionLocator regionLocator)
Configure a MapReduce Job to perform an incremental load into the given
table.
|
static void |
configureIncrementalLoad(org.apache.hadoop.mapreduce.Job job,
org.apache.hadoop.hbase.client.Table table,
org.apache.hadoop.hbase.client.RegionLocator regionLocator)
Configure a MapReduce Job to perform an incremental load into the given
table.
|
static void |
configureIncrementalLoadMap(org.apache.hadoop.mapreduce.Job job,
org.apache.hadoop.hbase.client.TableDescriptor tableDescriptor) |
org.apache.hadoop.mapreduce.RecordWriter<org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.Cell> |
getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) |
protected static byte[] |
getTableNameSuffixedWithFamily(byte[] tableName,
byte[] family) |
checkOutputSpecs, getCompressOutput, getDefaultWorkFile, getOutputCommitter, getOutputCompressorClass, getOutputName, getOutputPath, getPathForWorkFile, getUniqueFile, getWorkOutputPath, setCompressOutput, setOutputCompressorClass, setOutputName, setOutputPathprotected static final byte[] tableSeparator
public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY
public static final String LOCALITY_SENSITIVE_CONF_KEY
public static final String STORAGE_POLICY_PROPERTY
public static final String STORAGE_POLICY_PROPERTY_CF_PREFIX
protected static byte[] combineTableNameSuffix(byte[] tableName,
byte[] suffix)
public org.apache.hadoop.mapreduce.RecordWriter<org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.Cell> getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context)
throws IOException,
InterruptedException
getRecordWriter in class org.apache.hadoop.mapreduce.lib.output.FileOutputFormat<org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.Cell>IOExceptionInterruptedExceptionprotected static byte[] getTableNameSuffixedWithFamily(byte[] tableName,
byte[] family)
public static void configureIncrementalLoad(org.apache.hadoop.mapreduce.Job job,
org.apache.hadoop.hbase.client.Table table,
org.apache.hadoop.hbase.client.RegionLocator regionLocator)
throws IOException
IOExceptionpublic static void configureIncrementalLoad(org.apache.hadoop.mapreduce.Job job,
org.apache.hadoop.hbase.client.TableDescriptor tableDescriptor,
org.apache.hadoop.hbase.client.RegionLocator regionLocator)
throws IOException
IOExceptionpublic static void configureIncrementalLoadMap(org.apache.hadoop.mapreduce.Job job,
org.apache.hadoop.hbase.client.TableDescriptor tableDescriptor)
throws IOException
IOExceptionCopyright © 2007–2020 The Apache Software Foundation. All rights reserved.