public static class HBaseIO.Read
extends org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PBegin,org.apache.beam.sdk.values.PCollection<org.apache.hadoop.hbase.client.Result>>
PTransform that reads from HBase. See the class-level Javadoc on HBaseIO for*
more information.HBaseIO,
Serialized Form| Modifier and Type | Method and Description |
|---|---|
boolean |
equals(@Nullable java.lang.Object o) |
org.apache.beam.sdk.values.PCollection<org.apache.hadoop.hbase.client.Result> |
expand(org.apache.beam.sdk.values.PBegin input) |
org.apache.hadoop.conf.Configuration |
getConfiguration() |
org.apache.beam.sdk.io.range.ByteKeyRange |
getKeyRange()
Returns the range of keys that will be read from the table.
|
org.apache.hadoop.hbase.client.Scan |
getScan() |
java.lang.String |
getTableId() |
int |
hashCode() |
void |
populateDisplayData(org.apache.beam.sdk.transforms.display.DisplayData.Builder builder) |
HBaseIO.Read |
withConfiguration(org.apache.hadoop.conf.Configuration configuration)
Reads from the HBase instance indicated by the* given configuration.
|
HBaseIO.Read |
withFilter(org.apache.hadoop.hbase.filter.Filter filter)
Filters the rows read from HBase using the given* row filter.
|
HBaseIO.Read |
withKeyRange(byte[] startRow,
byte[] stopRow)
Reads only rows in the specified range.
|
HBaseIO.Read |
withKeyRange(org.apache.beam.sdk.io.range.ByteKeyRange keyRange)
Reads only rows in the specified range.
|
HBaseIO.Read |
withScan(org.apache.hadoop.hbase.client.Scan scan)
Filters the rows read from HBase using the given* scan.
|
HBaseIO.Read |
withTableId(java.lang.String tableId)
Reads from the specified table.
|
addAnnotation, compose, compose, getAdditionalInputs, getAnnotations, getDefaultOutputCoder, getDefaultOutputCoder, getDefaultOutputCoder, getKindString, getName, getResourceHints, setDisplayData, setResourceHints, toString, validate, validatepublic HBaseIO.Read withConfiguration(org.apache.hadoop.conf.Configuration configuration)
public HBaseIO.Read withTableId(java.lang.String tableId)
public HBaseIO.Read withScan(org.apache.hadoop.hbase.client.Scan scan)
public HBaseIO.Read withFilter(org.apache.hadoop.hbase.filter.Filter filter)
public HBaseIO.Read withKeyRange(org.apache.beam.sdk.io.range.ByteKeyRange keyRange)
public HBaseIO.Read withKeyRange(byte[] startRow, byte[] stopRow)
public org.apache.beam.sdk.values.PCollection<org.apache.hadoop.hbase.client.Result> expand(org.apache.beam.sdk.values.PBegin input)
expand in class org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PBegin,org.apache.beam.sdk.values.PCollection<org.apache.hadoop.hbase.client.Result>>public void populateDisplayData(org.apache.beam.sdk.transforms.display.DisplayData.Builder builder)
populateDisplayData in interface org.apache.beam.sdk.transforms.display.HasDisplayDatapopulateDisplayData in class org.apache.beam.sdk.transforms.PTransform<org.apache.beam.sdk.values.PBegin,org.apache.beam.sdk.values.PCollection<org.apache.hadoop.hbase.client.Result>>public org.apache.hadoop.conf.Configuration getConfiguration()
public java.lang.String getTableId()
public org.apache.hadoop.hbase.client.Scan getScan()
public org.apache.beam.sdk.io.range.ByteKeyRange getKeyRange()
public boolean equals(@Nullable java.lang.Object o)
equals in class java.lang.Objectpublic int hashCode()
hashCode in class java.lang.Object