public class HadoopRecordInputFormat<K,V> extends Object implements org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>
| Modifier and Type | Field and Description |
|---|---|
HadoopTypeConverter<K,V> |
converter |
org.apache.hadoop.mapred.InputFormat<K,V> |
hadoopInputFormat |
org.apache.hadoop.mapred.JobConf |
jobConf |
K |
key |
org.apache.hadoop.mapred.RecordReader<K,V> |
recordReader |
V |
value |
| Constructor and Description |
|---|
HadoopRecordInputFormat() |
HadoopRecordInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> hadoopInputFormat,
org.apache.hadoop.mapred.JobConf job,
HadoopTypeConverter<K,V> conv) |
| Modifier and Type | Method and Description |
|---|---|
void |
close() |
void |
configure(org.apache.flink.configuration.Configuration parameters) |
org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit[] |
createInputSplits(int minNumSplits) |
org.apache.hadoop.mapred.InputFormat<K,V> |
getHadoopInputFormat() |
org.apache.flink.core.io.InputSplitAssigner |
getInputSplitAssigner(org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit[] inputSplits) |
org.apache.hadoop.mapred.JobConf |
getJobConf() |
org.apache.flink.api.common.io.statistics.BaseStatistics |
getStatistics(org.apache.flink.api.common.io.statistics.BaseStatistics cachedStatistics) |
org.apache.flink.types.Record |
nextRecord(org.apache.flink.types.Record record) |
void |
open(org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit split) |
boolean |
reachedEnd() |
void |
setHadoopInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> hadoopInputFormat) |
void |
setJobConf(org.apache.hadoop.mapred.JobConf job) |
public HadoopTypeConverter<K,V> converter
public org.apache.hadoop.mapred.JobConf jobConf
public transient K key
public transient V value
public HadoopRecordInputFormat()
public HadoopRecordInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> hadoopInputFormat, org.apache.hadoop.mapred.JobConf job, HadoopTypeConverter<K,V> conv)
public void configure(org.apache.flink.configuration.Configuration parameters)
configure in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>public org.apache.flink.api.common.io.statistics.BaseStatistics getStatistics(org.apache.flink.api.common.io.statistics.BaseStatistics cachedStatistics)
throws IOException
getStatistics in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>IOExceptionpublic org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit[] createInputSplits(int minNumSplits)
throws IOException
createInputSplits in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>createInputSplits in interface org.apache.flink.core.io.InputSplitSource<org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>IOExceptionpublic org.apache.flink.core.io.InputSplitAssigner getInputSplitAssigner(org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit[] inputSplits)
getInputSplitAssigner in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>getInputSplitAssigner in interface org.apache.flink.core.io.InputSplitSource<org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>public void open(org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit split)
throws IOException
open in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>IOExceptionpublic boolean reachedEnd()
throws IOException
reachedEnd in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>IOExceptionpublic org.apache.flink.types.Record nextRecord(org.apache.flink.types.Record record)
throws IOException
nextRecord in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>IOExceptionpublic void close()
throws IOException
close in interface org.apache.flink.api.common.io.InputFormat<org.apache.flink.types.Record,org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopInputSplit>IOExceptionpublic void setJobConf(org.apache.hadoop.mapred.JobConf job)
public void setHadoopInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> hadoopInputFormat)
public org.apache.hadoop.mapred.JobConf getJobConf()
Copyright © 2014–2016 The Apache Software Foundation. All rights reserved.