public class HadoopInputFormat<K extends Writable,V extends Writable> extends Object implements org.apache.flink.api.common.io.InputFormat<Tuple2<K,V>,HadoopInputSplit>, ResultTypeQueryable<Tuple2<K,V>>
| Constructor and Description |
|---|
HadoopInputFormat() |
HadoopInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> mapredInputFormat,
Class<K> key,
Class<V> value,
org.apache.hadoop.mapred.JobConf job) |
| Modifier and Type | Method and Description |
|---|---|
void |
close() |
void |
configure(org.apache.flink.configuration.Configuration parameters) |
HadoopInputSplit[] |
createInputSplits(int minNumSplits) |
org.apache.hadoop.mapred.InputFormat<K,V> |
getHadoopInputFormat() |
Class<? extends HadoopInputSplit> |
getInputSplitType() |
org.apache.hadoop.mapred.JobConf |
getJobConf() |
org.apache.flink.types.TypeInformation<Tuple2<K,V>> |
getProducedType() |
org.apache.flink.api.common.io.statistics.BaseStatistics |
getStatistics(org.apache.flink.api.common.io.statistics.BaseStatistics cachedStats) |
Tuple2<K,V> |
nextRecord(Tuple2<K,V> record) |
void |
open(HadoopInputSplit split) |
boolean |
reachedEnd() |
void |
setHadoopInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> mapredInputFormat) |
void |
setJobConf(org.apache.hadoop.mapred.JobConf job) |
public void setJobConf(org.apache.hadoop.mapred.JobConf job)
public void setHadoopInputFormat(org.apache.hadoop.mapred.InputFormat<K,V> mapredInputFormat)
public org.apache.hadoop.mapred.JobConf getJobConf()
public void configure(org.apache.flink.configuration.Configuration parameters)
public org.apache.flink.api.common.io.statistics.BaseStatistics getStatistics(org.apache.flink.api.common.io.statistics.BaseStatistics cachedStats)
throws IOException
getStatistics in interface org.apache.flink.api.common.io.InputFormat<Tuple2<K extends Writable,V extends Writable>,HadoopInputSplit>IOExceptionpublic HadoopInputSplit[] createInputSplits(int minNumSplits) throws IOException
createInputSplits in interface org.apache.flink.api.common.io.InputFormat<Tuple2<K extends Writable,V extends Writable>,HadoopInputSplit>IOExceptionpublic Class<? extends HadoopInputSplit> getInputSplitType()
public void open(HadoopInputSplit split) throws IOException
open in interface org.apache.flink.api.common.io.InputFormat<Tuple2<K extends Writable,V extends Writable>,HadoopInputSplit>IOExceptionpublic boolean reachedEnd()
throws IOException
reachedEnd in interface org.apache.flink.api.common.io.InputFormat<Tuple2<K extends Writable,V extends Writable>,HadoopInputSplit>IOExceptionpublic Tuple2<K,V> nextRecord(Tuple2<K,V> record) throws IOException
nextRecord in interface org.apache.flink.api.common.io.InputFormat<Tuple2<K extends Writable,V extends Writable>,HadoopInputSplit>IOExceptionpublic void close()
throws IOException
close in interface org.apache.flink.api.common.io.InputFormat<Tuple2<K extends Writable,V extends Writable>,HadoopInputSplit>IOExceptionpublic org.apache.flink.types.TypeInformation<Tuple2<K,V>> getProducedType()
getProducedType in interface ResultTypeQueryable<Tuple2<K extends Writable,V extends Writable>>Copyright © 2014 The Apache Software Foundation. All rights reserved.