public abstract class DruidQueryRecordReader<R extends Comparable<R>> extends org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable> implements org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>
The key for each record will be a NullWritable, while the value will be a DruidWritable containing the timestamp as well as all values resulting from the query.
| Modifier and Type | Class and Description |
|---|---|
protected class |
DruidQueryRecordReader.JsonParserIterator<R extends Comparable<R>>
This is a helper wrapper class used to create an iterator of druid rows out of InputStream.
|
| Modifier and Type | Field and Description |
|---|---|
protected org.apache.druid.query.Query |
query
Query that Druid executes.
|
| Constructor and Description |
|---|
DruidQueryRecordReader() |
| Modifier and Type | Method and Description |
|---|---|
void |
close() |
org.apache.hadoop.io.NullWritable |
createKey() |
DruidQueryRecordReader.JsonParserIterator<R> |
createQueryResultsIterator() |
DruidWritable |
createValue() |
abstract org.apache.hadoop.io.NullWritable |
getCurrentKey() |
abstract DruidWritable |
getCurrentValue() |
long |
getPos() |
abstract float |
getProgress() |
DruidQueryRecordReader.JsonParserIterator<R> |
getQueryResultsIterator() |
protected abstract com.fasterxml.jackson.databind.JavaType |
getResultTypeDef() |
void |
initialize(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.conf.Configuration conf) |
void |
initialize(org.apache.hadoop.mapreduce.InputSplit split,
com.fasterxml.jackson.databind.ObjectMapper mapper,
com.fasterxml.jackson.databind.ObjectMapper smileMapper,
org.apache.druid.java.util.http.client.HttpClient httpClient,
org.apache.hadoop.conf.Configuration conf) |
void |
initialize(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.mapreduce.TaskAttemptContext context) |
abstract boolean |
next(org.apache.hadoop.io.NullWritable key,
DruidWritable value) |
abstract boolean |
nextKeyValue() |
public DruidQueryRecordReader.JsonParserIterator<R> getQueryResultsIterator()
public DruidQueryRecordReader.JsonParserIterator<R> createQueryResultsIterator()
public void initialize(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.mapreduce.TaskAttemptContext context)
throws IOException
initialize in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>IOExceptionpublic void initialize(org.apache.hadoop.mapreduce.InputSplit split,
com.fasterxml.jackson.databind.ObjectMapper mapper,
com.fasterxml.jackson.databind.ObjectMapper smileMapper,
org.apache.druid.java.util.http.client.HttpClient httpClient,
org.apache.hadoop.conf.Configuration conf)
throws IOException
IOExceptionpublic void initialize(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.conf.Configuration conf)
throws IOException
IOExceptionprotected abstract com.fasterxml.jackson.databind.JavaType getResultTypeDef()
public org.apache.hadoop.io.NullWritable createKey()
createKey in interface org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>public DruidWritable createValue()
createValue in interface org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>public abstract boolean next(org.apache.hadoop.io.NullWritable key,
DruidWritable value)
throws IOException
next in interface org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>IOExceptionpublic long getPos()
getPos in interface org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>public abstract boolean nextKeyValue()
throws IOException
nextKeyValue in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>IOExceptionpublic abstract org.apache.hadoop.io.NullWritable getCurrentKey()
throws IOException,
InterruptedException
getCurrentKey in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>IOExceptionInterruptedExceptionpublic abstract DruidWritable getCurrentValue() throws IOException, InterruptedException
getCurrentValue in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>IOExceptionInterruptedExceptionpublic abstract float getProgress()
throws IOException
getProgress in interface org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>getProgress in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>IOExceptionpublic void close()
close in interface Closeableclose in interface AutoCloseableclose in interface org.apache.hadoop.mapred.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>close in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,DruidWritable>Copyright © 2022 The Apache Software Foundation. All rights reserved.