public final class DruidStorageHandlerUtils extends Object
| Modifier and Type | Class and Description |
|---|---|
static interface |
DruidStorageHandlerUtils.DataPusher
Simple interface for retry operations.
|
| Modifier and Type | Field and Description |
|---|---|
static org.apache.druid.segment.IndexIO |
INDEX_IO
Used by druid to perform IO on indexes.
|
static org.apache.druid.segment.IndexMergerV9 |
INDEX_MERGER_V9
Used by druid to merge indexes.
|
static com.fasterxml.jackson.databind.ObjectMapper |
JSON_MAPPER
Mapper to use to serialize/deserialize Druid objects (JSON).
|
static com.fasterxml.jackson.databind.ObjectMapper |
SMILE_MAPPER
Mapper to use to serialize/deserialize Druid objects (SMILE).
|
| Modifier and Type | Method and Description |
|---|---|
static org.apache.druid.query.Query |
addDynamicFilters(org.apache.druid.query.Query query,
ExprNodeGenericFuncDesc filterExpr,
org.apache.hadoop.conf.Configuration conf,
boolean resolveDynamicValues) |
static String |
createScanAllQuery(String dataSourceName,
List<String> columns) |
static org.apache.druid.segment.loading.DataSegmentPusher |
createSegmentPusherForDirectory(String segmentDirectory,
org.apache.hadoop.conf.Configuration configuration) |
static org.apache.druid.java.util.http.client.Request |
createSmileRequest(String address,
org.apache.druid.query.Query query)
Method that creates a request for Druid query using SMILE format.
|
static String |
extractColName(ExprNodeDesc expr,
List<org.apache.druid.segment.VirtualColumn> virtualColumns) |
static List<org.apache.druid.timeline.DataSegment> |
getCreatedSegments(org.apache.hadoop.fs.Path taskDir,
org.apache.hadoop.conf.Configuration conf) |
static org.apache.druid.java.util.common.Pair<List<org.apache.druid.data.input.impl.DimensionSchema>,org.apache.druid.query.aggregation.AggregatorFactory[]> |
getDimensionsAndAggregates(List<String> columnNames,
List<org.apache.hadoop.hive.serde2.typeinfo.TypeInfo> columnTypes) |
static org.apache.druid.segment.indexing.granularity.GranularitySpec |
getGranularitySpec(org.apache.hadoop.conf.Configuration configuration,
Properties tableProperties) |
static org.apache.druid.segment.IndexSpec |
getIndexSpec(org.apache.hadoop.conf.Configuration jc) |
static List<String> |
getListProperty(org.apache.hadoop.hive.metastore.api.Table table,
String propertyName) |
static org.apache.druid.segment.VirtualColumns |
getVirtualColumns(org.apache.druid.query.Query query) |
static org.apache.hadoop.fs.Path |
makeSegmentDescriptorOutputPath(org.apache.druid.timeline.DataSegment pushedSegment,
org.apache.hadoop.fs.Path segmentsDescriptorDir) |
static InputStream |
submitRequest(org.apache.druid.java.util.http.client.HttpClient client,
org.apache.druid.java.util.http.client.Request request)
Method that submits a request to an Http address and retrieves the result.
|
static void |
writeSegmentDescriptor(org.apache.hadoop.fs.FileSystem outputFS,
org.apache.druid.timeline.DataSegment segment,
org.apache.hadoop.fs.Path descriptorPath)
Writes to filesystem serialized form of segment descriptor if an existing file exists it will try to replace it.
|
public static final com.fasterxml.jackson.databind.ObjectMapper JSON_MAPPER
public static final com.fasterxml.jackson.databind.ObjectMapper SMILE_MAPPER
public static final org.apache.druid.segment.IndexIO INDEX_IO
public static final org.apache.druid.segment.IndexMergerV9 INDEX_MERGER_V9
public static org.apache.druid.java.util.http.client.Request createSmileRequest(String address, org.apache.druid.query.Query query)
address - of the host target.query - druid query.public static InputStream submitRequest(org.apache.druid.java.util.http.client.HttpClient client, org.apache.druid.java.util.http.client.Request request) throws IOException
client - Http Client will be used to submit request.request - Http request to be submitted.IOException - in case of request IO error.public static List<org.apache.druid.timeline.DataSegment> getCreatedSegments(org.apache.hadoop.fs.Path taskDir, org.apache.hadoop.conf.Configuration conf) throws IOException
taskDir - path to the directory containing the segments descriptor info
the descriptor path will be
../workingPath/task_id/DruidStorageHandler.SEGMENTS_DESCRIPTOR_DIR_NAME/*.jsonconf - hadoop conf to get the file systemIOException - can be for the case we did not produce data.public static void writeSegmentDescriptor(org.apache.hadoop.fs.FileSystem outputFS,
org.apache.druid.timeline.DataSegment segment,
org.apache.hadoop.fs.Path descriptorPath)
throws IOException
outputFS - filesystem.segment - DataSegment object.descriptorPath - path.IOException - in case any IO issues occur.public static org.apache.hadoop.fs.Path makeSegmentDescriptorOutputPath(org.apache.druid.timeline.DataSegment pushedSegment,
org.apache.hadoop.fs.Path segmentsDescriptorDir)
pushedSegment - the pushed data segment objectsegmentsDescriptorDir - actual directory path for descriptors.public static String createScanAllQuery(String dataSourceName, List<String> columns) throws com.fasterxml.jackson.core.JsonProcessingException
com.fasterxml.jackson.core.JsonProcessingException@Nullable public static List<String> getListProperty(org.apache.hadoop.hive.metastore.api.Table table, String propertyName)
public static org.apache.druid.segment.loading.DataSegmentPusher createSegmentPusherForDirectory(String segmentDirectory, org.apache.hadoop.conf.Configuration configuration) throws IOException
IOExceptionpublic static org.apache.druid.segment.indexing.granularity.GranularitySpec getGranularitySpec(org.apache.hadoop.conf.Configuration configuration,
Properties tableProperties)
public static org.apache.druid.segment.IndexSpec getIndexSpec(org.apache.hadoop.conf.Configuration jc)
public static org.apache.druid.java.util.common.Pair<List<org.apache.druid.data.input.impl.DimensionSchema>,org.apache.druid.query.aggregation.AggregatorFactory[]> getDimensionsAndAggregates(List<String> columnNames, List<org.apache.hadoop.hive.serde2.typeinfo.TypeInfo> columnTypes)
public static org.apache.druid.query.Query addDynamicFilters(org.apache.druid.query.Query query,
ExprNodeGenericFuncDesc filterExpr,
org.apache.hadoop.conf.Configuration conf,
boolean resolveDynamicValues)
@Nullable public static String extractColName(ExprNodeDesc expr, List<org.apache.druid.segment.VirtualColumn> virtualColumns)
public static org.apache.druid.segment.VirtualColumns getVirtualColumns(org.apache.druid.query.Query query)
Copyright © 2022 The Apache Software Foundation. All rights reserved.