本文整理汇总了Java中io.druid.granularity.QueryGranularity类的典型用法代码示例。如果您正苦于以下问题:Java QueryGranularity类的具体用法?Java QueryGranularity怎么用?Java QueryGranularity使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
QueryGranularity类属于io.druid.granularity包,在下文中一共展示了QueryGranularity类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getIndex
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
public static QueryableIndex getIndex(String indexKey) throws IOException {
if (indices.has(indexKey)) {
return indices.get(indexKey);
}
// Create druid segments from raw data
Reader reader = new FileReader(new File("./src/test/resources/report.csv"));
List<String> columns = Arrays.asList("colo", "pool", "report", "URL", "TS", "metric", "value", "count", "min", "max", "sum");
List<String> metrics = Arrays.asList("value", "count", "min", "max", "sum");
List<String> dimensions = new ArrayList<>(columns);
dimensions.removeAll(metrics);
Loader loader = Loader.csv(reader, columns, dimensions, "TS");
DimensionsSpec dimensionsSpec = new DimensionsSpec(dimensions, null, null);
AggregatorFactory[] metricsAgg = new AggregatorFactory[]{
new LongSumAggregatorFactory("agg_count", "count"),
new DoubleMaxAggregatorFactory("agg_max", "max"),
new DoubleMinAggregatorFactory("agg_min", "min"),
new DoubleSumAggregatorFactory("agg_sum", "sum")
};
IncrementalIndexSchema indexSchema = new IncrementalIndexSchema(0, QueryGranularity.ALL, dimensionsSpec, metricsAgg);
indices.cache(indexKey, loader, indexSchema);
return indices.get(indexKey);
}
示例2: getGroupByQuery
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
public static Query getGroupByQuery() {
List<DimFilter> filters = new ArrayList<>();
filters.add(DimFilters.dimEquals("report", "URLTransaction"));
filters.add(DimFilters.dimEquals("pool", "r1cart"));
filters.add(DimFilters.dimEquals("metric", "Duration"));
return GroupByQuery.builder()
.setDataSource("test")
.setQuerySegmentSpec(QuerySegmentSpecs.create(new Interval(0, new DateTime().getMillis())))
.setGranularity(QueryGranularity.NONE)
.addDimension("URL")
.addAggregator(new LongSumAggregatorFactory("agg_count", "agg_count"))
.addAggregator(new DoubleMaxAggregatorFactory("agg_max", "agg_max"))
.addAggregator(new DoubleMinAggregatorFactory("agg_min", "agg_min"))
.addAggregator(new DoubleSumAggregatorFactory("agg_sum", "agg_sum"))
.setDimFilter(DimFilters.and(filters))
.build();
}
示例3: getTopNQuery
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
public static Query getTopNQuery() {
List<DimFilter> filters = new ArrayList<>();
filters.add(DimFilters.dimEquals("report", "URLTransaction"));
filters.add(DimFilters.dimEquals("pool", "r1cart"));
filters.add(DimFilters.dimEquals("metric", "Duration"));
return new TopNQueryBuilder()
.threshold(5)
.metric("agg_count")
.dataSource("test")
.intervals(QuerySegmentSpecs.create(new Interval(0, new DateTime().getMillis())))
.granularity(QueryGranularity.NONE)
.dimension("colo")
.aggregators(
Arrays.asList(
new LongSumAggregatorFactory("agg_count", "agg_count"),
new DoubleMaxAggregatorFactory("agg_max", "agg_max"),
new DoubleMinAggregatorFactory("agg_min", "agg_min"),
new DoubleSumAggregatorFactory("agg_sum", "agg_sum")))
.filters(DimFilters.and(filters)).build();
}
示例4: getTimeseriesQuery
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
public static Query getTimeseriesQuery() {
List<DimFilter> filters = new ArrayList<>();
filters.add(DimFilters.dimEquals("report", "URLTransaction"));
filters.add(DimFilters.dimEquals("pool", "r1cart"));
filters.add(DimFilters.dimEquals("metric", "Duration"));
return Druids.newTimeseriesQueryBuilder()
.dataSource("test")
.intervals(QuerySegmentSpecs.create(new Interval(0, new DateTime().getMillis())))
.granularity(QueryGranularity.ALL)
.aggregators(Arrays.asList(
new LongSumAggregatorFactory("agg_count", "agg_count"),
new DoubleMaxAggregatorFactory("agg_max", "agg_max"),
new DoubleMinAggregatorFactory("agg_min", "agg_min"),
new DoubleSumAggregatorFactory("agg_sum", "agg_sum")))
.filters(DimFilters.and(filters)).build();
}
示例5: createDruidSegments
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
public static QueryableIndex createDruidSegments() throws IOException {
// Create druid segments from raw data
Reader reader = new BufferedReader(new FileReader(new File("./src/test/resources/report.csv")));
List<String> columns = Arrays.asList("colo", "pool", "report", "URL", "TS", "metric", "value", "count", "min", "max", "sum");
List<String> exclusions = Arrays.asList("_Timestamp", "_Machine", "_ThreadId", "_Query");
List<String> metrics = Arrays.asList("value", "count", "min", "max", "sum");
List<DimensionSchema> dimensions = new ArrayList<DimensionSchema>();
for(String dim : columns){
dimensions.add(new StringDimensionSchema(dim));
}
dimensions.removeAll(exclusions);
dimensions.removeAll(metrics);
Loader loader = new CSVLoader(reader, columns, columns, "TS");
DimensionsSpec dimensionsSpec = new DimensionsSpec(dimensions, null, null);
AggregatorFactory[] metricsAgg = new AggregatorFactory[] {
new LongSumAggregatorFactory("agg_count", "count"),
new LongMaxAggregatorFactory("agg_max", "max"),
new LongMinAggregatorFactory("agg_min", "min"),
new DoubleSumAggregatorFactory("agg_sum", "sum"),
};
IncrementalIndexSchema indexSchema = new IncrementalIndexSchema(0, QueryGranularity.fromString("ALL"), dimensionsSpec, metricsAgg);
QueryableIndex index = IndexHelper.getQueryableIndex(loader, indexSchema);
return index;
}
示例6: topNQuery
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
@Test
public void topNQuery() throws IOException {
QueryableIndex index = createDruidSegments();
List<DimFilter> filters = new ArrayList<DimFilter>();
filters.add(DimFilters.dimEquals("report", "URLTransaction"));
filters.add(DimFilters.dimEquals("pool", "r1cart"));
filters.add(DimFilters.dimEquals("metric", "Duration"));
TopNQuery query =
new TopNQueryBuilder()
.threshold(5)
.metric("agg_count")
.dataSource("test")
.intervals(QuerySegmentSpecs.create(new Interval(0, new DateTime().getMillis())))
.granularity(QueryGranularity.fromString("NONE"))
.dimension("colo")
.aggregators(
Arrays.<AggregatorFactory>asList(
new LongSumAggregatorFactory("agg_count", "agg_count"),
new LongMaxAggregatorFactory("agg_max", "agg_max"),
new LongMinAggregatorFactory("agg_min", "agg_min"),
new DoubleSumAggregatorFactory("agg_sum", "agg_sum"))
)
.filters(DimFilters.and(filters)).build();
@SuppressWarnings("unchecked")
Sequence<Result> sequence = QueryHelper.run(query, index);
ArrayList<Result> results = Sequences.toList(sequence, Lists.<Result>newArrayList());
Assert.assertEquals(results.size(), 1);
}
示例7: getQueryGranularity
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
private QueryGranularity getQueryGranularity() {
if ("NONE".equals(queryGranularity))
return QueryGranularities.NONE;
else if ("ALL".equals(queryGranularity))
return QueryGranularities.ALL;
else
return QueryGranularity.fromString(queryGranularity);
}
示例8: PigSegmentLoadSpec
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
@JsonCreator
public PigSegmentLoadSpec(
@JsonProperty("dimensions") List<String> dimensions,
@JsonProperty("metrics") List<Metric> metrics,
@JsonProperty("granularity") QueryGranularity granularity,
@JsonProperty("filter") DimFilter filter)
{
this.dimensions = Preconditions.checkNotNull(dimensions, "null dimensions");
this.metrics = Preconditions.checkNotNull(metrics, "null metrics");
this.granularity = granularity == null ? QueryGranularities.NONE : granularity;
this.filter = filter;
}
示例9: getGranularity
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
public static QueryGranularity getGranularity(String type) {
if ("ALL".equals(type)) {
return QueryGranularity.ALL;
} else if ("NONE".equals(type)) {
return QueryGranularity.NONE;
} else {
return QueryGranularity.fromString(type);
}
}
示例10: groupByQuery
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
@Test
public void groupByQuery() throws IOException {
QueryableIndex index = createDruidSegments();
List<DimFilter> filters = new ArrayList<DimFilter>();
filters.add(DimFilters.dimEquals("report", "URLTransaction"));
filters.add(DimFilters.dimEquals("pool", "r1cart"));
filters.add(DimFilters.dimEquals("metric", "Duration"));
GroupByQuery query = GroupByQuery.builder()
.setDataSource("test")
.setQuerySegmentSpec(QuerySegmentSpecs.create(new Interval(0, new DateTime().getMillis())))
.setGranularity(QueryGranularity.fromString("NONE"))
.addDimension("URL")
.addAggregator(new LongSumAggregatorFactory("agg_count", "agg_count"))
.addAggregator(new LongMaxAggregatorFactory("agg_max", "agg_max"))
.addAggregator(new LongMinAggregatorFactory("agg_min", "agg_min"))
.addAggregator(new DoubleSumAggregatorFactory("agg_sum", "agg_sum"))
.setDimFilter(DimFilters.and(filters))
.build();
@SuppressWarnings("unchecked")
Sequence<Row> sequence = QueryHelper.run(query, index);
ArrayList<Row> results = Sequences.toList(sequence, Lists.<Row>newArrayList());
Assert.assertEquals(results.size(), 2);
if(results.get(0).getDimension("URL").get(0).equals("abc")) {
Assert.assertEquals(results.get(0).getLongMetric("agg_sum"), 247);
Assert.assertEquals(results.get(0).getLongMetric("agg_min"), 0);
Assert.assertEquals(results.get(0).getLongMetric("agg_max"), 124);
Assert.assertEquals(results.get(0).getLongMetric("agg_count"), 12);
Assert.assertEquals(results.get(1).getLongMetric("agg_sum"), 123);
Assert.assertEquals(results.get(1).getLongMetric("agg_min"), 0);
Assert.assertEquals(results.get(1).getLongMetric("agg_max"), 123);
Assert.assertEquals(results.get(1).getLongMetric("agg_count"), 3);
} else {
Assert.assertEquals(results.get(0).getLongMetric("agg_sum"), 123);
Assert.assertEquals(results.get(0).getLongMetric("agg_min"), 0);
Assert.assertEquals(results.get(0).getLongMetric("agg_max"), 123);
Assert.assertEquals(results.get(0).getLongMetric("agg_count"), 3);
Assert.assertEquals(results.get(1).getLongMetric("agg_sum"), 247);
Assert.assertEquals(results.get(1).getLongMetric("agg_min"), 0);
Assert.assertEquals(results.get(1).getLongMetric("agg_max"), 124);
Assert.assertEquals(results.get(1).getLongMetric("agg_count"), 12);
}
}
示例11: configure
import io.druid.granularity.QueryGranularity; //导入依赖的package包/类
@Override
public void configure(Context context) {
indexService = context.getString(INDEX_SERVICE);
discoveryPath = context.getString(DISCOVERY_PATH);
dimensions = Arrays.asList(context.getString(DIMENSIONS).split(","));
firehosePattern = context.getString(FIREHOSE_PATTERN, DEFAULT_FIREHOSE);
dataSource = context.getString(DATA_SOURCE, DEFAUL_DATASOURCE);
aggregators = AggregatorsHelper.build(context.getString(AGGREGATORS));
queryGranularity = QueryGranularity.fromString(context.getString(QUERY_GRANULARITY, DEFAULT_QUERY_GRANULARITY));
segmentGranularity = Granularity.valueOf(context.getString(SEGMENT_GRANULARITY, DEFAULT_SEGMENT_GRANULARITY));
windowPeriod = context.getString(WINDOW_PERIOD, DEFAULT_PERIOD);
partitions = context.getInteger(PARTITIONS, DEFAULT_PARTITIONS);
replicants = context.getInteger(REPLICANTS, DEFAULT_REPLICANTS);
timestampField = context.getString(TIMESTAMP_FIELD, DEFAULT_TIMESTAMP_FIELD);
timestampFormat = context.getString(TIMESTAMP_FORMAT, null);
zookeeperLocation = context.getString(ZOOKEEPER_LOCATION, DEFAULT_ZOOKEEPER_LOCATION);
baseSleepTime = context.getInteger(ZOOKEEPPER_BASE_SLEEP_TIME, DEFAULT_ZOOKEEPER_BASE_SLEEP);
maxRetries = context.getInteger(ZOOKEEPER_MAX_RETRIES, DEFAULT_ZOOKEEPER_MAX_RETRIES);
maxSleep = context.getInteger(ZOOKEEPER_MAX_SLEEP, DEFAULT_ZOOKEEPER_MAX_SLEEP);
batchSize = context.getInteger(BATCH_SIZE, DEFAULT_BATCH_SIZE);
druidService = sinkStrategy.getDruidService();
if (druidService != null) {
sinkCounter = new SinkCounter(this.getName());
if (timestampFormat.equals("auto"))
dateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
else if (timestampFormat.equals("millis"))
dateTimeFormatter = null;
else
dateTimeFormatter = DateTimeFormat.forPattern(timestampFormat);
// Filter defined fields
Set<String> filter = new HashSet<String>();
filter.add(timestampField);
filter.addAll(dimensions);
for (AggregatorFactory aggregatorFactory : aggregators) {
filter.addAll(aggregatorFactory.requiredFields());
}
eventParser = new FlumeEventParser(timestampField, dateTimeFormatter, filter);
}
}