本文整理汇总了Golang中github.com/cockroachdb/cockroach/ts/tspb.Query.GetSourceAggregator方法的典型用法代码示例。如果您正苦于以下问题:Golang Query.GetSourceAggregator方法的具体用法?Golang Query.GetSourceAggregator怎么用?Golang Query.GetSourceAggregator使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/cockroachdb/cockroach/ts/tspb.Query
的用法示例。
在下文中一共展示了Query.GetSourceAggregator方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: Query
// Query returns datapoints for the named time series during the supplied time
// span. Data is returned as a series of consecutive data points.
//
// Data is queried only at the Resolution supplied: if data for the named time
// series is not stored at the given resolution, an empty result will be
// returned.
//
// All data stored on the server is downsampled to some degree; the data points
// returned represent the average value within a sample period. Each datapoint's
// timestamp falls in the middle of the sample period it represents.
//
// If data for the named time series was collected from multiple sources, each
// returned datapoint will represent the sum of datapoints from all sources at
// the same time. The returned string slices contains a list of all sources for
// the metric which were aggregated to produce the result.
func (db *DB) Query(query tspb.Query, r Resolution, startNanos, endNanos int64) ([]tspb.TimeSeriesDatapoint, []string, error) {
// Normalize startNanos and endNanos the nearest SampleDuration boundary.
startNanos -= startNanos % r.SampleDuration()
var rows []client.KeyValue
if len(query.Sources) == 0 {
// Based on the supplied timestamps and resolution, construct start and end
// keys for a scan that will return every key with data relevant to the
// query.
startKey := MakeDataKey(query.Name, "" /* source */, r, startNanos)
endKey := MakeDataKey(query.Name, "" /* source */, r, endNanos).PrefixEnd()
b := &client.Batch{}
b.Scan(startKey, endKey)
if err := db.db.Run(b); err != nil {
return nil, nil, err
}
rows = b.Results[0].Rows
} else {
b := &client.Batch{}
// Iterate over all key timestamps which may contain data for the given
// sources, based on the given start/end time and the resolution.
kd := r.KeyDuration()
startKeyNanos := startNanos - (startNanos % kd)
endKeyNanos := endNanos - (endNanos % kd)
for currentTimestamp := startKeyNanos; currentTimestamp <= endKeyNanos; currentTimestamp += kd {
for _, source := range query.Sources {
key := MakeDataKey(query.Name, source, r, currentTimestamp)
b.Get(key)
}
}
err := db.db.Run(b)
if err != nil {
return nil, nil, err
}
for _, result := range b.Results {
row := result.Rows[0]
if row.Value == nil {
continue
}
rows = append(rows, row)
}
}
// Convert the queried source data into a set of data spans, one for each
// source.
sourceSpans, err := makeDataSpans(rows, startNanos)
if err != nil {
return nil, nil, err
}
// Compute a downsample function which will be used to return values from
// each source for each sample period.
downsampler, err := getDownsampleFunction(query.GetDownsampler())
if err != nil {
return nil, nil, err
}
// If we are returning a derivative, iteration needs to start at offset -1
// (in order to correctly compute the rate of change at offset 0).
var startOffset int32
isDerivative := query.GetDerivative() != tspb.TimeSeriesQueryDerivative_NONE
if isDerivative {
startOffset = -1
}
// Create an interpolatingIterator for each dataSpan, adding each iterator
// into a unionIterator collection. This is also where we compute a list of
// all sources with data present in the query.
sources := make([]string, 0, len(sourceSpans))
iters := make(unionIterator, 0, len(sourceSpans))
for name, span := range sourceSpans {
sources = append(sources, name)
iters = append(iters, span.newIterator(startOffset, downsampler))
}
// Choose an aggregation function to use when taking values from the
// unionIterator.
var valueFn func() float64
switch query.GetSourceAggregator() {
case tspb.TimeSeriesQueryAggregator_SUM:
valueFn = iters.sum
case tspb.TimeSeriesQueryAggregator_AVG:
valueFn = iters.avg
case tspb.TimeSeriesQueryAggregator_MAX:
//.........这里部分代码省略.........
示例2: assertQuery
// assertQuery generates a query result from the local test model and compares
// it against the query returned from the server.
func (tm *testModel) assertQuery(
name string,
sources []string,
downsample, agg *tspb.TimeSeriesQueryAggregator,
derivative *tspb.TimeSeriesQueryDerivative,
r Resolution,
start, end int64,
expectedDatapointCount, expectedSourceCount int,
) {
// Query the actual server.
q := tspb.Query{
Name: name,
Downsampler: downsample,
SourceAggregator: agg,
Derivative: derivative,
Sources: sources,
}
actualDatapoints, actualSources, err := tm.DB.Query(q, r, start, end)
if err != nil {
tm.t.Fatal(err)
}
if a, e := len(actualDatapoints), expectedDatapointCount; a != e {
tm.t.Logf("actual datapoints: %v", actualDatapoints)
tm.t.Fatal(errors.Errorf("query expected %d datapoints, got %d", e, a))
}
if a, e := len(actualSources), expectedSourceCount; a != e {
tm.t.Fatal(errors.Errorf("query expected %d sources, got %d", e, a))
}
// Construct an expected result for comparison.
var expectedDatapoints []tspb.TimeSeriesDatapoint
expectedSources := make([]string, 0, 0)
dataSpans := make(map[string]*dataSpan)
// If no specific sources were provided, look for data from every source
// encountered by the test model.
var sourcesToCheck map[string]struct{}
if len(sources) == 0 {
sourcesToCheck = tm.seenSources
} else {
sourcesToCheck = make(map[string]struct{})
for _, s := range sources {
sourcesToCheck[s] = struct{}{}
}
}
// Iterate over all possible sources which may have data for this query.
for sourceName := range sourcesToCheck {
// Iterate over all possible key times at which query data may be present.
for time := start - (start % r.KeyDuration()); time < end; time += r.KeyDuration() {
// Construct a key for this source/time and retrieve it from model.
key := MakeDataKey(name, sourceName, r, time)
value, ok := tm.modelData[string(key)]
if !ok {
continue
}
// Add data from the key to the correct dataSpan.
data, err := value.GetTimeseries()
if err != nil {
tm.t.Fatal(err)
}
ds, ok := dataSpans[sourceName]
if !ok {
ds = &dataSpan{
startNanos: start - (start % r.SampleDuration()),
sampleNanos: r.SampleDuration(),
}
dataSpans[sourceName] = ds
expectedSources = append(expectedSources, sourceName)
}
if err := ds.addData(data); err != nil {
tm.t.Fatal(err)
}
}
}
// Iterate over data in all dataSpans and construct expected datapoints.
var startOffset int32
isDerivative := q.GetDerivative() != tspb.TimeSeriesQueryDerivative_NONE
if isDerivative {
startOffset = -1
}
downsampleFn, err := getDownsampleFunction(q.GetDownsampler())
if err != nil {
tm.t.Fatal(err)
}
var iters unionIterator
for _, ds := range dataSpans {
iters = append(iters, ds.newIterator(startOffset, downsampleFn))
}
iters.init()
currentVal := func() tspb.TimeSeriesDatapoint {
var value float64
switch q.GetSourceAggregator() {
case tspb.TimeSeriesQueryAggregator_SUM:
value = iters.sum()
//.........这里部分代码省略.........