本文整理汇总了Golang中github.com/sburnett/transformer/store.Manager.Reader方法的典型用法代码示例。如果您正苦于以下问题:Golang Manager.Reader方法的具体用法?Golang Manager.Reader怎么用?Golang Manager.Reader使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/sburnett/transformer/store.Manager
的用法示例。
在下文中一共展示了Manager.Reader方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: CsvPipeline
func CsvPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
var experiment, node, filename string
var receivedTimestamp, creationTimestamp, size int64
csvStore := csvManager.Writer("stats.csv", []string{"experiment", "node", "filename"}, []string{"received_timestamp", "creation_timestamp", "size"}, &experiment, &node, &filename, &receivedTimestamp, &creationTimestamp, &size)
return []transformer.PipelineStage{
transformer.PipelineStage{
Name: "WriteStatsCsv",
Reader: levelDbManager.Reader("stats"),
Writer: csvStore,
},
}
}
示例2: SummarizeHealthPipeline
func SummarizeHealthPipeline(levelDbManager, csvManager store.Manager) transformer.Pipeline {
memoryStore := levelDbManager.Reader("memory")
memoryUsageByDayStore := levelDbManager.ReadingWriter("memory-usage-by-day")
memoryUsageByDaySummarizedStore := levelDbManager.ReadingWriter("memory-usage-by-day-summarized")
filesystemStore := levelDbManager.Reader("filesystem")
filesystemUsageByDayStore := levelDbManager.ReadingWriter("filesystem-usage-by-day")
filesystemUsageByDaySummarizedStore := levelDbManager.ReadingWriter("filesystem-usage-by-day-summarized")
var timestamp, usage int64
var filesystem, node string
memoryUsageSummaryCsv := csvManager.Writer("memory-usage-summary.csv", []string{"timestamp", "node"}, []string{"usage"}, ×tamp, &node, &usage)
filesystemUsageSummaryCsv := csvManager.Writer("filesystem-usage-summary.csv", []string{"filesystem", "timestamp", "node"}, []string{"usage"}, &filesystem, ×tamp, &node, &usage)
return []transformer.PipelineStage{
transformer.PipelineStage{
Name: "OrderMemoryUsageByTimestamp",
Reader: memoryStore,
Transformer: transformer.MakeMapFunc(orderRecordsByDay),
Writer: memoryUsageByDayStore,
},
transformer.PipelineStage{
Name: "SummarizeMemoryUsage",
Reader: memoryUsageByDayStore,
Transformer: transformer.TransformFunc(summarizeMemoryUsage),
Writer: memoryUsageByDaySummarizedStore,
},
transformer.PipelineStage{
Name: "WriteMemoryUsageSummaryCsv",
Reader: memoryUsageByDaySummarizedStore,
Writer: memoryUsageSummaryCsv,
},
transformer.PipelineStage{
Name: "OrderFilesystemUsageByTimestamp",
Reader: filesystemStore,
Transformer: transformer.MakeMapFunc(orderFilesystemRecordsByDay),
Writer: filesystemUsageByDayStore,
},
transformer.PipelineStage{
Name: "SummarizeFilesystemUsage",
Reader: filesystemUsageByDayStore,
Transformer: transformer.TransformFunc(summarizeFilesystemUsage),
Writer: filesystemUsageByDaySummarizedStore,
},
transformer.PipelineStage{
Name: "WriteFilesystemUsageSummaryCsv",
Reader: filesystemUsageByDaySummarizedStore,
Writer: filesystemUsageSummaryCsv,
},
}
}
示例3: FilterSessionsPipeline
func FilterSessionsPipeline(sessionStartTime, sessionEndTime int64, levelDbManager store.Manager, outputName string) transformer.Pipeline {
tracesStore := levelDbManager.Reader("traces")
traceKeyRangesStore := levelDbManager.Reader("availability-done")
filteredStore := levelDbManager.Writer(outputName)
parameters := filterSessions{
SessionStartTime: sessionStartTime * 1000000,
SessionEndTime: sessionEndTime * 1000000,
}
return []transformer.PipelineStage{
transformer.PipelineStage{
Name: "FilterSessions",
Reader: store.NewDemuxingReader(traceKeyRangesStore, tracesStore),
Transformer: parameters,
Writer: filteredStore,
},
}
}
示例4: TimesCsvPipeline
func TimesCsvPipeline(levelDbManager store.Manager, csvRoot string) transformer.Pipeline {
writeTimesCsv := func(inputChan, outputChan chan *store.Record) {
var currentHandle *os.File
var currentExperiment, currentNode string
for record := range inputChan {
var statsKey StatsKey
lex.DecodeOrDie(record.Key, &statsKey)
var statsValue StatsValue
lex.DecodeOrDie(record.Value, &statsValue)
if currentExperiment != statsKey.Experiment || currentNode != statsKey.Node {
if currentHandle != nil {
currentHandle.Close()
}
currentExperiment = statsKey.Experiment
currentNode = statsKey.Node
csvName := fmt.Sprintf("%s_%s.csv", currentExperiment, currentNode)
newHandle, err := os.Create(filepath.Join(csvRoot, csvName))
if err != nil {
panic(err)
}
currentHandle = newHandle
}
if _, err := fmt.Fprintf(currentHandle, "%d,%d\n", statsValue.CreationTimestamp, statsValue.ReceivedTimestamp); err != nil {
panic(err)
}
}
if currentHandle != nil {
currentHandle.Close()
}
}
return []transformer.PipelineStage{
transformer.PipelineStage{
Name: "WriteTimesCsv",
Reader: levelDbManager.Reader("stats"),
Transformer: transformer.TransformFunc(writeTimesCsv),
},
}
}
示例5: runFilterSessionsPipeline
func runFilterSessionsPipeline(startSecs, endSecs int64, levelDbManager store.Manager) {
transformer.RunPipeline(FilterSessionsPipeline(startSecs, endSecs, levelDbManager, "test"))
filteredStore := levelDbManager.Reader("test")
filteredStore.BeginReading()
for {
record, err := filteredStore.ReadRecord()
if err != nil {
panic(err)
}
if record == nil {
break
}
var traceKey TraceKey
lex.DecodeOrDie(record.Key, &traceKey)
fmt.Printf("%s %d %d\n", traceKey.NodeId, traceKey.SessionId, traceKey.SequenceNumber)
}
filteredStore.EndReading()
}
示例6: SummarizePipeline
func SummarizePipeline(levelDbManager store.Manager, csvManager store.Manager) transformer.Pipeline {
statsStore := levelDbManager.Reader("stats")
statsWithHourStore := levelDbManager.ReadingDeleter("stats-with-hour")
statsWithDayStore := levelDbManager.ReadingDeleter("stats-with-day")
statsWithReceivedTimestampStore := levelDbManager.ReadingDeleter("stats-with-received-timestamp")
interarrivalTimesStore := levelDbManager.ReadingDeleter("interarrival-times")
sizeSummaryStore := levelDbManager.ReadingWriter("size-summary")
sizeSummaryByHourStore := levelDbManager.ReadingWriter("size-summary-by-hour")
sizeSummaryByDayStore := levelDbManager.ReadingWriter("size-summary-by-day")
interarrivalTimesSummaryStore := levelDbManager.ReadingWriter("interarrival-times-summary")
sizePerDayStore := levelDbManager.ReadingWriter("sizes-by-day")
sizeSummaryWriter := makeSummaryCsvWriter(csvManager, "size-summary.csv")
sizeSummaryByHourWriter := makeSummaryByTimestampCsvWriter(csvManager, "size-summary-by-hour.csv")
sizeSummaryByDayWriter := makeSummaryByTimestampCsvWriter(csvManager, "size-summary-by-day.csv")
interarrivalTimesSummaryWriter := makeSummaryCsvWriter(csvManager, "interarrival-times-summary.csv")
sizesPerDayWriter := csvManager.Writer("sizes-per-day.csv", []string{"experiment", "node", "timestamp"}, []string{"count"}, new(string), new(string), new(int64), new(int64))
return []transformer.PipelineStage{
transformer.PipelineStage{
Name: "SummarizeSizes",
Reader: statsStore,
Transformer: transformer.TransformFunc(summarizeSizes),
Writer: sizeSummaryStore,
},
transformer.PipelineStage{
Name: "RekeyStatsByHour",
Reader: statsStore,
Transformer: transformer.MakeMapFunc(rekeyStatsByHour),
Writer: store.NewTruncatingWriter(statsWithHourStore),
},
transformer.PipelineStage{
Name: "SummarizeSizesByHour",
Reader: statsWithHourStore,
Transformer: transformer.TransformFunc(summarizeSizesByTimestamp),
Writer: sizeSummaryByHourStore,
},
transformer.PipelineStage{
Name: "RekeyStatsByDay",
Reader: statsStore,
Transformer: transformer.MakeMapFunc(rekeyStatsByDay),
Writer: store.NewTruncatingWriter(statsWithDayStore),
},
transformer.PipelineStage{
Name: "SummarizeSizesByDay",
Reader: statsWithDayStore,
Transformer: transformer.TransformFunc(summarizeSizesByTimestamp),
Writer: sizeSummaryByDayStore,
},
transformer.PipelineStage{
Name: "RekeyStatsByReceivedTimestamp",
Reader: statsStore,
Transformer: transformer.MakeMapFunc(rekeyStatsByReceviedTimestamp),
Writer: store.NewTruncatingWriter(statsWithReceivedTimestampStore),
},
transformer.PipelineStage{
Name: "ComputeInterarrivalTimes",
Reader: statsWithReceivedTimestampStore,
Transformer: transformer.TransformFunc(computeInterarrivalTimes),
Writer: store.NewTruncatingWriter(interarrivalTimesStore),
},
transformer.PipelineStage{
Name: "SummarizeInterarrival",
Reader: interarrivalTimesStore,
Transformer: transformer.TransformFunc(summarizeInterarrivalTimes),
Writer: interarrivalTimesSummaryStore,
},
transformer.PipelineStage{
Name: "SummarizeSizesPerDay",
Reader: statsStore,
Transformer: transformer.TransformFunc(summarizeSizesPerDay),
Writer: sizePerDayStore,
},
transformer.PipelineStage{
Name: "AggregateExperimentsPerDay",
Reader: sizePerDayStore,
Transformer: transformer.TransformFunc(aggregateSizesPerDay),
Writer: sizePerDayStore,
},
transformer.PipelineStage{
Name: "WriteSizesSummary",
Reader: sizeSummaryStore,
Writer: sizeSummaryWriter,
},
transformer.PipelineStage{
Name: "WriteSizesSummaryByHour",
Reader: sizeSummaryByHourStore,
Writer: sizeSummaryByHourWriter,
},
transformer.PipelineStage{
Name: "WriteSizesSummaryByDay",
Reader: sizeSummaryByDayStore,
Writer: sizeSummaryByDayWriter,
},
transformer.PipelineStage{
Name: "WriteInterarrivalTimesSummary",
Reader: interarrivalTimesSummaryStore,
Writer: interarrivalTimesSummaryWriter,
},
transformer.PipelineStage{
//.........这里部分代码省略.........