本文整理汇总了Python中output.Output.set_dictionary方法的典型用法代码示例。如果您正苦于以下问题:Python Output.set_dictionary方法的具体用法?Python Output.set_dictionary怎么用?Python Output.set_dictionary使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类output.Output
的用法示例。
在下文中一共展示了Output.set_dictionary方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ContextAggregator
# 需要导入模块: from output import Output [as 别名]
# 或者: from output.Output import set_dictionary [as 别名]
#.........这里部分代码省略.........
result = {}
output_dictionary = self.output.dictionary
# if neighbor is None:
# for o in output_dictionary:
# single_contexts = self.output.generate_single_contexts(o=o, single_contexts=self.get_database_singles(timestamp))
# aggregate_context = self.output.generate_aggregate_contexts(o=o, aggregate_contexts={self.new_aggregate} | self.get_database_aggregates(timestamp))
# result[o] = single_contexts | aggregate_context
# self.context_history.add_to_history(node_number=o, value=output_dictionary[o], timestamp=timestamp)
# else:
assert type(neighbor) in [int, long]
assert neighbor in output_dictionary
#assert self.new_aggregate is not None, "aggregate is None"
# TODO
# Duplication of code
o = neighbor
single_contexts = self.output.generate_single_contexts(o=o, single_contexts=self.get_database_singles(timestamp))
#print self.get_database_aggregates(timestamp)
if not self.new_aggregate:
new_aggregate = set()
else:
new_aggregate = {self.new_aggregate}
aggregate_context = self.output.generate_aggregate_contexts(o=o, aggregate_contexts= (new_aggregate | self.get_database_aggregates(timestamp)))
result[o] = single_contexts | aggregate_context
self.context_history.add_to_history(node_number=o, value=output_dictionary[o], timestamp=timestamp)
return result
def receive(self, from_node, contexts, timestamp=0):
"""receive_data
1. Stores the information who sent what
2. Increase the hopcount when the context is a single context
>>> d = ContextAggregator()
>>> # two contexts are received
>>> r = d.receive(1, {Context(value=1.0, cohorts=[0,1,2]), Context(value=1.0, cohorts=[0,1,3])})
>>> same(d.get_received_data(1), [[0,1,2],[0,1,3]])
True
>>>
"""
contexts = Context.increase_hop_count(contexts)
self.input[from_node] = contexts
received_info = contexts_to_standard(contexts)
self.context_history.add_to_history(node_number=from_node, value=received_info, timestamp=timestamp)
def process_to_set_output(self, neighbors=None, timestamp=0, iteration=0):
"""Process method is a generalized code for propagating contexts.
Input:
* neighbors : a dictionary that maps id -> standard contexts
* timestamp : currrent timestamp
Output:
* output dictionary: a dictionary that maps id -> standard contexts
1. When it is the first time of the propagation at timestamp, it samples the data
2. When it is not the first, it invokes the run() method
>>> a = ContextAggregator(1) # id == 1
>>> same(a.process_to_set_output(neighbors=[10,20,30]), {10: [[1], []], 20: [[1], []], 30: [[1], []]})
True
>>> same(contexts_to_standard(a.get_database_singles()), [[1],[]])
True
>>> same(a.get_database_aggregates(), [])
True
>>> a.get_input_dictionary() == {}
True
"""
if self.is_this_new_timestamp(timestamp):
sampled_data, special_flag = ContextAggregator.sample(self.get_sample_data(), timestamp)
# store the sampled data for comparison purposes
self.data = sampled_data
hopcount = 0
if special_flag:
hopcount = Context.SPECIAL_CONTEXT
context = Context(value=sampled_data, cohorts=[self.id], hopcount=hopcount, timestamp=timestamp)
self.set_database(singles={context}, aggregates=set(), timestamp=timestamp)
# store the context in the history and process
result = {}
for h in neighbors:
result[h] = [[self.id],[]]
else:
# process the early return
result = {}
if self.available_input_contexts(): # input_contexts: # if there is no inputs
if self.is_aggregation_mode():
result, combined_singles, combined_aggregates, new_aggregates = self.dataflow_aggregations_mode(neighbors, timestamp, iteration)
else:
result, combined_singles, combined_aggregates, new_aggregates = self.dataflow_singles_mode(neighbors, timestamp, iteration)
# this also reset the actual_sent_dictionary
self.output.set_dictionary(result)
# WARNING! Don't forget the input is cleared after the process_to_set_output() call
self.initalize_before_iteration()
return result
开发者ID:prosseek,项目名称:Efficient-Decentralized-Context-Sharing-via-Aggregation-Simulation,代码行数:104,代码来源:context_aggregator.py