本文整理汇总了Python中pyon.event.event.EventSubscriber.close方法的典型用法代码示例。如果您正苦于以下问题:Python EventSubscriber.close方法的具体用法?Python EventSubscriber.close怎么用?Python EventSubscriber.close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyon.event.event.EventSubscriber
的用法示例。
在下文中一共展示了EventSubscriber.close方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: IngestionWorker
# 需要导入模块: from pyon.event.event import EventSubscriber [as 别名]
# 或者: from pyon.event.event.EventSubscriber import close [as 别名]
#.........这里部分代码省略.........
ingestion_attributes={'variables':[], 'number_of_records':-1,'updated_metadata':False, 'updated_data':False}
if dset_config is None:
log.info('No dataset config for this stream!')
return
values_string = ''
sha1 = ''
encoding_type = ''
for key,value in packet.identifiables.iteritems():
if isinstance(value, DataStream):
values_string = value.values
value.values=''
elif isinstance(value, Encoding):
sha1 = value.sha1
encoding_type = value.encoding_type
elif isinstance(value, Coverage):
ingestion_attributes['variables'].append(key)
elif isinstance(value, CountElement):
ingestion_attributes['number_of_records'] = value.value
if dset_config.archive_metadata is True:
log.debug("Persisting data....")
ingestion_attributes['updated_metadata'] = True
self.persist_immutable(packet )
if dset_config.archive_data is True:
#@todo - grab the filepath to save the hdf string somewhere..
ingestion_attributes['updated_data'] = True
if values_string:
calculated_sha1 = hashlib.sha1(values_string).hexdigest().upper()
filename = FileSystem.get_hierarchical_url(FS.CACHE, calculated_sha1, ".%s" % encoding_type)
if sha1 != calculated_sha1:
raise IngestionWorkerException('The sha1 stored is different than the calculated from the received hdf_string')
#log.warn('writing to filename: %s' % filename)
with open(filename, mode='wb') as f:
f.write(values_string)
f.close()
else:
log.warn("Nothing to write!")
return ingestion_attributes
def on_stop(self):
TransformDataProcess.on_stop(self)
# close event subscriber safely
self.event_subscriber.close()
self.gl.join(timeout=5)
self.gl.kill()
self.db.close()
def on_quit(self):
TransformDataProcess.on_quit(self)
# close event subscriber safely
self.event_subscriber.close()
self.gl.join(timeout=5)
self.gl.kill()
self.db.close()
def get_dataset_config(self, incoming_packet):
"""
Gets the dset_config for the data stream
"""
try:
stream_id = incoming_packet.stream_resource_id
except AttributeError:
log.info('Packet does not have a data_stream_id: using default policy')
return None
dset_config = self.dataset_configs.get(stream_id, None)
configuration = None
if dset_config is None:
log.info('No config found for stream id: %s ' % stream_id)
else:
log.info('Got config: %s for stream id: %s' % (dset_config, stream_id))
configuration = dset_config.configuration
# return the extracted instruction
return configuration
示例2: IngestionWorker
# 需要导入模块: from pyon.event.event import EventSubscriber [as 别名]
# 或者: from pyon.event.event.EventSubscriber import close [as 别名]
#.........这里部分代码省略.........
return self.db.create_doc(obj)
def process_stream(self, packet, dset_config):
"""
Accepts a stream. Also accepts instruction (a dset_config). According to the received dset_config it processes the
stream such as store in hfd_storage, couch_storage.
@param: packet The incoming data stream of type stream.
@param: dset_config The dset_config telling this method what to do with the incoming data stream.
"""
ingestion_attributes={'variables':[], 'number_of_records':-1,'updated_metadata':False, 'updated_data':False}
if dset_config is None:
log.info('No dataset config for this stream!')
return
# Get back to the serialized form - the process receives only the IonObject after the interceptor stack has decoded it...
simple_dict = ion_serializer.serialize(packet) #packet is an ion_object
byte_string = msgpack.packb(simple_dict, default=encode_ion)
encoding_type = 'ion_msgpack'
# Persisted sha1 is crafted from the byte string msgpack creates
calculated_sha1 = hashlib.sha1(byte_string).hexdigest().upper()
dataset_granule = {
'stream_id' : dset_config.stream_id,
'dataset_id' : dset_config.dataset_id,
'persisted_sha1' : calculated_sha1,
'encoding_type' : encoding_type,
'ts_create' : get_ion_ts()
}
self.persist_immutable(dataset_granule)
filename = FileSystem.get_hierarchical_url(FS.CACHE, calculated_sha1, ".%s" % encoding_type)
with open(filename, mode='wb') as f:
f.write(byte_string)
f.close()
return ingestion_attributes
def on_stop(self):
TransformDataProcess.on_stop(self)
# close event subscriber safely
self.event_subscriber.close()
self.gl.join(timeout=5)
self.gl.kill()
self.db.close()
def on_quit(self):
TransformDataProcess.on_quit(self)
# close event subscriber safely
self.event_subscriber.close()
self.gl.join(timeout=5)
self.gl.kill()
self.db.close()
def get_dataset_config(self, incoming_packet):
"""
Gets the dset_config for the data stream
"""
try:
# For now - use the data_producer_id field as a stream_id to get us moving
#@todo fix the missmatch between producer_id and stream_id!
stream_id = incoming_packet.data_producer_id
except AttributeError:
log.info('Packet does not have a data_stream_id: using default policy')
return None
dset_config = self.dataset_configs.get(stream_id, None)
configuration = None
if dset_config is None:
log.info('No config found for stream id: %s ' % stream_id)
else:
log.info('Got config: %s for stream id: %s' % (dset_config, stream_id))
configuration = dset_config.configuration
# return the extracted instruction
return configuration