本文整理汇总了Python中pyon.util.containers.DotDict.get_safe方法的典型用法代码示例。如果您正苦于以下问题:Python DotDict.get_safe方法的具体用法?Python DotDict.get_safe怎么用?Python DotDict.get_safe使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyon.util.containers.DotDict
的用法示例。
在下文中一共展示了DotDict.get_safe方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_dotdict_copy
# 需要导入模块: from pyon.util.containers import DotDict [as 别名]
# 或者: from pyon.util.containers.DotDict import get_safe [as 别名]
def test_dotdict_copy(self):
d = DotDict({"foo": "bar"})
d2 = copy.copy(d)
self.assertTrue(hasattr(d2, "foo"))
self.assertEqual("bar", d2.foo)
# output_streams = copy(self.CFG.get_safe('process.publish_streams'))
v = "a12345"
CFG = DotDict()
CFG.process.publish_streams.salinity = v
print "CFG =", CFG
self.assertTrue(hasattr(CFG.process.publish_streams, "salinity"))
self.assertEqual(v, CFG.process.publish_streams.salinity)
self.assertEqual(v, CFG.get_safe("process.publish_streams").salinity)
self.assertEqual(v, copy.copy(CFG.get_safe("process.publish_streams")).salinity)
output_streams = copy.copy(CFG.get_safe("process.publish_streams"))
print "output_streams =", output_streams
self.assertTrue(hasattr(output_streams, "salinity"))
print "output_streams.salinity =", output_streams.salinity
self.assertEqual(v, output_streams.salinity)
first_stream = output_streams.popitem()
print "first_stream =", first_stream
self.assertEqual(v, first_stream[1])
d.lock()
dl = copy.copy(d)
self.assertTrue(hasattr(dl, "foo"))
self.assertEqual("bar", dl.foo)
with self.assertRaises(AttributeError):
d.foo2 = "nope"
示例2: _create_process_config
# 需要导入模块: from pyon.util.containers import DotDict [as 别名]
# 或者: from pyon.util.containers.DotDict import get_safe [as 别名]
def _create_process_config(self, config):
""" Prepare the config for the new process. Clone system config and apply process overrides.
Support including config by reference of a resource attribute or object from object store.
"""
process_cfg = deepcopy(CFG)
if config:
# Use provided config. Must be dict or DotDict
if not isinstance(config, DotDict):
config = DotDict(config)
if config.get_safe("process.config_ref"):
# Use a reference
config_ref = config.get_safe("process.config_ref")
log.info("Enhancing new process spawn config from ref=%s" % config_ref)
matches = re.match(r'^([A-Za-z]+):([A-Za-z0-9_\.]+)/(.*)$', config_ref)
if matches:
ref_type, ref_id, ref_ext = matches.groups()
if ref_type == "resources":
if self.container.has_capability(self.container.CCAP.RESOURCE_REGISTRY):
try:
obj = self.container.resource_registry.read(ref_id)
if obj and hasattr(obj, ref_ext):
ref_config = getattr(obj, ref_ext)
if isinstance(ref_config, dict):
dict_merge(process_cfg, ref_config, inplace=True)
else:
raise BadRequest("config_ref %s exists but not dict" % config_ref)
else:
raise BadRequest("config_ref %s - attribute not found" % config_ref)
except NotFound as nf:
log.warn("config_ref %s - object not found" % config_ref)
raise
else:
log.error("Container missing RESOURCE_REGISTRY capability to resolve process config ref %s" % config_ref)
elif ref_type == "objects":
if self.container.has_capability(self.container.CCAP.OBJECT_STORE):
try:
obj = self.container.object_store.read_doc(ref_id)
ref_config = obj
if ref_ext:
ref_config = get_safe(obj, ref_ext, None)
if ref_config is None:
raise BadRequest("config_ref %s - attribute not found" % config_ref)
if isinstance(ref_config, dict):
dict_merge(process_cfg, ref_config, inplace=True)
else:
raise BadRequest("config_ref %s exists but not dict" % config_ref)
except NotFound as nf:
log.warn("config_ref %s - object not found" % config_ref)
raise
else:
log.error("Container missing OBJECT_STORE capability to resolve process config ref %s" % config_ref)
else:
raise BadRequest("Unknown reference type in: %s" % config_ref)
dict_merge(process_cfg, config, inplace=True)
if self.container.spawn_args:
# Override config with spawn args
dict_merge(process_cfg, self.container.spawn_args, inplace=True)
#log.debug("spawn_process() pid=%s process_cfg=%s", process_id, process_cfg)
return process_cfg
示例3: create_data_process
# 需要导入模块: from pyon.util.containers import DotDict [as 别名]
# 或者: from pyon.util.containers.DotDict import get_safe [as 别名]
def create_data_process(self, data_process_definition_id='', in_data_product_ids=None, out_data_product_ids=None, configuration=None):
'''
Creates a DataProcess resource and launches the process.
A DataProcess is a process that receives one (or more) data products and produces one (or more) data products.
@param data_process_definition_id : The Data Process Definition to use, if none is specified the standard TransformDataProcess is used
@param in_data_product_ids : A list of input data product identifiers
@param out_data_product_ids : A list of output data product identifiers
@param configuration : The configuration dictionary for the process, and the routing table:
The routing table is defined as such:
{ in_data_product_id: {out_data_product_id : actor }}
Routes are specified in the configuration dictionary under the item "routes"
actor is either None (for ParameterFunctions) or a valid TransformFunction identifier
'''
configuration = DotDict(configuration or {})
in_data_product_ids = in_data_product_ids or []
out_data_product_ids = out_data_product_ids or []
routes = configuration.get_safe('process.routes', {})
if not routes and (1==len(in_data_product_ids)==len(out_data_product_ids)):
routes = {in_data_product_ids[0]: {out_data_product_ids[0]:None}}
# Routes are not supported for processes with discrete data process definitions
elif not routes and not data_process_definition_id:
raise BadRequest('No valid route defined for this data process.')
self.validate_compatibility(data_process_definition_id, in_data_product_ids, out_data_product_ids, routes)
routes = self._manage_routes(routes)
configuration.process.input_products = in_data_product_ids
configuration.process.output_products = out_data_product_ids
configuration.process.routes = routes
if 'lookup_docs' in configuration.process:
configuration.process.lookup_docs.extend(self._get_lookup_docs(in_data_product_ids, out_data_product_ids))
else:
configuration.process.lookup_docs = self._get_lookup_docs(in_data_product_ids, out_data_product_ids)
dproc = DataProcess()
dproc.name = 'data_process_%s' % self.get_unique_id()
dproc.configuration = configuration
dproc_id, rev = self.clients.resource_registry.create(dproc)
dproc._id = dproc_id
dproc._rev = rev
for data_product_id in in_data_product_ids:
self.clients.resource_registry.create_association(subject=dproc_id, predicate=PRED.hasInputProduct, object=data_product_id)
if data_process_definition_id:
self.clients.resource_registry.create_association(data_process_definition_id, PRED.hasDataProcess ,dproc_id)
self._manage_producers(dproc_id, out_data_product_ids)
self._manage_attachments()
queue_name = self._create_subscription(dproc, in_data_product_ids)
pid = self._launch_data_process(
queue_name=queue_name,
data_process_definition_id=data_process_definition_id,
out_data_product_ids=out_data_product_ids,
configuration=configuration)
self.clients.resource_registry.create_association(subject=dproc_id, predicate=PRED.hasProcess, object=pid)
return dproc_id
示例4: spawn_process
# 需要导入模块: from pyon.util.containers import DotDict [as 别名]
# 或者: from pyon.util.containers.DotDict import get_safe [as 别名]
def spawn_process(self, name=None, module=None, cls=None, config=None, process_id=None):
"""
Spawn a process within the container. Processes can be of different type.
"""
if process_id and not is_valid_identifier(process_id, ws_sub='_'):
raise BadRequest("Given process_id %s is not a valid identifier" % process_id)
# Generate a new process id if not provided
# TODO: Ensure it is system-wide unique
process_id = process_id or "%s.%s" % (self.container.id, self.proc_id_pool.get_id())
log.debug("ProcManager.spawn_process(name=%s, module.cls=%s.%s, config=%s) as pid=%s", name, module, cls, config, process_id)
process_cfg = deepcopy(CFG)
if config:
# Use provided config. Must be dict or DotDict
if not isinstance(config, DotDict):
config = DotDict(config)
if config.get_safe("process.config_ref"):
# Use a reference
config_ref = config.get_safe("process.config_ref")
log.info("Enhancing new process spawn config from ref=%s" % config_ref)
matches = re.match(r'^([A-Za-z]+):([A-Za-z0-9]+)/(.+)$', config_ref)
if matches:
ref_type, ref_id, ref_ext = matches.groups()
if ref_type == "resources":
if self.container.has_capability(self.container.CCAP.RESOURCE_REGISTRY):
try:
obj = self.container.resource_registry.read(ref_id)
if obj and hasattr(obj, ref_ext):
ref_config = getattr(obj, ref_ext)
if isinstance(ref_config, dict):
dict_merge(process_cfg, ref_config, inplace=True)
else:
raise BadRequest("config_ref %s exists but not dict" % config_ref)
else:
raise BadRequest("config_ref %s - attribute not found" % config_ref)
except NotFound as nf:
log.warn("config_ref %s - object not found" % config_ref)
raise
else:
log.error("Container missing RESOURCE_REGISTRY capability to resolve process config ref %s" % config_ref)
else:
raise BadRequest("Unknown reference type in: %s" % config_ref)
dict_merge(process_cfg, config, inplace=True)
if self.container.spawn_args:
# Override config with spawn args
dict_merge(process_cfg, self.container.spawn_args, inplace=True)
#log.debug("spawn_process() pid=%s process_cfg=%s", process_id, process_cfg)
# PROCESS TYPE. Determines basic process context (messaging, service interface)
# One of the constants defined at the top of this file
service_cls = named_any("%s.%s" % (module, cls))
process_type = get_safe(process_cfg, "process.type") or getattr(service_cls, "process_type", "service")
process_start_mode = get_safe(config, "process.start_mode")
process_instance = None
# alert we have a spawning process, but we don't have the instance yet, so give the class instead (more accurate than name)
self._call_proc_state_changed("%s.%s" % (module, cls), ProcessStateEnum.PENDING)
try:
# spawn service by type
if process_type == SERVICE_PROCESS_TYPE:
process_instance = self._spawn_service_process(process_id, name, module, cls, process_cfg)
elif process_type == STREAM_PROCESS_TYPE:
process_instance = self._spawn_stream_process(process_id, name, module, cls, process_cfg)
elif process_type == AGENT_PROCESS_TYPE:
process_instance = self._spawn_agent_process(process_id, name, module, cls, process_cfg)
elif process_type == STANDALONE_PROCESS_TYPE:
process_instance = self._spawn_standalone_process(process_id, name, module, cls, process_cfg)
elif process_type == IMMEDIATE_PROCESS_TYPE:
process_instance = self._spawn_immediate_process(process_id, name, module, cls, process_cfg)
elif process_type == SIMPLE_PROCESS_TYPE:
process_instance = self._spawn_simple_process(process_id, name, module, cls, process_cfg)
else:
raise BadRequest("Unknown process type: %s" % process_type)
process_instance._proc_type = process_type
self._register_process(process_instance, name)
process_instance.errcause = "OK"
log.info("ProcManager.spawn_process: %s.%s -> pid=%s OK", module, cls, process_id)
if process_type == IMMEDIATE_PROCESS_TYPE:
log.info('Terminating immediate process: %s', process_instance.id)
self.terminate_process(process_instance.id)
# terminate process also triggers TERMINATING/TERMINATED
self._call_proc_state_changed(process_instance, ProcessStateEnum.EXITED)
#.........这里部分代码省略.........