本文整理汇总了Python中openquake.baselib.general.AccumDict.bb_dict方法的典型用法代码示例。如果您正苦于以下问题:Python AccumDict.bb_dict方法的具体用法?Python AccumDict.bb_dict怎么用?Python AccumDict.bb_dict使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类openquake.baselib.general.AccumDict
的用法示例。
在下文中一共展示了AccumDict.bb_dict方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: zerodict
# 需要导入模块: from openquake.baselib.general import AccumDict [as 别名]
# 或者: from openquake.baselib.general.AccumDict import bb_dict [as 别名]
def zerodict(self):
"""
Initial accumulator, a dictionary (trt_id, gsim) -> curves
"""
zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls)
zd = AccumDict((key, zc) for key in self.rlzs_assoc)
zd.calc_times = []
zd.eff_ruptures = AccumDict() # trt_id -> eff_ruptures
zd.bb_dict = {
(smodel.ordinal, site.id): BoundingBox(smodel.ordinal, site.id)
for site in self.sitecol
for smodel in self.csm.source_models
} if self.oqparam.poes_disagg else {}
return zd
示例2: zerodict
# 需要导入模块: from openquake.baselib.general import AccumDict [as 别名]
# 或者: from openquake.baselib.general.AccumDict import bb_dict [as 别名]
def zerodict(self):
"""
Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
"""
zd = AccumDict()
num_levels = len(self.oqparam.imtls.array)
for grp in self.csm.src_groups:
num_gsims = len(self.rlzs_assoc.gsims_by_grp_id[grp.id])
zd[grp.id] = ProbabilityMap(num_levels, num_gsims)
zd.calc_times = []
zd.eff_ruptures = AccumDict() # grp_id -> eff_ruptures
zd.bb_dict = BBdict()
if self.oqparam.poes_disagg:
for sid in self.sitecol.sids:
for smodel in self.csm.source_models:
zd.bb_dict[smodel.ordinal, sid] = BoundingBox(
smodel.ordinal, sid)
return zd
示例3: execute
# 需要导入模块: from openquake.baselib.general import AccumDict [as 别名]
# 或者: from openquake.baselib.general.AccumDict import bb_dict [as 别名]
def execute(self):
"""
Run in parallel `core_task(sources, sitecol, monitor)`, by
parallelizing on the sources according to their weight and
tectonic region type.
"""
monitor = self.monitor.new(self.core_task.__name__)
monitor.oqparam = oq = self.oqparam
ucerf_source = self.src_group.sources[0]
max_dist = oq.maximum_distance[DEFAULT_TRT]
acc = AccumDict({
grp_id: ProbabilityMap(len(oq.imtls.array), len(gsims))
for grp_id, gsims in self.rlzs_assoc.gsims_by_grp_id.items()})
acc.calc_times = []
acc.eff_ruptures = AccumDict() # grp_id -> eff_ruptures
acc.bb_dict = {}
if len(self.csm) > 1:
# when multiple branches, parallelise by branch
branches = [br.value for br in self.smlt.branches.values()]
rup_res = parallel.starmap(
ucerf_classical_hazard_by_branch,
self.gen_args(branches, ucerf_source, monitor)).submit_all()
else:
# single branch
gsims = self.rlzs_assoc.gsims_by_grp_id[0]
[(branch_id, branch)] = self.smlt.branches.items()
branchname = branch.value
ucerf_source.src_group_id = 0
ucerf_source.weight = 1
ucerf_source.nsites = len(self.sitecol)
self.infos[0, ucerf_source.source_id] = source.SourceInfo(
ucerf_source)
logging.info('Getting the background point sources')
with self.monitor('getting background sources', autoflush=True):
ucerf_source.build_idx_set()
background_sids = ucerf_source.get_background_sids(
self.sitecol, max_dist)
bckgnd_sources = ucerf_source.get_background_sources(
background_sids)
# parallelize on the background sources, small tasks
args = (bckgnd_sources, self.sitecol, oq.imtls,
gsims, self.oqparam.truncation_level,
'SourceSitesFilter', max_dist, (), monitor)
bg_res = parallel.apply(
pmap_from_grp, args,
concurrent_tasks=self.oqparam.concurrent_tasks).submit_all()
# parallelize by rupture subsets
tasks = self.oqparam.concurrent_tasks * 2 # they are big tasks
rup_sets = ucerf_source.get_rupture_indices(branchname)
rup_res = parallel.apply(
ucerf_classical_hazard_by_rupture_set,
(rup_sets, branchname, ucerf_source, self.src_group.id,
self.sitecol, gsims, monitor),
concurrent_tasks=tasks).submit_all()
# compose probabilities from background sources
for pmap in bg_res:
acc[0] |= pmap
self.save_data_transfer(bg_res)
pmap_by_grp_id = functools.reduce(self.agg_dicts, rup_res, acc)
with self.monitor('store source_info', autoflush=True):
self.store_source_info(self.infos)
self.save_data_transfer(rup_res)
self.datastore['csm_info'] = self.csm.info
self.rlzs_assoc = self.csm.info.get_rlzs_assoc(
functools.partial(self.count_eff_ruptures, pmap_by_grp_id))
self.datastore['csm_info'] = self.csm.info
return pmap_by_grp_id