本文整理匯總了Python中openquake.commonlib.oqvalidation.OqParam.from_方法的典型用法代碼示例。如果您正苦於以下問題:Python OqParam.from_方法的具體用法?Python OqParam.from_怎麽用?Python OqParam.from_使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類openquake.commonlib.oqvalidation.OqParam
的用法示例。
在下文中一共展示了OqParam.from_方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: show
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def show(calc_id, key=None, rlzs=None):
"""
Show the content of a datastore.
:param calc_id: numeric calculation ID; if 0, show all calculations
:param key: key of the datastore
:param rlzs: flag; if given, print out the realizations in order
"""
if not calc_id:
if not os.path.exists(datastore.DATADIR):
return
rows = []
for calc_id in datastore.get_calc_ids(datastore.DATADIR):
try:
oq = OqParam.from_(datastore.DataStore(calc_id).attrs)
cmode, descr = oq.calculation_mode, oq.description
except: # invalid datastore directory
logging.warn('Removed invalid calculation %d', calc_id)
shutil.rmtree(os.path.join(
datastore.DATADIR, 'calc_%s' % calc_id))
else:
rows.append((calc_id, cmode, descr))
for row in sorted(rows, key=lambda row: row[0]): # by calc_id
print('#%d %s: %s' % row)
return
ds = datastore.DataStore(calc_id)
if key:
if key in datastore.view:
print(datastore.view(key, ds))
return
obj = ds[key]
if hasattr(obj, 'value'): # an array
print(write_csv(io.StringIO(), obj.value))
else:
print(obj)
return
# print all keys
oq = OqParam.from_(ds.attrs)
print(oq.calculation_mode, 'calculation (%r) saved in %s contains:' %
(oq.description, ds.hdf5path))
for key in ds:
print(key, humansize(ds.getsize(key)))
# this part is experimental and not tested on purpose
if rlzs and 'curves_by_trt_gsim' in ds:
min_value = 0.01 # used in rmsep
curves_by_rlz, mean_curves = combined_curves(ds)
dists = []
for rlz in sorted(curves_by_rlz):
curves = curves_by_rlz[rlz]
dist = sum(rmsep(mean_curves[imt], curves[imt], min_value)
for imt in mean_curves.dtype.fields)
dists.append((dist, rlz))
for dist, rlz in sorted(dists):
print('rlz=%s, rmsep=%s' % (rlz, dist))
示例2: pre_execute
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def pre_execute(self):
"""
Check if there is a pre_calculator or a previous calculation ID.
If yes, read the inputs by invoking the precalculator or by retrieving
the previous calculation; if not, read the inputs directly.
"""
if self.pre_calculator is not None:
# the parameter hazard_calculation_id is only meaningful if
# there is a precalculator
precalc_id = self.oqparam.hazard_calculation_id
if precalc_id is None: # recompute everything
precalc = calculators[self.pre_calculator](
self.oqparam, self.monitor('precalculator'),
self.datastore.calc_id)
precalc.run(clean_up=False)
if 'scenario' not in self.oqparam.calculation_mode:
self.csm = precalc.csm
else: # read previously computed data
self.datastore.set_parent(datastore.DataStore(precalc_id))
# update oqparam with the attributes saved in the datastore
self.oqparam = OqParam.from_(self.datastore.attrs)
self.read_exposure_sitecol()
else: # we are in a basic calculator
self.read_exposure_sitecol()
self.read_sources()
self.datastore.hdf5.flush()
示例3: get_data_transfer
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def get_data_transfer(dstore):
"""
Determine the amount of data transferred from the controller node
to the workers and back in a classical calculation.
:param dstore: a :class:`openquake.commonlib.datastore.DataStore` instance
:returns: (block_info, to_send_forward, to_send_back)
"""
oqparam = OqParam.from_(dstore.attrs)
sitecol = dstore['sitecol']
rlzs_assoc = dstore['rlzs_assoc']
info = dstore['job_info']
sources = dstore['composite_source_model'].get_sources()
num_gsims_by_trt = groupby(rlzs_assoc, operator.itemgetter(0),
lambda group: sum(1 for row in group))
gsims_assoc = rlzs_assoc.gsims_by_trt_id
to_send_forward = 0
to_send_back = 0
block_info = []
for block in split_in_blocks(sources, oqparam.concurrent_tasks or 1,
operator.attrgetter('weight'),
operator.attrgetter('trt_model_id')):
num_gsims = num_gsims_by_trt.get(block[0].trt_model_id, 0)
back = info['n_sites'] * info['n_levels'] * info['n_imts'] * num_gsims
to_send_back += back * 8 # 8 bytes per float
args = (block, sitecol, gsims_assoc, PerformanceMonitor(''))
to_send_forward += sum(len(p) for p in parallel.pickle_sequence(args))
block_info.append((len(block), block.weight))
return numpy.array(block_info, block_dt), to_send_forward, to_send_back
示例4: export_ses_xml
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def export_ses_xml(ekey, dstore):
"""
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
"""
fmt = ekey[-1]
oq = OqParam.from_(dstore.attrs)
try:
csm_info = dstore['rlzs_assoc'].csm_info
except AttributeError: # for scenario calculators don't export
return []
sescollection = dstore['sescollection']
col_id = 0
fnames = []
for sm in csm_info.source_models:
for trt_model in sm.trt_models:
sesruptures = list(sescollection[col_id].values())
col_id += 1
ses_coll = SESCollection(
groupby(sesruptures, operator.attrgetter('ses_idx')),
sm.path, oq.investigation_time)
smpath = '_'.join(sm.path)
fname = 'ses-%d-smltp_%s.%s' % (trt_model.id, smpath, fmt)
dest = os.path.join(dstore.export_dir, fname)
globals()['_export_ses_' + fmt](dest, ses_coll)
fnames.append(os.path.join(dstore.export_dir, fname))
return fnames
示例5: export_gmf
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def export_gmf(ekey, dstore):
"""
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
"""
sitecol = dstore['sitecol']
rlzs_assoc = dstore['rlzs_assoc']
rupture_by_tag = sum(dstore['sescollection'], AccumDict())
all_tags = dstore['tags'].value
oq = OqParam.from_(dstore.attrs)
investigation_time = (None if oq.calculation_mode == 'scenario'
else oq.investigation_time)
samples = oq.number_of_logic_tree_samples
fmt = ekey[-1]
gmfs = dstore[ekey[0]]
nbytes = gmfs.attrs['nbytes']
logging.info('Internal size of the GMFs: %s', humansize(nbytes))
if nbytes > GMF_MAX_SIZE:
logging.warn(GMF_WARNING, dstore.hdf5path)
fnames = []
for rlz, gmf_by_idx in zip(
rlzs_assoc.realizations, rlzs_assoc.combine_gmfs(gmfs)):
tags = all_tags[list(gmf_by_idx)]
gmfs = list(gmf_by_idx.values())
if not gmfs:
continue
ruptures = [rupture_by_tag[tag] for tag in tags]
fname = build_name(dstore, rlz, 'gmf', fmt, samples)
fnames.append(fname)
globals()['export_gmf_%s' % fmt](
('gmf', fmt), fname, sitecol,
ruptures, gmfs, rlz, investigation_time)
return fnames
示例6: export_hcurves_xml_json
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def export_hcurves_xml_json(ekey, dstore):
export_type = ekey[1]
len_ext = len(export_type) + 1
oq = OqParam.from_(dstore.attrs)
sitemesh = dstore['sitemesh'].value
rlzs_assoc = dstore['rlzs_assoc']
fnames = []
writercls = (hazard_writers.HazardCurveGeoJSONWriter
if export_type == 'geojson' else
hazard_writers.HazardCurveXMLWriter)
rlzs = iter(rlzs_assoc.realizations)
for kind, curves in dstore[ekey[0]].items():
rlz = next(rlzs)
name = hazard_curve_name(
dstore, ekey, kind, rlzs_assoc, oq.number_of_logic_tree_samples)
for imt in oq.imtls:
fname = name[:-len_ext] + '-' + imt + '.' + export_type
data = [HazardCurve(Location(site), poes[imt])
for site, poes in zip(sitemesh, curves)]
writer = writercls(fname, investigation_time=oq.investigation_time,
imls=oq.imtls[imt],
smlt_path='_'.join(rlz.sm_lt_path),
gsimlt_path=rlz.gsim_rlz.uid)
writer.serialize(data)
fnames.append(fname)
return sorted(fnames)
示例7: export_agg_curve
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def export_agg_curve(ekey, dstore):
oq = OqParam.from_(dstore.attrs)
cost_types = dstore['cost_types']
rlzs = dstore['rlzs_assoc'].realizations
agg_curve = dstore[ekey[0]]
fnames = []
L, R = len(cost_types), len(rlzs)
for ct in cost_types:
loss_type = ct['name']
array = agg_curve[loss_type].value
for ins in range(oq.insured_losses + 1):
for rlz in rlzs:
suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % (
rlz.uid, loss_type)
dest = dstore.export_path('agg_curve%s%s.%s' % (
suffix, '_ins' if ins else '', ekey[1]))
rec = array[rlz.ordinal, ins]
curve = AggCurve(rec['losses'], rec['poes'], rec['avg'], None)
risk_writers.AggregateLossCurveXMLWriter(
dest, oq.investigation_time, loss_type,
source_model_tree_path='_'.join(rlz.sm_lt_path),
gsim_tree_path='_'.join(rlz.gsim_lt_path),
unit=ct['unit']).serialize(curve)
fnames.append(dest)
return sorted(fnames)
示例8: get_oqparam
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def get_oqparam(self):
"""
Return an OqParam object as read from the database
"""
datadir = os.path.dirname(self.ds_calc_dir)
dstore = datastore.read(self.id, datadir=datadir)
oqparam = OqParam.from_(dstore.attrs)
return oqparam
示例9: view_params
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def view_params(token, dstore):
oq = OqParam.from_(dstore.attrs)
params = ('calculation_mode', 'number_of_logic_tree_samples',
'maximum_distance', 'investigation_time',
'ses_per_logic_tree_path', 'truncation_level',
'rupture_mesh_spacing', 'complex_fault_mesh_spacing',
'width_of_mfd_bin', 'area_source_discretization',
'random_seed', 'master_seed', 'concurrent_tasks')
return rst_table([(param, getattr(oq, param)) for param in params])
示例10: view_inputs
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def view_inputs(token, dstore):
inputs = OqParam.from_(dstore.attrs).inputs.copy()
try:
source_models = [('source', fname) for fname in inputs['source']]
del inputs['source']
except KeyError: # there is no 'source' in scenario calculations
source_models = []
return rst_table(
build_links(list(inputs.items()) + source_models),
header=['Name', 'File'])
示例11: view_gmfs_total_size
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def view_gmfs_total_size(name, dstore):
"""
:returns:
the total size of the GMFs as human readable string; it assumes
4 bytes for the rupture index, 4 bytes for the realization index
and 8 bytes for each float (there are num_imts floats per gmf)
"""
nbytes = 0
num_imts = len(OqParam.from_(dstore.attrs).imtls)
for counts in dstore['counts_per_rlz']:
nbytes += 8 * counts['gmf'] * (num_imts + 1)
return humansize(nbytes)
示例12: avglosses_data_transfer
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def avglosses_data_transfer(token, dstore):
"""
Determine the amount of average losses transferred from the workers to the
controller node in a risk calculation.
"""
oq = OqParam.from_(dstore.attrs)
N = len(dstore['assetcol'])
R = len(dstore['rlzs_assoc'].realizations)
L = len(dstore['riskmodel'].loss_types)
ct = oq.concurrent_tasks
size_bytes = N * R * L * 2 * 8 * ct # two 8 byte floats, loss and ins_loss
return ('%d asset(s) x %d realization(s) x %d loss type(s) x 2 losses x '
'8 bytes x %d tasks = %s' % (N, R, L, ct, humansize(size_bytes)))
示例13: read_exposure_sitecol
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def read_exposure_sitecol(self):
"""
Read the exposure (if any) and then the site collection, possibly
extracted from the exposure.
"""
logging.info('Reading the site collection')
with self.monitor('reading site collection', autoflush=True):
haz_sitecol = readinput.get_site_collection(self.oqparam)
inputs = self.oqparam.inputs
if 'exposure' in inputs:
logging.info('Reading the exposure')
with self.monitor('reading exposure', autoflush=True):
self.exposure = readinput.get_exposure(self.oqparam)
self.sitecol, self.assets_by_site = (
readinput.get_sitecol_assets(self.oqparam, self.exposure))
if len(self.exposure.cost_types):
self.cost_types = self.exposure.cost_types
self.taxonomies = numpy.array(
sorted(self.exposure.taxonomies), '|S100')
num_assets = self.count_assets()
if self.datastore.parent:
haz_sitecol = self.datastore.parent['sitecol']
if haz_sitecol is not None and haz_sitecol != self.sitecol:
with self.monitor('assoc_assets_sites'):
self.sitecol, self.assets_by_site = \
self.assoc_assets_sites(haz_sitecol.complete)
ok_assets = self.count_assets()
num_sites = len(self.sitecol)
logging.warn('Associated %d assets to %d sites, %d discarded',
ok_assets, num_sites, num_assets - ok_assets)
elif (self.datastore.parent and 'exposure' in
OqParam.from_(self.datastore.parent.attrs).inputs):
logging.info('Re-using the already imported exposure')
else: # no exposure
self.sitecol = haz_sitecol
# save mesh and asset collection
self.save_mesh()
if hasattr(self, 'assets_by_site'):
self.assetcol = riskinput.build_asset_collection(
self.assets_by_site, self.oqparam.time_event)
spec = set(self.oqparam.specific_assets)
unknown = spec - set(self.assetcol['asset_ref'])
if unknown:
raise ValueError('The specific asset(s) %s are not in the '
'exposure' % ', '.join(unknown))
示例14: export_avg_losses_stats
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def export_avg_losses_stats(ekey, dstore):
"""
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
"""
oq = OqParam.from_(dstore.attrs)
avg_losses = dstore[ekey[0]]
quantiles = ['mean'] + ['quantile-%s' % q for q in oq.quantile_loss_curves]
assets = get_assets(dstore)
fnames = []
for i, quantile in enumerate(quantiles):
losses = avg_losses[:, i]
dest = dstore.export_path('avg_losses-%s.csv' % quantile)
data = compose_arrays(assets, losses)
writers.write_csv(dest, data, fmt='%10.6E')
fnames.append(dest)
return fnames
示例15: _print_info
# 需要導入模塊: from openquake.commonlib.oqvalidation import OqParam [as 別名]
# 或者: from openquake.commonlib.oqvalidation.OqParam import from_ [as 別名]
def _print_info(dstore, filtersources=True, weightsources=True):
assoc = dstore["rlzs_assoc"]
oqparam = OqParam.from_(dstore.attrs)
csm = dstore["composite_source_model"]
sitecol = dstore["sitecol"]
print(csm.get_info())
print("See https://github.com/gem/oq-risklib/blob/master/doc/" "effective-realizations.rst for an explanation")
print(assoc)
if filtersources or weightsources:
[info] = readinput.get_job_info(oqparam, csm, sitecol)
info["n_sources"] = csm.get_num_sources()
curve_matrix_size = info["n_sites"] * info["n_levels"] * info["n_imts"] * len(assoc) * 8
for k in info.dtype.fields:
if k == "input_weight" and not weightsources:
pass
else:
print(k, info[k])
print("curve_matrix_size", humansize(curve_matrix_size))
if "num_ruptures" in dstore:
print(datastore.view("rupture_collections", dstore))