当前位置: 首页>>代码示例>>Python>>正文


Python general.AccumDict类代码示例

本文整理汇总了Python中openquake.baselib.general.AccumDict的典型用法代码示例。如果您正苦于以下问题:Python AccumDict类的具体用法?Python AccumDict怎么用?Python AccumDict使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了AccumDict类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: read

 def read(cls, dstore):
     """
     :param dstore: a DataStore instance
     :returns: a :class:`CompositeRiskModel` instance
     """
     oqparam = dstore['oqparam']
     crm = dstore.getitem('risk_model')
     riskdict = AccumDict(accum={})
     riskdict.limit_states = crm.attrs['limit_states']
     for quoted_id, rm in crm.items():
         riskid = unquote_plus(quoted_id)
         for lt_kind in rm:
             lt, kind = lt_kind.rsplit('-', 1)
             rf = dstore['risk_model/%s/%s' % (quoted_id, lt_kind)]
             if kind == 'consequence':
                 riskdict[riskid][lt, kind] = rf
             elif kind == 'fragility':  # rf is a FragilityFunctionList
                 try:
                     rf = rf.build(
                         riskdict.limit_states,
                         oqparam.continuous_fragility_discretization,
                         oqparam.steps_per_interval)
                 except ValueError as err:
                     raise ValueError('%s: %s' % (riskid, err))
                 riskdict[riskid][lt, kind] = rf
             else:  # rf is a vulnerability function
                 rf.init()
                 if lt.endswith('_retrofitted'):
                     # strip _retrofitted, since len('_retrofitted') = 12
                     riskdict[riskid][
                         lt[:-12], 'vulnerability_retrofitted'] = rf
                 else:
                     riskdict[riskid][lt, 'vulnerability'] = rf
     return CompositeRiskModel(oqparam, riskdict)
开发者ID:digitalsatori,项目名称:oq-engine,代码行数:34,代码来源:riskinput.py

示例2: get_trt_sources

 def get_trt_sources(self, optimize_same_id=None):
     """
     :returns: a list of pairs [(trt, group of sources)]
     """
     atomic = []
     acc = AccumDict(accum=[])
     for sm in self.source_models:
         for grp in sm.src_groups:
             if grp and grp.atomic:
                 atomic.append((grp.trt, grp))
             elif grp:
                 acc[grp.trt].extend(grp)
     if optimize_same_id is None:
         optimize_same_id = self.optimize_same_id
     if optimize_same_id is False:
         return atomic + list(acc.items())
     # extract a single source from multiple sources with the same ID
     n = 0
     tot = 0
     dic = {}
     for trt in acc:
         dic[trt] = []
         for grp in groupby(acc[trt], lambda x: x.source_id).values():
             src = grp[0]
             n += 1
             tot += len(grp)
             # src.src_group_id can be a list if get_sources_by_trt was
             # called before
             if len(grp) > 1 and not isinstance(src.src_group_id, list):
                 src.src_group_id = [s.src_group_id for s in grp]
             dic[trt].append(src)
     if n < tot:
         logging.info('Reduced %d sources to %d sources with unique IDs',
                      tot, n)
     return atomic + list(dic.items())
开发者ID:digitalsatori,项目名称:oq-engine,代码行数:35,代码来源:source.py

示例3: get_mesh_hcurves

def get_mesh_hcurves(oqparam):
    """
    Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`.

    :param oqparam:
        an :class:`openquake.commonlib.oqvalidation.OqParam` instance
    :returns:
        the mesh of points and the data as a dictionary
        imt -> array of curves for each site
    """
    imtls = oqparam.imtls
    lon_lats = set()
    data = AccumDict()  # imt -> list of arrays
    ncols = len(imtls) + 1  # lon_lat + curve_per_imt ...
    csvfile = oqparam.inputs["hazard_curves"]
    for line, row in enumerate(csv.reader(csvfile), 1):
        try:
            if len(row) != ncols:
                raise ValueError("Expected %d columns, found %d" % ncols, len(row))
            x, y = row[0].split()
            lon_lat = valid.longitude(x), valid.latitude(y)
            if lon_lat in lon_lats:
                raise DuplicatedPoint(lon_lat)
            lon_lats.add(lon_lat)
            for i, imt_ in enumerate(imtls, 1):
                values = valid.decreasing_probabilities(row[i])
                if len(values) != len(imtls[imt_]):
                    raise ValueError("Found %d values, expected %d" % (len(values), len(imtls([imt_]))))
                data += {imt_: [numpy.array(values)]}
        except (ValueError, DuplicatedPoint) as err:
            raise err.__class__("%s: file %s, line %d" % (err, csvfile, line))
    lons, lats = zip(*sorted(lon_lats))
    mesh = geo.Mesh(numpy.array(lons), numpy.array(lats))
    return mesh, {imt: numpy.array(lst) for imt, lst in data.items()}
开发者ID:gem,项目名称:oq-risklib,代码行数:34,代码来源:readinput.py

示例4: compute_ruptures

def compute_ruptures(branch_info, ucerf, sitecol, oqparam, monitor):
    """
    Returns the ruptures as a TRT set
    :param str branch_info:
        Tuple of (ltbr, branch_id, branch_weight)
    :param ucerf:
        Instance of the UCERFSESControl object
    :param sitecol:
        Site collection :class:`openquake.hazardlib.site.SiteCollection`
    :param oqparam:
        Instance of :class:`openquake.commonlib.oqvalidation.OqParam`
    :param monitor:
        Instance of :class:`openquake.baselib.performance.Monitor`
    :returns:
        Dictionary of rupture instances associated to a TRT ID
    """
    integration_distance = oqparam.maximum_distance[DEFAULT_TRT]
    res = AccumDict()
    res.calc_times = AccumDict()
    serial = 1
    filter_mon = monitor('update_background_site_filter', measuremem=False)
    event_mon = monitor('sampling ruptures', measuremem=False)
    for src_group_id, (ltbrid, branch_id, _) in enumerate(branch_info):
        t0 = time.time()
        with filter_mon:
            ucerf.update_background_site_filter(sitecol, integration_distance)

        # set the seed before calling generate_event_set
        numpy.random.seed(oqparam.random_seed + src_group_id)
        ses_ruptures = []
        for ses_idx in range(1, oqparam.ses_per_logic_tree_path + 1):
            with event_mon:
                rups, n_occs = ucerf.generate_event_set(
                    branch_id, sitecol, integration_distance)
            for i, rup in enumerate(rups):
                rup.seed = oqparam.random_seed  # to think
                rrup = rup.surface.get_min_distance(sitecol.mesh)
                r_sites = sitecol.filter(rrup <= integration_distance)
                if r_sites is None:
                    continue
                indices = r_sites.indices
                events = []
                for j in range(n_occs[i]):
                    # NB: the first 0 is a placeholder for the eid that will be
                    # set later, in EventBasedRuptureCalculator.post_execute;
                    # the second 0 is the sampling ID
                    events.append((0, ses_idx, j, 0))
                if len(events):
                    ses_ruptures.append(
                        event_based.EBRupture(
                            rup, indices,
                            numpy.array(events, event_based.event_dt),
                            ucerf.source_id, src_group_id, serial))
                    serial += 1
        dt = time.time() - t0
        res.calc_times[src_group_id] = (ltbrid, dt)
        res[src_group_id] = ses_ruptures
    res.trt = DEFAULT_TRT
    return res
开发者ID:cigdemyilmaz88,项目名称:oq-engine,代码行数:59,代码来源:ucerf_event_based.py

示例5: zerodict

 def zerodict(self):
     """
     Initial accumulator, a dictionary (grp_id, gsim) -> curves
     """
     zd = AccumDict()
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()
     return zd
开发者ID:tahirazeem,项目名称:oq-engine,代码行数:8,代码来源:event_based.py

示例6: build_starmap

    def build_starmap(self, ssm, sitecol, assetcol, riskmodel, imts,
                      trunc_level, correl_model, min_iml, monitor):
        """
        :param ssm: CompositeSourceModel containing a single source model
        :param sitecol: a SiteCollection instance
        :param assetcol: an AssetCollection instance
        :param riskmodel: a RiskModel instance
        :param imts: a list of Intensity Measure Types
        :param trunc_level: truncation level
        :param correl_model: correlation model
        :param min_iml: vector of minimum intensities, one per IMT
        :param monitor: a Monitor instance
        :returns: a pair (starmap, dictionary)
        """
        ruptures_by_grp = AccumDict()
        num_ruptures = 0
        num_events = 0
        allargs = []
        grp_trt = {}
        # collect the sources
        maxweight = ssm.get_maxweight(self.oqparam.concurrent_tasks)
        logging.info('Using a maxweight of %d', maxweight)
        for src_group in ssm.src_groups:
            grp_trt[src_group.id] = trt = src_group.trt
            gsims = ssm.gsim_lt.values[trt]
            for block in block_splitter(src_group, maxweight, getweight):
                allargs.append((block, self.sitecol, gsims, monitor))
        # collect the ruptures
        for dic in parallel.starmap(self.compute_ruptures, allargs):
            ruptures_by_grp += dic
            [rupts] = dic.values()
            num_ruptures += len(rupts)
            num_events += dic.num_events
        ruptures_by_grp.num_events = num_events
        save_ruptures(self, ruptures_by_grp)

        # determine the realizations
        rlzs_assoc = ssm.info.get_rlzs_assoc(
            count_ruptures=lambda grp: len(ruptures_by_grp.get(grp.id, 0)))
        allargs = []
        # prepare the risk inputs
        ruptures_per_block = self.oqparam.ruptures_per_block
        for src_group in ssm.src_groups:
            for rupts in block_splitter(
                    ruptures_by_grp[src_group.id], ruptures_per_block):
                trt = grp_trt[rupts[0].grp_id]
                ri = riskinput.RiskInputFromRuptures(
                    trt, imts, sitecol, rupts, trunc_level,
                    correl_model, min_iml)
                allargs.append((ri, riskmodel, rlzs_assoc, assetcol, monitor))
        taskname = '%s#%d' % (losses_by_taxonomy.__name__, ssm.sm_id + 1)
        smap = starmap(losses_by_taxonomy, allargs, name=taskname)
        attrs = dict(num_ruptures={
            sg_id: len(rupts) for sg_id, rupts in ruptures_by_grp.items()},
                     num_events=num_events,
                     num_rlzs=len(rlzs_assoc.realizations),
                     sm_id=ssm.sm_id)
        return smap, attrs
开发者ID:rcgee,项目名称:oq-engine,代码行数:58,代码来源:event_based_risk.py

示例7: ucerf_classical_hazard_by_branch

def ucerf_classical_hazard_by_branch(branchnames, ucerf_source, src_group_id,
                                     src_filter, gsims, monitor):
    """
    :param branchnames:
        a list of branch names
    :param ucerf_source:
        a source-like object for the UCERF model
    :param src_group_id:
        an ordinal number for the source
    :param source filter:
        a filter returning the sites affected by the source
    :param gsims:
        a list of GSIMs
    :param monitor:
        a monitor instance
    :returns:
        an AccumDict rlz -> curves
    """
    truncation_level = monitor.oqparam.truncation_level
    imtls = monitor.oqparam.imtls
    trt = ucerf_source.tectonic_region_type
    max_dist = monitor.oqparam.maximum_distance[trt]
    dic = AccumDict()
    dic.bbs = []
    dic.calc_times = []
    for branchname in branchnames:
        # Two step process here - the first generates the hazard curves from
        # the rupture sets
        monitor.eff_ruptures = 0
        # Apply the initial rupture to site filtering
        rupset_idx = ucerf_source.get_rupture_indices(branchname)
        rupset_idx, s_sites = \
            ucerf_source.filter_sites_by_distance_from_rupture_set(
                rupset_idx, src_filter.sitecol, max_dist)

        if len(s_sites):
            dic[src_group_id] = hazard_curves_per_rupture_subset(
                rupset_idx, ucerf_source, src_filter, imtls, gsims,
                truncation_level, bbs=dic.bbs, monitor=monitor)

        else:
            dic[src_group_id] = ProbabilityMap(len(imtls.array), len(gsims))
        dic.calc_times += monitor.calc_times  # added by pmap_from_grp
        dic.eff_ruptures = {src_group_id: monitor.eff_ruptures}  # idem
        logging.info('Branch %s', branchname)
        # Get the background point sources
        background_sids = ucerf_source.get_background_sids(
            src_filter.sitecol, max_dist)
        bckgnd_sources = ucerf_source.get_background_sources(background_sids)
        if bckgnd_sources:
            pmap = pmap_from_grp(
                bckgnd_sources, src_filter, imtls, gsims, truncation_level,
                bbs=dic.bbs, monitor=monitor)
            dic[src_group_id] |= pmap
            dic.eff_ruptures[src_group_id] += monitor.eff_ruptures
            dic.calc_times += monitor.calc_times
    return dic
开发者ID:,项目名称:,代码行数:57,代码来源:

示例8: zerodict

 def zerodict(self):
     """
     Initial accumulator, a dictionary trt_model_id -> list of ruptures
     """
     smodels = self.rlzs_assoc.csm_info.source_models
     zd = AccumDict((tm.id, []) for smodel in smodels
                    for tm in smodel.trt_models)
     zd.calc_times = []
     return zd
开发者ID:amirj700,项目名称:oq-risklib,代码行数:9,代码来源:event_based.py

示例9: zerodict

 def zerodict(self):
     """
     Initial accumulator, a dictionary (grp_id, gsim) -> curves
     """
     zd = AccumDict()
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()
     self.eid = collections.Counter()  # sm_id -> event_id
     self.sm_by_grp = self.csm.info.get_sm_by_grp()
     return zd
开发者ID:rcgee,项目名称:oq-engine,代码行数:10,代码来源:event_based.py

示例10: count_eff_ruptures

def count_eff_ruptures(sources, sitecol, gsims, monitor):
    """
    Count the number of ruptures contained in the given sources and return
    a dictionary src_group_id -> num_ruptures. All sources belong to the
    same tectonic region type.
    """
    grp_id = sources[0].src_group_id
    acc = AccumDict({grp_id: {}})
    acc.eff_ruptures = {grp_id: sum(src.num_ruptures for src in sources)}
    return acc
开发者ID:rcgee,项目名称:oq-engine,代码行数:10,代码来源:reportwriter.py

示例11: count_eff_ruptures

def count_eff_ruptures(sources, sitecol, siteidx, rlzs_assoc, monitor):
    """
    Count the number of ruptures contained in the given sources and return
    a dictionary trt_model_id -> num_ruptures. All sources belong to the
    same tectonic region type.
    """
    acc = AccumDict()
    acc.eff_ruptures = {sources[0].trt_model_id:
                        sum(src.num_ruptures for src in sources)}
    return acc
开发者ID:nooperpudd,项目名称:oq-engine,代码行数:10,代码来源:source.py

示例12: combine

    def combine(self, results, agg=agg_prob):
        """
        :param results: a dictionary (trt_model_id, gsim_no) -> floats
        :param agg: an aggregation function
        :returns: a dictionary rlz -> aggregated floats

        Example: a case with tectonic region type T1 with GSIMS A, B, C
        and tectonic region type T2 with GSIMS D, E.

        >> assoc = RlzsAssoc(CompositionInfo([], []))
        >> assoc.rlzs_assoc = {
        ... ('T1', 'A'): ['r0', 'r1'],
        ... ('T1', 'B'): ['r2', 'r3'],
        ... ('T1', 'C'): ['r4', 'r5'],
        ... ('T2', 'D'): ['r0', 'r2', 'r4'],
        ... ('T2', 'E'): ['r1', 'r3', 'r5']}
        ...
        >> results = {
        ... ('T1', 'A'): 0.01,
        ... ('T1', 'B'): 0.02,
        ... ('T1', 'C'): 0.03,
        ... ('T2', 'D'): 0.04,
        ... ('T2', 'E'): 0.05,}
        ...
        >> combinations = assoc.combine(results, operator.add)
        >> for key, value in sorted(combinations.items()): print key, value
        r0 0.05
        r1 0.06
        r2 0.06
        r3 0.07
        r4 0.07
        r5 0.08

        You can check that all the possible sums are performed:

        r0: 0.01 + 0.04 (T1A + T2D)
        r1: 0.01 + 0.05 (T1A + T2E)
        r2: 0.02 + 0.04 (T1B + T2D)
        r3: 0.02 + 0.05 (T1B + T2E)
        r4: 0.03 + 0.04 (T1C + T2D)
        r5: 0.03 + 0.05 (T1C + T2E)

        In reality, the `combine_curves` method is used with hazard_curves and
        the aggregation function is the `agg_curves` function, a composition of
        probability, which however is close to the sum for small probabilities.
        """
        ad = AccumDict()
        for key, value in results.items():
            gsim = self.csm_info.gsimdict[key]
            for rlz in self.rlzs_assoc[key[0], gsim]:
                ad[rlz] = agg(ad.get(rlz, 0), value)
        return ad
开发者ID:amirj700,项目名称:oq-risklib,代码行数:52,代码来源:source.py

示例13: acc0

 def acc0(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     csm_info = self.csm.info
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     for grp in self.csm.src_groups:
         num_gsims = len(csm_info.gsim_lt.get_gsims(grp.trt))
         zd[grp.id] = ProbabilityMap(num_levels, num_gsims)
     zd.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
     zd.nsites = AccumDict()  # src.id -> nsites
     return zd
开发者ID:digitalsatori,项目名称:oq-engine,代码行数:13,代码来源:classical.py

示例14: zerodict

 def zerodict(self):
     """
     Initial accumulator, a dictionary (trt_id, gsim) -> curves
     """
     zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls)
     zd = AccumDict((key, zc) for key in self.rlzs_assoc)
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()  # trt_id -> eff_ruptures
     zd.bb_dict = {
         (smodel.ordinal, site.id): BoundingBox(smodel.ordinal, site.id)
         for site in self.sitecol
         for smodel in self.csm.source_models
     } if self.oqparam.poes_disagg else {}
     return zd
开发者ID:amirj700,项目名称:oq-risklib,代码行数:14,代码来源:classical.py

示例15: compute_ruptures

def compute_ruptures(sources, sitecol, gsims, monitor):
    """
    :param sources: a sequence of UCERF sources
    :param sitecol: a SiteCollection instance
    :param gsims: a list of GSIMs
    :param monitor: a Monitor instance
    :returns: an AccumDict grp_id -> EBRuptures
    """
    [src] = sources  # there is a single source per UCERF branch
    integration_distance = monitor.maximum_distance[DEFAULT_TRT]
    res = AccumDict()
    res.calc_times = AccumDict()
    serial = 1
    event_mon = monitor('sampling ruptures', measuremem=False)
    res.num_events = 0
    res.trt = DEFAULT_TRT
    t0 = time.time()
    # set the seed before calling generate_event_set
    numpy.random.seed(monitor.seed + src.src_group_id)
    ebruptures = []
    eid = 0
    src.build_idx_set()
    background_sids = src.get_background_sids(sitecol, integration_distance)
    for ses_idx in range(1, monitor.ses_per_logic_tree_path + 1):
        with event_mon:
            rups, n_occs = src.generate_event_set(background_sids)
        for rup, n_occ in zip(rups, n_occs):
            rup.seed = monitor.seed  # to think
            rrup = rup.surface.get_min_distance(sitecol.mesh)
            r_sites = sitecol.filter(rrup <= integration_distance)
            if r_sites is None:
                continue
            indices = r_sites.indices
            events = []
            for occ in range(n_occ):
                events.append((eid, ses_idx, occ, 0))  # 0 is the sampling
                eid += 1
            if events:
                ebruptures.append(
                    event_based.EBRupture(
                        rup, indices,
                        numpy.array(events, event_based.event_dt),
                        src.source_id, src.src_group_id, serial))
                serial += 1
                res.num_events += len(events)
        res[src.src_group_id] = ebruptures
        res.calc_times[src.src_group_id] = (
            src.source_id, len(sitecol), time.time() - t0)
    return res
开发者ID:rcgee,项目名称:oq-engine,代码行数:49,代码来源:ucerf_risk.py


注:本文中的openquake.baselib.general.AccumDict类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。