当前位置: 首页>>代码示例>>Python>>正文


Python dask.delayed方法代码示例

本文整理汇总了Python中dask.delayed方法的典型用法代码示例。如果您正苦于以下问题:Python dask.delayed方法的具体用法?Python dask.delayed怎么用?Python dask.delayed使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在dask的用法示例。


在下文中一共展示了dask.delayed方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: dispatch

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def dispatch(function, delayed=False, client=None):
    """
    Decide how to wrap a function for Dask depending on the options given.

    Parameters
    ----------
    function : callable
        The function that will be called.
    delayed : bool
        If True, will wrap the function in :func:`dask.delayed`.
    client : None or dask.distributed Client
        If *delayed* is False and *client* is not None, will return a partial
        execution of the ``client.submit`` with the function as first argument.

    Returns
    -------
    function : callable
        The function wrapped in Dask.

    """
    if delayed:
        return dask.delayed(function)
    if client is not None:
        return functools.partial(client.submit, function)
    return function 
开发者ID:fatiando,项目名称:verde,代码行数:27,代码来源:utils.py

示例2: detect_outliers

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def detect_outliers(request):
    """
    Detect outliers end point
    """
    dataset_id = int(request.GET.get("dataset_id"))

    if dataset_id is None:
        return JsonResponse({"status": "failure", "message": "Dataset id is not provided"})

    dataset = Dataset.objects.get(pk=dataset_id)
    file_path = dataset.path
    delete_features = json.loads(dataset.deleted_features)

    # Create a detection experiment and start outlier detection
    process = Process.objects.get(name='Detection')
    process_status = ProcessStatus.objects.get(name='Running')
    experiment = Experiment(dataset=dataset, process=process, process_status=process_status)
    experiment.save()
    results = delayed(detect_all)(os.path.join(settings.MEDIA_ROOT, file_path), experiment.id, settings.RESULTS_ROOT,
                                  delete_features)
    dask.compute(results)

    return JsonResponse(
        {'status': 'success', 'message': 'Detection started successfully', 'experiment_id': experiment.id}) 
开发者ID:MateLabs,项目名称:AutoOut,代码行数:26,代码来源:views.py

示例3: _load

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def _load(self, files_in, files_out, urlpath, meta=True):
        """Download a set of files"""
        import dask
        out = []
        outnames = []
        for file_in, file_out in zip(files_in, files_out):
            cache_path = file_out.path
            outnames.append(cache_path)

            # If `_munge_path` did not find a match we want to avoid
            # writing to the urlpath.
            if cache_path == urlpath:
                continue

            if not os.path.isfile(cache_path):
                logger.debug("Caching file: {}".format(file_in.path))
                logger.debug("Original path: {}".format(urlpath))
                logger.debug("Cached at: {}".format(cache_path))
                if meta:
                    self._log_metadata(urlpath, file_in.path, cache_path)
                ddown = dask.delayed(_download)
                out.append(ddown(file_in, file_out, self.blocksize,
                                 self.output))
        dask.compute(*out)
        return outnames 
开发者ID:intake,项目名称:intake,代码行数:27,代码来源:cache.py

示例4: delayed_dask_stack

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def delayed_dask_stack():
    """A 4D (20, 10, 10, 10) delayed dask array, simulates disk io."""
    # we will return a dict with a 'calls' variable that tracks call count
    output = {'calls': 0}

    # create a delayed version of function that simply generates np.arrays
    # but also counts when it has been called
    @dask.delayed
    def get_array():
        nonlocal output
        output['calls'] += 1
        return np.random.rand(10, 10, 10)

    # then make a mock "timelapse" of 3D stacks
    # see https://napari.org/tutorials/applications/dask.html for details
    _list = [get_array() for fn in range(20)]
    output['stack'] = da.stack(
        [da.from_delayed(i, shape=(10, 10, 10), dtype=np.float) for i in _list]
    )
    assert output['stack'].shape == (20, 10, 10, 10)
    return output 
开发者ID:napari,项目名称:napari,代码行数:23,代码来源:test_dask_layers.py

示例5: _update_dataset

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def _update_dataset(partitions, *args, **kwargs):
    # TODO: fix the parsing below to adapt for all supported formats (see: parse_input_to_metapartition)
    if any(partitions):
        table_name = next(iter(dict(partitions[0]["data"]).keys()))
        delayed_partitions = [
            dask.delayed(_unwrap_partition)(part) for part in partitions
        ]
        partitions = dd.from_delayed(delayed_partitions)
    else:
        table_name = "core"
        partitions = None
    ddf = update_dataset_from_ddf(partitions, *args, table=table_name, **kwargs)

    s = pickle.dumps(ddf, pickle.HIGHEST_PROTOCOL)
    ddf = pickle.loads(s)

    return ddf.compute() 
开发者ID:JDASoftwareGroup,项目名称:kartothek,代码行数:19,代码来源:test_update.py

示例6: calculate_centroids_old

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def calculate_centroids_old(cnmds, window, grp_dim=['animal', 'session']):
    print("computing centroids")
    cnt_list = []
    for anm, cur_anm in cnmds.groupby('animal'):
        for ss, cur_ss in cur_anm.groupby('session'):
            # cnt = centroids(cur_ss['A_shifted'], window.sel(animal=anm))
            cnt = da.delayed(centroids)(
                cur_ss['A_shifted'], window.sel(animal=anm))
            cnt_list.append(cnt)
    with ProgressBar():
        cnt_list, = da.compute(cnt_list)
    cnts_ds = pd.concat(cnt_list, ignore_index=True)
    cnts_ds.height = cnts_ds.height.astype(float)
    cnts_ds.width = cnts_ds.width.astype(float)
    cnts_ds.unit_id = cnts_ds.unit_id.astype(int)
    cnts_ds.animal = cnts_ds.animal.astype(str)
    cnts_ds.session = cnts_ds.session.astype(str)
    cnts_ds.session_id = cnts_ds.session_id.astype(str)
    return cnts_ds 
开发者ID:DeniseCaiLab,项目名称:minian,代码行数:21,代码来源:cross_registration.py

示例7: calculate_centroid_distance_old

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def calculate_centroid_distance_old(cents,
                                A,
                                window,
                                grp_dim=['animal'],
                                tile=(50, 50),
                                shift=True,
                                hamming=True,
                                corr=False):
    dist_list = []
    A = da.delayed(A)
    for cur_anm, cur_grp in cents.groupby('animal'):
        print("processing animal: {}".format(cur_anm))
        cur_A = A.sel(animal=cur_anm)
        cur_wnd = window.sel(animal=cur_anm)
        dist = centroids_distance(cur_grp, cur_A, cur_wnd, shift, hamming,
                                  corr, tile)
        dist['meta', 'animal'] = cur_anm
        dist_list.append(dist)
    dist = pd.concat(dist_list, ignore_index=True)
    return dist 
开发者ID:DeniseCaiLab,项目名称:minian,代码行数:22,代码来源:cross_registration.py

示例8: centroids_distance_old

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def centroids_distance_old(cents,
                       A,
                       window,
                       shift,
                       hamming,
                       corr,
                       tile=(50, 50)):
    sessions = cents['session'].unique()
    dim_h = (np.min(cents['height']), np.max(cents['height']))
    dim_w = (np.min(cents['width']), np.max(cents['width']))
    dist_list = []
    for ssA, ssB in itt.combinations(sessions, 2):
        # dist = _calc_cent_dist(ssA, ssB, cents, cnmds, window, tile, dim_h, dim_w)
        dist = da.delayed(_calc_cent_dist)(ssA, ssB, cents, A, window,
                                           tile, dim_h, dim_w, shift, hamming,
                                           corr)
        dist_list.append(dist)
    with ProgressBar():
        dist_list, = da.compute(dist_list)
    dists = pd.concat(dist_list, ignore_index=True)
    return dists 
开发者ID:DeniseCaiLab,项目名称:minian,代码行数:23,代码来源:cross_registration.py

示例9: _labeled_comprehension_delayed

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def _labeled_comprehension_delayed(func,
                                   out_dtype,
                                   default,
                                   a,
                                   positions=None):
    """
    Wrapped delayed labeled comprehension function

    Included in the module for pickling purposes. Also handle cases where
    computation should not occur.
    """

    result = numpy.empty((1,), dtype=out_dtype)

    if a.size:
        if positions is None:
            result[0] = func(a)
        else:
            result[0] = func(a, positions)
    else:
        result[0] = default[0]

    return result 
开发者ID:dask,项目名称:dask-image,代码行数:25,代码来源:__init__.py

示例10: _labeled_comprehension_func

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def _labeled_comprehension_func(func,
                                out_dtype,
                                default,
                                a,
                                positions=None):
    """
    Wrapped labeled comprehension function

    Ensures the result is a proper Dask Array and the computation delayed.
    """

    return dask.array.from_delayed(
        _labeled_comprehension_delayed(func, out_dtype, default, a, positions),
        (1,),
        out_dtype
    ) 
开发者ID:dask,项目名称:dask-image,代码行数:18,代码来源:__init__.py

示例11: __call__

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def __call__(self, datasets, **info):
        """Create the composite by scaling the DNB data using a histogram equalization method.

        :param datasets: 2-element tuple (Day/Night Band data, Solar Zenith Angle data)
        :param **info: Miscellaneous metadata for the newly produced composite
        """
        if len(datasets) != 2:
            raise ValueError("Expected 2 datasets, got %d" % (len(datasets), ))

        dnb_data = datasets[0]
        sza_data = datasets[1]
        delayed = dask.delayed(self._run_dnb_normalization)(dnb_data.data, sza_data.data)
        output_dataset = dnb_data.copy()
        output_data = da.from_delayed(delayed, dnb_data.shape, dnb_data.dtype)
        output_dataset.data = output_data.rechunk(dnb_data.data.chunks)

        info = dnb_data.attrs.copy()
        info.update(self.attrs)
        info["standard_name"] = "equalized_radiance"
        info["mode"] = "L"
        output_dataset.attrs = info
        return output_dataset 
开发者ID:pytroll,项目名称:satpy,代码行数:24,代码来源:viirs.py

示例12: three_d_effect

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def three_d_effect(img, **kwargs):
    """Create 3D effect using convolution."""
    w = kwargs.get('weight', 1)
    LOG.debug("Applying 3D effect with weight %.2f", w)
    kernel = np.array([[-w, 0, w],
                       [-w, 1, w],
                       [-w, 0, w]])
    mode = kwargs.get('convolve_mode', 'same')

    def func(band_data, kernel=kernel, mode=mode, index=None):
        del index

        delay = dask.delayed(_three_d_effect_delayed)(band_data, kernel, mode)
        new_data = da.from_delayed(delay, shape=band_data.shape, dtype=band_data.dtype)
        return new_data

    return apply_enhancement(img.data, func, separate=True, pass_dask=True) 
开发者ID:pytroll,项目名称:satpy,代码行数:19,代码来源:__init__.py

示例13: _call_ll2cr

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0):
        """Wrap ll2cr() for handling dask delayed calls better."""
        new_src = SwathDefinition(lons, lats)

        swath_points_in_grid, cols, rows = ll2cr(new_src, target_geo_def)
        # FIXME: How do we check swath usage/coverage if we only do this
        #        per-block
        # # Determine if enough of the input swath was used
        # grid_name = getattr(self.target_geo_def, "name", "N/A")
        # fraction_in = swath_points_in_grid / float(lons.size)
        # swath_used = fraction_in > swath_usage
        # if not swath_used:
        #     LOG.info("Data does not fit in grid %s because it only %f%% of "
        #              "the swath is used" %
        #              (grid_name, fraction_in * 100))
        #     raise RuntimeError("Data does not fit in grid %s" % (grid_name,))
        # else:
        #     LOG.debug("Data fits in grid %s and uses %f%% of the swath",
        #               grid_name, fraction_in * 100)

        return np.stack([cols, rows], axis=0) 
开发者ID:pytroll,项目名称:satpy,代码行数:23,代码来源:resample.py

示例14: _call_fornav

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def _call_fornav(self, cols, rows, target_geo_def, data,
                     grid_coverage=0, **kwargs):
        """Wrap fornav() to run as a dask delayed."""
        num_valid_points, res = fornav(cols, rows, target_geo_def,
                                       data, **kwargs)

        if isinstance(data, tuple):
            # convert 'res' from tuple of arrays to one array
            res = np.stack(res)
            num_valid_points = sum(num_valid_points)

        grid_covered_ratio = num_valid_points / float(res.size)
        grid_covered = grid_covered_ratio > grid_coverage
        if not grid_covered:
            msg = "EWA resampling only found %f%% of the grid covered " \
                  "(need %f%%)" % (grid_covered_ratio * 100,
                                   grid_coverage * 100)
            raise RuntimeError(msg)
        LOG.debug("EWA resampling found %f%% of the grid covered" %
                  (grid_covered_ratio * 100))

        return res 
开发者ID:pytroll,项目名称:satpy,代码行数:24,代码来源:resample.py

示例15: benchmark

# 需要导入模块: import dask [as 别名]
# 或者: from dask import delayed [as 别名]
def benchmark(datasets=None, datasets_path=None, distributed=True, timeout=None):
    if datasets is None:
        if datasets_path is None:
            datasets = get_available_demos().name
        else:
            datasets = os.listdir(datasets_path)

    if distributed:
        import dask

        global score_dataset
        score_dataset = dask.delayed(score_dataset)

    scores = list()
    for dataset in datasets:
        scores.append(score_dataset(dataset, datasets_path, timeout))

    if distributed:
        scores = dask.compute(*scores)

    return pd.DataFrame(scores) 
开发者ID:sdv-dev,项目名称:SDV,代码行数:23,代码来源:benchmark.py


注:本文中的dask.delayed方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。