当前位置: 首页>>代码示例>>Python>>正文


Python dask.__version__方法代码示例

本文整理汇总了Python中dask.__version__方法的典型用法代码示例。如果您正苦于以下问题:Python dask.__version__方法的具体用法?Python dask.__version__怎么用?Python dask.__version__使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在dask的用法示例。


在下文中一共展示了dask.__version__方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _submit_calcs_on_client

# 需要导入模块: import dask [as 别名]
# 或者: from dask import __version__ [as 别名]
def _submit_calcs_on_client(calcs, client, func):
    """Submit calculations via dask.bag and a distributed client"""
    logging.info('Connected to client: {}'.format(client))
    if LooseVersion(dask.__version__) < '0.18':
        dask_option_setter = dask.set_options
    else:
        dask_option_setter = dask.config.set
    with dask_option_setter(get=client.get):
        return db.from_sequence(calcs).map(func).compute() 
开发者ID:spencerahill,项目名称:aospy,代码行数:11,代码来源:automate.py

示例2: test_dask_array_creates_cache

# 需要导入模块: import dask [as 别名]
# 或者: from dask import __version__ [as 别名]
def test_dask_array_creates_cache():
    """Test that adding a dask array creates a dask cache and turns of fusion.
    """
    # by default we have no dask_cache and task fusion is active
    original = dask.config.get("optimization.fuse.active", None)

    def mock_set_view_slice():
        assert dask.config.get("optimization.fuse.active") is False

    layer = layers.Image(da.ones((100, 100)))
    layer._set_view_slice = mock_set_view_slice
    layer.set_view_slice()
    # adding a dask array will turn on the cache, and turn off task fusion.
    assert isinstance(utils.dask_cache, dask.cache.Cache)
    assert dask.config.get("optimization.fuse.active", None) == original

    # if the dask version is too low to remove task fusion, emit a warning
    _dask_ver = dask.__version__
    dask.__version__ = '2.14.0'
    with pytest.warns(UserWarning) as record:
        _ = layers.Image(da.ones((100, 100)))

    assert 'upgrade Dask to v2.15.0 or later' in record[0].message.args[0]
    dask.__version__ = _dask_ver

    # make sure we can resize the cache
    assert utils.dask_cache.cache.total_bytes > 1000
    utils.resize_dask_cache(1000)
    assert utils.dask_cache.cache.total_bytes <= 1000

    # This should only affect dask arrays, and not numpy data
    def mock_set_view_slice2():
        assert dask.config.get("optimization.fuse.active", None) == original

    layer2 = layers.Image(np.ones((100, 100)))
    layer2._set_view_slice = mock_set_view_slice2
    layer2.set_view_slice()

    # clean up cache
    utils.dask_cache = None 
开发者ID:napari,项目名称:napari,代码行数:42,代码来源:test_dask_layers.py

示例3: task_execution_time

# 需要导入模块: import dask [as 别名]
# 或者: from dask import __version__ [as 别名]
def task_execution_time(self, builderSelf):
        ## calculates execution time of each task in client using get task stream
        task_Info_latest = builderSelf.client.get_task_stream()[-1]
        time_tuple = task_Info_latest['startstops'][0]

        if version.parse(dask.__version__) <  version.parse("2.10.0"):
            computing_time = time_tuple[2] - time_tuple[1]
        else:
            computing_time = time_tuple['stop'] - time_tuple['start']
        return computing_time 
开发者ID:QuantEcon,项目名称:sphinxcontrib-jupyter,代码行数:12,代码来源:execute_nb.py

示例4: resolve_name

# 需要导入模块: import dask [as 别名]
# 或者: from dask import __version__ [as 别名]
def resolve_name(self, modname, parents, path, base):
        if modname is None:
            if path:
                mod_cls = path.rstrip(".")
            else:
                mod_cls = None
                # if documenting a class-level object without path,
                # there must be a current class, either from a parent
                # auto directive ...
                mod_cls = self.env.temp_data.get("autodoc:class")
                # ... or from a class directive
                if mod_cls is None:
                    mod_cls = self.env.temp_data.get("py:class")
                # ... if still None, there's no way to know
                if mod_cls is None:
                    return None, []
            # HACK: this is added in comparison to ClassLevelDocumenter
            # mod_cls still exists of class.accessor, so an extra
            # rpartition is needed
            modname, accessor = rpartition(mod_cls, ".")
            modname, cls = rpartition(modname, ".")
            parents = [cls, accessor]
            # if the module name is still missing, get it like above
            if not modname:
                modname = self.env.temp_data.get("autodoc:module")
            if not modname:
                if sphinx.__version__ > "1.3":
                    modname = self.env.ref_context.get("py:module")
                else:
                    modname = self.env.temp_data.get("py:module")
            # ... else, it stays None, which means invalid
        return modname, parents + [base] 
开发者ID:benbovy,项目名称:xarray-simlab,代码行数:34,代码来源:conf.py

示例5: get_execution_engine

# 需要导入模块: import dask [as 别名]
# 或者: from dask import __version__ [as 别名]
def get_execution_engine():
    # In the future, when there are multiple engines and different ways of
    # backing the DataFrame, there will have to be some changed logic here to
    # decide these things. In the meantime, we will use the currently supported
    # execution engine + backing (Pandas + Ray).
    if "MODIN_ENGINE" in os.environ:
        # .title allows variants like ray, RAY, Ray
        return os.environ["MODIN_ENGINE"].title()
    else:
        if "MODIN_DEBUG" in os.environ:
            return "Python"
        else:
            if sys.platform != "win32":
                try:
                    import ray

                except ImportError:
                    pass
                else:
                    if version.parse(ray.__version__) != version.parse("0.8.6"):
                        raise ImportError(
                            "Please `pip install modin[ray]` to install compatible Ray version."
                        )
                    return "Ray"
            try:
                import dask
                import distributed

            except ImportError:
                raise ImportError(
                    "Please `pip install {}modin[dask]` to install an engine".format(
                        "modin[ray]` or `" if sys.platform != "win32" else ""
                    )
                )
            else:
                if version.parse(dask.__version__) < version.parse(
                    "2.1.0"
                ) or version.parse(distributed.__version__) < version.parse("2.3.2"):
                    raise ImportError(
                        "Please `pip install modin[dask]` to install compatible Dask version."
                    )
                return "Dask" 
开发者ID:modin-project,项目名称:modin,代码行数:44,代码来源:__init__.py

示例6: make_cluster_model

# 需要导入模块: import dask [as 别名]
# 或者: from dask import __version__ [as 别名]
def make_cluster_model(
    cluster_id: str,
    cluster_name: str,
    cluster: Cluster,
    adaptive: Union[Adaptive, None],
) -> ClusterModel:
    """
    Make a cluster model. This is a JSON-serializable representation
    of the information about a cluster that can be sent over the wire.

    Parameters
    ----------
    cluster_id: string
        A unique string for the cluster.

    cluster_name: string
        A display name for the cluster.

    cluster: Cluster
        The cluster out of which to make the cluster model.

    adaptive: Adaptive
        The adaptive controller for the number of workers for the cluster, or
        none if the cluster is not scaled adaptively.
    """
    # This would be a great target for a dataclass
    # once python 3.7 is in wider use.
    try:
        info = cluster.scheduler_info
    except AttributeError:
        info = cluster.scheduler.identity()
    try:
        cores = sum(d["nthreads"] for d in info["workers"].values())
    except KeyError:  # dask.__version__ < 2.0
        cores = sum(d["ncores"] for d in info["workers"].values())
    assert isinstance(info, dict)
    model = dict(
        id=cluster_id,
        name=cluster_name,
        scheduler_address=cluster.scheduler_address,
        dashboard_link=cluster.dashboard_link or "",
        workers=len(info["workers"]),
        memory=utils.format_bytes(
            sum(d["memory_limit"] for d in info["workers"].values())
        ),
        cores=cores,
    )
    if adaptive:
        model["adapt"] = {"minimum": adaptive.minimum, "maximum": adaptive.maximum}

    return model


# Create a default cluster manager
# to keep track of clusters. 
开发者ID:dask,项目名称:dask-labextension,代码行数:57,代码来源:manager.py


注:本文中的dask.__version__方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。