本文整理匯總了Python中dask.array.Array方法的典型用法代碼示例。如果您正苦於以下問題:Python array.Array方法的具體用法?Python array.Array怎麽用?Python array.Array使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類dask.array
的用法示例。
在下文中一共展示了array.Array方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_write_zarr
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def test_write_zarr(self, adata, adata_dist):
import dask.array as da
import zarr
log1p(adata_dist)
temp_store = zarr.TempStore()
chunks = adata_dist.X.chunks
if isinstance(chunks[0], tuple):
chunks = (chunks[0][0],) + chunks[1]
# write metadata using regular anndata
adata.write_zarr(temp_store, chunks)
if isinstance(adata_dist.X, da.Array):
adata_dist.X.to_zarr(temp_store.dir_path("X"), overwrite=True)
else:
adata_dist.X.to_zarr(temp_store.dir_path("X"), chunks)
# read back as zarr directly and check it is the same as adata.X
adata_log1p = ad.read_zarr(temp_store)
log1p(adata)
npt.assert_allclose(adata_log1p.X, adata.X)
示例2: _dask_or_eager_func
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def _dask_or_eager_func(name, eager_module=np, list_of_args=False, n_array_args=1):
"""Create a function that dispatches to dask for dask array inputs."""
if has_dask:
def f(*args, **kwargs):
dispatch_args = args[0] if list_of_args else args
if any(isinstance(a, dsa.Array) for a in dispatch_args[:n_array_args]):
module = dsa
else:
module = eager_module
return getattr(module, name)(*args, **kwargs)
else:
def f(data, *args, **kwargs):
return getattr(eager_module, name)(data, *args, **kwargs)
return f
示例3: StackColumns
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def StackColumns(*cols):
"""
Stack the input dask arrays vertically, column by column.
This uses :func:`dask.array.vstack`.
Parameters
----------
*cols : :class:`dask.array.Array`
the dask arrays to stack vertically together
Returns
-------
:class:`dask.array.Array` :
the dask array where columns correspond to the input arrays
Raises
------
TypeError
If the input columns are not dask arrays
"""
cols = da.broadcast_arrays(*cols)
return da.vstack(cols).T
示例4: _dask_or_eager_func
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def _dask_or_eager_func(name, eager_module=np, list_of_args=False,
n_array_args=1):
"""Create a function that dispatches to dask for dask array inputs."""
if has_dask:
def f(*args, **kwargs):
dispatch_args = args[0] if list_of_args else args
if any(isinstance(a, dsa.Array)
for a in dispatch_args[:n_array_args]):
module = dsa
else:
module = eager_module
return getattr(module, name)(*args, **kwargs)
else:
def f(data, *args, **kwargs):
return getattr(eager_module, name)(data, *args, **kwargs)
return f
示例5: __new__
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def __new__(cls, dm, **kwargs):
if isinstance(dm, da.Array):
dm = DaskMeta.from_darray(dm)
elif isinstance(dm, dict):
dm = DaskMeta(**dm)
elif isinstance(dm, DaskMeta):
pass
elif dm.__class__.__name__ in ("Op", "GraphMeta", "TmsMeta", "TemplateMeta"):
itr = [dm.dask, dm.name, dm.chunks, dm.dtype, dm.shape]
dm = DaskMeta._make(itr)
else:
raise ValueError("{} must be initialized with a DaskMeta, a dask array, or a dict with DaskMeta fields".format(cls.__name__))
self = da.Array.__new__(cls, dm.dask, dm.name, dm.chunks, dtype=dm.dtype, shape=dm.shape)
if "__geo_transform__" in kwargs:
self.__geo_transform__ = kwargs["__geo_transform__"]
if "__geo_interface__" in kwargs:
self.__geo_interface__ = kwargs["__geo_interface__"]
return self
示例6: _build_image_layer
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def _build_image_layer(self, image, image_bounds, cmap='viridis'):
if image is not None:
if isinstance(image, da.Array):
if len(image.shape) == 2 or \
(image.shape[0] == 1 and len(image.shape) == 3):
arr = image.compute()
else:
arr = image.rgb()
coords = box(*image.bounds)
else:
assert image_bounds is not None, "Must pass image_bounds with ndarray images"
arr = image
coords = box(*image_bounds)
b64 = self._encode_image(arr, cmap)
return ImageLayer(b64, self._polygon_coords(coords))
else:
return 'false';
示例7: _load_GeoTransform
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def _load_GeoTransform(self):
"""Calculate latitude and longitude variable calculated from the
gdal.Open.GetGeoTransform method"""
def load_lon():
return arange(ds.RasterXSize)*b[1]+b[0]
def load_lat():
return arange(ds.RasterYSize)*b[5]+b[3]
ds = self.ds
b = self.ds.GetGeoTransform() # bbox, interval
if with_dask:
lat = Array(
{('lat', 0): (load_lat,)}, 'lat', (self.ds.RasterYSize,),
shape=(self.ds.RasterYSize,), dtype=float)
lon = Array(
{('lon', 0): (load_lon,)}, 'lon', (self.ds.RasterXSize,),
shape=(self.ds.RasterXSize,), dtype=float)
else:
lat = load_lat()
lon = load_lon()
return Variable(('lat',), lat), Variable(('lon',), lon)
示例8: compute_scaling
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def compute_scaling(df, region1, region2=None, dmin=int(1e1), dmax=int(1e7), n_bins=50):
import dask.array as da
if region2 is None:
region2 = region1
distbins = numutils.logbins(dmin, dmax, N=n_bins)
areas = contact_areas(distbins, region1, region2)
df = df[
(df["pos1"] >= region1[0])
& (df["pos1"] < region1[1])
& (df["pos2"] >= region2[0])
& (df["pos2"] < region2[1])
]
dists = (df["pos2"] - df["pos1"]).values
if isinstance(dists, da.Array):
obs, _ = da.histogram(dists[(dists >= dmin) & (dists < dmax)], bins=distbins)
else:
obs, _ = np.histogram(dists[(dists >= dmin) & (dists < dmax)], bins=distbins)
return distbins, obs, areas
示例9: test_nearest_swath_1d_mask_to_grid_1n
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def test_nearest_swath_1d_mask_to_grid_1n(self):
"""Test 1D swath definition to 2D grid definition; 1 neighbor."""
from pyresample.kd_tree import XArrayResamplerNN
import xarray as xr
import dask.array as da
resampler = XArrayResamplerNN(self.tswath_1d, self.tgrid,
radius_of_influence=100000,
neighbours=1)
data = self.tdata_1d
ninfo = resampler.get_neighbour_info(mask=data.isnull())
for val in ninfo[:3]:
# vii, ia, voi
self.assertIsInstance(val, da.Array)
res = resampler.get_sample_from_neighbour_info(data)
self.assertIsInstance(res, xr.DataArray)
self.assertIsInstance(res.data, da.Array)
actual = res.values
expected = np.array([
[1., 2., 2.],
[1., 2., 2.],
[1., np.nan, 2.],
[1., 2., 2.],
])
np.testing.assert_allclose(actual, expected)
示例10: test_nearest_swath_2d_mask_to_area_1n
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def test_nearest_swath_2d_mask_to_area_1n(self):
"""Test 2D swath definition to 2D area definition; 1 neighbor."""
from pyresample.kd_tree import XArrayResamplerNN
import xarray as xr
import dask.array as da
swath_def = self.swath_def_2d
data = self.data_2d
resampler = XArrayResamplerNN(swath_def, self.area_def,
radius_of_influence=50000,
neighbours=1)
ninfo = resampler.get_neighbour_info(mask=data.isnull())
for val in ninfo[:3]:
# vii, ia, voi
self.assertIsInstance(val, da.Array)
res = resampler.get_sample_from_neighbour_info(data)
self.assertIsInstance(res, xr.DataArray)
self.assertIsInstance(res.data, da.Array)
res = res.values
cross_sum = np.nansum(res)
expected = 15874591.0
self.assertEqual(cross_sum, expected)
示例11: test_nearest_swath_1d_mask_to_grid_8n
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def test_nearest_swath_1d_mask_to_grid_8n(self):
"""Test 1D swath definition to 2D grid definition; 8 neighbors."""
from pyresample.kd_tree import XArrayResamplerNN
import xarray as xr
import dask.array as da
resampler = XArrayResamplerNN(self.tswath_1d, self.tgrid,
radius_of_influence=100000,
neighbours=8)
data = self.tdata_1d
ninfo = resampler.get_neighbour_info(mask=data.isnull())
for val in ninfo[:3]:
# vii, ia, voi
self.assertIsInstance(val, da.Array)
res = resampler.get_sample_from_neighbour_info(data)
self.assertIsInstance(res, xr.DataArray)
self.assertIsInstance(res.data, da.Array)
# actual = res.values
# expected = TODO
# np.testing.assert_allclose(actual, expected)
示例12: _dask_array_vgrid
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def _dask_array_vgrid(self, varname, klevels, k_chunksize):
# return a dask array for a 1D vertical grid var
# single chunk for 1D variables
chunks = ((len(klevels),),)
# manually build dask graph
dsk = {}
token = tokenize(varname, self.store)
name = '-'.join([varname, token])
nz = self.nz if _VAR_METADATA[varname]['dims'] != ['k_p1'] else self.nz+1
task = (_get_1d_chunk, self.store, varname,
list(klevels), nz, self.dtype)
key = name, 0
dsk[key] = task
return dsa.Array(dsk, name, chunks, self.dtype)
示例13: interpolate_xarray
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def interpolate_xarray(xpoints, ypoints, values, shape, kind='cubic',
blocksize=CHUNK_SIZE):
"""Interpolate, generating a dask array."""
vchunks = range(0, shape[0], blocksize)
hchunks = range(0, shape[1], blocksize)
token = tokenize(blocksize, xpoints, ypoints, values, kind, shape)
name = 'interpolate-' + token
from scipy.interpolate import interp2d
interpolator = interp2d(xpoints, ypoints, values, kind=kind)
dskx = {(name, i, j): (interpolate_slice,
slice(vcs, min(vcs + blocksize, shape[0])),
slice(hcs, min(hcs + blocksize, shape[1])),
interpolator)
for i, vcs in enumerate(vchunks)
for j, hcs in enumerate(hchunks)
}
res = da.Array(dskx, name, shape=list(shape),
chunks=(blocksize, blocksize),
dtype=values.dtype)
return DataArray(res, dims=('y', 'x'))
示例14: materialize_as_ndarray
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def materialize_as_ndarray(a):
"""Convert distributed arrays to ndarrays."""
if type(a) in (list, tuple):
if da is not None and any(isinstance(arr, da.Array) for arr in a):
return da.compute(*a, sync=True)
return tuple(np.asarray(arr) for arr in a)
return np.asarray(a)
示例15: test_compute_dataset_with_processed_variables
# 需要導入模塊: from dask import array [as 別名]
# 或者: from dask.array import Array [as 別名]
def test_compute_dataset_with_processed_variables(self):
dataset = self.get_test_dataset()
computed_dataset = evaluate_dataset(dataset,
processed_variables=[('a', None),
('b', dict(valid_pixel_expression=None)),
('c', dict(expression='a + b',
load=True)),
('d', dict(valid_pixel_expression='c > 0.4',
load=True))])
self.assertIsNot(computed_dataset, dataset)
self.assertIn('x', computed_dataset)
self.assertIn('y', computed_dataset)
self.assertIn('a', computed_dataset)
self.assertIn('b', computed_dataset)
self.assertIn('c', computed_dataset)
self.assertIn('d', computed_dataset)
self.assertIn('x', computed_dataset.coords)
self.assertIn('y', computed_dataset.coords)
self.assertIn('title', computed_dataset.attrs)
self.assertEqual((2, 4), computed_dataset.a.shape)
self.assertEqual((2, 4), computed_dataset.b.shape)
self.assertEqual((2, 4), computed_dataset.c.shape)
self.assertIsInstance(computed_dataset.a.data, da.Array)
self.assertIsInstance(computed_dataset.b.data, da.Array)
self.assertIsInstance(computed_dataset.c.data, np.ndarray) # load=True --> load c as numpy array
self.assertIsInstance(computed_dataset.d.data, np.ndarray) # load=True --> load d as numpy array
self.assertIn('expression', computed_dataset.c.attrs)
self.assertEqual((2, 4), computed_dataset.d.shape)
self.assertIn('expression', computed_dataset.d.attrs)
np.testing.assert_array_almost_equal(computed_dataset.a.values,
np.array([[0.1, 0.2, 0.4, 0.1], [0.5, 0.1, 0.2, 0.3]]))
np.testing.assert_array_almost_equal(computed_dataset.b.values,
np.array([[0.4, 0.3, 0.2, 0.4], [0.1, 0.2, 0.5, 0.1]]))
np.testing.assert_array_almost_equal(computed_dataset.c.values,
np.array([[0.5, 0.5, 0.6, 0.5], [0.6, 0.3, 0.7, 0.4]]))
np.testing.assert_array_almost_equal(computed_dataset.d.values,
np.array([[0.04, 0.06, 0.08, 0.04], [0.05, nan, 0.1, nan]]))