本文整理汇总了Python中xarray.concat函数的典型用法代码示例。如果您正苦于以下问题:Python concat函数的具体用法?Python concat怎么用?Python concat使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了concat函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_concat
def test_concat(self):
# TODO: simplify and split this test case
# drop the third dimension to keep things relatively understandable
data = create_test_data()
for k in list(data):
if 'dim3' in data[k].dims:
del data[k]
split_data = [data.isel(dim1=slice(3)),
data.isel(dim1=slice(3, None))]
self.assertDatasetIdentical(data, concat(split_data, 'dim1'))
def rectify_dim_order(dataset):
# return a new dataset with all variable dimensions transposed into
# the order in which they are found in `data`
return Dataset(dict((k, v.transpose(*data[k].dims))
for k, v in iteritems(dataset.data_vars)),
dataset.coords, attrs=dataset.attrs)
for dim in ['dim1', 'dim2']:
datasets = [g for _, g in data.groupby(dim, squeeze=False)]
self.assertDatasetIdentical(data, concat(datasets, dim))
dim = 'dim2'
self.assertDatasetIdentical(
data, concat(datasets, data[dim]))
self.assertDatasetIdentical(
data, concat(datasets, data[dim], coords='minimal'))
datasets = [g for _, g in data.groupby(dim, squeeze=True)]
concat_over = [k for k, v in iteritems(data.coords)
if dim in v.dims and k != dim]
actual = concat(datasets, data[dim], coords=concat_over)
self.assertDatasetIdentical(data, rectify_dim_order(actual))
actual = concat(datasets, data[dim], coords='different')
self.assertDatasetIdentical(data, rectify_dim_order(actual))
# make sure the coords argument behaves as expected
data.coords['extra'] = ('dim4', np.arange(3))
for dim in ['dim1', 'dim2']:
datasets = [g for _, g in data.groupby(dim, squeeze=True)]
actual = concat(datasets, data[dim], coords='all')
expected = np.array([data['extra'].values
for _ in range(data.dims[dim])])
self.assertArrayEqual(actual['extra'].values, expected)
actual = concat(datasets, data[dim], coords='different')
self.assertDataArrayEqual(data['extra'], actual['extra'])
actual = concat(datasets, data[dim], coords='minimal')
self.assertDataArrayEqual(data['extra'], actual['extra'])
# verify that the dim argument takes precedence over
# concatenating dataset variables of the same name
dim = (2 * data['dim1']).rename('dim1')
datasets = [g for _, g in data.groupby('dim1', squeeze=False)]
expected = data.copy()
expected['dim1'] = dim
self.assertDatasetIdentical(expected, concat(datasets, dim))
示例2: test_concat_size0
def test_concat_size0(self):
data = create_test_data()
split_data = [data.isel(dim1=slice(0, 0)), data]
actual = concat(split_data, 'dim1')
self.assertDatasetIdentical(data, actual)
actual = concat(split_data[::-1], 'dim1')
self.assertDatasetIdentical(data, actual)
示例3: multi_concat
def multi_concat(results, dims):
"""Concatenate a nested list of xarray objects along several dimensions.
"""
if len(dims) == 1:
return xr.concat(results, dim=dims[0])
else:
return xr.concat([multi_concat(sub_results, dims[1:])
for sub_results in results], dim=dims[0])
示例4: test_concat_coords
def test_concat_coords(self):
data = Dataset({"foo": ("x", np.random.randn(10))})
expected = data.assign_coords(c=("x", [0] * 5 + [1] * 5))
objs = [data.isel(x=slice(5)).assign_coords(c=0), data.isel(x=slice(5, None)).assign_coords(c=1)]
for coords in ["different", "all", ["c"]]:
actual = concat(objs, dim="x", coords=coords)
self.assertDatasetIdentical(expected, actual)
for coords in ["minimal", []]:
with self.assertRaisesRegexp(ValueError, "not equal across"):
concat(objs, dim="x", coords=coords)
示例5: test_concat_constant_index
def test_concat_constant_index(self):
# GH425
ds1 = Dataset({"foo": 1.5}, {"y": 1})
ds2 = Dataset({"foo": 2.5}, {"y": 1})
expected = Dataset({"foo": ("y", [1.5, 2.5]), "y": [1, 1]})
for mode in ["different", "all", ["foo"]]:
actual = concat([ds1, ds2], "y", data_vars=mode)
self.assertDatasetIdentical(expected, actual)
with self.assertRaisesRegexp(ValueError, "not equal across datasets"):
concat([ds1, ds2], "y", data_vars="minimal")
示例6: test_concat_encoding
def test_concat_encoding(self):
# Regression test for GH1297
ds = Dataset({'foo': (['x', 'y'], np.random.random((2, 3))),
'bar': (['x', 'y'], np.random.random((2, 3)))},
{'x': [0, 1]})
foo = ds['foo']
foo.encoding = {"complevel": 5}
ds.encoding = {"unlimited_dims": 'x'}
assert concat([foo, foo], dim="x").encoding == foo.encoding
assert concat([ds, ds], dim="x").encoding == ds.encoding
示例7: test_concat_do_not_promote
def test_concat_do_not_promote(self):
# GH438
objs = [Dataset({"y": ("t", [1])}, {"x": 1}), Dataset({"y": ("t", [2])}, {"x": 1})]
expected = Dataset({"y": ("t", [1, 2])}, {"x": 1, "t": [0, 0]})
actual = concat(objs, "t")
self.assertDatasetIdentical(expected, actual)
objs = [Dataset({"y": ("t", [1])}, {"x": 1}), Dataset({"y": ("t", [2])}, {"x": 2})]
with self.assertRaises(ValueError):
concat(objs, "t", coords="minimal")
示例8: test_concat
def test_concat(self):
ds = Dataset({"foo": (["x", "y"], np.random.random((10, 20))), "bar": (["x", "y"], np.random.random((10, 20)))})
foo = ds["foo"]
bar = ds["bar"]
# from dataset array:
expected = DataArray(np.array([foo.values, bar.values]), dims=["w", "x", "y"])
actual = concat([foo, bar], "w")
self.assertDataArrayEqual(expected, actual)
# from iteration:
grouped = [g for _, g in foo.groupby("x")]
stacked = concat(grouped, ds["x"])
self.assertDataArrayIdentical(foo, stacked)
# with an index as the 'dim' argument
stacked = concat(grouped, ds.indexes["x"])
self.assertDataArrayIdentical(foo, stacked)
actual = concat([foo[0], foo[1]], pd.Index([0, 1])).reset_coords(drop=True)
expected = foo[:2].rename({"x": "concat_dim"})
self.assertDataArrayIdentical(expected, actual)
actual = concat([foo[0], foo[1]], [0, 1]).reset_coords(drop=True)
expected = foo[:2].rename({"x": "concat_dim"})
self.assertDataArrayIdentical(expected, actual)
with self.assertRaisesRegexp(ValueError, "not identical"):
concat([foo, bar], dim="w", compat="identical")
with self.assertRaisesRegexp(ValueError, "not a valid argument"):
concat([foo, bar], dim="w", data_vars="minimal")
示例9: test_concat_coords
def test_concat_coords(self):
data = Dataset({'foo': ('x', np.random.randn(10))})
expected = data.assign_coords(c=('x', [0] * 5 + [1] * 5))
objs = [data.isel(x=slice(5)).assign_coords(c=0),
data.isel(x=slice(5, None)).assign_coords(c=1)]
for coords in ['different', 'all', ['c']]:
actual = concat(objs, dim='x', coords=coords)
self.assertDatasetIdentical(expected, actual)
for coords in ['minimal', []]:
with self.assertRaisesRegexp(ValueError, 'not equal across'):
concat(objs, dim='x', coords=coords)
示例10: test_auto_combine_2d
def test_auto_combine_2d(self):
ds = create_test_data
partway1 = concat([ds(0), ds(3)], dim='dim1')
partway2 = concat([ds(1), ds(4)], dim='dim1')
partway3 = concat([ds(2), ds(5)], dim='dim1')
expected = concat([partway1, partway2, partway3], dim='dim2')
datasets = [[ds(0), ds(1), ds(2)], [ds(3), ds(4), ds(5)]]
result = auto_combine(datasets, concat_dim=['dim1', 'dim2'])
assert_equal(result, expected)
示例11: test_concat_twice
def test_concat_twice(self, create_combined_ids, concat_dim):
shape = (2, 3)
combined_ids = create_combined_ids(shape)
result = _combine_nd(combined_ids, concat_dims=['dim1', concat_dim])
ds = create_test_data
partway1 = concat([ds(0), ds(3)], dim='dim1')
partway2 = concat([ds(1), ds(4)], dim='dim1')
partway3 = concat([ds(2), ds(5)], dim='dim1')
expected = concat([partway1, partway2, partway3], dim=concat_dim)
assert_equal(result, expected)
示例12: add_cyclic
def add_cyclic(varin,dim='nlon'):
'''Add a cyclic point to CESM data. Preserve datatype: xarray'''
dimdict = {}
dimdict[dim] = 0
if dim == 'nlon':
return(xr.concat([varin, varin.isel(nlon=0)], dim='nlon'))
elif dim == 'nlat':
return(xr.concat([varin, varin.isel(nlat=0)], dim='nlat'))
elif dim == 'dim_0':
return(xr.concat([varin, varin.isel(dim_0=0)], dim='dim_0'))
elif dim == 'dim_1':
return(xr.concat([varin, varin.isel(dim_1=0)], dim='dim_1'))
示例13: test_concat_do_not_promote
def test_concat_do_not_promote(self):
# GH438
objs = [Dataset({'y': ('t', [1])}, {'x': 1, 't': [0]}),
Dataset({'y': ('t', [2])}, {'x': 1, 't': [0]})]
expected = Dataset({'y': ('t', [1, 2])}, {'x': 1, 't': [0, 0]})
actual = concat(objs, 't')
self.assertDatasetIdentical(expected, actual)
objs = [Dataset({'y': ('t', [1])}, {'x': 1, 't': [0]}),
Dataset({'y': ('t', [2])}, {'x': 2, 't': [0]})]
with self.assertRaises(ValueError):
concat(objs, 't', coords='minimal')
示例14: file_loop
def file_loop(passit):
ds = xr.open_dataset(passit)
dataset = passit[6:9]
ds['tir'].values = ds['tir']
bloblist = []
tirlist = []
lat = ds.lat
lon = ds.lon
for ids, day in enumerate(ds['tir']):
print('id', ids)
date = day.time
day.values = day / 100
if np.sum(day.values) == 0:
continue
img, nogood, t_thresh_size, t_thresh_cut, pix_nb = powerBlob_utils.filter_img(day.values, 5)
power = util.waveletT(img, dataset='METEOSAT5K_vera')
power_out = powerBlob_utils.find_scales_dominant(power, nogood, dataset=dataset)
if power_out is None:
continue
new_savet = (day.values*100).astype(np.int16)
bloblist.append(xr.DataArray(power_out.astype(np.int16), coords={'time': date, 'lat': lat, 'lon': lon},
dims=['lat', 'lon'])) # [np.newaxis, :])
tirlist.append(xr.DataArray(new_savet, coords={'time': date, 'lat': lat, 'lon': lon}, dims=['lat', 'lon']))
ds_mfg = xr.Dataset()
ds_mfg['blobs'] = xr.concat(bloblist, 'time')
ds_mfg['tir'] = xr.concat(tirlist, 'time')
ds_mfg.sel(lat=slice(5, 12), lon=slice(-13, 13))
savefile = passit.replace('cores_', 'coresPower_')
try:
os.remove(savefile)
except OSError:
pass
comp = dict(zlib=True, complevel=5)
enc = {var: comp for var in ds_mfg.data_vars}
ds_mfg.to_netcdf(path=savefile, mode='w', encoding=enc, format='NETCDF4')
print('Saved ' + savefile)
示例15: add_to_slice
def add_to_slice(da, dim, sl, value):
# split array into before, middle and after (if slice is the
# beginning or end before or after will be empty)
before = da[{dim: slice(0, sl)}]
middle = da[{dim: sl}]
after = da[{dim: slice(sl+1, None)}]
if sl < -1:
raise RuntimeError('slice can not be smaller value than -1')
elif sl == -1:
da_new = xr.concat([before, middle+value], dim=dim)
else:
da_new = xr.concat([before, middle+value, after], dim=dim)
# then add 'value' to middle and concatenate again
return da_new