本文整理汇总了Python中xarray.DataArray.from_dict方法的典型用法代码示例。如果您正苦于以下问题:Python DataArray.from_dict方法的具体用法?Python DataArray.from_dict怎么用?Python DataArray.from_dict使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类xarray.DataArray
的用法示例。
在下文中一共展示了DataArray.from_dict方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: from xarray import DataArray [as 别名]
# 或者: from xarray.DataArray import from_dict [as 别名]
def main():
N = 48
data = range(N)
dates = [datetime(2001, 1, 1, 15) + timedelta(hours=i) for i in range(N)]
# time variable is called the same as time dimension: works
scheme1 = {
"dims": "t",
"coords": {"t": {"dims": "t", "data": dates}},
"data": data
}
# time variable is called differently from the time dimension: does not work
scheme2 = {
"dims": "t",
"coords": {"time": {"dims": "t", "data": dates}},
"data": data
}
a1 = DataArray.from_dict(scheme1)
print(a1)
a1_daily = a1.resample("D", "t")
print(a1_daily)
assert isinstance(a1_daily, DataArray)
print([t for t in a1_daily.coords["t"].values])
print("=======================")
a2 = DataArray.from_dict(scheme2)
print(a2)
print(a2.resample("D", "time"))
示例2: read_data_for_period
# 需要导入模块: from xarray import DataArray [as 别名]
# 或者: from xarray.DataArray import from_dict [as 别名]
#.........这里部分代码省略.........
data_source_types.ALL_VARS_IN_A_FOLDER_IN_NETCDF_FILES_OPEN_EACH_FILE_SEPARATELY]:
if self.varname_to_file_path is None:
base_folder = Path(self.base_folder)
ds = xarray.open_mfdataset(str(base_folder / "*.nc*"), data_vars="minimal")
else:
## In the case of very different netcdf files in the folder
## i.e. data_source_types.ALL_VARS_IN_A_FOLDER_IN_NETCDF_FILES_OPEN_EACH_FILE_SEPARATELY
ds = xarray.open_dataset(self.varname_to_file_path[varname_internal])
print("reading {} from {}".format(varname_internal, self.varname_to_file_path[varname_internal]))
# select the variable by name and time
# print(period.start, period.end)
# print(ds[self.varname_mapping[varname_internal]])
# try both time and t
try:
var = ds[self.varname_mapping[varname_internal]].sel(time=slice(period.start, period.end)).squeeze()
except ValueError:
var = ds[self.varname_mapping[varname_internal]].sel(t=slice(period.start, period.end)).squeeze()
for cname, cvals in var.coords.items():
if "time" in cname.lower() or "t" == cname.lower():
dates = cvals
if self.lons is None:
need_to_create_meshgrid = False
for cname, cvals in var.coords.items():
if "lon" in cname.lower():
lons = cvals.values
if lons.ndim == 1:
need_to_create_meshgrid = True
if "lat" in cname.lower():
lats = cvals.values
if need_to_create_meshgrid:
lats, lons = np.meshgrid(lats, lons)
self.lons, self.lats = lons, lats
# if still could not find longitudes and latitudes
if self.lons is None:
for vname, ncvar in ds.items():
if "lon" in vname.lower():
self.lons = ncvar.values
if "lat" in vname.lower():
self.lats = ncvar.values
# if still could not find => raise an exception
if self.lons is None:
raise IOError(f"Could not find lon/lat fields in the\n {ds}")
if var.ndim > 3:
var = var[:, self.level_mapping[varname_internal].value, :, :]
if var.shape[-2:] == self.lons.shape:
data_list = var.values
else:
if var.ndim == 3:
data_list = np.transpose(var.values, axes=(0, 2, 1))
elif var.ndim == 2:
data_list = np.transpose(var.values)
else:
raise Exception(f"{var.ndim}-dimensional variables are not supported")
# close the dataset
ds.close()
else:
raise NotImplementedError(
"reading of the layout type {} is not implemented yet.".format(self.data_source_type))
# print(dates[0], dates[1], "...", dates[-1], len(dates))
# Construct a dictionary for xarray.DataArray ...
vardict = {
"coords": {
"t": {"dims": "t", "data": dates},
"lon": {"dims": ("x", "y"), "data": self.lons},
"lat": {"dims": ("x", "y"), "data": self.lats},
},
"dims": ("t", "x", "y"),
"data": data_list,
"name": varname_internal
}
if len(data_list) == 0:
print("retreived dates: {}".format(dates))
raise IOError(
"Could not find any {} data for the period {}..{} in {}".format(self.varname_mapping[varname_internal],
period.start, period.end,
self.base_folder))
# Convert units based on supplied mappings
return self.multipliers[varname_internal] * DataArray.from_dict(vardict) + self.offsets[varname_internal]
示例3: read_data_for_period_3d
# 需要导入模块: from xarray import DataArray [as 别名]
# 或者: from xarray.DataArray import from_dict [as 别名]
def read_data_for_period_3d(self, period: Period, varname_internal: str) -> DataArray:
"""
Read 3D fields
:param period:
:param varname_internal:
"""
data_list = []
dates = []
vert_levels = None
vert_level_units = None
if self.data_source_type == data_source_types.ALL_VARS_IN_A_FOLDER_OF_RPN_FILES:
raise NotImplementedError()
elif self.data_source_type == data_source_types.SAMPLES_FOLDER_FROM_CRCM_OUTPUT:
assert varname_internal in self.varname_to_file_prefix, f"Could not find {varname_internal} in {self.varname_to_file_prefix}"
filename_prefix = self.varname_to_file_prefix[varname_internal]
if filename_prefix in ["dp", ]:
vert_level_units = "mb"
for month_start in period.range("months"):
year, m = month_start.year, month_start.month
# Skip years or months that are not available
if (year, m) not in self.yearmonth_to_path:
print(f"Skipping {year}-{m}")
continue
month_dir = self.yearmonth_to_path[(year, m)]
for f in sorted(month_dir.iterdir()):
# Skip the file for time step 0
if f.name[-9:-1] == "0" * 8:
continue
# read only files with the specified prefix
if not f.name.startswith(filename_prefix):
continue
with RPN(str(f)) as r:
print(f"Reading {self.varname_mapping[varname_internal]} from {f}")
data_rvar = r.variables[self.varname_mapping[varname_internal]]
assert isinstance(data_rvar, rpn.RPNVariable)
dates.extend(data_rvar.sorted_dates)
if vert_levels is None:
vert_levels = data_rvar.sorted_levels
data_list.append(data_rvar[:])
if self.lons is None:
self.__update_bmp_info_from_rpnfile_obj(r)
else:
raise NotImplementedError()
data_list = np.concatenate(data_list, axis=0)
print(f"data_list.shape={data_list.shape}, var_name={varname_internal}")
# data_list = np.transpose(data_list, axes=(0, 2, 3, 1))
# Construct a dictionary for xarray.DataArray ...
vardict = {
"coords": {
"t": {"dims": "t", "data": dates},
"lon": {"dims": ("x", "y"), "data": self.lons},
"lat": {"dims": ("x", "y"), "data": self.lats},
"lev": {"dims": ("z",), "data": vert_levels}
},
"dims": ("t", "z", "x", "y"),
"data": data_list,
"name": varname_internal
}
if vert_level_units is not None:
vardict["coords"]["lev"].update({"attrs": {"units": vert_level_units}})
if len(data_list) == 0:
print("retreived dates: {}".format(dates))
raise IOError(
"Could not find any {} data for the period {}..{} in {}".format(self.varname_mapping[varname_internal],
period.start, period.end,
self.base_folder))
# Convert units based on supplied mappings
return self.multipliers[varname_internal] * DataArray.from_dict(vardict) + self.offsets[varname_internal]