当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.drop方法代码示例

本文整理汇总了Python中xarray.Dataset.drop方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.drop方法的具体用法?Python Dataset.drop怎么用?Python Dataset.drop使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在xarray.Dataset的用法示例。


在下文中一共展示了Dataset.drop方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: diff

# 需要导入模块: from xarray import Dataset [as 别名]
# 或者: from xarray.Dataset import drop [as 别名]
def diff(ds: xr.Dataset,
         ds2: xr.Dataset,
         monitor: Monitor = Monitor.NONE) -> xr.Dataset:
    """
    Calculate the difference of two datasets (ds - ds2). This is done by
    matching variable names in the two datasets against each other and taking
    the difference of matching variables.

    If lat/lon/time extents differ between the datasets, the default behavior
    is to take the intersection of the datasets and run subtraction on that.
    However, broadcasting is possible. E.g. ds(lat/lon/time) - ds(lat/lon) is
    valid. In this case the subtrahend will be stretched to the size of
    ds(lat/lon/time) so that it can be subtracted. This also works if the
    subtrahend is a single time slice of arbitrary temporal position. In this
    case, the time dimension will be squeezed out leaving a lat/lon dataset.

    :param ds: The minuend dataset
    :param ds2: The subtrahend dataset
    :param monitor: a progress monitor.
    :return: The difference dataset
    """
    try:
        # Times do not intersect
        if 0 == len(ds.time - ds2.time) and \
                len(ds.time) == len(ds2.time):  # Times are the same length
            # If the datasets don't intersect in time dimension, a naive difference
            # would return empty data variables. Hence, the time coordinate has to
            # be dropped beforehand
            ds = ds.drop('time')
            ds2 = ds2.drop('time')
            return ds - ds2
    except AttributeError:
        # It is likely that the one operand is a lat/lon array that can be
        # broadcast against the other operand
        pass

    try:
        if 1 == len(ds2.time):
            # The subtrahend is a single time-slice -> squeeze 'time' dimension to
            # be able to broadcast is along minuend
            ds2 = ds2.squeeze('time', drop=True)
    except AttributeError:
        # Doesn't have a time dimension already
        pass
    except TypeError as e:
        if 'unsized object' in str(e):
            # The 'time' variable is a scalar
            pass
        else:
            raise TypeError(str(e))

    with monitor.observing("Subtract datasets"):
        diff = ds - ds2

    return diff
开发者ID:CCI-Tools,项目名称:ect-core,代码行数:57,代码来源:arithmetics.py

示例2: test_coordinates_encoding

# 需要导入模块: from xarray import Dataset [as 别名]
# 或者: from xarray.Dataset import drop [as 别名]
    def test_coordinates_encoding(self):
        def equals_latlon(obj):
            return obj == 'lat lon' or obj == 'lon lat'

        original = Dataset({'temp': ('x', [0, 1]), 'precip': ('x', [0, -1])},
                           {'lat': ('x', [2, 3]), 'lon': ('x', [4, 5])})
        with self.roundtrip(original) as actual:
            self.assertDatasetIdentical(actual, original)
        with create_tmp_file() as tmp_file:
            original.to_netcdf(tmp_file)
            with open_dataset(tmp_file, decode_coords=False) as ds:
                self.assertTrue(equals_latlon(ds['temp'].attrs['coordinates']))
                self.assertTrue(equals_latlon(ds['precip'].attrs['coordinates']))
                self.assertNotIn('coordinates', ds.attrs)
                self.assertNotIn('coordinates', ds['lat'].attrs)
                self.assertNotIn('coordinates', ds['lon'].attrs)

        modified = original.drop(['temp', 'precip'])
        with self.roundtrip(modified) as actual:
            self.assertDatasetIdentical(actual, modified)
        with create_tmp_file() as tmp_file:
            modified.to_netcdf(tmp_file)
            with open_dataset(tmp_file, decode_coords=False) as ds:
                self.assertTrue(equals_latlon(ds.attrs['coordinates']))
                self.assertNotIn('coordinates', ds['lat'].attrs)
                self.assertNotIn('coordinates', ds['lon'].attrs)
开发者ID:ashang,项目名称:xarray,代码行数:28,代码来源:test_backends.py

示例3: _normalize_lat_lon_2d

# 需要导入模块: from xarray import Dataset [as 别名]
# 或者: from xarray.Dataset import drop [as 别名]
def _normalize_lat_lon_2d(ds: xr.Dataset) -> xr.Dataset:
    """
    Detect 2D 'lat', 'lon' variables that span a equi-rectangular grid. Then:
    Drop original 'lat', 'lon' variables
    Rename original dimensions names of 'lat', 'lon' variables, usually ('y', 'x'), to ('lat', 'lon').
    Insert new 1D 'lat', 'lon' coordinate variables with dimensions 'lat' and 'lon', respectively.
    :param ds: some xarray dataset
    :return: a normalized xarray dataset, or the original one
    """
    if not ('lat' in ds and 'lon' in ds):
        return ds

    lat_var = ds['lat']
    lon_var = ds['lon']

    lat_dims = lat_var.dims
    lon_dims = lon_var.dims
    if lat_dims != lon_dims:
        return ds

    spatial_dims = lon_dims
    if len(spatial_dims) != 2:
        return ds

    x_dim_name = spatial_dims[-1]
    y_dim_name = spatial_dims[-2]

    lat_data_1 = lat_var[:, 0]
    lat_data_2 = lat_var[:, -1]
    lon_data_1 = lon_var[0, :]
    lon_data_2 = lon_var[-1, :]

    equal_lat = np.allclose(lat_data_1, lat_data_2, equal_nan=True)
    equal_lon = np.allclose(lon_data_1, lon_data_2, equal_nan=True)

    # Drop lat lon in any case. If note qual_lat and equal_lon subset_spatial_impl will subsequently
    # fail with a ValidationError

    ds = ds.drop(['lon', 'lat'])

    if not (equal_lat and equal_lon):
        return ds

    ds = ds.rename({
        x_dim_name: 'lon',
        y_dim_name: 'lat',
    })

    ds = ds.assign_coords(lon=np.array(lon_data_1), lat=np.array(lat_data_1))

    return ds
开发者ID:CCI-Tools,项目名称:ect-core,代码行数:53,代码来源:opimpl.py

示例4: test_roundtrip_coordinates

# 需要导入模块: from xarray import Dataset [as 别名]
# 或者: from xarray.Dataset import drop [as 别名]
    def test_roundtrip_coordinates(self):
        original = Dataset({'foo': ('x', [0, 1])},
                           {'x': [2, 3], 'y': ('a', [42]), 'z': ('x', [4, 5])})

        with self.roundtrip(original) as actual:
            self.assertDatasetIdentical(original, actual)

        expected = original.drop('foo')
        with self.roundtrip(expected) as actual:
            self.assertDatasetIdentical(expected, actual)

        expected = original.copy()
        expected.attrs['coordinates'] = 'something random'
        with self.assertRaisesRegexp(ValueError, 'cannot serialize'):
            with self.roundtrip(expected):
                pass

        expected = original.copy(deep=True)
        expected['foo'].attrs['coordinates'] = 'something random'
        with self.assertRaisesRegexp(ValueError, 'cannot serialize'):
            with self.roundtrip(expected):
                pass
开发者ID:ashang,项目名称:xarray,代码行数:24,代码来源:test_backends.py

示例5: normalize_missing_time

# 需要导入模块: from xarray import Dataset [as 别名]
# 或者: from xarray.Dataset import drop [as 别名]
def normalize_missing_time(ds: xr.Dataset) -> xr.Dataset:
    """
    Add a time coordinate variable and their associated bounds coordinate variables
    if temporal CF attributes ``time_coverage_start`` and ``time_coverage_end``
    are given but the time dimension is missing.

    The new time coordinate variable will be named ``time`` with dimension ['time'] and shape [1].
    The time bounds coordinates variable will be named ``time_bnds`` with dimensions ['time', 'bnds'] and shape [1,2].
    Both are of data type ``datetime64``.

    :param ds: Dataset to adjust
    :return: Adjusted dataset
    """
    time_coverage_start = ds.attrs.get('time_coverage_start')
    if time_coverage_start is not None:
        # noinspection PyBroadException
        try:
            time_coverage_start = pd.to_datetime(time_coverage_start)
        except BaseException:
            pass

    time_coverage_end = ds.attrs.get('time_coverage_end')
    if time_coverage_end is not None:
        # noinspection PyBroadException
        try:
            time_coverage_end = pd.to_datetime(time_coverage_end)
        except BaseException:
            pass

    if not time_coverage_start and not time_coverage_end:
        # Can't do anything
        return ds

    if 'time' in ds:
        time = ds.time
        if not time.dims:
            ds = ds.drop('time')
        elif len(time.dims) == 1:
            time_dim_name = time.dims[0]
            is_time_used_as_dim = any([(time_dim_name in ds[var_name].dims) for var_name in ds.data_vars])
            if is_time_used_as_dim:
                # It seems we already have valid time coordinates
                return ds
            time_bnds_var_name = time.attrs.get('bounds')
            if time_bnds_var_name in ds:
                ds = ds.drop(time_bnds_var_name)
            ds = ds.drop('time')
            ds = ds.drop([var_name for var_name in ds.coords if time_dim_name in ds.coords[var_name].dims])

    if time_coverage_start or time_coverage_end:
        # noinspection PyBroadException
        try:
            ds = ds.expand_dims('time')
        except BaseException as e:
            warnings.warn(f'failed to add time dimension: {e}')

        if time_coverage_start and time_coverage_end:
            time_value = time_coverage_start + 0.5 * (time_coverage_end - time_coverage_start)
        else:
            time_value = time_coverage_start or time_coverage_end

        new_coord_vars = dict(time=xr.DataArray([time_value], dims=['time']))

        if time_coverage_start and time_coverage_end:
            has_time_bnds = 'time_bnds' in ds.coords or 'time_bnds' in ds
            if not has_time_bnds:
                new_coord_vars.update(time_bnds=xr.DataArray([[time_coverage_start, time_coverage_end]],
                                                             dims=['time', 'bnds']))

        ds = ds.assign_coords(**new_coord_vars)

        ds.coords['time'].attrs['long_name'] = 'time'
        ds.coords['time'].attrs['standard_name'] = 'time'
        ds.coords['time'].encoding['units'] = 'days since 1970-01-01'
        if 'time_bnds' in ds.coords:
            ds.coords['time'].attrs['bounds'] = 'time_bnds'
            ds.coords['time_bnds'].attrs['long_name'] = 'time'
            ds.coords['time_bnds'].attrs['standard_name'] = 'time'
            ds.coords['time_bnds'].encoding['units'] = 'days since 1970-01-01'

    return ds
开发者ID:CCI-Tools,项目名称:ect-core,代码行数:83,代码来源:opimpl.py

示例6: __plot_vals

# 需要导入模块: from xarray import Dataset [as 别名]
# 或者: from xarray.Dataset import drop [as 别名]

#.........这里部分代码省略.........
        _storage = np.ma.masked_where(np.isnan(_storage), _storage)


        if plot_deviations:
            cs = bmap.pcolormesh(xx, yy, _storage - bankfull_storage, norm=norm, cmap=cmap)
        else:
            cs = bmap.pcolormesh(xx, yy, _storage, norm=norm, cmap=cmap)


        ext = "both" if plot_deviations else "max"


        cb = bmap.colorbar(cs, location="bottom", format=FuncFormatter(__storage_cb_format_ticklabels), extend=ext)
        cb.ax.set_visible(show_cb)
        cb.ax.set_xlabel(r"${\rm m^3}$")
        ax.set_title(prefix)
        axes.append(ax)
        return _storage




    clevs_timings = range(13)
    norm_timings = BoundaryNorm(clevs_timings, len(clevs_timings) - 1)
    cmap_timings = cm.get_cmap("Spectral", len(clevs_timings) - 1)


    def __plot_timings(prefix, show_cb=False, row=0, col=0, the_storage=None):
        _dates = ds["{}_dates.month".format(prefix)][:]

        ax = fig.add_subplot(gs[row, col])

        if the_storage is not None:
            _dates = _dates.where(~np.isnan(the_storage))
            _dates = np.ma.masked_where(the_storage.mask, _dates)

        _dates = maskoceans(lons2d_, lats2d, _dates)

        cs = bmap.pcolormesh(xx, yy, _dates, norm=norm_timings, cmap=cmap_timings)
        cb = bmap.colorbar(cs, location="bottom", format=FuncFormatter(__timing_cb_format_ticklabels))

        if show_cb:
            cb.ax.set_xlabel("month")

            maj_locator = cb.ax.xaxis.get_major_locator()


            print("old tick locs = {}".format(maj_locator.locs))
            maj_locator.locs = __get_new_tick_locs_middle(maj_locator.locs, len(clevs_timings) - 1, shift_direction=-1)
            print("new tick locs = {}".format(maj_locator.locs))


            for tick_line in cb.ax.xaxis.get_ticklines():
                tick_line.set_visible(False)

        cb.ax.set_visible(show_cb)
        ax.set_title("{} timing".format(prefix))
        axes.append(ax)


    # minimum storage
    min_storage = __plot_storage("min", show_cb=True, row=0, col=0, plot_deviations=plot_deviations_from_bankfull_storage)

    # maximum storage
    max_storage = __plot_storage("max", row=0, col=1, plot_deviations=plot_deviations_from_bankfull_storage)

    # average storage
    avg_storage = __plot_storage("avg", row=0, col=2, plot_deviations=plot_deviations_from_bankfull_storage)


    # bankfull storage (if provided)
    if bankfull_storage is not None:
        bf_storage_varname = "bankfull_{}".format(storage_var_name)

        ds[bf_storage_varname] = xarray.DataArray(data=bankfull_storage, dims=("x", "y"))
        __plot_storage("bankfull", row=1, col=2, plot_deviations=False, show_cb=True)
        ds.drop(bf_storage_varname)

    # tmin
    dates_clevs = range(0, 13)
    __plot_timings("min", row=1, col=0, show_cb=True, the_storage=min_storage)


    # tmax
    __plot_timings("max", row=1, col=1, the_storage=max_storage)


    for i, ax in enumerate(axes):
        bmap.drawcoastlines(linewidth=0.1, ax=ax)
        if region_of_interest_shp is not None:
            bmap.readshapefile(region_of_interest_shp[:-4], "basin", ax=ax, color="k", linewidth=2)



    if plot_deviations_from_bankfull_storage:
        label = label + "_bf_storage_anomalies"

    img_file = img_dir / "{}.png".format(label)
    fig.savefig(str(img_file), dpi=400, bbox_inches="tight")
    plt.close(fig)
开发者ID:guziy,项目名称:RPN,代码行数:104,代码来源:calculate_flood_storage.py


注:本文中的xarray.Dataset.drop方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。