本文整理汇总了Python中iris.cube.CubeList.append方法的典型用法代码示例。如果您正苦于以下问题:Python CubeList.append方法的具体用法?Python CubeList.append怎么用?Python CubeList.append使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类iris.cube.CubeList
的用法示例。
在下文中一共展示了CubeList.append方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _add_levels
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import append [as 别名]
def _add_levels(cube, levels=13):
clist = CubeList()
for level in range(levels):
mln = DimCoord(level, standard_name='model_level_number')
other = cube.copy()
other.add_aux_coord(mln)
clist.append(other)
return clist.merge_cube()
示例2: _create_cube
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import append [as 别名]
def _create_cube(self, filenames, variable):
import numpy as np
from cis.data_io.hdf import _read_hdf4
from cis.data_io import hdf_vd
from iris.cube import Cube, CubeList
from iris.coords import DimCoord, AuxCoord
from cis.time_util import calculate_mid_time, cis_standard_time_unit
from cis.data_io.hdf_sd import get_metadata
from cf_units import Unit
variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
logging.info("Listing coordinates: " + str(variables))
cube_list = CubeList()
# Read each file individually, let Iris do the merging at the end.
for f in filenames:
sdata, vdata = _read_hdf4(f, variables)
lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid']))
lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid']))
lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees')
lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees')
# create time coordinate using the midpoint of the time delta between the start date and the end date
start_datetime = self._get_start_date(f)
end_datetime = self._get_end_date(f)
mid_datetime = calculate_mid_time(start_datetime, end_datetime)
logging.debug("Using {} as datetime for file {}".format(mid_datetime, f))
time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit,
bounds=[start_datetime, end_datetime])
var = sdata[variable]
metadata = get_metadata(var)
try:
units = Unit(metadata.units)
except ValueError:
logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable))
units = None
cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
aux_coords_and_dims=[(time_coord, None)],
var_name=metadata._name, long_name=metadata.long_name, units=units)
cube_list.append(cube)
# Merge the cube list across the scalar time coordinates before returning a single cube.
return cube_list.merge_cube()
示例3: Test_extract
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import append [as 别名]
class Test_extract(tests.IrisTest):
def setUp(self):
self.scalar_cubes = CubeList()
for i in range(5):
for letter in 'abcd':
self.scalar_cubes.append(Cube(i, long_name=letter))
def test_scalar_cube_name_constraint(self):
# Test the name based extraction of a CubeList containing scalar cubes.
res = self.scalar_cubes.extract('a')
expected = CubeList([Cube(i, long_name='a') for i in range(5)])
self.assertEqual(res, expected)
def test_scalar_cube_data_constraint(self):
# Test the extraction of a CubeList containing scalar cubes
# when using a cube_func.
val = 2
constraint = iris.Constraint(cube_func=lambda c: c.data == val)
res = self.scalar_cubes.extract(constraint)
expected = CubeList([Cube(val, long_name=letter) for letter in 'abcd'])
self.assertEqual(res, expected)
示例4: create_data_object
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import append [as 别名]
def create_data_object(self, filenames, variable, index_offset=1):
from cis.data_io.hdf_vd import get_data
from cis.data_io.hdf_vd import VDS
from pyhdf.error import HDF4Error
from cis.data_io import hdf_sd
from iris.coords import DimCoord, AuxCoord
from iris.cube import Cube, CubeList
from cis.data_io.gridded_data import GriddedData
from cis.time_util import cis_standard_time_unit
from datetime import datetime
from iris.util import new_axis
import numpy as np
logging.debug("Creating data object for variable " + variable)
variables = ["Pressure_Mean"]
logging.info("Listing coordinates: " + str(variables))
variables.append(variable)
# reading data from files
sdata = {}
for filename in filenames:
try:
sds_dict = hdf_sd.read(filename, variables)
except HDF4Error as e:
raise IOError(str(e))
for var in list(sds_dict.keys()):
utils.add_element_to_list_in_dict(sdata, var, sds_dict[var])
# work out size of data arrays
# the coordinate variables will be reshaped to match that.
# NOTE: This assumes that all Caliop_L1 files have the same altitudes.
# If this is not the case, then the following line will need to be changed
# to concatenate the data from all the files and not just arbitrarily pick
# the altitudes from the first file.
alt_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Altitude_Midpoint'))[0, :]
alt_coord = DimCoord(alt_data, standard_name='altitude', units='km')
alt_coord.convert_units('m')
lat_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Latitude_Midpoint'))[0, :]
lat_coord = DimCoord(lat_data, standard_name='latitude', units='degrees_north')
lon_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Longitude_Midpoint'))[0, :]
lon_coord = DimCoord(lon_data, standard_name='longitude', units='degrees_east')
cubes = CubeList()
for f in filenames:
t = get_data(VDS(f, "Nominal_Year_Month"), True)[0]
time_data = cis_standard_time_unit.date2num(datetime(int(t[0:4]), int(t[4:6]), 15))
time_coord = AuxCoord(time_data, long_name='Profile_Time', standard_name='time',
units=cis_standard_time_unit)
# retrieve data + its metadata
var = sdata[variable]
metadata = hdf.read_metadata(var, "SD")
data = self._get_calipso_data(hdf_sd.HDF_SDS(f, variable))
pres_data = self._get_calipso_data(hdf_sd.HDF_SDS(f, 'Pressure_Mean'))
pres_coord = AuxCoord(pres_data, standard_name='air_pressure', units='hPa')
if data.ndim == 2:
# pres_coord = new_axis()
cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)],
aux_coords_and_dims=[(time_coord, ())])
# Promote the time scalar coord to a length one dimension
new_cube = new_axis(cube, 'time')
cubes.append(new_cube)
elif data.ndim == 3:
# pres_coord = new_axis()
cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1), (alt_coord, 2)],
aux_coords_and_dims=[(time_coord, ())])
# Promote the time scalar coord to a length one dimension
new_cube = new_axis(cube, 'time')
# Then add the (extended) pressure coord so that it is explicitly a function of time
new_cube.add_aux_coord(pres_coord[np.newaxis, ...], (0, 1, 2, 3))
cubes.append(new_cube)
else:
raise ValueError("Unexpected number of dimensions for CALIOP data: {}".format(data.ndim))
# Concatenate the cubes from each file into a single GriddedData object
gd = GriddedData.make_from_cube(cubes.concatenate_cube())
return gd
示例5: parameter
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import append [as 别名]
class AtmosFlow:
"""
Atmospheric Flow
Used to calculate meteorological parameters from the given cubes.
Derived quantities are stored as cached properties to save computational
time.
Calculating quantites that involve on horizontal derivatives are only
true if used in cartesian coordinates, e.g. on the output from a LAM model
with constant grid spacing. Use `prepare_cube_on_model_levels` to prepare
cubes for `AtmosFlow` on a cartesian grid.
Attributes
----------
cubes: iris.cube.CubeList
list of cubes representing meteorological parameters
main_cubes: iris.cube.CubeList
list of non-scalar cubes
wind_cmpnt: iris.cube.CubeList
list of u,v,w-wind components
{x,y,z}coord: iris.coord.Coord
Coordinates in the respective dimensions
pres: iris.cube.Cube
Pressure created from a coordinate, if possible
lats: iris.cube.Cube
latitudes
fcor: iris.cube.Cube
Coriolis parameter (taken at 45N by default)
d{x,y}: iris.cube.Cube
Grid spacing (if cartesian=True)
"""
def __init__(self, cartesian=True, **kw_vars):
"""
Parameters
----------
cartesian: bool (default True)
Cartesian coord system flag
**kw_vars: dict of iris cubes
meteorological parameters
Examples
--------
Initialise an `AtmosFlow` object with 3 wind components
>>> AF = AtmosFlow(u=u_cart, v=v_cart, w=w_cart)
and calculate relative vorticity:
>>> rv = AF.rel_vort
"""
self.__dict__.update(kw_vars)
self.cartesian = cartesian
self.cubes = CubeList(filter(iscube, self.__dict__.values()))
self.main_cubes = CubeList(filter(iscube_and_not_scalar,
self.__dict__.values()))
self.wind_cmpnt = CubeList(filter(None,
[getattr(self, 'u', None),
getattr(self, 'v', None),
getattr(self, 'w', None)]))
thecube = self.main_cubes[0]
check_coords(self.main_cubes)
# Get the dim_coord, or None if none exist, for the xyz dimensions
self.xcoord = thecube.coord(axis='X', dim_coords=True)
self.ycoord = thecube.coord(axis='Y', dim_coords=True)
self.zcoord = thecube.coord(axis='Z')
if self.zcoord.units.is_convertible('Pa'):
# Check if the vertical coordinate is pressure
self.zmode = 'pcoord'
for cube in self.main_cubes:
if self.zcoord in cube.dim_coords:
clean_pressure_coord(cube)
self.pres = coords.pres_coord_to_cube(thecube)
self.cubes.append(self.pres)
if not hasattr(self, 'lats'):
try:
_, lats = grid.unrotate_lonlat_grids(thecube)
except (ValueError, AttributeError):
lats = np.array([45.])
self.lats = Cube(lats,
units='degrees',
standard_name='latitude')
self.fcor = mcalc.coriolis_parameter(self.lats)
self.fcor.convert_units('s-1')
if self.cartesian:
for ax, rot_name in zip(('x', 'y'),
('grid_longitude', 'grid_latitude')):
for cube in self.cubes:
if rot_name in [i.name() for i in cube.coords(axis=ax)]:
cube.remove_coord(rot_name)
try:
_dx = thecube.attributes['um_res'].to_flt('m')
except KeyError:
_dx = 1.
self.dx = Cube(_dx, units='m')
self.dy = Cube(_dx, units='m')
#.........这里部分代码省略.........
示例6: create_data_object
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import append [as 别名]
def create_data_object(self, filenames, variable):
from netCDF4 import Dataset
from biggus import OrthoArrayAdapter
from iris.cube import Cube, CubeList
from iris.coords import DimCoord
from iris.fileformats.netcdf import NetCDFDataProxy
from datetime import datetime
from os.path import basename
from cis.time_util import cis_standard_time_unit
from cis.data_io.gridded_data import make_from_cube
import numpy as np
cubes = CubeList()
for f in filenames:
# Open the file
ds = Dataset(f)
# E.g. 'NO2.COLUMN.VERTICAL.TROPOSPHERIC.CS30_BACKSCATTER.SOLAR'
v = ds.variables[variable]
# Get the coords
lat = ds.variables['LATITUDE']
lon = ds.variables['LONGITUDE']
# Create a biggus adaptor over the data
scale_factor = getattr(v, 'scale_factor', None)
add_offset = getattr(v, 'add_offset', None)
if scale_factor is None and add_offset is None:
v_dtype = v.datatype
elif scale_factor is not None:
v_dtype = scale_factor.dtype
else:
v_dtype = add_offset.dtype
# proxy = NetCDFDataProxy(v.shape, v_dtype, f, variable, float(v.VAR_FILL_VALUE))
# a = OrthoArrayAdapter(proxy)
# Mask out all invalid values (NaN, Inf, etc)
a = np.ma.masked_invalid(v[:])
# Set everything negative to NaN
a = np.ma.masked_less(a, 0.0)
# Just read the lat and lon in directly
lat_coord = DimCoord(lat[:], standard_name='latitude', units='degrees', long_name=lat.VAR_DESCRIPTION)
lon_coord = DimCoord(lon[:], standard_name='longitude', units='degrees', long_name=lon.VAR_DESCRIPTION)
# Pull the date out of the filename
fname = basename(f)
dt = datetime.strptime(fname[:10], "%Y_%m_%d")
t_coord = DimCoord(cis_standard_time_unit.date2num(dt), standard_name='time', units=cis_standard_time_unit)
c = Cube(a, long_name=getattr(v, "VAR_DESCRIPTION", None), units=getattr(v, "VAR_UNITS", None),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)])
c.add_aux_coord(t_coord)
# Close the file
ds.close()
cubes.append(c)
# We have a scalar time coord and no conflicting metadata so this should just create one cube...
merged = cubes.merge_cube()
# Return as a CIS GriddedData object
return make_from_cube(merged)