本文整理汇总了Python中iris.cube.CubeList类的典型用法代码示例。如果您正苦于以下问题:Python CubeList类的具体用法?Python CubeList怎么用?Python CubeList使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了CubeList类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_cube
def get_cube(url, name_list=None, bbox=None, callback=None,
time=None, units=None, constraint=None):
cubes = iris.load_raw(url, callback=callback)
if constraint:
cubes = cubes.extract(constraint)
if name_list:
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
if not cubes:
raise ValueError('Cube does not contain {!r}'.format(name_list))
else:
cube = cubes.merge_cube()
if bbox:
cube = intersection(cube, bbox)
if time:
if isinstance(time, datetime):
start, stop = time, None
elif isinstance(time, tuple):
start, stop = time[0], time[1]
else:
raise ValueError('Time must be start or (start, stop).'
' Got {!r}'.format(time))
cube = time_slice(cube, start, stop)
if units:
if not cube.units == units:
cube.convert_units(units)
return cube
示例2: get_cube
def get_cube(url, name_list, bbox=None, time=None, units=None, callback=None,
constraint=None):
"""Only `url` and `name_list` are mandatory. The kw args are:
`bbox`, `callback`, `time`, `units`, `constraint`."""
cubes = iris.load_raw(url, callback=callback)
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
if not cubes:
raise ValueError('Cube does not contain {!r}'.format(name_list))
else:
cube = cubes.merge_cube()
if constraint:
cube = cube.extract(constraint)
if not cube:
raise ValueError('No cube using {!r}'.format(constraint))
if bbox:
cube = subset(cube, bbox)
if not cube:
raise ValueError('No cube using {!r}'.format(bbox))
if time:
if isinstance(time, datetime):
start, stop = time, None
elif isinstance(time, tuple):
start, stop = time[0], time[1]
else:
raise ValueError('Time must be start or (start, stop).'
' Got {!r}'.format(time))
cube = time_slice(cube, start, stop)
if units:
if cube.units != units:
cube.convert_units(units)
return cube
示例3: _add_levels
def _add_levels(cube, levels=13):
clist = CubeList()
for level in range(levels):
mln = DimCoord(level, standard_name='model_level_number')
other = cube.copy()
other.add_aux_coord(mln)
clist.append(other)
return clist.merge_cube()
示例4: test_realise_data
def test_realise_data(self):
# Simply check that calling CubeList.realise_data is calling
# _lazy_data.co_realise_cubes.
mock_cubes_list = [mock.Mock(ident=count) for count in range(3)]
test_cubelist = CubeList(mock_cubes_list)
call_patch = self.patch('iris._lazy_data.co_realise_cubes')
test_cubelist.realise_data()
# Check it was called once, passing cubes as *args.
self.assertEqual(call_patch.call_args_list,
[mock.call(*mock_cubes_list)])
示例5: Test_xml
class Test_xml(tests.IrisTest):
def setUp(self):
self.cubes = CubeList([Cube(np.arange(3)),
Cube(np.arange(3))])
def test_byteorder_default(self):
self.assertIn('byteorder', self.cubes.xml())
def test_byteorder_false(self):
self.assertNotIn('byteorder', self.cubes.xml(byteorder=False))
def test_byteorder_true(self):
self.assertIn('byteorder', self.cubes.xml(byteorder=True))
示例6: _create_cube
def _create_cube(self, filenames, variable):
import numpy as np
from cis.data_io.hdf import _read_hdf4
from cis.data_io import hdf_vd
from iris.cube import Cube, CubeList
from iris.coords import DimCoord, AuxCoord
from cis.time_util import calculate_mid_time, cis_standard_time_unit
from cis.data_io.hdf_sd import get_metadata
from cf_units import Unit
variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
logging.info("Listing coordinates: " + str(variables))
cube_list = CubeList()
# Read each file individually, let Iris do the merging at the end.
for f in filenames:
sdata, vdata = _read_hdf4(f, variables)
lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid']))
lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid']))
lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees')
lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees')
# create time coordinate using the midpoint of the time delta between the start date and the end date
start_datetime = self._get_start_date(f)
end_datetime = self._get_end_date(f)
mid_datetime = calculate_mid_time(start_datetime, end_datetime)
logging.debug("Using {} as datetime for file {}".format(mid_datetime, f))
time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit,
bounds=[start_datetime, end_datetime])
var = sdata[variable]
metadata = get_metadata(var)
try:
units = Unit(metadata.units)
except ValueError:
logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable))
units = None
cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
aux_coords_and_dims=[(time_coord, None)],
var_name=metadata._name, long_name=metadata.long_name, units=units)
cube_list.append(cube)
# Merge the cube list across the scalar time coordinates before returning a single cube.
return cube_list.merge_cube()
示例7: test_orthogonal_with_realization
def test_orthogonal_with_realization(self):
# => fp: 2; rt: 2; t: 2; realization: 2
triples = ((0, 10, 1),
(0, 10, 2),
(0, 11, 1),
(0, 11, 2),
(1, 10, 1),
(1, 10, 2),
(1, 11, 1),
(1, 11, 2))
en1_cubes = [self._make_cube(*triple, realization=1) for
triple in triples]
en2_cubes = [self._make_cube(*triple, realization=2) for
triple in triples]
cubes = CubeList(en1_cubes) + CubeList(en2_cubes)
cube, = cubes.merge()
self.assertCML(cube, checksum=False)
示例8: test_combination_with_realization
def test_combination_with_realization(self):
# => fp, rt, t: 8; realization: 2
triples = ((0, 10, 1),
(0, 10, 2),
(0, 11, 1),
(0, 11, 3), # This '3' breaks the pattern.
(1, 10, 1),
(1, 10, 2),
(1, 11, 1),
(1, 11, 2))
en1_cubes = [self._make_cube(*triple, realization=1) for
triple in triples]
en2_cubes = [self._make_cube(*triple, realization=2) for
triple in triples]
cubes = CubeList(en1_cubes) + CubeList(en2_cubes)
cube, = cubes.merge()
self.assertCML(cube, checksum=False)
示例9: test_combination_with_extra_triple
def test_combination_with_extra_triple(self):
# => fp, rt, t, realization: 17
triples = ((0, 10, 1),
(0, 10, 2),
(0, 11, 1),
(0, 11, 2),
(1, 10, 1),
(1, 10, 2),
(1, 11, 1),
(1, 11, 2))
en1_cubes = [self._make_cube(*triple, realization=1) for
triple in triples]
# Add extra time triple on the end.
en2_cubes = [self._make_cube(*triple, realization=2) for
triple in triples + ((1, 11, 3),)]
cubes = CubeList(en1_cubes) + CubeList(en2_cubes)
cube, = cubes.merge()
self.assertCML(cube, checksum=False)
示例10: test_combination_with_extra_realization
def test_combination_with_extra_realization(self):
# => fp, rt, t, realization: 17
triples = ((0, 10, 1),
(0, 10, 2),
(0, 11, 1),
(0, 11, 2),
(1, 10, 1),
(1, 10, 2),
(1, 11, 1),
(1, 11, 2))
en1_cubes = [self._make_cube(*triple, realization=1) for
triple in triples]
en2_cubes = [self._make_cube(*triple, realization=2) for
triple in triples]
# Add extra that is a duplicate of one of the time triples
# but with a different realisation.
en3_cubes = [self._make_cube(0, 10, 2, realization=3)]
cubes = CubeList(en1_cubes) + CubeList(en2_cubes) + CubeList(en3_cubes)
cube, = cubes.merge()
self.assertCML(cube, checksum=False)
示例11: Test_extract
class Test_extract(tests.IrisTest):
def setUp(self):
self.scalar_cubes = CubeList()
for i in range(5):
for letter in 'abcd':
self.scalar_cubes.append(Cube(i, long_name=letter))
def test_scalar_cube_name_constraint(self):
# Test the name based extraction of a CubeList containing scalar cubes.
res = self.scalar_cubes.extract('a')
expected = CubeList([Cube(i, long_name='a') for i in range(5)])
self.assertEqual(res, expected)
def test_scalar_cube_data_constraint(self):
# Test the extraction of a CubeList containing scalar cubes
# when using a cube_func.
val = 2
constraint = iris.Constraint(cube_func=lambda c: c.data == val)
res = self.scalar_cubes.extract(constraint)
expected = CubeList([Cube(val, long_name=letter) for letter in 'abcd'])
self.assertEqual(res, expected)
示例12: test_FAIL_phenomena_nostash
def test_FAIL_phenomena_nostash(self):
# If we remove the 'STASH' attributes, certain phenomena can still be
# successfully encoded+decoded by standard load using LBFC values.
# Structured loading gets this wrong, because it does not use LBFC in
# characterising phenomena.
flds = self.fields(c_t='1122', phn='0101')
for fld in flds:
del fld.attributes['STASH']
file = self.save_fieldcubes(flds)
results = iris.load(file)
if not self.do_fast_loads:
# This is what we'd LIKE to get (what iris.load gives).
expected = CubeList(flds).merge()
else:
# At present, we get a cube incorrectly combined together over all
# 4 timepoints, with the same phenomenon for all (!wrong!).
# It's a bit tricky to arrange the existing data like that.
# Do it by hacking the time values to allow merge, and then fixing
# up the time
old_t1, old_t2 = (fld.coord('time').points[0]
for fld in (flds[0], flds[2]))
for i_fld, fld in enumerate(flds):
# Hack the phenomena to all look like the first one.
fld.rename('air_temperature')
fld.units = 'K'
# Hack the time points so the 4 cube can merge into one.
fld.coord('time').points = [old_t1 + i_fld]
one_cube = CubeList(flds).merge_cube()
# Replace time dim with an anonymous dim.
co_t_fake = one_cube.coord('time')
one_cube.remove_coord(co_t_fake)
# Reconstruct + add back the expected auxiliary time coord.
co_t_new = AuxCoord([old_t1, old_t1, old_t2, old_t2],
standard_name='time', units=co_t_fake.units)
one_cube.add_aux_coord(co_t_new, 0)
expected = [one_cube]
self.assertEqual(results, expected)
示例13: CubeList
bbox = [-87.40, 24.25, -74.70, 36.70]
units = iris.unit.Unit('celsius')
name_list = ['sea_water_temperature',
'sea_surface_temperature',
'sea_water_potential_temperature',
'equivalent_potential_temperature',
'sea_water_conservative_temperature',
'pseudo_equivalent_potential_temperature']
url = "http://crow.marine.usf.edu:8080/thredds/dodsC/FVCOM-Nowcast-Agg.nc"
cubes = iris.load_raw(url)
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
cube = cubes.merge_cube()
lat = iris.Constraint(latitude=lambda cell: bbox[1] <= cell < bbox[3])
lon = iris.Constraint(longitude=lambda cell: bbox[0] <= cell <= bbox[2])
cube = cube.extract(lon & lat)
istart = time_near(cube, start)
istop = time_near(cube, stop)
cube = cube[istart:istop, ...]
# <codecell>
print cube
# <codecell>
示例14: create_data_object
def create_data_object(self, filenames, variable):
from netCDF4 import Dataset
from biggus import OrthoArrayAdapter
from iris.cube import Cube, CubeList
from iris.coords import DimCoord
from iris.fileformats.netcdf import NetCDFDataProxy
from datetime import datetime
from os.path import basename
from cis.time_util import cis_standard_time_unit
from cis.data_io.gridded_data import make_from_cube
import numpy as np
cubes = CubeList()
for f in filenames:
# Open the file
ds = Dataset(f)
# E.g. 'NO2.COLUMN.VERTICAL.TROPOSPHERIC.CS30_BACKSCATTER.SOLAR'
v = ds.variables[variable]
# Get the coords
lat = ds.variables['LATITUDE']
lon = ds.variables['LONGITUDE']
# Create a biggus adaptor over the data
scale_factor = getattr(v, 'scale_factor', None)
add_offset = getattr(v, 'add_offset', None)
if scale_factor is None and add_offset is None:
v_dtype = v.datatype
elif scale_factor is not None:
v_dtype = scale_factor.dtype
else:
v_dtype = add_offset.dtype
# proxy = NetCDFDataProxy(v.shape, v_dtype, f, variable, float(v.VAR_FILL_VALUE))
# a = OrthoArrayAdapter(proxy)
# Mask out all invalid values (NaN, Inf, etc)
a = np.ma.masked_invalid(v[:])
# Set everything negative to NaN
a = np.ma.masked_less(a, 0.0)
# Just read the lat and lon in directly
lat_coord = DimCoord(lat[:], standard_name='latitude', units='degrees', long_name=lat.VAR_DESCRIPTION)
lon_coord = DimCoord(lon[:], standard_name='longitude', units='degrees', long_name=lon.VAR_DESCRIPTION)
# Pull the date out of the filename
fname = basename(f)
dt = datetime.strptime(fname[:10], "%Y_%m_%d")
t_coord = DimCoord(cis_standard_time_unit.date2num(dt), standard_name='time', units=cis_standard_time_unit)
c = Cube(a, long_name=getattr(v, "VAR_DESCRIPTION", None), units=getattr(v, "VAR_UNITS", None),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)])
c.add_aux_coord(t_coord)
# Close the file
ds.close()
cubes.append(c)
# We have a scalar time coord and no conflicting metadata so this should just create one cube...
merged = cubes.merge_cube()
# Return as a CIS GriddedData object
return make_from_cube(merged)
示例15: setUp
def setUp(self):
self.scalar_cubes = CubeList()
for i in range(5):
for letter in 'abcd':
self.scalar_cubes.append(Cube(i, long_name=letter))