本文整理汇总了Python中iris.cube.CubeList.merge_cube方法的典型用法代码示例。如果您正苦于以下问题:Python CubeList.merge_cube方法的具体用法?Python CubeList.merge_cube怎么用?Python CubeList.merge_cube使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类iris.cube.CubeList
的用法示例。
在下文中一共展示了CubeList.merge_cube方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_cube
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import merge_cube [as 别名]
def get_cube(url, name_list, bbox=None, time=None, units=None, callback=None,
constraint=None):
"""Only `url` and `name_list` are mandatory. The kw args are:
`bbox`, `callback`, `time`, `units`, `constraint`."""
cubes = iris.load_raw(url, callback=callback)
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
if not cubes:
raise ValueError('Cube does not contain {!r}'.format(name_list))
else:
cube = cubes.merge_cube()
if constraint:
cube = cube.extract(constraint)
if not cube:
raise ValueError('No cube using {!r}'.format(constraint))
if bbox:
cube = subset(cube, bbox)
if not cube:
raise ValueError('No cube using {!r}'.format(bbox))
if time:
if isinstance(time, datetime):
start, stop = time, None
elif isinstance(time, tuple):
start, stop = time[0], time[1]
else:
raise ValueError('Time must be start or (start, stop).'
' Got {!r}'.format(time))
cube = time_slice(cube, start, stop)
if units:
if cube.units != units:
cube.convert_units(units)
return cube
示例2: get_cube
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import merge_cube [as 别名]
def get_cube(url, name_list=None, bbox=None, callback=None,
time=None, units=None, constraint=None):
cubes = iris.load_raw(url, callback=callback)
if constraint:
cubes = cubes.extract(constraint)
if name_list:
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
if not cubes:
raise ValueError('Cube does not contain {!r}'.format(name_list))
else:
cube = cubes.merge_cube()
if bbox:
cube = intersection(cube, bbox)
if time:
if isinstance(time, datetime):
start, stop = time, None
elif isinstance(time, tuple):
start, stop = time[0], time[1]
else:
raise ValueError('Time must be start or (start, stop).'
' Got {!r}'.format(time))
cube = time_slice(cube, start, stop)
if units:
if not cube.units == units:
cube.convert_units(units)
return cube
示例3: _add_levels
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import merge_cube [as 别名]
def _add_levels(cube, levels=13):
clist = CubeList()
for level in range(levels):
mln = DimCoord(level, standard_name='model_level_number')
other = cube.copy()
other.add_aux_coord(mln)
clist.append(other)
return clist.merge_cube()
示例4: _create_cube
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import merge_cube [as 别名]
def _create_cube(self, filenames, variable):
import numpy as np
from cis.data_io.hdf import _read_hdf4
from cis.data_io import hdf_vd
from iris.cube import Cube, CubeList
from iris.coords import DimCoord, AuxCoord
from cis.time_util import calculate_mid_time, cis_standard_time_unit
from cis.data_io.hdf_sd import get_metadata
from cf_units import Unit
variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
logging.info("Listing coordinates: " + str(variables))
cube_list = CubeList()
# Read each file individually, let Iris do the merging at the end.
for f in filenames:
sdata, vdata = _read_hdf4(f, variables)
lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid']))
lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid']))
lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees')
lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees')
# create time coordinate using the midpoint of the time delta between the start date and the end date
start_datetime = self._get_start_date(f)
end_datetime = self._get_end_date(f)
mid_datetime = calculate_mid_time(start_datetime, end_datetime)
logging.debug("Using {} as datetime for file {}".format(mid_datetime, f))
time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit,
bounds=[start_datetime, end_datetime])
var = sdata[variable]
metadata = get_metadata(var)
try:
units = Unit(metadata.units)
except ValueError:
logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable))
units = None
cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
aux_coords_and_dims=[(time_coord, None)],
var_name=metadata._name, long_name=metadata.long_name, units=units)
cube_list.append(cube)
# Merge the cube list across the scalar time coordinates before returning a single cube.
return cube_list.merge_cube()
示例5: create_data_object
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import merge_cube [as 别名]
def create_data_object(self, filenames, variable):
from netCDF4 import Dataset
from biggus import OrthoArrayAdapter
from iris.cube import Cube, CubeList
from iris.coords import DimCoord
from iris.fileformats.netcdf import NetCDFDataProxy
from datetime import datetime
from os.path import basename
from cis.time_util import cis_standard_time_unit
from cis.data_io.gridded_data import make_from_cube
import numpy as np
cubes = CubeList()
for f in filenames:
# Open the file
ds = Dataset(f)
# E.g. 'NO2.COLUMN.VERTICAL.TROPOSPHERIC.CS30_BACKSCATTER.SOLAR'
v = ds.variables[variable]
# Get the coords
lat = ds.variables['LATITUDE']
lon = ds.variables['LONGITUDE']
# Create a biggus adaptor over the data
scale_factor = getattr(v, 'scale_factor', None)
add_offset = getattr(v, 'add_offset', None)
if scale_factor is None and add_offset is None:
v_dtype = v.datatype
elif scale_factor is not None:
v_dtype = scale_factor.dtype
else:
v_dtype = add_offset.dtype
# proxy = NetCDFDataProxy(v.shape, v_dtype, f, variable, float(v.VAR_FILL_VALUE))
# a = OrthoArrayAdapter(proxy)
# Mask out all invalid values (NaN, Inf, etc)
a = np.ma.masked_invalid(v[:])
# Set everything negative to NaN
a = np.ma.masked_less(a, 0.0)
# Just read the lat and lon in directly
lat_coord = DimCoord(lat[:], standard_name='latitude', units='degrees', long_name=lat.VAR_DESCRIPTION)
lon_coord = DimCoord(lon[:], standard_name='longitude', units='degrees', long_name=lon.VAR_DESCRIPTION)
# Pull the date out of the filename
fname = basename(f)
dt = datetime.strptime(fname[:10], "%Y_%m_%d")
t_coord = DimCoord(cis_standard_time_unit.date2num(dt), standard_name='time', units=cis_standard_time_unit)
c = Cube(a, long_name=getattr(v, "VAR_DESCRIPTION", None), units=getattr(v, "VAR_UNITS", None),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)])
c.add_aux_coord(t_coord)
# Close the file
ds.close()
cubes.append(c)
# We have a scalar time coord and no conflicting metadata so this should just create one cube...
merged = cubes.merge_cube()
# Return as a CIS GriddedData object
return make_from_cube(merged)
示例6: CubeList
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import merge_cube [as 别名]
units = iris.unit.Unit('celsius')
name_list = ['sea_water_temperature',
'sea_surface_temperature',
'sea_water_potential_temperature',
'equivalent_potential_temperature',
'sea_water_conservative_temperature',
'pseudo_equivalent_potential_temperature']
url = "http://crow.marine.usf.edu:8080/thredds/dodsC/FVCOM-Nowcast-Agg.nc"
cubes = iris.load_raw(url)
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
cube = cubes.merge_cube()
lat = iris.Constraint(latitude=lambda cell: bbox[1] <= cell < bbox[3])
lon = iris.Constraint(longitude=lambda cell: bbox[0] <= cell <= bbox[2])
cube = cube.extract(lon & lat)
istart = time_near(cube, start)
istop = time_near(cube, stop)
cube = cube[istart:istop, ...]
# <codecell>
print cube
# <codecell>