本文整理汇总了Python中iris.cube.CubeList.concatenate_cube方法的典型用法代码示例。如果您正苦于以下问题:Python CubeList.concatenate_cube方法的具体用法?Python CubeList.concatenate_cube怎么用?Python CubeList.concatenate_cube使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类iris.cube.CubeList
的用法示例。
在下文中一共展示了CubeList.concatenate_cube方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_data_object
# 需要导入模块: from iris.cube import CubeList [as 别名]
# 或者: from iris.cube.CubeList import concatenate_cube [as 别名]
def create_data_object(self, filenames, variable, index_offset=1):
from cis.data_io.hdf_vd import get_data
from cis.data_io.hdf_vd import VDS
from pyhdf.error import HDF4Error
from cis.data_io import hdf_sd
from iris.coords import DimCoord, AuxCoord
from iris.cube import Cube, CubeList
from cis.data_io.gridded_data import GriddedData
from cis.time_util import cis_standard_time_unit
from datetime import datetime
from iris.util import new_axis
import numpy as np
logging.debug("Creating data object for variable " + variable)
variables = ["Pressure_Mean"]
logging.info("Listing coordinates: " + str(variables))
variables.append(variable)
# reading data from files
sdata = {}
for filename in filenames:
try:
sds_dict = hdf_sd.read(filename, variables)
except HDF4Error as e:
raise IOError(str(e))
for var in list(sds_dict.keys()):
utils.add_element_to_list_in_dict(sdata, var, sds_dict[var])
# work out size of data arrays
# the coordinate variables will be reshaped to match that.
# NOTE: This assumes that all Caliop_L1 files have the same altitudes.
# If this is not the case, then the following line will need to be changed
# to concatenate the data from all the files and not just arbitrarily pick
# the altitudes from the first file.
alt_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Altitude_Midpoint'))[0, :]
alt_coord = DimCoord(alt_data, standard_name='altitude', units='km')
alt_coord.convert_units('m')
lat_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Latitude_Midpoint'))[0, :]
lat_coord = DimCoord(lat_data, standard_name='latitude', units='degrees_north')
lon_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Longitude_Midpoint'))[0, :]
lon_coord = DimCoord(lon_data, standard_name='longitude', units='degrees_east')
cubes = CubeList()
for f in filenames:
t = get_data(VDS(f, "Nominal_Year_Month"), True)[0]
time_data = cis_standard_time_unit.date2num(datetime(int(t[0:4]), int(t[4:6]), 15))
time_coord = AuxCoord(time_data, long_name='Profile_Time', standard_name='time',
units=cis_standard_time_unit)
# retrieve data + its metadata
var = sdata[variable]
metadata = hdf.read_metadata(var, "SD")
data = self._get_calipso_data(hdf_sd.HDF_SDS(f, variable))
pres_data = self._get_calipso_data(hdf_sd.HDF_SDS(f, 'Pressure_Mean'))
pres_coord = AuxCoord(pres_data, standard_name='air_pressure', units='hPa')
if data.ndim == 2:
# pres_coord = new_axis()
cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)],
aux_coords_and_dims=[(time_coord, ())])
# Promote the time scalar coord to a length one dimension
new_cube = new_axis(cube, 'time')
cubes.append(new_cube)
elif data.ndim == 3:
# pres_coord = new_axis()
cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units),
dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1), (alt_coord, 2)],
aux_coords_and_dims=[(time_coord, ())])
# Promote the time scalar coord to a length one dimension
new_cube = new_axis(cube, 'time')
# Then add the (extended) pressure coord so that it is explicitly a function of time
new_cube.add_aux_coord(pres_coord[np.newaxis, ...], (0, 1, 2, 3))
cubes.append(new_cube)
else:
raise ValueError("Unexpected number of dimensions for CALIOP data: {}".format(data.ndim))
# Concatenate the cubes from each file into a single GriddedData object
gd = GriddedData.make_from_cube(cubes.concatenate_cube())
return gd