本文整理汇总了Python中cis.data_io.ungridded_data.UngriddedData.units方法的典型用法代码示例。如果您正苦于以下问题:Python UngriddedData.units方法的具体用法?Python UngriddedData.units怎么用?Python UngriddedData.units使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cis.data_io.ungridded_data.UngriddedData
的用法示例。
在下文中一共展示了UngriddedData.units方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: collocate
# 需要导入模块: from cis.data_io.ungridded_data import UngriddedData [as 别名]
# 或者: from cis.data_io.ungridded_data.UngriddedData import units [as 别名]
def collocate(self, points, data, constraint, kernel):
"""
This collocator takes a list of HyperPoints and a data object (currently either Ungridded
data or a Cube) and returns one new LazyData object with the values as determined by the
constraint and kernel objects. The metadata for the output LazyData object is copied from
the input data object.
:param points: UngriddedData or UngriddedCoordinates defining the sample points
:param data: An UngriddedData object or Cube, or any other object containing metadata that
the constraint object can read. May also be a list of objects, in which case a list will
be returned
:param constraint: An instance of a Constraint subclass which takes a data object and
returns a subset of that data based on it's internal parameters
:param kernel: An instance of a Kernel subclass which takes a number of points and returns
a single value
:return: A single LazyData object
"""
log_memory_profile("GeneralUngriddedCollocator Initial")
if isinstance(data, list):
# Indexing and constraints (for SepConstraintKdTree) will only take place on the first iteration,
# so we really can just call this method recursively if we've got a list of data.
output = UngriddedDataList()
for var in data:
output.extend(self.collocate(points, var, constraint, kernel))
return output
metadata = data.metadata
sample_points = points.get_all_points()
# Convert ungridded data to a list of points if kernel needs it.
# Special case checks for kernels that use a cube - this could be done more elegantly.
if isinstance(kernel, nn_gridded) or isinstance(kernel, li):
if hasattr(kernel, "interpolator"):
# If we have an interpolator on the kernel we need to reset it as it depends on the actual values
# as well as the coordinates
kernel.interpolator = None
kernel.coord_names = []
if not isinstance(data, iris.cube.Cube):
raise ValueError("Ungridded data cannot be used with kernel nn_gridded or li")
if constraint is not None and not isinstance(constraint, DummyConstraint):
raise ValueError("A constraint cannot be specified with kernel nn_gridded or li")
data_points = data
else:
data_points = data.get_non_masked_points()
# First fix the sample points so that they all fall within the same 360 degree longitude range
_fix_longitude_range(points.coords(), sample_points)
# Then fix the data points so that they fall onto the same 360 degree longitude range as the sample points
_fix_longitude_range(points.coords(), data_points)
log_memory_profile("GeneralUngriddedCollocator after data retrieval")
# Create index if constraint and/or kernel require one.
coord_map = None
data_index.create_indexes(constraint, points, data_points, coord_map)
data_index.create_indexes(kernel, points, data_points, coord_map)
log_memory_profile("GeneralUngriddedCollocator after indexing")
logging.info("--> Collocating...")
# Create output arrays.
self.var_name = data.name()
self.var_long_name = metadata.long_name
self.var_standard_name = metadata.standard_name
self.var_units = data.units
var_set_details = kernel.get_variable_details(self.var_name, self.var_long_name,
self.var_standard_name, self.var_units)
sample_points_count = len(sample_points)
values = np.zeros((len(var_set_details), sample_points_count)) + self.fill_value
log_memory_profile("GeneralUngriddedCollocator after output array creation")
logging.info(" {} sample points".format(sample_points_count))
# Apply constraint and/or kernel to each sample point.
cell_count = 0
total_count = 0
for i, point in sample_points.enumerate_non_masked_points():
# Log progress periodically.
cell_count += 1
if cell_count == 1000:
total_count += cell_count
cell_count = 0
logging.info(" Processed {} points of {}".format(total_count, sample_points_count))
if constraint is None:
con_points = data_points
else:
con_points = constraint.constrain_points(point, data_points)
try:
value_obj = kernel.get_value(point, con_points)
# Kernel returns either a single value or a tuple of values to insert into each output variable.
if isinstance(value_obj, tuple):
for idx, val in enumerate(value_obj):
if not np.isnan(val):
values[idx, i] = val
else:
values[0, i] = value_obj
except CoordinateMultiDimError as e:
raise NotImplementedError(e)
#.........这里部分代码省略.........