本文整理汇总了Python中anuga.file.netcdf.NetCDFFile.order方法的典型用法代码示例。如果您正苦于以下问题:Python NetCDFFile.order方法的具体用法?Python NetCDFFile.order怎么用?Python NetCDFFile.order使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类anuga.file.netcdf.NetCDFFile
的用法示例。
在下文中一共展示了NetCDFFile.order方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _sww_merge_parallel_non_smooth
# 需要导入模块: from anuga.file.netcdf import NetCDFFile [as 别名]
# 或者: from anuga.file.netcdf.NetCDFFile import order [as 别名]
def _sww_merge_parallel_non_smooth(swwfiles, output, verbose=False, delete_old=False):
"""
Merge a list of sww files into a single file.
Used to merge files created by parallel runs.
The sww files to be merged must have exactly the same timesteps.
It is assumed that the separate sww files have been stored in non_smooth
format.
Note that some advanced information and custom quantities may not be
exported.
swwfiles is a list of .sww files to merge.
output is the output filename, including .sww extension.
verbose True to log output information
"""
if verbose:
print "MERGING SWW Files"
first_file = True
tri_offset = 0
for filename in swwfiles:
if verbose:
print 'Reading file ', filename, ':'
fid = NetCDFFile(filename, netcdf_mode_r)
if first_file:
times = fid.variables['time'][:]
n_steps = len(times)
number_of_timesteps = fid.dimensions['number_of_timesteps']
#print n_steps, number_of_timesteps
starttime = int(fid.starttime)
out_s_quantities = {}
out_d_quantities = {}
out_s_c_quantities = {}
out_d_c_quantities = {}
xllcorner = fid.xllcorner
yllcorner = fid.yllcorner
number_of_global_triangles = int(fid.number_of_global_triangles)
number_of_global_nodes = int(fid.number_of_global_nodes)
number_of_global_triangle_vertices = 3*number_of_global_triangles
order = fid.order
xllcorner = fid.xllcorner;
yllcorner = fid.yllcorner ;
zone = fid.zone;
false_easting = fid.false_easting;
false_northing = fid.false_northing;
datum = fid.datum;
projection = fid.projection;
g_volumes = num.arange(number_of_global_triangles*3).reshape(-1,3)
g_x = num.zeros((number_of_global_triangle_vertices,),num.float32)
g_y = num.zeros((number_of_global_triangle_vertices,),num.float32)
g_points = num.zeros((number_of_global_triangle_vertices,2),num.float32)
#=======================================
# Deal with the vertex based variables
#=======================================
quantities = set(['elevation', 'friction', 'stage', 'xmomentum',
'ymomentum', 'xvelocity', 'yvelocity', 'height'])
variables = set(fid.variables.keys())
quantities = list(quantities & variables)
static_quantities = []
dynamic_quantities = []
for quantity in quantities:
# Test if elevation is static
if n_steps == fid.variables[quantity].shape[0]:
dynamic_quantities.append(quantity)
else:
static_quantities.append(quantity)
# Static Quantities are stored as a 1D array
for quantity in static_quantities:
out_s_quantities[quantity] = num.zeros((3*number_of_global_triangles,),num.float32)
#=======================================
# Deal with the centroid based variables
#=======================================
quantities = set(['elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'])
#.........这里部分代码省略.........
示例2: _sww_merge_parallel_smooth
# 需要导入模块: from anuga.file.netcdf import NetCDFFile [as 别名]
# 或者: from anuga.file.netcdf.NetCDFFile import order [as 别名]
def _sww_merge_parallel_smooth(swwfiles, output, verbose=False, delete_old=False):
"""
Merge a list of sww files into a single file.
Use to merge files created by parallel runs.
The sww files to be merged must have exactly the same timesteps.
It is assumed that the separate sww files have been stored in non_smooth
format.
Note that some advanced information and custom quantities may not be
exported.
swwfiles is a list of .sww files to merge.
output is the output filename, including .sww extension.
verbose True to log output information
"""
if verbose:
print "MERGING SWW Files"
first_file = True
tri_offset = 0
for filename in swwfiles:
if verbose:
print 'Reading file ', filename, ':'
fid = NetCDFFile(filename, netcdf_mode_r)
if first_file:
times = fid.variables['time'][:]
n_steps = len(times)
#number_of_timesteps = fid.dimensions['number_of_timesteps']
#print n_steps, number_of_timesteps
starttime = int(fid.starttime)
out_s_quantities = {}
out_d_quantities = {}
out_s_c_quantities = {}
out_d_c_quantities = {}
xllcorner = fid.xllcorner
yllcorner = fid.yllcorner
number_of_global_triangles = int(fid.number_of_global_triangles)
number_of_global_nodes = int(fid.number_of_global_nodes)
order = fid.order
xllcorner = fid.xllcorner;
yllcorner = fid.yllcorner ;
zone = fid.zone;
false_easting = fid.false_easting;
false_northing = fid.false_northing;
datum = fid.datum;
projection = fid.projection;
g_volumes = num.zeros((number_of_global_triangles,3),num.int)
g_x = num.zeros((number_of_global_nodes,),num.float32)
g_y = num.zeros((number_of_global_nodes,),num.float32)
g_points = num.zeros((number_of_global_nodes,2),num.float32)
#=====================================
# Deal with the vertex based variables
#=====================================
quantities = set(['elevation', 'friction', 'stage', 'xmomentum',
'ymomentum', 'xvelocity', 'yvelocity', 'height'])
variables = set(fid.variables.keys())
quantities = list(quantities & variables)
static_quantities = []
dynamic_quantities = []
for quantity in quantities:
# Test if quantity is static
if n_steps == fid.variables[quantity].shape[0]:
dynamic_quantities.append(quantity)
else:
static_quantities.append(quantity)
for quantity in static_quantities:
out_s_quantities[quantity] = num.zeros((number_of_global_nodes,),num.float32)
# Quantities are stored as a 2D array of timesteps x data.
for quantity in dynamic_quantities:
out_d_quantities[quantity] = \
num.zeros((n_steps,number_of_global_nodes),num.float32)
#=======================================
# Deal with the centroid based variables
#=======================================
quantities = set(['elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'])
variables = set(fid.variables.keys())
#.........这里部分代码省略.........
示例3: _sww_merge
# 需要导入模块: from anuga.file.netcdf import NetCDFFile [as 别名]
# 或者: from anuga.file.netcdf.NetCDFFile import order [as 别名]
def _sww_merge(swwfiles, output, verbose=False):
"""
Merge a list of sww files into a single file.
May be useful for parallel runs. Note that colinear points and
edges are not merged: there will essentially be multiple meshes within
the one sww file.
The sww files to be merged must have exactly the same timesteps. Note
that some advanced information and custom quantities may not be
exported.
swwfiles is a list of .sww files to merge.
output is the output filename, including .sww extension.
verbose True to log output information
"""
if verbose:
print "MERGING SWW Files"
static_quantities = ['elevation']
dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']
first_file = True
tri_offset = 0
for filename in swwfiles:
if verbose:
print 'Reading file ', filename, ':'
fid = NetCDFFile(filename, netcdf_mode_r)
tris = fid.variables['volumes'][:]
if first_file:
times = fid.variables['time'][:]
x = []
y = []
out_tris = list(tris)
out_s_quantities = {}
out_d_quantities = {}
xllcorner = fid.xllcorner
yllcorner = fid.yllcorner
order = fid.order
xllcorner = fid.xllcorner;
yllcorner = fid.yllcorner ;
zone = fid.zone;
false_easting = fid.false_easting;
false_northing = fid.false_northing;
datum = fid.datum;
projection = fid.projection;
for quantity in static_quantities:
out_s_quantities[quantity] = []
# Quantities are stored as a 2D array of timesteps x data.
for quantity in dynamic_quantities:
out_d_quantities[quantity] = [ [] for _ in range(len(times))]
description = 'merged:' + getattr(fid, 'description')
first_file = False
else:
for tri in tris:
# Advance new tri indices to point at newly appended points.
verts = [vertex+tri_offset for vertex in tri]
out_tris.append(verts)
try: # works with netcdf4
num_pts = len(fid.dimensions['number_of_points'])
except: # works with scientific.io.netcdf
num_pts = int(fid.dimensions['number_of_points'])
tri_offset += num_pts
if verbose:
print ' new triangle index offset is ', tri_offset
x.extend(list(fid.variables['x'][:]))
y.extend(list(fid.variables['y'][:]))
# Grow the list of static quantities associated with the x,y points
for quantity in static_quantities:
out_s_quantities[quantity].extend(fid.variables[quantity][:])
#Collate all dynamic quantities according to their timestep
for quantity in dynamic_quantities:
time_chunks = fid.variables[quantity][:]
for i, time_chunk in enumerate(time_chunks):
out_d_quantities[quantity][i].extend(time_chunk)
# Mash all points into a single big list
#.........这里部分代码省略.........
示例4: esri2sww
# 需要导入模块: from anuga.file.netcdf import NetCDFFile [as 别名]
# 或者: from anuga.file.netcdf.NetCDFFile import order [as 别名]
#.........这里部分代码省略.........
number_of_latitudes = len(latitudes)
number_of_longitudes = len(longitudes)
number_of_times = len(os.listdir(elevation_dir))
number_of_points = number_of_latitudes * number_of_longitudes
number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes - 1) * 2
# Work out the times
if len(elevation_files) > 1:
# Assume: The time period is less than 24hrs.
time_period = (int(elevation_files[1][-3:]) - int(elevation_files[0][-3:])) * 60 * 60
times = [x * time_period for x in range(len(elevation_files))]
else:
times = [0.0]
if verbose:
log.critical("------------------------------------------------")
log.critical("Statistics:")
log.critical(" Extent (lat/lon):")
log.critical(" lat in [%f, %f], len(lat) == %d" % (min(latitudes), max(latitudes), len(latitudes)))
log.critical(" lon in [%f, %f], len(lon) == %d" % (min(longitudes), max(longitudes), len(longitudes)))
log.critical(" t in [%f, %f], len(t) == %d" % (min(times), max(times), len(times)))
######### WRITE THE SWW FILE #############
# NetCDF file definition
outfile = NetCDFFile(sww_file, netcdf_mode_w)
# Create new file
outfile.institution = "Geoscience Australia"
outfile.description = "Converted from XXX"
# For sww compatibility
outfile.smoothing = "Yes"
outfile.order = 1
# Start time in seconds since the epoch (midnight 1/1/1970)
outfile.starttime = starttime = times[0]
# dimension definitions
outfile.createDimension("number_of_volumes", number_of_volumes)
outfile.createDimension("number_of_vertices", 3)
outfile.createDimension("number_of_points", number_of_points)
outfile.createDimension("number_of_timesteps", number_of_times)
# variable definitions
outfile.createVariable("x", precision, ("number_of_points",))
outfile.createVariable("y", precision, ("number_of_points",))
outfile.createVariable("elevation", precision, ("number_of_points",))
# FIXME: Backwards compatibility
# outfile.createVariable('z', precision, ('number_of_points',))
#################################
outfile.createVariable("volumes", netcdf_int, ("number_of_volumes", "number_of_vertices"))
outfile.createVariable("time", precision, ("number_of_timesteps",))
outfile.createVariable("stage", precision, ("number_of_timesteps", "number_of_points"))
outfile.createVariable("xmomentum", precision, ("number_of_timesteps", "number_of_points"))
outfile.createVariable("ymomentum", precision, ("number_of_timesteps", "number_of_points"))
# Store
from anuga.coordinate_transforms.redfearn import redfearn