本文整理汇总了Python中netCDF4.Dataset.getncattr方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.getncattr方法的具体用法?Python Dataset.getncattr怎么用?Python Dataset.getncattr使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.getncattr方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
timestamp = date if date else self.get_date(filename)
ds = Dataset(filename)
try:
self.srcDSProjection = NSR(ds.variables['projection_lambert'].proj4)
except KeyError:
raise WrongMapperError
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
mm = pti.get_gcmd_instrument('Computer')
ee = pti.get_gcmd_platform('Earth Observation Satellites')
self.dataset.SetMetadataItem('instrument', json.dumps(mm))
self.dataset.SetMetadataItem('platform', json.dumps(ee))
self.dataset.SetMetadataItem('Data Center', 'NO/MET')
self.dataset.SetMetadataItem('Entry Title', str(ds.getncattr('title')))
try:
# Skip the field if summary is missing in the ds
self.dataset.SetMetadataItem('summary', str(ds.getncattr('summary')))
except AttributeError:
pass
示例2: _compare_netcdf_files
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def _compare_netcdf_files(self, original, new, ext="nc"):
"""
Compare the contents of two netcdf files
"""
filenameO = '%s.%s' % (original, ext)
filePathO = os.path.join(self.readDirectory, filenameO)
filenameN = '%s.%s' % (new, ext)
filePathN = os.path.join(self.writeDirectory, filenameN)
dO = Dataset(filePathO)
dN = Dataset(filePathN)
assert_almost_equal(dO.variables['time'][:], dN.variables['time'][:], decimal=5)
assert_almost_equal(dO.variables['lon'][:], dN.variables['lon'][:], decimal=5)
assert_almost_equal(dO.variables['lat'][:], dN.variables['lat'][:], decimal=5)
assert_almost_equal(dO.variables['precipitation'][:], dN.variables['precipitation'][:], decimal=5)
assert_almost_equal(dO.variables['pressure'][:], dN.variables['pressure'][:], decimal=4)
assert_almost_equal(dO.variables['relative_humidity'][:], dN.variables['relative_humidity'][:], decimal=4)
assert_almost_equal(dO.variables['wind_speed'][:], dN.variables['wind_speed'][:], decimal=5)
assert_almost_equal(dO.variables['direct_radiation'][:], dN.variables['direct_radiation'][:], decimal=4)
assert_almost_equal(dO.variables['diffusive_radiation'][:], dN.variables['diffusive_radiation'][:], decimal=5)
assert_almost_equal(dO.variables['temperature'][:], dN.variables['temperature'][:], decimal=5)
assert_almost_equal(dO.variables['cloud_cover'][:], dN.variables['cloud_cover'][:], decimal=5)
self.assertEqual(dO.getncattr("proj4"),dN.getncattr("proj4"))
assert_almost_equal(dO.getncattr("geotransform"),dN.getncattr("geotransform"))
dO.close()
dN.close()
示例3: wrf_copy_attributes
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def wrf_copy_attributes( infilename, outfilename, nlevs ):
'''
Copies the netCDF attributes of one file into another file
that is created by this function. This is so that information
like the model start date, dx, and namelist options are data
attributes in post-processed netCDF files. This will assume that
the grid domain in both files is the same, and will use the west-
east and south-north values from the input file.
Parameters
----------
infilename: The name/path of the input file to be read
outfilename: The name/path of the output file to be written
nlevs: The number of vertical levels that the output file should have
'''
## open the files
infile = Dataset( infilename )
outfile = Dataset( outfilename, 'w', format='NETCDF4' )
## create dimensions
level = outfile.createDimension( 'bottom_top', nlevs )
time = outfile.createDimension( 'time', None )
lon = outfile.createDimension( 'south_north', infile.getncattr('SOUTH-NORTH_PATCH_END_UNSTAG') )
lat = outfile.createDimension( 'west_east', infile.getncattr('WEST-EAST_PATCH_END_UNSTAG') )
## copy the global attributes to the new file
inattrs = infile.ncattrs()
for attr in inattrs:
outfile.setncattr( attr, infile.getncattr( attr ) )
## close both files
infile.close()
outfile.close()
示例4: netcdf_to_parameter
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def netcdf_to_parameter(inputFileName, outputFileName, event_emitter=None, **kwargs):
fileHandle = Dataset(inputFileName, 'r')
temporaryFileHandle = open(outputFileName, 'w')
# global attributes
attributes = fileHandle.ncattrs()
for attribute in attributes:
if attribute == 'title':
attributeValue = repr(str(fileHandle.getncattr(attribute))).replace("'", "")
temporaryFileHandle.write(attributeValue+'\n')
if attribute == 'version':
attributeValue = repr(str(fileHandle.getncattr(attribute))).replace("'", "")
temporaryFileHandle.write(attributeValue+'\n')
if attribute == 'number_of_hrus':
numberOfHruCells = int(repr(str(fileHandle.getncattr(attribute))).replace("'", ""))
# dimensions
dim = find_dimensions(fileHandle)
dimensionNames = dim[0]
dimensionValues = dim[1]
temporaryFileHandle.write('** Dimensions **\n')
for index in range(len(dimensionNames)):
temporaryFileHandle.write('####\n'+dimensionNames[index]+'\n'+str(dimensionValues[index])+'\n')
# variables from file
varFromFile = find_variables_from_file(fileHandle)
variableNamesFromFile = varFromFile[0]
variableDimensionsFromFile = varFromFile[1]
variableTypesFromFile = varFromFile[2]
# variables
var = find_variables(variableNamesFromFile, variableDimensionsFromFile, variableTypesFromFile)
variableNames = var[0]
variableDimensions = var[1]
variableTypes = var[2]
numberOfParameterValues = find_number_of_parameter_values(variableDimensions, dimensionNames, dimensionValues)
countOfDimensions = find_count_of_dimensions(variableDimensions)
sizeOfLatitudeVariable = find_size_of_latitude_variable(fileHandle)
kwargs['event_name'] = 'nc_to_parameter'
kwargs['event_description'] = 'creating input parameter file from netcdf file'
kwargs['progress_value'] = 0.00
if event_emitter:
event_emitter.emit('progress',**kwargs)
write_variable_data_to_file(temporaryFileHandle, fileHandle, variableNames, \
variableDimensions, countOfDimensions, sizeOfLatitudeVariable, \
numberOfParameterValues, variableTypes, numberOfHruCells, event_emitter=event_emitter, **kwargs)
kwargs['event_name'] = 'nc_to_parameter'
kwargs['event_description'] = 'creating input parameter file from output netcdf file'
kwargs['progress_value'] = 100
if event_emitter:
event_emitter.emit('progress',**kwargs)
示例5: extract_row_column_hru_information
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def extract_row_column_hru_information(parameterFile):
fileHandle = Dataset(parameterFile, 'r')
attributes = fileHandle.ncattrs()
for attribute in attributes:
if attribute == 'number_of_hrus':
numberOfHruCells = int(repr(str(fileHandle.getncattr(attribute))).replace("'", ""))
if attribute == 'number_of_rows':
numberOfRows = int(repr(str(fileHandle.getncattr(attribute))).replace("'", ""))
if attribute == 'number_of_columns':
numberOfColumns = int(repr(str(fileHandle.getncattr(attribute))).replace("'", ""))
return numberOfHruCells, numberOfRows, numberOfColumns
示例6: read_one_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def read_one_file(self, fname, **kwargs):
"""read one file for initing self"""
self.init_clean()
f = Dataset(fname, **kwargs)
for d in f.dimensions:
self.dims[str(d)] = len(f.dimensions[d])
if 'TIME' not in self.dims or self.dims['TIME'] == 0:
raise RuntimeError("%s contains no data" % fname)
for v in f.variables:
strv = str(v)
ncv = f.variables[v]
v_arr = ncv[:]
# datetime handling
if v == 'datetime':
datetime_type = type(v_arr[0])
if datetime_type in (np.string_ , str, unicode):
v_arr = np.array([datetime.strptime(datestr, _std_datetime_fmt) for datestr in v_arr])
else:
v_arr = num2date(v_arr, units=_std_datetime_units)
self.vars[strv] = v_arr
self.var_dims[strv] = tuple([str(dimname) for dimname in ncv.dimensions])
# aver_method
if 'aver_method' in ncv.ncattrs():
self.var_aver_methods[strv] = str(ncv.getncattr('aver_method'))
elif v in _special_avermethods:
self.var_aver_methods[strv] = _special_avermethods[v]
else:
self.var_aver_methods[strv] = 'mean'
for a in f.ncattrs():
self.attrs[str(a)] = f.getncattr(a)
self.recheck_time()
f.close()
示例7: wrf_copy_attributes
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def wrf_copy_attributes( infilename, outfilename ):
## open the files
infile = Dataset( infilename )
outfile = Dataset( outfilename, 'w', format='NETCDF4' )
## create dimensions
level = outfile.createDimension( 'bottom_top', None )
time = outfile.createDimension( 'time', None )
lon = outfile.createDimension( 'south_north', infile.getncattr('SOUTH-NORTH_PATCH_END_UNSTAG') )
lat = outfile.createDimension( 'west_east', infile.getncattr('WEST-EAST_PATCH_END_UNSTAG') )
## copy the global attributes to the new file
inattrs = infile.ncattrs()
for attr in inattrs:
outfile.setncattr( attr, infile.getncattr( attr ) )
infile.close()
outfile.close()
示例8: WRFterrain_P
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def WRFterrain_P(fileselection='control',dom=1,slice=0):
if fileselection=='nouintah':
fname = '/uufs/chpc.utah.edu/common/home/horel-group/lawson/wrfout/1/NAM/2011112906_nouintah/wrfout_d0'+str(dom)+'_2011-11-29_06:00:00'
elif fileselection=='withuintah':
fname = '/uufs/chpc.utah.edu/common/home/horel-group/lawson/wrfout/1/NAM/2011112906_withuintah/wrfout_d0'+str(dom)+'_2011-11-29_06:00:00'
else:
fname = '/uufs/chpc.utah.edu/common/home/horel-group/lawson/WRFV3/test/em_real/wrfout_1.3_wasatch/wrfout_d0'+str(dom)+'_2011-12-01_00:00:00'
nc = Dataset(fname,'r')
terrain = nc.variables['PSFC'][0,:,:]
xlong = nc.variables['XLONG'][0]
xlat = nc.variables['XLAT'][0]
if slice==1:
Nx = nc.getncattr('WEST-EAST_GRID_DIMENSION')-1
Ny = nc.getncattr('SOUTH-NORTH_GRID_DIMENSION')-1
xlong = xlong[Ny/2,:]
xlat = xlat[:,Nx/2]
return terrain, xlong, xlat
示例9: main
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def main(args):
dsin = Dataset(args.infile)
dsout = Dataset(args.outfile, 'w')
# Copy global attributes
for att in dsin.ncattrs():
dsout.setncattr(att, dsin.getncattr(att))
# Determine variables to copy
if args.variable:
if not set(args.variable).issubset(set(dsin.variables.keys())):
raise AssertionError('Specified variables are not available in the input file')
vars_to_copy = set(args.variable)
# Vars as exclusion list?
if args.exclude:
vars_to_copy = set(dsin.variables.keys()).difference(vars_to_copy)
else:
vars_to_copy = dsin.variables.keys()
# Determine dimensions to copy
dims_to_copy = set()
for v in vars_to_copy:
dims_to_copy = dims_to_copy.union(set(dsin.variables[v].dimensions))
# Add associate dimvars (Assumes dimvars have same name as dimension)
if not all([x in dsin.variables.keys() for x in dims_to_copy]):
raise AssertionError('Not all dimenions being copied have associated dimension variables')
print 'Copying variables: {}'.format(vars_to_copy)
print 'Copying dimensions: {}'.format(dims_to_copy)
# Copy Dimensions
for dname, dim in dsin.dimensions.items():
if dname not in dims_to_copy:
continue
print dname, len(dim)
dsout.createDimension(dname, len(dim) if not dim.isunlimited() else None)
# Copy Variables
for v_name, varin in dsin.variables.items():
if v_name not in vars_to_copy:
continue
outVar = dsout.createVariable(v_name, varin.datatype, varin.dimensions)
print v_name, varin.datatype, varin.dimensions, varin.shape, len(varin.shape)
# Copy all variable attributes
outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()})
# Itteratively write variables with 3+ dimensions
if len(varin.shape) > 2:
count = float(varin.shape[0])
for i in range(varin.shape[0]):
if args.progress:
sys.stdout.write("\r{:.2%}".format(i/count))
outVar[i,:,:] = varin[i,:,:]
else:
outVar[:] = varin[:]
示例10: SiteList1
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def SiteList1(self):
'''
查找并创建 TCCON 数据的站点名,经纬度,开始结束时间信息
'''
# 从txt中获取经纬度信息,注掉了 暂时不用,从NC中获取,以后用来这看
# patLines = '.+\(([-\.\d]+).+,([-\.\d]+).+\)\.'
for file in self.FileList:
ncFile = Dataset(file, 'r', format='NETCDF3_CLASSIC') # 'NCETCDF4'
Lon = ncFile.variables['long_deg'][0]
Lat = ncFile.variables['lat_deg'][0]
siteName = ncFile.getncattr('longName')
start_date = ncFile.getncattr('start_date')
end_date = ncFile.getncattr('end_date')
ncFile.close()
dateT_S = datetime.strptime(start_date, '%Y/%m/%d')
dateT_E = datetime.strptime(end_date, '%Y/%m/%d')
start_date = dateT_S.strftime('%Y-%m-%d')
end_date = dateT_E.strftime('%Y-%m-%d')
Line = '%17s %15.6f %15.6f %15s %15s\n' % (siteName, Lon, Lat, start_date, end_date)
self.FileSave.append(Line)
示例11: read_attr
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def read_attr(filename,attribute_name,varname=None):
"""docstring for read_attr"""
if nclib==NETCDF4:
ncfile=Dataset(filename)
if varname is None:
att_val=ncfile.getncattr(attribute_name)
else:
att_val=ncfile.variables[varname].getncattr(attribute_name)
elif nclib==NIO:
ncfile=Nio.open_file(filename,format="nc")
if varname is None:
att_val=ncfile.__dict__[attribute_name]
else:
att_val=ncfile.variables[varname].__dict__[attribute_name]
ncfile.close()
return att_val
示例12: __init__
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
timestamp = date if date else self.get_date(filename)
ds = Dataset(filename)
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
self.dataset.SetMetadataItem('entry_title', str(ds.getncattr('title')))
self.dataset.SetMetadataItem('data_center', json.dumps(pti.get_gcmd_provider('UK/MOD/MET')))
self.dataset.SetMetadataItem('ISO_topic_category',
pti.get_iso19115_topic_category('oceans')['iso_topic_category'])
self.dataset.SetMetadataItem('gcmd_location', json.dumps(pti.get_gcmd_location('sea surface')))
#mm = pti.get_gcmd_instrument('amsr-e')
#ee = pti.get_gcmd_platform('aqua')
#self.dataset.SetMetadataItem('instrument', json.dumps(mm))
#self.dataset.SetMetadataItem('platform', json.dumps(ee))
self.dataset.SetMetadataItem('platform/instrument',
json.dumps(self.get_platform_and_instrument_list(ds)))
示例13: _handler
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def _handler(self, request, response):
# TODO: can't set default value for input otherwise I will always get
# both dataset and dataset_opendap
response.update_status('PyWPS Process started.', 0)
if 'dataset_opendap' in request.inputs:
inpt = request.inputs['dataset_opendap'][0]
resource = inpt.url
else:
inpt = request.inputs['dataset'][0]
resource = inpt.file
ds = Dataset(resource)
with open(os.path.join(self.workdir, 'out.txt'), "w") as fp:
response.outputs['output'].file = fp.name
fp.write("URL: {}\n\n".format(inpt.url))
fp.write("MIME Type: {}\n\n".format(inpt.data_format.mime_type))
for attr in ds.ncattrs():
fp.write("{}: {}\n\n".format(attr, ds.getncattr(attr)))
response.update_status('PyWPS Process completed.', 100)
return response
示例14: get_nc_data
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
def get_nc_data(fname):
""" using netcdf4 to import data
Usage
-----
>>>fname = ../data/test_ctd.nc
>>> get_nc_data(fname)
"""
f = Dataset(fname,'r')
global_attrs = {}
for i, v in enumerate(f.ncattrs()):
global_attrs[v] = f.getncattr(v)
#index may be 'dep', 'depth', or 'pres'
try:
cast = np.zeros( ( f.variables['dep'][:].shape[0],len(f.variables.keys()) ) )
except KeyError:
try:
cast = np.zeros( ( f.variables['depth'][:].shape[0],len(f.variables.keys()) ) )
except KeyError:
try:
cast = np.zeros( ( f.variables['pres'][:].shape[0],len(f.variables.keys()) ) )
except:
print " ERROR: NetCDF vertical coordinate not recognized. No dep, depth, or pres key in file \n"
sys.exit(1)
variable_names = {}
for j, v in enumerate( f.variables.keys() ):
try: #non coord dims have 4 axis
cast[:,j] = f.variables[v][0,:,0,0]
variable_names[v] = j
except ValueError: #coord dims have only one axis
cast[:,j] = f.variables[v][:]
variable_names[v] = j
return (global_attrs, variable_names, cast)
示例15: TrajectoryNetCDFWriter
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import getncattr [as 别名]
#.........这里部分代码省略.........
Input:
- glider: Name of the glider deployed.
- deployment_date: String or DateTime of when glider was
first deployed.
"""
if 'trajectory' not in self._nc.variables:
# Setup Trajectory Dimension
self._nc.createDimension('traj_strlen', len(trajectory_string))
# Setup Trajectory Variable
trajectory_var = self._nc.createVariable(
u'trajectory',
'S1',
('traj_strlen',),
zlib=True,
complevel=self._comp_level
)
attrs = {
'cf_role': 'trajectory_id',
'long_name': 'Trajectory/Deployment Name', # NOQA
'comment': 'A trajectory is a single deployment of a glider and may span multiple data files.' # NOQA
}
for key, value in sorted(attrs.items()):
trajectory_var.setncattr(key, value)
else:
trajectory_var = self._nc.variables['trajectory']
# Set the trajectory variable data
trajectory_var[:] = stringtoarr(trajectory_string, len(trajectory_string))
if not self._nc.getncattr('id').strip():
self._nc.id = trajectory_string # Global id variable
def set_source_file_var(self, source_file_string, attrs=None):
""" Sets the trajectory dimension and variable for the dataset and the
global id attribute
Input:
- glider: Name of the glider deployed.
- deployment_date: String or DateTime of when glider was
first deployed.
"""
if 'source_file' not in self._nc.variables:
# Setup Trajectory Dimension
self._nc.createDimension('source_file_strlen', len(source_file_string))
# Setup Trajectory Variable
source_file_var = self._nc.createVariable(
u'source_file',
'S1',
('source_file_strlen',),
zlib=True,
complevel=self._comp_level
)
if attrs:
attrs['long_name'] = 'Source data file'
attrs['comment'] = 'Name of the source data file and associated file metadata'
for key, value in sorted(attrs.items()):
source_file_var.setncattr(key, value)
else:
source_file_var = self._nc.variables['source_file']