本文整理匯總了Python中geodata.netcdf.DatasetNetCDF.load方法的典型用法代碼示例。如果您正苦於以下問題:Python DatasetNetCDF.load方法的具體用法?Python DatasetNetCDF.load怎麽用?Python DatasetNetCDF.load使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類geodata.netcdf.DatasetNetCDF
的用法示例。
在下文中一共展示了DatasetNetCDF.load方法的1個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: loadObservations
# 需要導入模塊: from geodata.netcdf import DatasetNetCDF [as 別名]
# 或者: from geodata.netcdf.DatasetNetCDF import load [as 別名]
def loadObservations(name=None, folder=None, period=None, grid=None, station=None, shape=None, lencl=False,
varlist=None, varatts=None, filepattern=None, filelist=None, resolution=None,
projection=None, geotransform=None, axes=None, lautoregrid=None, mode='climatology'):
''' A function to load standardized observational datasets. '''
# prepare input
if mode.lower() == 'climatology': # post-processed climatology files
# transform period
if period is None or period == '':
if name not in ('PCIC','PRISM','GPCC','NARR'):
raise ValueError("A period is required to load observational climatologies.")
elif isinstance(period,basestring):
period = tuple([int(prd) for prd in period.split('-')])
elif not isinstance(period,(int,np.integer)) and ( not isinstance(period,tuple) and len(period) == 2 ):
raise TypeError(period)
elif mode.lower() in ('time-series','timeseries'): # concatenated time-series files
period = None # to indicate time-series (but for safety, the input must be more explicit)
if lautoregrid is None: lautoregrid = False # this can take very long!
# cast/copy varlist
if isinstance(varlist,basestring): varlist = [varlist] # cast as list
elif varlist is not None: varlist = list(varlist) # make copy to avoid interference
# figure out station and shape options
if station and shape: raise ArgumentError()
elif station or shape:
if grid is not None: raise NotImplementedError('Currently observational station data can only be loaded from the native grid.')
if lautoregrid: raise GDALError('Station data can not be regridded, since it is not map data.')
lstation = bool(station); lshape = bool(shape)
grid = station if lstation else shape
# add station/shape parameters
if varlist:
params = stn_params if lstation else shp_params
for param in params:
if param not in varlist: varlist.append(param)
else:
lstation = False; lshape = False
# varlist (varlist = None means all variables)
if varatts is None: varatts = default_varatts.copy()
if varlist is not None: varlist = translateVarNames(varlist, varatts)
# filelist
if filelist is None:
filename = getFileName(name=name, resolution=resolution, period=period, grid=grid, filepattern=filepattern)
# check existance
filepath = '{:s}/{:s}'.format(folder,filename)
if not os.path.exists(filepath):
nativename = getFileName(name=name, resolution=resolution, period=period, grid=None, filepattern=filepattern)
nativepath = '{:s}/{:s}'.format(folder,nativename)
if os.path.exists(nativepath):
if lautoregrid:
from processing.regrid import performRegridding # causes circular reference if imported earlier
griddef = loadPickledGridDef(grid=grid, res=None, folder=grid_folder)
dataargs = dict(period=period, resolution=resolution)
performRegridding(name, 'climatology',griddef, dataargs) # default kwargs
else: raise IOError("The dataset '{:s}' for the selected grid ('{:s}') is not available - use the regrid module to generate it.".format(filename,grid) )
else: raise IOError("The dataset file '{:s}' does not exits!\n('{:s}')".format(filename,filepath))
# load dataset
dataset = DatasetNetCDF(name=name, folder=folder, filelist=[filename], varlist=varlist, varatts=varatts,
axes=axes, multifile=False, ncformat='NETCDF4')
# mask all shapes that are incomplete in dataset
if shape and lencl and 'shp_encl' in dataset:
dataset.load() # need to load data before masking; is cheap for shape averages, anyway
dataset.mask(mask='shp_encl', invert=True, skiplist=shp_params)
# correct ordinal number of shape (should start at 1, not 0)
if lshape:
if dataset.hasAxis('shapes'): raise AxisError("Axis 'shapes' should be renamed to 'shape'!")
if not dataset.hasAxis('shape'):
raise AxisError()
if dataset.shape.coord[0] == 0: dataset.shape.coord += 1
# figure out grid
if not lstation and not lshape:
if grid is None or grid == name:
dataset = addGDALtoDataset(dataset, projection=projection, geotransform=geotransform, gridfolder=grid_folder)
elif isinstance(grid,basestring): # load from pickle file
# griddef = loadPickledGridDef(grid=grid, res=None, filename=None, folder=grid_folder)
# add GDAL functionality to dataset
dataset = addGDALtoDataset(dataset, griddef=grid, gridfolder=grid_folder)
else: raise TypeError(dataset)
# N.B.: projection should be auto-detected, if geographic (lat/lon)
return dataset