本文整理匯總了Python中geodata.netcdf.DatasetNetCDF.unload方法的典型用法代碼示例。如果您正苦於以下問題:Python DatasetNetCDF.unload方法的具體用法?Python DatasetNetCDF.unload怎麽用?Python DatasetNetCDF.unload使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類geodata.netcdf.DatasetNetCDF
的用法示例。
在下文中一共展示了DatasetNetCDF.unload方法的1個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: performExtraction
# 需要導入模塊: from geodata.netcdf import DatasetNetCDF [as 別名]
# 或者: from geodata.netcdf.DatasetNetCDF import unload [as 別名]
#.........這裏部分代碼省略.........
## extract meta data from arguments
module, dataargs, loadfct, filepath, datamsgstr = getMetaData(dataset, mode, dataargs)
dataset_name = dataargs.dataset_name; periodstr = dataargs.periodstr; avgfolder = dataargs.avgfolder
# load template dataset
stndata = stnfct() # load station dataset from function
if not isinstance(stndata, Dataset): raise TypeError
# N.B.: the loading function is necessary, because DataseNetCDF instances do not pickle well
# determine age of source file
if not loverwrite: sourceage = datetime.fromtimestamp(os.path.getmtime(filepath))
# get filename for target dataset and do some checks
filename = getTargetFile(stndata.name, dataset, mode, module, dataargs, lwrite)
if ldebug: filename = 'test_' + filename
if not os.path.exists(avgfolder): raise IOError, "Dataset folder '{:s}' does not exist!".format(avgfolder)
lskip = False # else just go ahead
if lwrite:
if lreturn:
tmpfilename = filename # no temporary file if dataset is passed on (can't rename the file while it is open!)
else:
if lparallel: tmppfx = 'tmp_exstns_{:s}_'.format(pidstr[1:-1])
else: tmppfx = 'tmp_exstns_'.format(pidstr[1:-1])
tmpfilename = tmppfx + filename
filepath = avgfolder + filename
tmpfilepath = avgfolder + tmpfilename
if os.path.exists(filepath):
if not loverwrite:
age = datetime.fromtimestamp(os.path.getmtime(filepath))
# if source file is newer than sink file or if sink file is a stub, recompute, otherwise skip
if age > sourceage and os.path.getsize(filepath) > 1e5: lskip = True
# N.B.: NetCDF files smaller than 100kB are usually incomplete header fragments from a previous crashed
if not lskip: os.remove(filepath) # recompute
# depending on last modification time of file or overwrite setting, start computation, or skip
if lskip:
# print message
skipmsg = "\n{:s} >>> Skipping: file '{:s}' in dataset '{:s}' already exists and is newer than source file.".format(pidstr,filename,dataset_name)
skipmsg += "\n{:s} >>> ('{:s}')\n".format(pidstr,filepath)
logger.info(skipmsg)
else:
## actually load datasets
source = loadfct() # load source
# check period
if 'period' in source.atts and dataargs.periodstr != source.atts.period: # a NetCDF attribute
raise DateError, "Specifed period is inconsistent with netcdf records: '{:s}' != '{:s}'".format(periodstr,source.atts.period)
# print message
if lclim: opmsgstr = "Extracting '{:s}'-type Point Data from Climatology ({:s})".format(stndata.name, periodstr)
elif lts: opmsgstr = "Extracting '{:s}'-type Point Data from Time-series".format(stndata.name)
else: raise NotImplementedError, "Unrecognized Mode: '{:s}'".format(mode)
# print feedback to logger
logger.info('\n{0:s} *** {1:^65s} *** \n{0:s} *** {2:^65s} *** \n'.format(pidstr,datamsgstr,opmsgstr))
if not lparallel and ldebug: logger.info('\n'+str(source)+'\n')
## create new sink/target file
# set attributes
atts=source.atts.copy()
atts['period'] = dataargs.periodstr if dataargs.periodstr else 'time-series'
atts['name'] = dataset_name; atts['station'] = stndata.name
atts['title'] = '{:s} (Stations) from {:s} {:s}'.format(stndata.title,dataset_name,mode.title())
# make new dataset
if lwrite: # write to NetCDF file
if os.path.exists(tmpfilepath): os.remove(tmpfilepath) # remove old temp files
sink = DatasetNetCDF(folder=avgfolder, filelist=[tmpfilename], atts=atts, mode='w')
else: sink = Dataset(atts=atts) # ony create dataset in memory
# initialize processing
CPU = CentralProcessingUnit(source, sink, varlist=varlist, tmp=False, feedback=ldebug)
# extract data at station locations
CPU.Extract(template=stndata, flush=True)
# get results
CPU.sync(flush=True)
# print dataset
if not lparallel and ldebug:
logger.info('\n'+str(sink)+'\n')
# write results to file
if lwrite:
sink.sync()
writemsg = "\n{:s} >>> Writing to file '{:s}' in dataset {:s}".format(pidstr,filename,dataset_name)
writemsg += "\n{:s} >>> ('{:s}')\n".format(pidstr,filepath)
logger.info(writemsg)
# rename file to proper name
if not lreturn:
sink.unload(); sink.close(); del sink # destroy all references
if os.path.exists(filepath): os.remove(filepath) # remove old file
os.rename(tmpfilepath,filepath)
# N.B.: there is no temporary file if the dataset is returned, because an open file can't be renamed
# clean up and return
source.unload(); del source#, CPU
if lreturn:
return sink # return dataset for further use (netcdf file still open!)
else:
return 0 # "exit code"