本文整理汇总了Python中netCDF4.Dataset.history方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.history方法的具体用法?Python Dataset.history怎么用?Python Dataset.history使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.history方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: process
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def process(input, output, direction, collapse):
"""Process the file 'input', expanding or collapsing data according to
'action' and 'direction'. Saves result in 'output'."""
try:
nc = CDF(input)
except:
print "ERROR: Can't open %s" % input
exit(1)
try:
out = CDF(output, 'w', format="NETCDF3_CLASSIC")
except:
print "ERROR: Can't open %s" % output
exit(1)
copy_attributes(nc, out)
for name in nc.dimensions.keys():
copy_dim(nc, out, name, direction)
if collapse:
for name in nc.variables.keys():
if name == direction:
continue
collapse_var(nc, out, name, direction)
message = "Collapsed using flowline.py"
else:
out.createDimension(direction, 3)
if direction == 'x':
dim = 'y'
else:
dim = 'x'
var1 = nc.variables[dim]
delta = np.diff(var1[:])[0]
var2 = out.createVariable(direction, 'f8', (direction,))
var2.axis = "%s" % direction.upper()
var2.long_name = "%s-coordinate in Cartesian system" % direction.upper()
var2.standard_name = "projection_%s_coordinate" % direction
var2.units = var1.units
var2[:] = [-delta, 0, delta]
for name in nc.variables.keys():
expand_var(nc, out, name, direction)
message = asctime() + ': ' + ' '.join(argv) + '\n'
if 'history' in out.ncattrs():
out.history = message + out.history # prepend to history string
else:
out.history = message
out.close()
示例2: max
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def max(time_0,x,y,n,set_point):# n is the periods in a day.
nc=Dataset('/Users/Bora/Desktop/REU/Bond/DDCode/03.nc','r')
for i in nc.variables:
print ([i,nc.variables[i].units,nc.variables[i].shape])
long = np.array(nc.variables['longitude'][:],dtype=np.float32) #Defining the variables in the netcdf file and assigning them
lats = np.array(nc.variables ['latitude'][:],dtype=np.float32)
time = np.array(nc.variables ['time'][:],dtype=np.float32)
x_new=((x%time_0)*(n*365)) #Finding the number of years from the base data (which is from 1996-2015)
y_new=((y%time_0)*(n*365))
number_of_years=y-x
time_period=time[x_new:y_new] # Creating an array with the time period specified
temperature = np.array(nc.variables['temp'][x_new:y_new][:][:],dtype=np.float32)
temperature_new= np.empty((len(time_period)/n, len(lats),len(long)), dtype=np.float32)
for i in range(len(temperature_new)):
temperature_new[i]= np.max(temperature[(i*n):((i+1)*n)],axis=0)
temperature_max=np.empty((365,len(lats),len(long)), dtype=np.float32)
for i in range(365):
for j in range(number_of_years):
temperature_max[i]= temperature_new[i+365*j]+temperature_max[i]
temperature_max=temperature_max/number_of_years
temperature_max = np.subtract(temperature_max,(273.16+set_point))
time_for_max=np.arange(1,366,1)
nc.close()
data4max= Dataset('/Users/Bora/Desktop/REU/Bond/DDCode/max_'+str(set_point)+'.nc', 'w', format='NETCDF4')
data4max.close()
data4max= Dataset('/Users/Bora/Desktop/REU/Bond/DDCode/max_'+str(set_point)+'.nc', 'a')
time= data4max.createDimension('time', None)
lat= data4max.createDimension('lat', 241)
lon= data4max.createDimension('lon', 480)
times= data4max.createVariable('time','f4',('time',))
latitudes= data4max.createVariable('latitude','f4',('lat',))
longitudes=data4max.createVariable('longitude','f4',('lon',))
temp= data4max.createVariable('temp','f4',('time','lat','lon',))
import time
data4max.description = 'Max Temperature values from 1996-2016 excluding February 29th'
data4max.source= 'netCDF4 python'
data4max.history= 'Created' +time.ctime(time.time())
latitudes.units= 'degrees north'
longitudes.units= 'degrees east'
temp.units = 'K'
times.units = 'days in a gregorian calendar'
latitudes[:]= lats
longitudes[:]=long
times[:]= time_for_max
temp[:]= temperature_max
data4max.close()
new_data= Dataset('/Users/Bora/Desktop/REU/Bond/DDCode/max_'+str(set_point)+'.nc', 'r')
for i in new_data.variables:
print([i,new_data.variables[i].units,new_data.variables[i].shape])
new_data.close()
示例3: setUp
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def setUp(self):
self.files = [tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name for nfile in range(2)]
for nfile,file in enumerate(self.files):
f = Dataset(file,'w',format='NETCDF4_CLASSIC')
f.createDimension('time',None)
f.createDimension('y',ydim)
f.createDimension('z',zdim)
f.history = 'created today'
time = f.createVariable('time', 'f', ('time', ))
#time.units = 'days since {0}-01-01'.format(1979+nfile)
yr = 1979+nfile
time.units = 'days since %s-01-01' % yr
time.calendar = 'standard'
x = f.createVariable('x','f',('time', 'y', 'z'))
x.units = 'potatoes per square mile'
nx1 = self.ninc*nfile;
nx2 = self.ninc*(nfile+1)
time[:] = np.arange(self.ninc)
x[:] = np.arange(nx1, nx2).reshape(self.ninc,1,1) * np.ones((1, ydim, zdim))
f.close()
示例4: setUp
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def setUp(self):
""" Check that a the AIMS system or this script hasn't been modified.
This function checks that a downloaded file still has the same md5.
"""
logging_aims()
channel_id = '8365'
from_date = '2008-09-30T00:27:27Z'
thru_date = '2008-09-30T00:30:00Z'
level_qc = 1
aims_rss_val = 100
xml_url = 'http://data.aims.gov.au/gbroosdata/services/rss/netcdf/level%s/%s' % (str(level_qc), str(aims_rss_val))
aims_xml_info = parse_aims_xml(xml_url)
channel_id_info = aims_xml_info[channel_id]
self.netcdf_tmp_file_path = download_channel(channel_id, from_date, thru_date, level_qc)
modify_soop_trv_netcdf(self.netcdf_tmp_file_path, channel_id_info)
# force values of attributes which change all the time
netcdf_file_obj = Dataset(self.netcdf_tmp_file_path, 'a', format='NETCDF4')
netcdf_file_obj.date_created = "1970-01-01T00:00:00Z"
netcdf_file_obj.history = 'data validation test only'
netcdf_file_obj.close()
shutil.move(self.netcdf_tmp_file_path, remove_creation_date_from_filename(self.netcdf_tmp_file_path))
self.netcdf_tmp_file_path = remove_creation_date_from_filename(self.netcdf_tmp_file_path)
示例5: new
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def new(self, secs):
"""
Creates a new seNorge netCDF file.
Convention: Climate and Forecast (CF) version 1.4
@param secs: in seconds since 1970-01-01 00:00:00
"""
# create new file
rootgrp = Dataset(self.filename, 'w') # create new file using netcdf4
# rootgrp = netcdf_file(self.filename, 'w') # create new file using scipy.IO
# add root dimensions
rootgrp.createDimension('time', size=self.default_senorge_time)
rootgrp.createDimension('x', size=self.default_senorge_width)
rootgrp.createDimension('y', size=self.default_senorge_height)
# add root attributes
rootgrp.Conventions = "CF-1.4"
rootgrp.institution = "Norwegian Water Resources and Energy Directorate (NVE)"
rootgrp.source = ""
rootgrp.history = ""
rootgrp.references = ""
rootgrp.comment = "Data distributed via www.senorge.no"
self.rootgrp = rootgrp
# add coordinates
time = self.rootgrp.createVariable('time', 'f8', ('time',))
time.units = 'seconds since 1970-01-01 00:00:00 +00:00'
time.long_name = 'time'
time.standard_name = 'time'
time[:] = secs
self._set_utm()
self._set_latlon()
示例6: fix_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def fix_netcdf(infile,outfile):
"""
Write a new netcdf but this time do the coordinate vars correctly
"""
rootgrp = Dataset(outfile,'w', format='NETCDF3_64BIT')
data, targetAttrs = read_netcdf(infile,vars=('Prec','Wind','Tmax','Tmin','time','nav_lat','nav_lon'))
res = 0.5
# set dimensions
lon = rootgrp.createDimension('lon',data['Prec'].shape[2])
lat = rootgrp.createDimension('lat',data['Prec'].shape[1])
time = rootgrp.createDimension('time',data['Prec'].shape[0])
# do vars
times = rootgrp.createVariable('time','f8',('time',))
times[:] = np.arange(data['Prec'].shape[0])*86400
times.units = targetAttrs['time']['units']
times.long_name = targetAttrs['time']['long_name']
lat = rootgrp.createVariable('lat','f8',('lat',))
lat[:] = np.arange(data['nav_lat'].min(),data['nav_lat'].max()+res,res)
lat.units = 'degrees_north'
lat.long_name = 'Latitude'
lon = rootgrp.createVariable('lon','f8',('lon',))
lon[:] = np.arange(data['nav_lon'].min(),data['nav_lon'].max()+res,res)
lon.units = 'degrees_east'
lon.long_name = 'Longitude'
Precip = rootgrp.createVariable('Precip','f8',('time','lat','lon',),fill_value=data['Prec'].fill_value)
Precip[:,:,:] = data['Prec']
Precip.units = targetAttrs['Prec']['units']
Precip.long_name = targetAttrs['Prec']['long_name']
Tmax = rootgrp.createVariable('Tmax','f8',('time','lat','lon',),fill_value=data['Tmax'].fill_value)
Tmax[:,:,:] = data['Tmax']
Tmax.units = targetAttrs['Tmax']['units']
Tmax.long_name = targetAttrs['Tmax']['long_name']
Tmin = rootgrp.createVariable('Tmin','f8',('time','lat','lon',),fill_value=data['Tmin'].fill_value)
Tmin[:,:,:] = data['Tmin']
Tmin.units = targetAttrs['Tmin']['units']
Tmin.long_name = targetAttrs['Tmin']['long_name']
Wind = rootgrp.createVariable('Wind','f8',('time','lat','lon',),fill_value=data['Wind'].fill_value)
Wind[:,:,:] = data['Wind']
Wind.units = targetAttrs['Wind']['units']
Wind.long_name = targetAttrs['Wind']['long_name']
rootgrp.description = 'Global 1/2 Degree Gridded Meteorological VIC Forcing Data Set '
rootgrp.history = 'Created: {}\n'.format(tm.ctime(tm.time()))
rootgrp.source = sys.argv[0] # prints the name of script used
rootgrp.institution = "University of Washington Dept. of Civil and Environmental Engineering"
rootgrp.sources = "UDel (Willmott and Matsuura 2007), CRU (Mitchell et al., 2004), NCEP/NCAR (Kalnay et al. 1996)"
rootgrp.projection = "Geographic"
rootgrp.surfSng_convention = "Traditional"
rootgrp.close()
示例7: write_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def write_netcdf(ncfile, domain, period, ncvarinfo, format, data):
if verbose:
print "Writing {}".format(ncfile)
nc = Dataset(ncfile, 'w', format=format)
nc.createDimension('time', None)
nc.createDimension('lon', domain['nx'])
nc.createDimension('lat', domain['ny'])
# coordinate variables
time = nc.createVariable('time', 'f4', 'time')
lon = nc.createVariable('lon', 'f4', 'lon')
lat = nc.createVariable('lat', 'f4', 'lat')
time[:] = np.arange(0, period['nsteps'], period['interval'].days)
lon[:] = domain['west'] + np.arange(0.5, domain['nx']) * domain['lonres']
lat[:] = domain['south'] + np.arange(0.5, domain['ny']) * domain['latres']
for var in data.iterkeys():
if verbose:
print 'var: {}'.format(var)
ncvar = nc.createVariable(var, 'f4', ('time','lat','lon',),
fill_value=default_fillvals['f4'])
ncvar[:] = data[var]
attrtable = {
'time': {
'long_name': 'time',
'units': 'days since {}'.format(period['start']),
'calendar': 'standard',
},
'lon': {
'long_name': 'longitude',
'units': 'degrees_east',
},
'lat': {
'long_name': 'latitude',
'units': 'degrees_north',
},
}
for var in ncvarinfo.keys():
attrtable[ncvarinfo[var]['name']] = {
'long_name': ncvarinfo[var]['longname'],
'units': ncvarinfo[var]['units'],
'_Fillvalue': default_fillvals['f4']
}
for var in attrtable.iterkeys():
for attr in attrtable[var].iterkeys():
nc.variables[var].setncattr(attr, attrtable[var][attr])
nc.history = 'Created: {}\n'.format(tm.ctime(tm.time()))
nc.history += ' '.join(sys.argv) + '\n'
nc.close()
示例8: prep
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def prep(infile,outfile):
"""
Function to preprocess FFDAS netCDF files
Converts from 24 hourly variables to one
flux variable varying in t dimension
"""
# read in input netCDF file
datain = Dataset(infile,'r')
# open output netCDF file
dataout = Dataset(outfile, "w", format="NETCDF4")
# get dimensions from input file
lat1 = datain.variables['latitude'][:]
lon1 = datain.variables['longitude'][:]
# get data from input file
flux = np.zeros((24,len(lat1),len(lon1)))
# read in each strange hourly variable and concatenate to one array
for i in range(1,25):
exec("tmpflux = datain.variables['flux_h%02d'][:]" % i)
flux[i-1,:,:] = tmpflux
# set up output file
timeout = dataout.createDimension("time", None)
lat2 = dataout.createDimension("latitude", len(lat1))
lon2 = dataout.createDimension("longitude", len(lon1))
times = dataout.createVariable("hour", "i2", ("time",))
latitudes = dataout.createVariable("latitude","f4",("latitude",))
longitudes = dataout.createVariable("longitude","f4",("longitude",))
outflux = dataout.createVariable("flux","f8",("time","latitude","longitude" ,))
timearr = np.arange(1,25,1)
# add some metadata
dataout.description = "Converted Hourly FFDAS flux netCDF file"
dataout.history = "Created " + time.ctime(time.time())
dataout.source = "convert_ffdas_hrly.py - C. Martin - Univ. of MD - 2/2016"
latitudes.units = "degrees north"
longitudes.units = "degrees east"
outflux.units = "kgC/cell/h"
times.units = "hour of day"
# write to file
latitudes[:] = lat1
longitudes[:] = lon1
times[:] = timearr
outflux[:] = flux
# close files
datain.close()
dataout.close()
示例9: writeCMIP5File
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def writeCMIP5File(modelName,scenario,myvarname,lon,lat,time,mydata,mydataanomaly,outfilename):
myformat='NETCDF3_CLASSIC'
if os.path.exists(outfilename):
os.remove(outfilename)
print "Results written to netcdf file: %s"%(outfilename)
if myvarname=="sic": myvar="SIC"
f1 = Dataset(outfilename, mode='w', format=myformat)
f1.title = "IPCC AR5 %s"%(myvar)
f1.description = "IPCC AR5 running averages of %s for model %s for scenario %s"%(myvar,modelName,scenario)
f1.history = "Created " + str(datetime.now())
f1.source = "Trond Kristiansen ([email protected])"
f1.type = "File in NetCDF3 format created using iceExtract.py"
f1.Conventions = "CF-1.0"
"""Define dimensions"""
f1.createDimension('x', len(lon))
f1.createDimension('y', len(lat))
f1.createDimension('time', None)
vnc = f1.createVariable('longitude', 'd', ('x',),zlib=False)
vnc.long_name = 'Longitude'
vnc.units = 'degree_east'
vnc.standard_name = 'longitude'
vnc[:] = lon
vnc = f1.createVariable('latitude', 'd', ('y',),zlib=False)
vnc.long_name = 'Latitude'
vnc.units = 'degree_north'
vnc.standard_name = 'latitude'
vnc[:] = lat
v_time = f1.createVariable('time', 'd', ('time',),zlib=False)
v_time.long_name = 'Years'
v_time.units = 'Years'
v_time.field = 'time, scalar, series'
v_time[:]=time
v_temp=f1.createVariable('SIC', 'd', ('time', 'y', 'x',),zlib=False)
v_temp.long_name = "Sea-ice area fraction (%)"
v_temp.units = "%"
v_temp.time = "time"
v_temp.field="SIC, scalar, series"
v_temp.missing_value = 1e20
if myvarname=='sic':
f1.variables['SIC'][:,:,:] = mydata
f1.close()
示例10: write_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def write_netcdf(ncfile, format, start, ndays, domain, model, data):
print "Writing {}".format(ncfile)
nc = Dataset(ncfile, 'w', format=format)
nc.createDimension('time', None)
nc.createDimension('lon', domain['nx'])
nc.createDimension('lat', domain['ny'])
# coordinate variables
time = nc.createVariable('time', 'f4', 'time')
lon = nc.createVariable('lon', 'f4', 'lon')
lat = nc.createVariable('lat', 'f4', 'lat')
time[:] = np.arange(0, ndays, 1)
lon[:] = domain['west'] + np.arange(0, domain['nx']) * domain['lonres']
lat[:] = domain['south'] + np.arange(0, domain['ny']) * domain['latres']
for var in data.iterkeys():
ncvar = nc.createVariable(var, 'f4', ('time','lat','lon',),
fill_value=default_fillvals['f4'])
ncvar[:] = data[var]
attrtable = {
'time': {
'long_name': 'time',
'units': 'days since {}'.format(start),
'calendar': 'standard',
},
'lon': {
'long_name': 'longitude',
'units': 'degrees_east',
},
'lat': {
'long_name': 'latitude',
'units': 'degrees_north',
},
'sm': {
'long_name': 'Soil moisture percentile',
'units': '-',
'valid_min': 0.,
'valid_max': 1.,
},
}
for var in attrtable.iterkeys():
for attr in attrtable[var].iterkeys():
nc.variables[var].setncattr(attr, attrtable[var][attr])
nc.model = model
nc.history = 'Created: {}\n'.format(tm.ctime(tm.time()))
nc.history += ' '.join(sys.argv) + '\n'
nc.close()
示例11: tonetcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def tonetcdf(self,varname='d',fn = ''):
""" save numpy array to netcdf file """
import sys, os, time
from netCDF4 import Dataset
if fn =='':
fn = sys.argv[0].split('.')[0]+'_tmp.nc'
if os.path.exists(fn):
os.remove(fn)
rootgrp = Dataset(fn, 'w', format='NETCDF4')
rootgrp.history = 'Created by '+sys.argv[0] + time.ctime(time.time())
dims = self.data.shape
if len(dims) == 2:
self.data = self.data.reshape(1,1,dims[0],dims[1])
if len(dims) == 3:
self.data = self.data.reshape(1,dims[-3],dims[-2],dims[-1])
depth = rootgrp.createDimension('depth', None)
time = rootgrp.createDimension('time', None)
lat = rootgrp.createDimension('lat', dims[-2])
lon = rootgrp.createDimension('lon', dims[-1])
times = rootgrp.createVariable('time','f8',('time',))
depths = rootgrp.createVariable('depth','f4',('depth',))
latitudes = rootgrp.createVariable('latitude','f4',('lat',))
longitudes = rootgrp.createVariable('longitude','f4',('lon',))
latitudes.units = 'degrees north'
longitudes.units = 'degrees east'
times.units = 'hours since 0001-01-01 00:00:00.0'
times.calendar = 'gregorian'
if len(self.time)==1:
self.time = range(dims[0])
if len(self.depth) == 1:
self.depth = range(dims[1])
if len(self.lat) == 1:
self.lat = range(dims[2])
if len(self.lon) == 1:
self.lon = range(dims[3])
depths[:] = self.depth
times[:] = self.time
latitudes[:] = self.lat
longitudes[:] = self.lon
temp = rootgrp.createVariable(varname,'f4',('time','depth','lat','lon',))
temp[:] = self.data
rootgrp.close()
示例12: tamoc_nc_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def tamoc_nc_file(fname, title, summary, source):
"""
Write the header meta data to an netCDF file for a TAMOC output
The TAMOC suite stores its output by detaul in a netCDF dataset file.
This function writes the standard TAMOC metadata to the header of the
netCDF file.
Parameters
----------
fname : str
File name of the file to write
title: str
String stating the TAMOC module where the data originated and the
type of data contained.
summary : str
String summarizing what is contained in the dataset or information
needed to interpret the dataset
source : str
String describing the source of the data in the dataset or of related
datasets
Returns
-------
nc : `netCDF4.Dataset` object
The `netCDF4.Dataset` object containing the open netCDF4 file where
the data should be stored.
"""
# Create the netCDF dataset object
nc = Dataset(fname, 'w', format='NETCDF4_CLASSIC')
# Write the netCDF header data for a TAMOC suite output
nc.Conventions = 'TAMOC Modeling Suite Output File'
nc.Metadata_Conventions = 'TAMOC Python Model'
nc.featureType = 'profile'
nc.cdm_data_type = 'Profile'
nc.nodc_template_version = \
'NODC_NetCDF_Profile_Orthogonal_Template_v1.0'
nc.title = title
nc.summary = summary
nc.source = source
nc.creator_url = 'http://github.com/socolofs/tamoc'
nc.date_created = datetime.today().isoformat(' ')
nc.date_modified = datetime.today().isoformat(' ')
nc.history = 'Creation'
# Return the netCDF dataset
return nc
示例13: main
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def main(JSONArray, outputFileName, wavelength=None, spectrum=None, recordTime=None, commandLine=None):
'''
Main netCDF handler, write data to the netCDF file indicated.
'''
netCDFHandler = Dataset(outputFileName, 'w', format='NETCDF4')
dataMemberList = [JSONMembers[u"environment_sensor_set_reading"]
for JSONMembers in JSONArray]
timeStampList = [JSONMembers[u'timestamp']
for JSONMembers in dataMemberList]
timeDimension = netCDFHandler.createDimension("time", None)
tempTimeVariable = netCDFHandler.createVariable(
'time', str, ('time',), chunksizes=(1,))
for i in range(len(timeStampList)): # Assign Times
tempTimeVariable[i] = timeStampList[i]
for data in dataMemberList[0]:
if data != 'spectrometer' and type(dataMemberList[0][data]) not in (str, unicode):
tempVariable = netCDFHandler.createVariable(
renameTheValue(data), 'f4', ('time',))
tempVariable[:] = getListOfValue(
dataMemberList, data) # Assign "values"
if 'unit' in dataMemberList[0][data]: # Assign Units
setattr(tempVariable, 'units', _UNIT_DICTIONARY[
dataMemberList[0][data]['unit']])
if 'rawValue' in dataMemberList[0][data]: # Assign "rawValues"
netCDFHandler.createVariable(renameTheValue(data) + '_rawValue', 'f4', ('time',))[:] =\
getListOfRawValue(dataMemberList, data)
elif type(dataMemberList[0][data]) in (str, unicode):
netCDFHandler.createVariable(renameTheValue(data), str)[
0] = dataMemberList[0][data]
if data == 'spectrometer': # Special care for spectrometers :)
netCDFHandler.createVariable('Spectrometer_maxFixedIntensity', 'f4', ('time',))[:] =\
getSpectrometerInformation(dataMemberList)[0]
netCDFHandler.createVariable('Spectrometer_Integration_Time_In_Microseconds', 'f4', ('time',))[:] =\
getSpectrometerInformation(dataMemberList)[1]
if wavelength and spectrum:
netCDFHandler.createDimension("wavelength", len(wavelength[0]))
netCDFHandler.createVariable("wavelength", 'f4', ('wavelength',))[
:] = wavelength[0]
netCDFHandler.createVariable("spectrum", 'f4', ('time', 'wavelength'))[
:, :] = spectrum
netCDFHandler.history = recordTime + ': python ' + commandLine
netCDFHandler.close()
示例14: generate_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def generate_nc(parser_context):
parser = XLSParser()
with open(parser_context.filepath, 'r') as f:
doc = f.read()
info = parser.extract_worksheets(doc)
nccl = info[parser_context.worksheet]
#header_line = 3
#columns = nccl[header_line]
#data_range = (4, 66)
data_rows = nccl[parser_context.data_range[0]:parser_context.data_range[1]]
print 'Generating',parser_context.output_file
nc = Dataset(parser_context.output_file, 'w')
nc.createDimension('time', len(data_rows)*12)
nc.GDAL = "GDAL 1.9.2, released 2012/10/08"
nc.history = "Created dynamically in IPython Notebook 2013-11-14"
nc.title = nccl[0][0]
nc.summary = nccl[1][0]
nc.naming_authority = 'GLOS'
nc.source = 'GLERL'
nc.standard_name_vocabulary = "http://www.cgd.ucar.edu/cms/eaton/cf-metadata/standard_name.html"
nc.project = 'GLOS'
nc.Conventions = "CF-1.6"
time = nc.createVariable('time', 'f8', ('time',))
time.standard_name = 'time'
time.units = 'seconds since 1970-01-01'
time.long_name = 'Time'
time.axis = 'T'
precip = nc.createVariable(parser_context.variable, 'f8', ('time',), fill_value=parser_context.fill_value)
#precip.standard_name = 'precipitation_amount'
precip.standard_name = parser_context.standard_name
precip.units = parser_context.units
for i,row in enumerate(data_rows):
for j in xrange(12):
the_date = datetime(row[0], j+1, 1)
timestamp = calendar.timegm(the_date.utctimetuple())
time[i*12 + j] = timestamp
try:
value = float(row[j+1])
except ValueError:
continue
except TypeError:
continue
precip[i*12 + j] = value
nc.close()
示例15: savenetcdf4
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import history [as 别名]
def savenetcdf4(v, fn = '', ndims=4, lons=[], lats=[], levels=[], records=[]):
""" save numpy array to netcdf file
v={vname:vvalue, vname:vvalue}
"""
import sys, os
import time as mod_time
from netCDF4 import Dataset
if fn =='':
fn = sys.argv[0].split('.')[0]+'_tmp.nc'
if os.path.exists(fn):
os.remove(fn)
rootgrp = Dataset(fn, 'w', format='NETCDF4')
rootgrp.history = 'Created by '+sys.argv[0] + mod_time.ctime(mod_time.time())
if len(levels) !=0:
depth = rootgrp.createDimension('depth', len(levels))
depths = rootgrp.createVariable('depth','f8',('depth',))
depths[:] = levels
if len(records)!=0:
time = rootgrp.createDimension('time', None)
times = rootgrp.createVariable('time','f8',('time',))
times.units = 'hours since 0001-01-01 00:00:00.0'
times.calendar = 'gregorian'
times[:] = records
rootgrp.createDimension('lat', len(lats))
latitude = rootgrp.createVariable('lat','f8',('lat',))
latitude.units = 'degrees north'
latitude[:] = lats[:]
rootgrp.createDimension('lon', len(lons))
longitude = rootgrp.createVariable('lon','f8',('lon',))
longitude.units = 'degrees east'
longitude[:] = lons[:]
for varname in v.keys():
if ndims == 4:
rootgrp.createVariable(varname,'f8',('time','depth','lat','lon',), fill_value=99999)[:] = v[varname][:]
elif ndims == 3:
rootgrp.createVariable(varname,'f8',('depth','lat','lon',), fill_value=99999)[:] = v[varname][:]
else:
rootgrp.createVariable(varname,'f8',('lat','lon',), fill_value=99999)[:] = v[varname][:]
rootgrp.close()