本文整理汇总了Python中netCDF4.Dataset.variables['time'][:]方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.variables['time'][:]方法的具体用法?Python Dataset.variables['time'][:]怎么用?Python Dataset.variables['time'][:]使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.variables['time'][:]方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: process
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def process (directory, head, tail, year_start):
days_per_month = array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])
# Loop through the four years
for year in range(year_start,year_start+4):
# Check if this is a leap year and update days in February
if year % 4 == 0:
days_per_month[1] = 29
else:
days_per_month[1] = 28
# Make a time axis with data on the 15th of every month
# First get the number of days between year_start and the current year
start_day = 365*(year-year_start)
if year > year_start:
for year_tmp in range(year_start, year):
if year_tmp % 4 == 0:
# A leap year has occurred
start_day += 1
# Start on Jan 15th at midnight
time = [start_day + 14]
# Loop over months
for month in range(1,12):
time.append(start_day + sum(days_per_month[0:month]) + 14)
file = directory + head + str(year) + tail
print 'Processing ' + file
id = Dataset(file, 'a')
id.variables['time'][:] = time
# Set the cycle_length to 4 years
id.variables['time'].cycle_length = 365.0*4 + 1
id.close()
示例2: filter2nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def filter2nc(self,outfile,tstart,tend,substep=12,varlist=None,**kwargs):
"""
Filters the variables in the list, varlist, and outputs the results to netcdf
"""
self.__dict__.update(kwargs)
if tstart == -1:
self.tstep=np.arange(0,self.Nt,1)
else:
self.tstep=self.getTstep(tstart,tend)
if varlist == None:
varlist = ['eta','uc','vc','w']
# Create the output file
self.writeNC(outfile)
# Create the output variables
for vv in varlist:
print 'Creating variable: %s'%vv
self.create_nc_var(outfile, vv, ugrid[vv]['dimensions'], ugrid[vv]['attributes'],\
dtype=ugrid[vv]['dtype'],zlib=ugrid[vv]['zlib'],complevel=ugrid[vv]['complevel'],fill_value=ugrid[vv]['fill_value'])
self.create_nc_var(outfile,'time', ugrid['time']['dimensions'], ugrid['time']['attributes'])
# Loop through and filter each variable (do layer by layer on 3D variables for sake of memory)
nc = Dataset(outfile,'a')
# Create the time variable first
nctime = othertime.SecondsSince(self.time[self.tstep[0]:self.tstep[-1]:substep])
nc.variables['time'][:] = nctime
for vv in varlist:
print 'Filtering variable: %s'%vv
if len(ugrid[vv]['dimensions']) == 2:
dataf = self.__call__(tstart,tend,varname=vv)
nc.variables[vv][:] = dataf[::substep,:].copy()
elif len(ugrid[vv]['dimensions']) == 3:
for kk in range(0,self.Nkmax):
print ' layer: %d'%kk
self.klayer = [kk]
dataf = self.__call__(tstart,tend,varname=vv)
nc.variables[vv][:,kk,:] = dataf[::substep,:].copy()
nc.close()
print '#####\nComplete - Filtered data written to: \n%s \n#####'%outfile
示例3: writeNC
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def writeNC(self,outfile,dv=None):
"""
Export the results to a netcdf file
"""
from suntans_ugrid import ugrid
from netCDF4 import Dataset
# Fill in the depths with zero
#if not self.__dict__.has_key('dv'):
if dv==None:
self.dv = np.zeros((self.Nc,))
else:
self.dv = dv
if not self.__dict__.has_key('Nk'):
self.Nk = self.Nkmax*np.ones((self.Nc,))
Grid.writeNC(self,outfile)
# write the time variable
t= othertime.SecondsSince(self.time)
self.create_nc_var(outfile,'time', ugrid['time']['dimensions'], ugrid['time']['attributes'])
# Create the other variables
self.create_nc_var(outfile,'eta',('time','Nc'),{'long_name':'Sea surface elevation','units':'metres','coordinates':'time yv xv'})
self.create_nc_var(outfile,'uc',('time','Nk','Nc'),{'long_name':'Eastward water velocity component','units':'metre second-1','coordinates':'time z_r yv xv'})
self.create_nc_var(outfile,'vc',('time','Nk','Nc'),{'long_name':'Northward water velocity component','units':'metre second-1','coordinates':'time z_r yv xv'})
self.create_nc_var(outfile,'salt',('time','Nk','Nc'),{'long_name':'Salinity','units':'ppt','coordinates':'time z_r yv xv'})
self.create_nc_var(outfile,'temp',('time','Nk','Nc'),{'long_name':'Water temperature','units':'degrees C','coordinates':'time z_r yv xv'})
self.create_nc_var(outfile,'agec',('time','Nk','Nc'),{'long_name':'Age concentration','units':''})
self.create_nc_var(outfile,'agealpha',('time','Nk','Nc'),{'long_name':'Age alpha parameter','units':'seconds','coordinates':'time z_r yv xv'})
self.create_nc_var(outfile,'agesource',('Nk','Nc'),\
{'long_name':'Age source grid cell (>0 = source)',\
'units':'','coordinates':'z_r yv xv'})
# now write the variables...
nc = Dataset(outfile,'a')
nc.variables['time'][:]=t
nc.variables['eta'][:]=self.h
nc.variables['uc'][:]=self.uc
nc.variables['vc'][:]=self.vc
nc.variables['salt'][:]=self.S
nc.variables['temp'][:]=self.T
nc.variables['agec'][:]=self.agec
nc.variables['agealpha'][:]=self.agealpha
nc.variables['agesource'][:]=self.agesource
nc.close()
print 'Initial condition file written to: %s'%outfile
示例4: create_test_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def create_test_file():
'''
Creates a test file. Don't use.
'''
import numpy.random as random
import numpy as np
from netCDF4 import Dataset
d = Dataset('c:/temp/input/big_gridded_big_ref.nc', 'a', format='NETCDF4_CLASSIC')
d.variables['time'][:] = np.arange(1261440000, 1261465000, 50)
d.variables['depth'][:] = np.arange(0.001, 0.005, 0.001)
d.variables['lat'][:] = np.linspace(-90, 90, 200)
d.variables['lon'][:] = np.linspace(-180, 180, 400)
d.variables['time_ref'][:] = np.arange(1261440000, 1261940000, 100)
d.variables['depth_ref'][:] = random.rand(5000)
d.variables['lat_ref'][:] = random.rand(5000) * 180 - 90
d.variables['lon_ref'][:] = random.rand(5000) * 360 - 180
d.variables['chl_ref'][:] = random.rand(5000)
d.variables['chl'][:] = random.rand(500, 4, 200, 400)
d.variables['sst'][:] = random.rand(500, 4, 200, 400) + 1
d.variables['sst_ref'][:] = random.rand(500, 4, 200, 400) * 0.5 + 1
d.close()
示例5: dataToNC
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def dataToNC(yr, ip, subset):
yr = str(yr)
loc = ips[ip]['loc']
sass_netfilename = os.path.join(ncpath, loc, loc+'_'+yr+'_raw.nc')
# print "dataToNC", sass_netfilename
if not os.path.isfile(sass_netfilename):
ncfile = Dataset(sass_netfilename, 'w', format='NETCDF4_CLASSIC')
ncfile = createNCshell(ncfile, ip)
# print "added init nc meta", loc+'_'+yr+'.nc'
ncfile.variables['time'][:] = subset.index.astype('int64')// 10**9
for attr in attrArr:
# ncfile.variables['sst'][:] = subset['temperature'].values
ncfile.variables[attr][:] = subset[attr].values
else:
ncfile = Dataset(sass_netfilename, 'a', format='NETCDF4_CLASSIC')
timeLen = len(ncfile.variables['time'][:])
# length should be the same for time & all attributes
ncfile.variables['time'][timeLen:] = subset.index.astype('int64')// 10**9
for attr in attrArr:
#atLen = len(ncfile.variables[attr][:])
ncfile.variables[attr][timeLen:] = subset[attr].values
NCtimeMeta(ncfile, ip)
ncfile.close()
示例6: setattr
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
setattr(dst, "geospatial_lon_min", "-116.6056")
setattr(dst, "geospatial_lon_max", "-103.5225")
if fn[-47:-35] == '_historical_':
setattr(dst, "time_coverage_start", "1971-01-01T00:0")
setattr(dst, "time_coverage_end", "2000-12-31T00:0")
# Create dimensions
dst.createDimension('time', size=None)
dst.createDimension('lon', size=len(lons))
dst.createDimension('lat', size=len(lats))
dst.createDimension('crs', size=1)
# Copy variables
for v_name, varin in data.variables.iteritems():
outVar = dst.createVariable(v_name, varin.datatype, varin.dimensions)
outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()})
if v_name == "crs":
outVar[:] = varin[:]
dst.variables['lat'][:] = lats
dst.variables['lon'][:] = lons
dst.variables['time'][:] = time
dst.variables[varname][:] = var
dst.close()
data.close()
print "Done processing " + dst_name
示例7: power
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
#.........这里部分代码省略.........
if not os.path.exists(pcurve_file):
#logger.debug("Power curve: %s not found, skipping" % pcurve_file)
pdata[:,l,:,:] = np.ma.masked
continue
logger.info('Predicting power output for %s' % loc )
#
# Open power curve
#
use_locs.append(l)
pcurve = from_file(pcurve_file)
for h in range(nheight):
speed = dataset_in.variables['SPEED'][:,l,h]
direction = dataset_in.variables['DIRECTION'][:,l,h]
#pwr = pcurve.power(speed,direction)
# pdist will create a distribution for each timetep based on sampling
# n times from a normal distribution.
pdist = pcurve.power_dist(speed, direction, sstd=sstd,dstd=dstd,n=n, normalise=pnorm)
pmean = np.mean(pdist, axis=1)
pquants = scipy.stats.mstats.mquantiles(pdist, prob=quantiles/100.0,axis=1, alphap=0.5, betap=0.5)
pdata[:,l,h,0] = pmean
pdata[:,l,h,1:] = pquants[:,:]
logger.info('finished %s' % loc)
use_inds = np.array(use_locs)
logger.debug(use_inds)
logger.debug(pdata.shape)
logger.debug(pdata[:,use_inds,:,:].shape)
if dataset_out != dataset_in:
dataset_out.createDimension('time', None)
dataset_out.createVariable('time', 'float', ('time',))
dataset_out.variables['time'][:] = nctime[:]
dataset_out.variables['time'].units = nctime.units
dataset_out.variables['time'].calendar = nctime.calendar
dataset_out.createDimension('location', len(use_locs))
dataset_out.createDimension('loc_str_length', loc_str_len)
loc_data =np.array([list(l.ljust(loc_str_len, ' ')) for l in location])
dataset_out.createVariable('location', 'c', ('location', 'loc_str_length'))
dataset_out.variables['location'][:] = loc_data[use_inds,:]
dataset_out.createDimension('height', nheight)
dataset_out.createVariable('height', 'i', ('height',))
dataset_out.variables['height'][:] = height[:]
dataset_out.GRID_ID = dataset_in.GRID_ID
dataset_out.DX = dataset_in.DX
dataset_out.DY = dataset_in.DY
try:
dataset_out.variables['height'].units = height.units
except Exception:
logger.warn("height units missing")
pdata = pdata[:, use_inds, :, :]
for key in metadata.keys():
key = key.upper()
logger.debug(key)
dataset_out.setncattr(key,dataset_in.getncattr(key))
pavg = dataset_out.createVariable('POWER','f',('time','location','height'))
pavg.units = 'kW'
pavg.description = 'forecast power output'
pavg[:] = pdata[:,:,:,0]
for q, qval in enumerate(quantiles):
varname = 'POWER.P%02d' % qval
logger.debug("creating variable %s" % varname)
var = dataset_out.createVariable(varname,'f',('time','location','height'))
if pnorm:
var.units = 'ratio'
else:
var.units = 'kW'
var.description = 'forecast power output'
print pdata[:,:,:,q+1]
var[:] = pdata[:,:,:,q+1]
dataset_in.close()
if dataset_out!=dataset_in:
dataset_out.close()
示例8:
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
var_out_lon = ncOutput.createVariable('lon', ncInput.variables['lon'].dtype,('lon',))
for ncattr in ncInput.variables['lon'].ncattrs():
var_out_lon.setncattr(ncattr, ncInput.variables['lon'].getncattr(ncattr))
ncOutput.variables['lon'][:] = lon_subset
# Add lat Variable
var_out_lat = ncOutput.createVariable('lat', ncInput.variables['lat'].dtype,('lat',))
for ncattr in ncInput.variables['lat'].ncattrs():
var_out_lat.setncattr(ncattr, ncInput.variables['lat'].getncattr(ncattr))
ncOutput.variables['lat'][:] = lat_subset
# Add time Variable
var_out_time = ncOutput.createVariable('time', ncInput.variables['time'].dtype,('time',))
for ncattr in ncInput.variables['time'].ncattrs():
var_out_time.setncattr(ncattr, ncInput.variables['time'].getncattr(ncattr))
ncOutput.variables['time'][:] = time
# Add data Variable
var_out_data = ncOutput.createVariable(varDataName, ncInput.variables[varDataName].dtype, ("time","lat","lon",))
for ncattr in ncInput.variables[varDataName].ncattrs():
var_out_data.setncattr(ncattr, ncInput.variables[varDataName].getncattr(ncattr))
ncOutput.variables[varDataName][:] = var_subset
# attr
ncOutput.history = "CLIP Created datatime" + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " by CAREERI wuxb"
ncOutput.source = "netCDF4 1.1.9 python"
###################################################################################
# write close
###################################################################################
# close
ncOutput.close() # close the new file
示例9: ncwrite_climgrid
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def ncwrite_climgrid(filename, climdata, climname, descrip, long_name, missing, climunits,
time, lon, lat, time_units, time_cal):
'''
ncwrite_climgrid(filename, climdata, descrip, long_name):
Must be an input array of climate data of the form climdata(time, lat, lon).
Input vectors of time, lon and lat must be provided. Time must be in the format to
write (not datetime format).
time_units - must be a string in the format of <time units> since <reference time>.
For example, "days since 1800-1-1 00:00:0.0"
'''
import numpy as np
from netCDF4 import Dataset # http://code.google.com/p/netcdf4-python/
#Create NetCDF file to write
w_nc = Dataset(filename, 'w', format='NETCDF4')
#File description
w_nc.description = "%s" % (descrip)
#File dimensions for TIME
w_nc.createDimension('time', len(time))
w_nc_time = w_nc.createVariable('time', time.dtype, ('time',))
w_nc_time.setncatts({'long_name': 'time',\
'units': time_units, 'calendar': time_cal})
# Assign the dimension data to the new NetCDF file.
w_nc.variables['time'][:] = time
#File dimensions for LAT
w_nc.createDimension('lat', len(lat))
w_nc_lat = w_nc.createVariable('lat', lat.dtype,('lat',))
w_nc_lat.setncatts({'long_name': 'Latitude',\
'units': 'Degrees North'})
# Assign the dimension data to the new NetCDF file.
w_nc.variables['lat'][:] = lat
#File dimensions for LON
w_nc.createDimension('lon', len(lon))
w_nc_lon = w_nc.createVariable('lon', lon.dtype,('lon',))
w_nc_lon.setncatts({'long_name': 'Longitude',\
'units': 'Degrees East'})
# Assign the dimension data to the new NetCDF file.
w_nc.variables['lon'][:] = lon
# Assign the climate variable
w_nc_var = w_nc.createVariable(climname, 'f', ('time','lat','lon'))
w_nc_var.setncatts({'long_name': long_name,\
'units': climunits,\
'missing_value': missing})
w_nc.variables[climname][:] = climdata
w_nc.close()
示例10: chop_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def chop_netcdf(fname, out_fname, begin, end, air_pressure = False):
"""Truncate the data in a netCDF file between two indices"""
if os.path.exists(out_fname):
os.remove(out_fname)
length = end - begin
if air_pressure == False:
p = get_pressure(fname)[begin:end]
else:
p = get_air_pressure(fname)[begin:end]
#get station id for the station_id dimension
stn_site_id = get_global_attribute(fname, 'stn_station_number')
t = get_time(fname)[begin:end]
flags = get_flags(fname)[begin:end]
alt = get_variable_data(fname, 'altitude')
lat = get_variable_data(fname, 'latitude')
long = get_variable_data(fname, 'longitude')
d = Dataset(fname)
output = Dataset(out_fname, 'w', format='NETCDF4_CLASSIC')
output.createDimension('time', length)
# copy globals
for att in d.ncattrs():
setattr(output, att, d.__dict__[att])
og_uuid = get_global_attribute(fname, 'uuid')
setattr(output, 'uuid', str(uuid.uuid4()))
# copy variables
for key in d.variables:
name = key
if name == 'station_id':
output.createDimension("station_id", len(stn_site_id))
datatype = d.variables[key].datatype
dim = d.variables[key].dimensions
if datatype == "int32":
var = output.createVariable(name, datatype, dim)
else:
var = output.createVariable(name, datatype, dim, fill_value=FILL_VALUE)
for att in d.variables[key].ncattrs():
if att != '_FillValue':
setattr(var, att, d.variables[key].__dict__[att])
#add uuid of previous netCDF file to pressure variable
if name == 'sea_pressure':
setattr(var, 'sea_uuid', og_uuid)
if name == 'air_pressure':
setattr(var, 'air_uuid', og_uuid)
output.variables['time'][:] = t
if air_pressure == False:
output.variables['sea_pressure'][:] = p
else:
output.variables['air_pressure'][:] = p
output.variables['pressure_qc'][:] = flags
output.variables['altitude'][:] = alt
output.variables['longitude'][:] = long
output.variables['latitude'][:] = lat
setattr(output,"time_coverage_start", \
unit_conversion.convert_ms_to_datestring(t[0], pytz.utc))
setattr(output,"time_coverage_end", \
unit_conversion.convert_ms_to_datestring(t[-1], pytz.utc))
setattr(output,"time_coverage_duration", \
unit_conversion.get_time_duration(t[-1] - t[0]))
d.close()
output.close()
示例11: custom_copy
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def custom_copy(fname, out_fname, begin,end, mode="storm_surge", step = 1):
if os.path.exists(out_fname):
os.remove(out_fname)
#get station id for the station_id dimension
stn_site_id = get_global_attribute(fname, 'stn_station_number')
t = get_time(fname)[begin:end:step]
flags = get_flags(fname)[begin:end:step]
alt = get_variable_data(fname, 'altitude')
lat = get_variable_data(fname, 'latitude')
long = get_variable_data(fname, 'longitude')
d = Dataset(fname)
output = Dataset(out_fname, 'w', format='NETCDF4_CLASSIC')
output.createDimension('time', len(t))
output.createDimension("station_id", len(stn_site_id))
# copy globals
for att in d.ncattrs():
setattr(output, att, d.__dict__[att])
setattr(output, 'uuid', str(uuid.uuid4()))
has_station_id = False
# copy variables
for key in d.variables:
#skip adding pressure qc if the mode is storm surge
if mode == 'storm_surge' and (key == 'pressure_qc' or key == 'sea_pressure'):
continue
if key == 'station_id':
has_station_id =True
name = key
datatype = d.variables[key].datatype
dim = d.variables[key].dimensions
if datatype == "int32":
var = output.createVariable(name, datatype, dim)
else:
var = output.createVariable(name, datatype, dim, fill_value=FILL_VALUE)
for att in d.variables[key].ncattrs():
if att != '_FillValue':
setattr(var, att, d.variables[key].__dict__[att])
# if name == 'sea_pressure':
# setattr(var, 'sea_uuid', sea_uuid)
output.variables['time'][:] = t
if mode != 'storm_surge':
output.variables['pressure_qc'][:] = flags
p = get_pressure(fname)[begin:end]
output.variables['sea_pressure'][:] = p
output.variables['altitude'][:] = alt
output.variables['longitude'][:] = long
output.variables['latitude'][:] = lat
# output.variables['sea_pressure'][:] = 0
if has_station_id == False:
#the following changes are essential in case the air and sea gui files are processed
#with older versions of the script
st_id = output.createVariable('station_id','S1',('station_id'))
st_id.setncattr('cf_role', 'time_series_id')
st_id.setncattr('long_name', 'station identifier')
st_id[:] = list(stn_site_id)
deployment_time = unit_conversion.convert_ms_to_datestring(t[0], pytz.utc)
retrieval_time = unit_conversion.convert_ms_to_datestring(t[-1], pytz.utc)
set_global_attribute(out_fname, 'deployment_time', deployment_time)
set_global_attribute(out_fname, 'retrieval_time', retrieval_time)
set_global_attribute(out_fname, 'salinity_ppm', 'unused')
set_global_attribute(out_fname, 'device_depth', 'unused')
set_global_attribute(out_fname, 'geospatial_lon_min', np.float64(-180))
set_global_attribute(out_fname, 'geospatial_lon_max', np.float64(180))
set_global_attribute(out_fname, 'geospatial_lat_min', np.float64(-90))
set_global_attribute(out_fname, 'geospatial_lat_max', np.float64(90))
set_global_attribute(out_fname, 'geospatial_vertical_min', np.float64(0))
set_global_attribute(out_fname, 'geospatial_vertical_max', np.float64(0))
first, last = get_sensor_orifice_elevation(out_fname)
set_global_attribute(out_fname, 'sensor_orifice_elevation_at_deployment_time', \
np.float64("{0:.4f}".format(first)))
set_global_attribute(out_fname, 'sensor_orifice_elevation_at_retrieval_time', \
np.float64("{0:.4f}".format(last)))
set_global_attribute(out_fname, 'sensor_orifice_elevation_units', 'meters')
first_land, last_land = get_land_surface_elevation(out_fname)
set_global_attribute(out_fname, 'initial_land_surface_elevation', \
np.float64("{0:.4f}".format(first_land)))
set_global_attribute(out_fname, 'final_land_surface_elevation', \
np.float64("{0:.4f}".format(last_land)))
set_global_attribute(out_fname, 'land_surface_elevation_units', 'meters')
# first_depth = get_initial_water_depth(out_fname)
#.........这里部分代码省略.........
示例12: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
# Open a new NetCDF file to write the data to. For format, you can choose from
# 'NETCDF3_CLASSIC', 'NETCDF3_64BIT', 'NETCDF4_CLASSIC', and 'NETCDF4'
w_nc_fid = Dataset('darwin_2012.nc', 'w', format='NETCDF4')
w_nc_fid.description = "NCEP/NCAR Reanalysis %s from its value at %s. %s" %\
(nc_fid.variables['air'].var_desc.lower(),\
darwin['name'], nc_fid.description)
# Using our previous dimension info, we can create the new time dimension
# Even though we know the size, we are going to set the size to unknown
w_nc_fid.createDimension('time', None)
w_nc_dim = w_nc_fid.createVariable('time', nc_fid.variables['time'].dtype,\
('time',))
# You can do this step yourself but someone else did the work for us.
for ncattr in nc_fid.variables['time'].ncattrs():
w_nc_dim.setncattr(ncattr, nc_fid.variables['time'].getncattr(ncattr))
# Assign the dimension data to the new NetCDF file.
w_nc_fid.variables['time'][:] = time
w_nc_var = w_nc_fid.createVariable('air', 'f8', ('time'))
w_nc_var.setncatts({'long_name': u"mean Daily Air temperature",\
'units': u"degK", 'level_desc': u'Surface',\
'var_desc': u"Air temperature",\
'statistic': u'Mean\nM'})
w_nc_fid.variables['air'][:] = air[time_idx, lat_idx, lon_idx]
w_nc_fid.close() # close the new file
# A plot of the temperature profile for Darwin in 2012
fig = plt.figure()
plt.plot(dt_time, air[:, lat_idx, lon_idx], c='r')
plt.plot(dt_time[time_idx], air[time_idx, lat_idx, lon_idx], c='b', marker='o')
plt.text(dt_time[time_idx], air[time_idx, lat_idx, lon_idx], cur_time,\
ha='right')
fig.autofmt_xdate()
示例13: ConvertNCCF
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
#.........这里部分代码省略.........
moo=np.where(np.array(nc_dims) == 'longitude')
goo=np.where(np.array(nc_vars) == 'longitude')
if not(goo[0] >= 0): goo=np.where(np.array(nc_vars) == 'lon') # Look for mistakes in HadISDH
if (moo[0] >= 0) & (goo[0] >= 0):
ncfw.createDimension(nc_dims[moo[0]],ncf.variables[nc_vars[goo[0]]].size)
else:
ncfw.createDimension('longitude',TheCLongs)
data['longitude']=ncfw.createVariable('longitude','f8',('longitude',))
data['longitude'].setncattr('standard_name',u'longitude')
data['longitude'].setncattr('long_name',u'longitude')
data['longitude'].setncattr('units',u'degrees_east')
data['longitude'].setncattr('point_spacing',u'even')
data['longitude'].setncattr('axis',u'X')
makemonth=0
moo=np.where(np.array(nc_dims) == 'month')
goo=np.where(np.array(nc_vars) == 'month')
if not(goo[0] >= 0): goo=np.where(np.array(nc_vars) == 'months') # Look for mistakes in HadISDH
if (moo[0] >= 0) & (goo[0] >= 0):
makemonth=1
ncfw.createDimension('month',12)
data['month']=ncfw.createVariable('month','i',('month',))
data['month'].setncattr('standard_name',u'month')
data['month'].setncattr('long_name',u'month')
data['month'].setncattr('units',u'days since 1973-1-1 00:00:00')
data['month'].setncattr('calendar',u'gregorian')
data['month'].setncattr('start_year',u'1973s')
data['month'].setncattr('end_year',u'1973s')
data['month'].setncattr('start_month',u'1s')
data['month'].setncattr('end_month',u'12s')
data['month'].setncattr('axis',u'T')
# Now set up the variables
# stop()
for loo in range(nvars): # miss out time, lat and lon - and month at the end
print(loo)
if (nc_vars[loo] != 'time') & (nc_vars[loo] != 'latitude') & (nc_vars[loo] != 'longitude') & (nc_vars[loo] != 'month') & \
(nc_vars[loo] != 'times') & (nc_vars[loo] != 'latitudes') & (nc_vars[loo] != 'longitudes') & (nc_vars[loo] != 'months') & \
(nc_vars[loo] != 'lat') & (nc_vars[loo] != 'lon'):
print(nc_vars[loo])
ncfw_var=ncfw.createVariable(nc_vars[loo],ncf.variables[nc_vars[loo]].dtype,ncf.variables[nc_vars[loo]].dimensions)
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == '_FillValue'))):
ncfw_var.setncattr('_FillValue',ncf.variables[nc_vars[loo]].getncattr('_FillValue'))
elif (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'missing_value'))):
ncfw_var.setncattr('_FillValue',ncf.variables[nc_vars[loo]].getncattr('missing_value'))
else:
ncfw_var.setncattr('_FillValue',TheMissing)
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'missing_value'))):
ncfw_var.setncattr('missing_value',ncf.variables[nc_vars[loo]].getncattr('missing_value'))
elif (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == '_FillValue'))):
ncfw_var.setncattr('missing_value',ncf.variables[nc_vars[loo]].getncattr('_FillValue'))
else:
ncfw_var.setncattr('missing_value',TheMissing)
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'valid_min'))):
ncfw_var.setncattr('valid_min',ncf.variables[nc_vars[loo]].getncattr('valid_min'))
else:
ncfw_var.setncattr('valid_min',min(ncf.variables[nc_vars[0]][np.where(ncf.variables[nc_vars[0]][:] != TheMissing)]))
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'valid_max'))):
ncfw_var.setncattr('valid_max',ncf.variables[nc_vars[loo]].getncattr('valid_max'))
else:
ncfw_var.setncattr('valid_max',max(ncf.variables[nc_vars[0]][np.where(ncf.variables[nc_vars[0]][:] != TheMissing)]))
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'reference_period'))):
ncfw_var.setncattr('reference_period',ncf.variables[nc_vars[loo]].getncattr('reference_period'))
else:
ncfw_var.setncattr('reference_period',ClimPeriod)
ncfw_var.setncatts({'long_name':ncf.variables[nc_vars[loo]].getncattr('long_name'),
'units':ncf.variables[nc_vars[loo]].getncattr('units')})
# Now fill the variables
ncfw.variables['time'][:]=TheDaysArray
ncfw.variables['latitude'][:]=ncf.variables[nc_vars[1]][:]
ncfw.variables['longitude'][:]=ncf.variables[nc_vars[2]][:]
if (makemonth == 1):
ncfw.variables['month'][:]=TheDaysArray[0:12]
for loo in range((nvars)): # miss out time, lat and lon
print(loo)
if (nc_vars[loo] != 'time') & (nc_vars[loo] != 'latitude') & (nc_vars[loo] != 'longitude') & (nc_vars[loo] != 'month') & \
(nc_vars[loo] != 'times') & (nc_vars[loo] != 'latitudes') & (nc_vars[loo] != 'longitudes') & (nc_vars[loo] != 'months') & \
(nc_vars[loo] != 'lat') & (nc_vars[loo] != 'lon'):
print(nc_vars[loo])
ncfw.variables[nc_vars[loo]][:]=ncf.variables[nc_vars[loo]][:]
ncfw.close()
return # ConvertNCCF
示例14: _time_axis_processing
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['time'][:] [as 别名]
def _time_axis_processing(ctx):
"""Time axis process in three step: rebuild, check and rewrite."""
# Extract start and end dates from filename
start_date, end_date = _dates_from_filename(ctx.filename, ctx.calendar)
start = _date2num(start_date, units = ctx.funits, calendar = ctx.calendar)
# Set time length, True/False instant axis and incrementation in frequency units
data = Dataset(ctx.root+ctx.filename, 'r+')
length = data.variables['time'].shape[0]
instant = _is_instant_time_axis(ctx.filename, ctx.realm)
inc = _time_inc(ctx.frequency)
# Instanciates object to display axis status
status = _AxisStatus()
status.directory = ctx.root
status.file = ctx.filename
status.start = _date_print(start_date)
status.end = _date_print(end_date)
status.steps = length
status.frequency = ctx.frequency
status.calendar = ctx.calendar
status.tunits = ctx.tunits
status.units = _control_time_units(data.variables['time'].units)
if instant:
status.instant = True
# Rebuild a proper time axis
axis_hp, last_hp = _rebuild_time_axis(start, length, instant, inc, ctx) # High precision
axis_lp, last_lp = _rebuild_time_axis(trunc(start, 5), length, instant, inc, ctx) # Low precision avoiding float precision issues
# Control consistency between last time date and end date from filename
if not _last_date_checker(_date_print(last_hp), _date_print(end_date)) and not _last_date_checker(_date_print(last_lp), _date_print(end_date)):
status.control.append('003')
logging.warning('ERROO3 - Inconsistent last and end dates for {0}'.format(ctx.filename))
else:
if _last_date_checker(_date_print(last_hp), _date_print(end_date)):
status.last = _date_print(last_hp) ; axis = axis_hp
elif _last_date_checker(_date_print(last_lp), _date_print(end_date)):
status.last = _date_print(last_lp) ; axis = axis_lp
# Control consistency between instant time and time boundaries
if instant and ('time_bnds' in data.variables.keys()):
status.control.append('004')
logging.warning('ERROO4 - Inconsistent time_bnds with instant time for {0}'.format(ctx.filename))
# Delete time bounds and bounds attribute from file
if ctx.write or ctx.force:
del data.variables['time'].bounds
data.close()
_nc_var_delete(ctx.root, ctx.filename, 'time_bnds')
# Compute checksum
status.checksum = _checksum(ctx.root+ctx.filename)
data = Dataset(ctx.root+ctx.filename, 'r+')
# Check time axis squareness
if ctx.check or ctx.write:
time = data.variables['time'][:]
if _time_checker(axis, time):
status.control.append('000')
else:
status.control.append('001')
logging.warning('ERROO1 - Wrong time axis for {0}'.format(ctx.filename))
# Rebuild, read and check time boundaries squareness if needed
if 'time_bnds' in data.variables.keys():
axis_bnds = _rebuild_time_bnds(start, length, inc, ctx)
time_bnds = data.variables['time_bnds'][:,:]
if _time_checker(axis_bnds, time_bnds):
status.bnds = True
else:
status.bnds = False
# Rewrite time axis depending on checking
if (ctx.write and not _time_checker(axis, time)) or ctx.force:
data.variables['time'][:] = axis
# Rewrite time units according to CMIP5 requirements (i.e., same units for all files)
data.variables['time'].units = ctx.tunits
# Rewrite time boundaries if needed
if 'time_bnds' in data.variables.keys():
data.variables['time_bnds'][:,:] = axis_bnds
# Compute checksum
status.checksum = _checksum(ctx.root+ctx.filename)
# Control consistency between time units
if ctx.tunits != status.units:
status.control.append('002')
logging.warning('ERROO2 - Changing time units for {0}'.format(ctx.filename))
# Close file
data.close()
# Return file status
return status