本文整理汇总了Python中netCDF4.Dataset.variables['lon'][:]方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.variables['lon'][:]方法的具体用法?Python Dataset.variables['lon'][:]怎么用?Python Dataset.variables['lon'][:]使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.variables['lon'][:]方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_test_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
def create_test_file():
'''
Creates a test file. Don't use.
'''
import numpy.random as random
import numpy as np
from netCDF4 import Dataset
d = Dataset('c:/temp/input/big_gridded_big_ref.nc', 'a', format='NETCDF4_CLASSIC')
d.variables['time'][:] = np.arange(1261440000, 1261465000, 50)
d.variables['depth'][:] = np.arange(0.001, 0.005, 0.001)
d.variables['lat'][:] = np.linspace(-90, 90, 200)
d.variables['lon'][:] = np.linspace(-180, 180, 400)
d.variables['time_ref'][:] = np.arange(1261440000, 1261940000, 100)
d.variables['depth_ref'][:] = random.rand(5000)
d.variables['lat_ref'][:] = random.rand(5000) * 180 - 90
d.variables['lon_ref'][:] = random.rand(5000) * 360 - 180
d.variables['chl_ref'][:] = random.rand(5000)
d.variables['chl'][:] = random.rand(500, 4, 200, 400)
d.variables['sst'][:] = random.rand(500, 4, 200, 400) + 1
d.variables['sst_ref'][:] = random.rand(500, 4, 200, 400) * 0.5 + 1
d.close()
示例2: setattr
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
setattr(dst, "geospatial_lon_min", "-116.6056")
setattr(dst, "geospatial_lon_max", "-103.5225")
if fn[-47:-35] == '_historical_':
setattr(dst, "time_coverage_start", "1971-01-01T00:0")
setattr(dst, "time_coverage_end", "2000-12-31T00:0")
# Create dimensions
dst.createDimension('time', size=None)
dst.createDimension('lon', size=len(lons))
dst.createDimension('lat', size=len(lats))
dst.createDimension('crs', size=1)
# Copy variables
for v_name, varin in data.variables.iteritems():
outVar = dst.createVariable(v_name, varin.datatype, varin.dimensions)
outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()})
if v_name == "crs":
outVar[:] = varin[:]
dst.variables['lat'][:] = lats
dst.variables['lon'][:] = lons
dst.variables['time'][:] = time
dst.variables[varname][:] = var
dst.close()
data.close()
print "Done processing " + dst_name
示例3: transform_osi_saf_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
def transform_osi_saf_netcdf(input_file, new_data_set, digits, debug = False):
""" Transform osi saf int netcdf """
print("================== Start transforming netCDF coordinates (precision %s) ==================" % (digits) )
old_dataset = Dataset(input_file,'a')
new_dataset = Dataset(new_data_set,'a')
o_lat = old_dataset.variables['lat']
o_lon = old_dataset.variables['lon']
o_lat_data = o_lat[:]
o_lon_data = o_lon[:]
# get dimensions from the old dataset
nj_max = len(old_dataset.dimensions['nj'])
ni_max = len(old_dataset.dimensions['ni'])
#need to create a numpy array with the right dimensions and fill it with the scale lat values
# and then the lon values
#n_data = numpy.zeros((nj_max, ni_max), dtype=int)
n_data = numpy.zeros((nj_max, ni_max), dtype=float)
nj = 0
ni = 0
print("== Start lat transformation \n")
while nj < nj_max:
while ni < ni_max:
#n_data[nj][ni] = round(o_lat_data[nj][ni], digits)*pow(10,digits)
n_data[nj][ni] = round(o_lat_data[nj][ni], digits)
ni += 1
if debug and (nj % 10) == 0:
print("debug: In nj loop %d\n" % (nj))
ni = 0
nj += 1
print("== End of lat transformation \n")
new_dataset.variables['lat'][:] = n_data
new_dataset.sync()
print("== Start lon transformation \n")
#n_data = numpy.zeros((nj_max, ni_max), dtype=int)
n_data = numpy.zeros((nj_max, ni_max), dtype=float)
#reset ni nj
ni = 0
nj = 0
while nj < nj_max:
while ni < ni_max:
#n_data[nj][ni] = round(o_lon_data[nj][ni], digits)*pow(10,digits)
n_data[nj][ni] = round(o_lon_data[nj][ni], digits)
ni += 1
if debug and (nj % 10) == 0:
print("debug: In nj loop %d\n" % (nj))
ni = 0
nj += 1
print("== End of lon transformation \n")
new_dataset.variables['lon'][:] = n_data
new_dataset.sync()
new_dataset.close()
old_dataset.sync()
old_dataset.close()
print("================== End of transforming netCDF coordinates ==================")
示例4: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
if __name__ == '__main__':
dir = '/homespace/gaubert/ifremer-data'
input_files = [
# '20110502-EUR-L2P_GHRSST-SSTsubskin-AVHRR_METOP_A-eumetsat_sstmgr_metop02_20110502_220403-v01.7-fv01.0.nc',
'20110426-EUR-L2P_GHRSST-SSTsubskin-AVHRR_METOP_A-eumetsat_sstmgr_metop02_20110426_111003-v01.7-fv01.0.nc',
'20110420-EUR-L2P_GHRSST-SSTsubskin-AVHRR_METOP_A-eumetsat_sstmgr_metop02_20110420_064903-v01.7-fv01.0.nc',
'20110414-EUR-L2P_GHRSST-SSTsubskin-AVHRR_METOP_A-eumetsat_sstmgr_metop02_20110414_025203-v01.7-fv01.0.nc'
]
for input_file in input_files:
dataset = Dataset('%s/%s' % (dir,input_file),'a')
lat = dataset.variables['lat']
lon = dataset.variables['lon']
lat_data = lat[:]
lon_data = lon[:]
lat_data = numpy.around(lat_data,3)
lon_data = numpy.around(lon_data,3)
dataset.variables['lat'][:] = lat_data
dataset.variables['lon'][:] = lon_data
dataset.sync()
dataset.close()
示例5: generate_seasonal_averages
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
def generate_seasonal_averages(qout_file, seasonal_average_file,
num_cpus=multiprocessing.cpu_count()):
"""
This function loops through a CF compliant rapid streamflow
file to produce a netCDF file with a seasonal average for
365 days a year
"""
with RAPIDDataset(qout_file) as qout_nc_file:
print("Generating seasonal average file ...")
seasonal_avg_nc = Dataset(seasonal_average_file, 'w')
seasonal_avg_nc.createDimension('rivid', qout_nc_file.size_river_id)
seasonal_avg_nc.createDimension('day_of_year', 365)
timeSeries_var = seasonal_avg_nc.createVariable('rivid', 'i4', ('rivid',))
timeSeries_var.long_name = (
'unique identifier for each river reach')
average_flow_var = seasonal_avg_nc.createVariable('average_flow', 'f8', ('rivid','day_of_year'))
average_flow_var.long_name = 'seasonal average streamflow'
average_flow_var.units = 'm3/s'
std_dev_flow_var = seasonal_avg_nc.createVariable('std_dev_flow', 'f8', ('rivid','day_of_year'))
std_dev_flow_var.long_name = 'seasonal std. dev. streamflow'
std_dev_flow_var.units = 'm3/s'
lat_var = seasonal_avg_nc.createVariable('lat', 'f8', ('rivid',),
fill_value=-9999.0)
lat_var.long_name = 'latitude'
lat_var.standard_name = 'latitude'
lat_var.units = 'degrees_north'
lat_var.axis = 'Y'
lon_var = seasonal_avg_nc.createVariable('lon', 'f8', ('rivid',),
fill_value=-9999.0)
lon_var.long_name = 'longitude'
lon_var.standard_name = 'longitude'
lon_var.units = 'degrees_east'
lon_var.axis = 'X'
seasonal_avg_nc.variables['lat'][:] = qout_nc_file.qout_nc.variables['lat'][:]
seasonal_avg_nc.variables['lon'][:] = qout_nc_file.qout_nc.variables['lon'][:]
river_id_list = qout_nc_file.get_river_id_array()
seasonal_avg_nc.variables['rivid'][:] = river_id_list
seasonal_avg_nc.close()
#generate multiprocessing jobs
mp_lock = multiprocessing.Manager().Lock()
job_combinations = []
for day_of_year in range(1, 366):
job_combinations.append((qout_file,
seasonal_average_file,
day_of_year,
mp_lock
))
pool = multiprocessing.Pool(num_cpus)
pool.map(generate_single_seasonal_average,
job_combinations)
pool.close()
pool.join()
示例6: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
OutputFileDirName = os.path.join(OutputFileDir,OutfileName)
#create file
ncOutput = Dataset(OutputFileDirName, 'w', format='NETCDF3_CLASSIC')
ncOutput.description = "Extract Data %s from CFMD by QTP shapefile. %s" % \
(ncInput.variables[varDataName].long_name.lower(),ncInput.description)
# Using our previous dimension info, we can create the new time dimension
# Even though we know the size, we are going to set the size to unknown
ncOutput.createDimension('time', None)
ncOutput.createDimension('lon', ncols)
ncOutput.createDimension('lat', nrows)
# Add lon Variable
var_out_lon = ncOutput.createVariable('lon', ncInput.variables['lon'].dtype,('lon',))
for ncattr in ncInput.variables['lon'].ncattrs():
var_out_lon.setncattr(ncattr, ncInput.variables['lon'].getncattr(ncattr))
ncOutput.variables['lon'][:] = lon_subset
# Add lat Variable
var_out_lat = ncOutput.createVariable('lat', ncInput.variables['lat'].dtype,('lat',))
for ncattr in ncInput.variables['lat'].ncattrs():
var_out_lat.setncattr(ncattr, ncInput.variables['lat'].getncattr(ncattr))
ncOutput.variables['lat'][:] = lat_subset
# Add time Variable
var_out_time = ncOutput.createVariable('time', ncInput.variables['time'].dtype,('time',))
for ncattr in ncInput.variables['time'].ncattrs():
var_out_time.setncattr(ncattr, ncInput.variables['time'].getncattr(ncattr))
ncOutput.variables['time'][:] = time
# Add data Variable
var_out_data = ncOutput.createVariable(varDataName, ncInput.variables[varDataName].dtype, ("time","lat","lon",))
示例7: ncwrite_climgrid
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
def ncwrite_climgrid(filename, climdata, climname, descrip, long_name, missing, climunits,
time, lon, lat, time_units, time_cal):
'''
ncwrite_climgrid(filename, climdata, descrip, long_name):
Must be an input array of climate data of the form climdata(time, lat, lon).
Input vectors of time, lon and lat must be provided. Time must be in the format to
write (not datetime format).
time_units - must be a string in the format of <time units> since <reference time>.
For example, "days since 1800-1-1 00:00:0.0"
'''
import numpy as np
from netCDF4 import Dataset # http://code.google.com/p/netcdf4-python/
#Create NetCDF file to write
w_nc = Dataset(filename, 'w', format='NETCDF4')
#File description
w_nc.description = "%s" % (descrip)
#File dimensions for TIME
w_nc.createDimension('time', len(time))
w_nc_time = w_nc.createVariable('time', time.dtype, ('time',))
w_nc_time.setncatts({'long_name': 'time',\
'units': time_units, 'calendar': time_cal})
# Assign the dimension data to the new NetCDF file.
w_nc.variables['time'][:] = time
#File dimensions for LAT
w_nc.createDimension('lat', len(lat))
w_nc_lat = w_nc.createVariable('lat', lat.dtype,('lat',))
w_nc_lat.setncatts({'long_name': 'Latitude',\
'units': 'Degrees North'})
# Assign the dimension data to the new NetCDF file.
w_nc.variables['lat'][:] = lat
#File dimensions for LON
w_nc.createDimension('lon', len(lon))
w_nc_lon = w_nc.createVariable('lon', lon.dtype,('lon',))
w_nc_lon.setncatts({'long_name': 'Longitude',\
'units': 'Degrees East'})
# Assign the dimension data to the new NetCDF file.
w_nc.variables['lon'][:] = lon
# Assign the climate variable
w_nc_var = w_nc.createVariable(climname, 'f', ('time','lat','lon'))
w_nc_var.setncatts({'long_name': long_name,\
'units': climunits,\
'missing_value': missing})
w_nc.variables[climname][:] = climdata
w_nc.close()
示例8: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
s=np.squeeze(s)
t=np.squeeze(t)
setnan= np.logical_and(s[:]==0.,t[:]==0.) # in place of mask
s[setnan]=np.nan
t[setnan]=np.nan
lat=np.squeeze(lat)
lon=np.squeeze(lon)
ds = Dataset('nemo.nc', 'w', format='NETCDF4')
ds.createDimension('z', nz)
ds.createDimension('y', ny)
ds.createDimension('x', nx)
ds.createVariable('lat','d',('z','y','x'))
ds.createVariable('lon','d',('z','y','x'))
#ds.createVariable('p','d',('z','y','x'))
ds.createVariable('s','f',('z','y','x')) # single precision!
ds.createVariable('tpot','f',('z','y','x'))
ds.variables['lat'][:]=lat
ds.variables['lon'][:]=lon
#ds.variables['p'][:]=p
ds.variables['s'][:]=s
ds.variables['tpot'][:]=t
ds.close()
示例9:
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
"%s from its value at %s. %s" %\
(nc_fid.variables['air'].var_desc.lower(),\
darwin['name'], nc_fid.description)
# Using our previous dimension information, we can create the new dimensions
data = {}
for dim in nc_dims:
w_nc_fid.createDimension(dim, nc_fid.variables[dim].size)
data[dim] = w_nc_fid.createVariable(dim, nc_fid.variables[dim].dtype,\
(dim,))
# You can do this step yourself but someone else did the work for us.
for ncattr in nc_fid.variables[dim].ncattrs():
data[dim].setncattr(ncattr, nc_fid.variables[dim].getncattr(ncattr))
# Assign the dimension data to the new NetCDF file.
w_nc_fid.variables['time'][:] = time
w_nc_fid.variables['lat'][:] = lats
w_nc_fid.variables['lon'][:] = lons
# Ok, time to create our departure variable
w_nc_var = w_nc_fid.createVariable('air_dep', 'f8', ('time', 'lat', 'lon'))
w_nc_var.setncatts({'long_name': u"mean Daily Air temperature departure",\
'units': u"degK", 'level_desc': u'Surface',\
'var_desc': u"Air temperature departure",\
'statistic': u'Mean\nM'})
w_nc_fid.variables['air_dep'][:] = departure
w_nc_fid.close() # close the new file
# Rounded maximum absolute value of the departure used for contouring
max_dep = np.round(np.abs(departure[time_idx, :, :]).max()+5., decimals=-1)
# Generate a figure of the departure for a single day
fig = plt.figure()
示例10: coarse_grain_horizontal
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['lon'][:] [as 别名]
def coarse_grain_horizontal(options):
data=Dataset(options.in_file)
output=Dataset(options.out_file,'w')
replicate_netcdf_file(output,data)
lengths_high=spherical_tools.coords(data)
for var in ['time','lev','slev']:
replicate_netcdf_var(output,data,var)
output.createDimension('slon',len(data.dimensions['slon'])/2)
output.createVariable('slon',np.float,('slon',))
output.variables['slon'][:]=data.variables['slon'][::2]
output.createDimension('lon',len(data.dimensions['lon'])/2)
output.createVariable('lon',np.float,('lon',))
output.variables['lon'][:]=(data.variables['lon'][::2]+data.variables['lon'][1::2])/2
output.createDimension('slat',len(data.dimensions['slat'])/3)
output.createVariable('slat',np.float,('slat',))
output.variables['slat'][:]=data.variables['slat'][1::3]
output.createDimension('lat',len(data.dimensions['slat'])/3+1)
output.createVariable('lat',np.float,('lat',))
output.variables['lat'][1:-1]=(data.variables['lat'][2:-2:3]+data.variables['lat'][3:-2:3]+data.variables['lat'][4:-2:3])/3
output.variables['lat'][0]=-90.0
output.variables['lat'][-1]=90.0
lengths_low=spherical_tools.coords(output)
var='ua'
output.createVariable(var,np.float,('time','lev','lat','slon'))
output.variables[var][:,:,1:-1,:]=(data.variables[var][:,:,2:-2:3,::2]+data.variables[var][:,:,3:-2:3,::2]+data.variables[var][:,:,4:-2:3,::2])
output.variables[var][:,:,0,:]=0.0
output.variables[var][:,:,-1,:]=0.0
var='va'
output.createVariable(var,np.float,('time','lev','slat','lon'))
output.variables[var][:]=(data.variables[var][:,:,1::3,::2]+data.variables[var][:,:,1::3,1::2])
output.sync()
for var in ['wa']:
output.createVariable(var,np.float,('time','slev','lat','lon'))
output.variables[var][:]=full_average(data.variables[var][:],output.variables[var].shape)
output.sync()
for var in ['mass']:
output.createVariable(var,np.float,('time','lev','lat','lon'))
output.variables[var][:]=full_average(data.variables[var][:],output.variables[var].shape)
output.sync()
for var in ['ta','hus','pa']:
output.createVariable(var,np.float,('time','lev','lat','lon'))
#temp=data.variables[var][:]*np.reshape(lengths_high.area_lat_lon,(1,1,)+lengths_high.area_lat_lon.shape)*data.variables['dpa'][:]
#output.variables[var][:]=full_average(temp,output.variables[var].shape)/(output.variables['dpa']*np.reshape(lengths_low.area_lat_lon,(1,1,)+lengths_low.area_lat_lon.shape))
output.variables[var][:]=full_average(data.variables[var][:]*data.variables['mass'][:],output.variables[var].shape)/output.variables['mass']
output.sync()
test_divergence=False
if test_divergence:
#Retrieve data and create output:
vars_space=dict()
for var in ['ua','va','wa']:
vars_space[var]=output.variables[var][0,:,:,:].astype(np.float,copy=False)
vars_space['dmassdt']=(output.variables['mass'][1,:,:,:].astype(np.float,copy=False)-
output.variables['mass'][0,:,:,:].astype(np.float,copy=False))
#Compute spherical lengths:
lengths=spherical_tools.coords(output)
#Create vector calculus space:
vector_calculus=spherical_tools.vector_calculus_spherical(vars_space['dpadt'].shape,lengths)
#FOR MERRA:
vars_space['ua']/=lengths.mer_len_lat_slon
vars_space['va']/=lengths.zon_len_slat_lon
vars_space['wa']/=np.reshape(lengths.area_lat_lon,(1,)+ vars_space['wa'].shape[1:])
####
#Compute the mass divergence:
DIV = vars_space['dmassdt'] + vector_calculus.DIV_from_UVW(*[vars_space[var] for var in ['ua','va','wa']])
output.createVariable('dmass',np.float,('time','lev','lat','lon'))
output.variables['dmass'][:]=DIV
output.close()
data.close()
return