本文整理汇总了Python中netCDF4.Dataset.variables['latitude'][:]方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.variables['latitude'][:]方法的具体用法?Python Dataset.variables['latitude'][:]怎么用?Python Dataset.variables['latitude'][:]使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.variables['latitude'][:]方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: wave_copy
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['latitude'][:] [as 别名]
def wave_copy(fname, out_fname, begin,end, stat_dict,mode="storm_surge", step = 1):
if os.path.exists(out_fname):
os.remove(out_fname)
#get station id for the station_id dimension
stn_site_id = get_global_attribute(fname, 'stn_station_number')
alt = get_variable_data(fname, 'altitude')
lat = get_variable_data(fname, 'latitude')
long = get_variable_data(fname, 'longitude')
d = Dataset(fname)
output = Dataset(out_fname, 'w', format='NETCDF4_CLASSIC')
output.createDimension('time', len(stat_dict['time']))
output.createDimension("station_id", len(stn_site_id))
# copy globals
for att in d.ncattrs():
setattr(output, att, d.__dict__[att])
setattr(output, 'uuid', str(uuid.uuid4()))
has_station_id = False
# copy variables
for key in d.variables:
#skip adding pressure qc if the mode is storm surge
if mode != 'station_id':
continue
else:
has_station_id = True
name = key
datatype = d.variables[key].datatype
dim = d.variables[key].dimensions
if datatype == "int32":
var = output.createVariable(name, datatype, dim)
else:
var = output.createVariable(name, datatype, dim, fill_value=FILL_VALUE)
for att in d.variables[key].ncattrs():
if att != '_FillValue':
setattr(var, att, d.variables[key].__dict__[att])
for x in stat_dict:
append_variable(out_fname, x, stat_dict[x], x, x)
output.variables['altitude'][:] = alt
output.variables['longitude'][:] = long
output.variables['latitude'][:] = lat
# output.variables['sea_pressure'][:] = 0
if has_station_id == False:
#the following changes are essential in case the air and sea gui files are processed
#with older versions of the script
st_id = output.createVariable('station_id','S1',('station_id'))
st_id.setncattr('cf_role', 'time_series_id')
st_id.setncattr('long_name', 'station identifier')
st_id[:] = list(stn_site_id)
#I have to keep this hunk of garbage until enought time has passed for all
#data files to be properly formatted,
deployment_time = unit_conversion.convert_ms_to_datestring(stat_dict['time'][0], pytz.utc)
retrieval_time = unit_conversion.convert_ms_to_datestring(stat_dict['time'][-1], pytz.utc)
set_global_attribute(out_fname, 'deployment_time', deployment_time)
set_global_attribute(out_fname, 'retrieval_time', retrieval_time)
set_global_attribute(out_fname, 'salinity_ppm', 'unused')
set_global_attribute(out_fname, 'device_depth', 'unused')
set_global_attribute(out_fname, 'geospatial_lon_min', np.float64(-180))
set_global_attribute(out_fname, 'geospatial_lon_max', np.float64(180))
set_global_attribute(out_fname, 'geospatial_lat_min', np.float64(-90))
set_global_attribute(out_fname, 'geospatial_lat_max', np.float64(90))
set_global_attribute(out_fname, 'geospatial_vertical_min', np.float64(0))
set_global_attribute(out_fname, 'geospatial_vertical_max', np.float64(0))
first, last = get_sensor_orifice_elevation(out_fname)
set_global_attribute(out_fname, 'sensor_orifice_elevation_at_deployment_time', \
np.float64("{0:.4f}".format(first)))
set_global_attribute(out_fname, 'sensor_orifice_elevation_at_retrieval_time', \
np.float64("{0:.4f}".format(last)))
set_global_attribute(out_fname, 'sensor_orifice_elevation_units', 'meters')
first_land, last_land = get_land_surface_elevation(out_fname)
set_global_attribute(out_fname, 'initial_land_surface_elevation', \
np.float64("{0:.4f}".format(first_land)))
set_global_attribute(out_fname, 'final_land_surface_elevation', \
np.float64("{0:.4f}".format(last_land)))
set_global_attribute(out_fname, 'land_surface_elevation_units', 'meters')
# first_depth = get_initial_water_depth(out_fname)
# set_global_attribute(out_fname, 'initial_water_depth', \
# np.float64("{0:.4f}".format(first_depth)))
# final_depth = get_final_water_depth(out_fname)
# set_global_attribute(out_fname, 'final_water_depth', \
# np.float64("{0:.4f}".format(final_depth)))
# set_global_attribute(out_fname, 'water_depth_units', 'meters')
set_global_attribute(out_fname, 'featureType', 'timeSeries')
set_var_attribute(out_fname, 'latitude', 'valid_max', np.float64(90))
#.........这里部分代码省略.........
示例2: chop_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['latitude'][:] [as 别名]
def chop_netcdf(fname, out_fname, begin, end, air_pressure = False):
"""Truncate the data in a netCDF file between two indices"""
if os.path.exists(out_fname):
os.remove(out_fname)
length = end - begin
if air_pressure == False:
p = get_pressure(fname)[begin:end]
else:
p = get_air_pressure(fname)[begin:end]
#get station id for the station_id dimension
stn_site_id = get_global_attribute(fname, 'stn_station_number')
t = get_time(fname)[begin:end]
flags = get_flags(fname)[begin:end]
alt = get_variable_data(fname, 'altitude')
lat = get_variable_data(fname, 'latitude')
long = get_variable_data(fname, 'longitude')
d = Dataset(fname)
output = Dataset(out_fname, 'w', format='NETCDF4_CLASSIC')
output.createDimension('time', length)
# copy globals
for att in d.ncattrs():
setattr(output, att, d.__dict__[att])
og_uuid = get_global_attribute(fname, 'uuid')
setattr(output, 'uuid', str(uuid.uuid4()))
# copy variables
for key in d.variables:
name = key
if name == 'station_id':
output.createDimension("station_id", len(stn_site_id))
datatype = d.variables[key].datatype
dim = d.variables[key].dimensions
if datatype == "int32":
var = output.createVariable(name, datatype, dim)
else:
var = output.createVariable(name, datatype, dim, fill_value=FILL_VALUE)
for att in d.variables[key].ncattrs():
if att != '_FillValue':
setattr(var, att, d.variables[key].__dict__[att])
#add uuid of previous netCDF file to pressure variable
if name == 'sea_pressure':
setattr(var, 'sea_uuid', og_uuid)
if name == 'air_pressure':
setattr(var, 'air_uuid', og_uuid)
output.variables['time'][:] = t
if air_pressure == False:
output.variables['sea_pressure'][:] = p
else:
output.variables['air_pressure'][:] = p
output.variables['pressure_qc'][:] = flags
output.variables['altitude'][:] = alt
output.variables['longitude'][:] = long
output.variables['latitude'][:] = lat
setattr(output,"time_coverage_start", \
unit_conversion.convert_ms_to_datestring(t[0], pytz.utc))
setattr(output,"time_coverage_end", \
unit_conversion.convert_ms_to_datestring(t[-1], pytz.utc))
setattr(output,"time_coverage_duration", \
unit_conversion.get_time_duration(t[-1] - t[0]))
d.close()
output.close()
示例3: replace_cfradial_metadata
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['latitude'][:] [as 别名]
def replace_cfradial_metadata( stdtape_filepath ):
# open standard tape file for reading
stdtape_file = Dataset(stdtape_filepath,'r')
# get stdtape timestamp
base_time=stdtape_file.variables['base_time'][:]
stdtape_secs=stdtape_file.variables['Time'][:]
stdtape_timestamp=pd.to_datetime(stdtape_secs+base_time,unit='s')
stdtape_lats=stdtape_file.variables['LAT'][:]
stdtape_lons=stdtape_file.variables['LON'][:]
stdtape_geo_alt=stdtape_file.variables['GEOPOT_ALT'][:]
stdtape_pres_alt=stdtape_file.variables['PRES_ALT'][:]
# close the file
stdtape_file.close()
# creates dictionary
dict_stdtape={ 'lats':stdtape_lats,
'lons':stdtape_lons,
'galt': stdtape_geo_alt,
'palt': stdtape_pres_alt}
# pandas dataframe for standar tape
df_stdtape=pd.DataFrame(data=dict_stdtape,index=stdtape_timestamp)
# get a list cfradial files in the current directory
# (bash script changes current directory)
nclist = glob.glob('cfrad.*')
nlist=len(nclist)
print "Folder contains ",str(nlist)," cfradial files"
for f in range(nlist):
# open cfradial file for reading and writing
print 'Processing: '+nclist[f]
cfrad_file = Dataset(nclist[f],'r+')
# get cfradial timestamp
start_datetime_nparray=cfrad_file.variables['time_coverage_start'][0:20]
strpattern=''
start_datetime_str=strpattern.join(str(v) for v in start_datetime_nparray)
time_format="%Y-%m-%dT%H:%M:%SZ"
cfrad_start_datetime=pd.to_datetime(start_datetime_str,format=time_format)
cfrad_time = cfrad_file.variables['time'][:]
cfrad_secs=pd.to_timedelta(cfrad_time.astype(int),unit='s')
cfrad_timestamp=cfrad_start_datetime+cfrad_secs
# create timestamp2 to miliseconds precision
foo=[math.modf(x) for x in cfrad_time]
f=zip(*foo) # unpair list of tuples
ms = np.asarray(f[0])*1000
s = f[1]
cfrad_ms=pd.to_timedelta(ms,unit='ms')
cfrad_s=pd.to_timedelta(s,unit='s')
cfrad_timestamp2=cfrad_start_datetime+cfrad_s+cfrad_ms
# remove duplicated timestamps (str type)
unique_timestamp=cfrad_timestamp.drop_duplicates()
nstamps=unique_timestamp.nunique()
# cfradial information
cfrad_lats = cfrad_file.variables['latitude'][:]
cfrad_lons = cfrad_file.variables['longitude'][:]
cfrad_altitude = cfrad_file.variables['altitude'][:]
cfrad_altitude_agl = cfrad_file.variables['altitude_agl'][:]
# creates dictionary
dict_cfrad = { 'lats':cfrad_lats,
'lons':cfrad_lons,
'alt': cfrad_altitude,
'alt_agl': cfrad_altitude_agl }
# pandas dataframe for cfradial file
df_cfrad=pd.DataFrame(data=dict_cfrad,index=cfrad_timestamp)
df_cfrad_new=df_cfrad.copy()
for t in range(nstamps):
timestamp=str(unique_timestamp[t])
# from std_tape
try:
new_lats=df_stdtape[timestamp]['lats']
new_lons=df_stdtape[timestamp]['lons']
new_galts=df_stdtape[timestamp]['galt']
new_palts=df_stdtape[timestamp]['palt']
except:
print "\nERROR: check STDTAPE file is correct"
cfrad_file.close()
sys.exit(1)
# to cfradial
# since the 'altitude' field does not exist in dict_cfrad
# pandas creates a new column (field). See if it's more
# efficient creating a dict_cfrad with 'altitude' and
# 'altitude_agl' fields
df_cfrad_new.loc[timestamp,'lats']=new_lats
df_cfrad_new.loc[timestamp,'lons']=new_lons
df_cfrad_new.loc[timestamp,'altitude']=new_palts
df_cfrad_new.loc[timestamp,'altitude_agl']=new_galts
#.........这里部分代码省略.........
示例4: replace_cfradial_coords
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['latitude'][:] [as 别名]
def replace_cfradial_coords( stdtape_filepath ):
from netCDF4 import Dataset
import glob
import numpy as np
import pandas as pd
# open standard tape file for reading
stdtape_file = Dataset(stdtape_filepath,'r')
# get stdtape timestamp
base_time=stdtape_file.variables['base_time'][:]
stdtape_secs=stdtape_file.variables['Time'][:]
stdtape_timestamp=pd.to_datetime(stdtape_secs+base_time,unit='s')
stdtape_lats=stdtape_file.variables['LAT'][:]
stdtape_lons=stdtape_file.variables['LON'][:]
# close the file
stdtape_file.close()
# pandas dataframe for standar tape
d={'lats':stdtape_lats,'lons':stdtape_lons}
df_stdtape=pd.DataFrame(data=d,index=stdtape_timestamp)
# get a list cfradial files
nclist = glob.glob('cfrad.*')
nlist=len(nclist)
print "Folder contains ",str(nlist)," cfradial files"
for f in np.arange(nlist):
# open cfradial file for reading and writing
print 'Processing: '+nclist[f]
cfrad_file = Dataset(nclist[f],'r+')
# get cfradial timestamp
start_datetime_nparray=cfrad_file.variables['time_coverage_start'][0:20]
strpattern=''
start_datetime_str=strpattern.join(str(v) for v in start_datetime_nparray)
time_format="%Y-%m-%dT%H:%M:%SZ"
cfrad_start_datetime=pd.to_datetime(start_datetime_str,format=time_format)
cfrad_time = cfrad_file.variables['time'][:]
cfrad_secs=pd.to_timedelta(cfrad_time.astype(int),unit='s')
cfrad_timestamp=cfrad_start_datetime+cfrad_secs
# remove duplicated timestamps (str)
unique_timestamp=cfrad_timestamp.drop_duplicates()
nstamps=unique_timestamp.nunique()
# cfradial coordinates
cfrad_lats = cfrad_file.variables['latitude'][:]
cfrad_lons = cfrad_file.variables['longitude'][:]
# pandas dataframe for cfradial file
d={'lats':cfrad_lats,'lons':cfrad_lons}
df_cfrad=pd.DataFrame(data=d,index=cfrad_timestamp)
df_cfrad_new=df_cfrad.copy()
for t in np.arange(nstamps):
timestamp=str(unique_timestamp[t])
new_lats=df_stdtape[timestamp]['lats']
new_lons=df_stdtape[timestamp]['lons']
df_cfrad_new.loc[timestamp,'lats']=new_lats
df_cfrad_new.loc[timestamp,'lons']=new_lons
cfrad_file.variables['latitude'][:]=df_cfrad_new['lats'].values
cfrad_file.variables['longitude'][:]=df_cfrad_new['lons'].values
# print ''
# print df_cfrad['lats'].values
# print ''
# print type(df_cfrad_new['lats'].values)
# print ''
# print df_cfrad_new['2001-01-25 18:44:51']['lats']
# print ''
# close the file.
cfrad_file.close()
示例5: ConvertNCCF
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['latitude'][:] [as 别名]
#.........这里部分代码省略.........
moo=np.where(np.array(nc_dims) == 'longitude')
goo=np.where(np.array(nc_vars) == 'longitude')
if not(goo[0] >= 0): goo=np.where(np.array(nc_vars) == 'lon') # Look for mistakes in HadISDH
if (moo[0] >= 0) & (goo[0] >= 0):
ncfw.createDimension(nc_dims[moo[0]],ncf.variables[nc_vars[goo[0]]].size)
else:
ncfw.createDimension('longitude',TheCLongs)
data['longitude']=ncfw.createVariable('longitude','f8',('longitude',))
data['longitude'].setncattr('standard_name',u'longitude')
data['longitude'].setncattr('long_name',u'longitude')
data['longitude'].setncattr('units',u'degrees_east')
data['longitude'].setncattr('point_spacing',u'even')
data['longitude'].setncattr('axis',u'X')
makemonth=0
moo=np.where(np.array(nc_dims) == 'month')
goo=np.where(np.array(nc_vars) == 'month')
if not(goo[0] >= 0): goo=np.where(np.array(nc_vars) == 'months') # Look for mistakes in HadISDH
if (moo[0] >= 0) & (goo[0] >= 0):
makemonth=1
ncfw.createDimension('month',12)
data['month']=ncfw.createVariable('month','i',('month',))
data['month'].setncattr('standard_name',u'month')
data['month'].setncattr('long_name',u'month')
data['month'].setncattr('units',u'days since 1973-1-1 00:00:00')
data['month'].setncattr('calendar',u'gregorian')
data['month'].setncattr('start_year',u'1973s')
data['month'].setncattr('end_year',u'1973s')
data['month'].setncattr('start_month',u'1s')
data['month'].setncattr('end_month',u'12s')
data['month'].setncattr('axis',u'T')
# Now set up the variables
# stop()
for loo in range(nvars): # miss out time, lat and lon - and month at the end
print(loo)
if (nc_vars[loo] != 'time') & (nc_vars[loo] != 'latitude') & (nc_vars[loo] != 'longitude') & (nc_vars[loo] != 'month') & \
(nc_vars[loo] != 'times') & (nc_vars[loo] != 'latitudes') & (nc_vars[loo] != 'longitudes') & (nc_vars[loo] != 'months') & \
(nc_vars[loo] != 'lat') & (nc_vars[loo] != 'lon'):
print(nc_vars[loo])
ncfw_var=ncfw.createVariable(nc_vars[loo],ncf.variables[nc_vars[loo]].dtype,ncf.variables[nc_vars[loo]].dimensions)
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == '_FillValue'))):
ncfw_var.setncattr('_FillValue',ncf.variables[nc_vars[loo]].getncattr('_FillValue'))
elif (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'missing_value'))):
ncfw_var.setncattr('_FillValue',ncf.variables[nc_vars[loo]].getncattr('missing_value'))
else:
ncfw_var.setncattr('_FillValue',TheMissing)
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'missing_value'))):
ncfw_var.setncattr('missing_value',ncf.variables[nc_vars[loo]].getncattr('missing_value'))
elif (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == '_FillValue'))):
ncfw_var.setncattr('missing_value',ncf.variables[nc_vars[loo]].getncattr('_FillValue'))
else:
ncfw_var.setncattr('missing_value',TheMissing)
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'valid_min'))):
ncfw_var.setncattr('valid_min',ncf.variables[nc_vars[loo]].getncattr('valid_min'))
else:
ncfw_var.setncattr('valid_min',min(ncf.variables[nc_vars[0]][np.where(ncf.variables[nc_vars[0]][:] != TheMissing)]))
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'valid_max'))):
ncfw_var.setncattr('valid_max',ncf.variables[nc_vars[loo]].getncattr('valid_max'))
else:
ncfw_var.setncattr('valid_max',max(ncf.variables[nc_vars[0]][np.where(ncf.variables[nc_vars[0]][:] != TheMissing)]))
if (any(np.where(np.array(ncf.variables[nc_vars[loo]].ncattrs()) == 'reference_period'))):
ncfw_var.setncattr('reference_period',ncf.variables[nc_vars[loo]].getncattr('reference_period'))
else:
ncfw_var.setncattr('reference_period',ClimPeriod)
ncfw_var.setncatts({'long_name':ncf.variables[nc_vars[loo]].getncattr('long_name'),
'units':ncf.variables[nc_vars[loo]].getncattr('units')})
# Now fill the variables
ncfw.variables['time'][:]=TheDaysArray
ncfw.variables['latitude'][:]=ncf.variables[nc_vars[1]][:]
ncfw.variables['longitude'][:]=ncf.variables[nc_vars[2]][:]
if (makemonth == 1):
ncfw.variables['month'][:]=TheDaysArray[0:12]
for loo in range((nvars)): # miss out time, lat and lon
print(loo)
if (nc_vars[loo] != 'time') & (nc_vars[loo] != 'latitude') & (nc_vars[loo] != 'longitude') & (nc_vars[loo] != 'month') & \
(nc_vars[loo] != 'times') & (nc_vars[loo] != 'latitudes') & (nc_vars[loo] != 'longitudes') & (nc_vars[loo] != 'months') & \
(nc_vars[loo] != 'lat') & (nc_vars[loo] != 'lon'):
print(nc_vars[loo])
ncfw.variables[nc_vars[loo]][:]=ncf.variables[nc_vars[loo]][:]
ncfw.close()
return # ConvertNCCF
示例6: range
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables['latitude'][:] [as 别名]
for i in range(num_lon):
for j in range(num_lat):
for k in range(num_depth):
temp_3d[k,j,i] = temp[posn]
salt_3d[k,j,i] = salt[posn]
posn = posn+1
# Output to NetCDF file
print 'Writing NetCDF file'
id = Dataset(out_file, 'w')
id.createDimension('longitude', num_lon)
id.createDimension('latitude', num_lat)
id.createDimension('depth', num_depth)
id.createVariable('longitude', 'f8', ('longitude'))
id.variables['longitude'].units = 'degrees'
id.variables['longitude'][:] = lon
id.createVariable('latitude', 'f8', ('latitude'))
id.variables['latitude'].units = 'degrees'
id.variables['latitude'][:] = lat
id.createVariable('depth', 'f8', ('depth'))
id.variables['depth'].units = 'metres'
id.variables['depth'][:] = depth
id.createVariable('temp', 'f8', ('depth', 'latitude', 'longitude'))
id.variables['temp'].units = 'C'
id.variables['temp'][:,:,:] = temp_3d
id.createVariable('salt', 'f8', ('depth', 'latitude', 'longitude'))
id.variables['salt'].units = 'psu'
id.variables['salt'][:,:,:] = salt_3d
id.close()