本文整理汇总了Python中netCDF4.Dataset.contact方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.contact方法的具体用法?Python Dataset.contact怎么用?Python Dataset.contact使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.contact方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: write_nc_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import contact [as 别名]
def write_nc_file(daily_results, filename, nc, anom_mode=False):
#Grab every 4th time value to represent daily
daily_time_var = nc.variables['time'][::4]
nc_out = Dataset(filename, mode='w', format='NETCDF4')
nc_out.createDimension('lon', LONS)
nc_out.createDimension('lat', LATS)
nc_out.createDimension('time', None) #UNLIMITED
nc_out.createDimension('month', MONTHS_YEAR)
nc_out.title = ''
nc_out.institution = ''
nc_out.project = ''
nc_out.contact = '[email protected]'
nc_out.Conventions = "CF-1.6"
longitude = nc_out.createVariable('lon', 'f8', ('lon',))
longitude.standard_name = 'longitude'
longitude.long_name = 'longitude'
longitude.units = 'degrees_east'
longitude.modulo = 360.0
longitude.axis = 'X'
longitude[:] = np.arange(0, 360.0, 2.0)
latitude = nc_out.createVariable('lat', 'f8', ('lat',))
latitude.standard_name = 'latitude'
latitude.long_name = 'latitude'
latitude.units = 'degrees_north'
latitude.axis = 'Y'
latitude[:] = np.arange(-90.0, 92.0, 2.0)
time = nc_out.createVariable('time', 'f8', ('time',))
time.units = 'hours since 1-1-1 0:0:0'
time.calendar = 'standard' #Gregorian
time[:] = daily_time_var
if anom_mode:
daily_mean = nc_out.createVariable('daily_anom', 'f8', ('time', 'lat', 'lon'))
daily_mean.long_name = 'z500 daily anomaly vs 1981-2010'
else:
daily_mean = nc_out.createVariable('daily_mean', 'f8', ('time', 'lat', 'lon'))
daily_mean.long_name = 'z500 daily mean'
daily_mean[:] = daily_results
nc_out.close()
示例2: makenetcdf_
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import contact [as 别名]
#.........这里部分代码省略.........
if fields[4] == "":
sals[i, 0] = -9999
else:
sals[i, 0] = fields[4]
if fields[5] == "":
fco2s[i, 0] = -9999
else:
fco2s[i, 0] = fields[5]
if len(fields[6]) == 0:
fco2qcs[i, 0] = -128
else:
fco2qcs[i, 0] = makeqcvalue_(int(fields[6]))
depthvar[:,:] = depths
positionvar[:,:] = positions
sstvar[:,:] = temps
sssvar[:,:] = sals
fco2var[:,:] = fco2s
fco2qcvar[:,:] = fco2qcs
depthdmvar[:,:] = dms
sstdmvar[:,:] = dms
sssdmvar[:,:] = dms
fco2dmvar[:,:] = dms
# Global attributes
nc.id = filenameroot
nc.data_type = "OceanSITES trajectory data"
nc.netcdf_version = "netCDF-4 classic model"
nc.format_version = "1.2"
nc.Conventions = "CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 "\
+ "Copernicus-InSituTAC-ParametersList-3.1.0"
nc.cdm_data_type = "Trajectory"
nc.data_mode = "R"
nc.area = "Global Ocean"
nc.geospatial_lat_min = str(minlat)
nc.geospatial_lat_max = str(maxlat)
nc.geospatial_lon_min = str(minlon)
nc.geospatial_lon_max = str(maxlon)
nc.geospatial_vertical_min = "5.00"
nc.geospatial_vertical_max = "5.00"
nc.last_latitude_observation = lats[-1]
nc.last_longitude_observation = lons[-1]
nc.last_date_observation = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_start = starttime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_end = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
#datasetdate = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.date_update = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.history = datasetdate + " : Creation"
nc.update_interval = "daily"
nc.data_assembly_center = "BERGEN"
nc.institution = "University of Bergen / Geophysical Institute"
nc.institution_edmo_code = "4595"
nc.institution_references = " "
nc.contact = "[email protected]"
nc.title = "Global Ocean - In Situ near-real time carbon observation"
nc.author = "cmems-service"
nc.naming_authority = "Copernicus"
nc.platform_code = getplatformcallsign_(platform_code)
nc.site_code = getplatformcallsign_(platform_code)
# For buoys -> Mooring observation.
platform_category_code = getplatformcategorycode_(platform_code)
nc.platform_name = getplatformname_(platform_code)
nc.source_platform_category_code = platform_category_code
nc.source = PLATFORM_CODES[platform_category_code]
nc.quality_control_indicator = "6" # "Not used"
nc.quality_index = "0"
nc.comment = " "
nc.summary = " "
nc.reference = "http://marine.copernicus.eu/, https://www.icos-cp.eu/"
nc.citation = "These data were collected and made freely available by the " \
+ "Copernicus project and the programs that contribute to it."
nc.distribution_statement = "These data follow Copernicus standards; they " \
+ "are public and free of charge. User assumes all risk for use of data. " \
+ "User must display citation in any publication or product using data. " \
+ "User must contact PI prior to any commercial use of data."
# Write the netCDF
nc.close()
# Read the netCDF file into memory
with open(ncpath, "rb") as ncfile:
ncbytes = ncfile.read()
# Delete the temp netCDF file
os.remove(ncpath)
return [filenameroot, ncbytes]
示例3: corrected
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import contact [as 别名]
dataset.title='Daily global radiation'
dataset.comment='Daily global radiation bias corrected (scaled distribution mapping) data of the EURO-CORDEX model. The reference period is 1981-2010, the years 2006-2010 are taken from the corresponding rcp4.5 scenario.'
var.grid_mapping = 'latitude_longitude'
# projection information
crs.longitude_of_prime_meridian = 0.0
crs.semi_major_axis = 6378137.0
crs.inverse_flattening = 298.257223563
crs.comment = 'Latitude and longitude on the WGS 1984 datum'
# write data to netCDF variable
var[:] = ds[param].data
lats[:] = lat1d
lons[:] = lon1d
# fill in times
dates = [startdate+k*timedelta(days=1) for k in range(ds[param].data.shape[0])]
times[:] = date2num(dates, units=times.units, calendar=times.calendar)
# global attributes
dataset.project= "Climaproof, funded by the Austrian Development Agency (ADA) and co-funded by the United Nations Environmental Programme (UNEP)"
dataset.source = 'Bias Correction Method: Switanek et al., 2017, doi.org/10.5194/hess-21-2649-2017, Regridding Method: Higher-order patch recovery (patch) by Earth System Modelling Framework (ESMF) software ESMF_RegridWeightGen (http://www.earthsystemmodeling.org/esmf_releases/public/last/ESMF_refdoc/)'
dataset.contact = 'Maria Wind <[email protected]>, Herbert Formayer <[email protected]>'
dataset.institution = 'Institute of Meteorology, University of Natural Resources and Life Sciences, Vienna, Austria'
dataset.referencees = 'https://data.ccca.ac.at/group/climaproof'
dataset.conventions = 'CF-1.6'
# close dataset
dataset.close()
示例4: write_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import contact [as 别名]
def write_netcdf(ncfile, format, domain, data):
nc = Dataset(ncfile, 'w', format=format)
nc.createDimension('x', domain['nx'])
nc.createDimension('y', domain['ny'])
nav_lon = nc.createVariable('nav_lon', 'f4', ('y','x',))
nav_lat = nc.createVariable('nav_lat', 'f4', ('y','x',))
soilsat_inst = nc.createVariable('soilsat_inst', 'f4', ('y','x',))
soilsat_mean = nc.createVariable('soilsat_mean', 'f4', ('y','x',))
soilsat_anom = nc.createVariable('soilsat_anom', 'f4', ('y','x',))
soilsat_perc = nc.createVariable('soilsat_perc', 'f4', ('y','x',))
nav_lon[:] = data['nav_lon']
nav_lat[:] = data['nav_lat']
soilsat_inst[:] = data['soilsat_inst']
soilsat_mean[:] = data['soilsat_mean']
soilsat_anom[:] = data['soilsat_anom']
soilsat_perc[:] = data['soilsat_perc']
nav_lon.units = 'degrees_east'
nav_lat.units = 'degrees_north'
soilsat_inst.units = '-'
soilsat_mean.units = '-'
soilsat_anom.units = '-'
soilsat_perc.units = '-'
nav_lon.long_name = 'Longitude'
nav_lat.long_name = 'Latitude'
soilsat_inst.long_name = 'Soil saturation'
soilsat_mean.long_name = 'Soil saturation mean'
soilsat_anom.long_name = 'Soil saturation anomaly'
soilsat_perc.long_name = 'Soil saturation percentile'
nav_lon.FillValue = default_fillvals['f4']
nav_lat.FillValue = default_fillvals['f4']
soilsat_inst.FillValue = default_fillvals['f4']
soilsat_mean.FillValue = default_fillvals['f4']
soilsat_anom.FillValue = default_fillvals['f4']
soilsat_perc.FillValue = default_fillvals['f4']
nav_lon.missing_value = 1.e+20
nav_lat.missing_value = 1.e+20
soilsat_inst.missing_value = 1.e+20
soilsat_mean.missing_value = 1.e+20
soilsat_anom.missing_value = 1.e+20
soilsat_perc.missing_value = 1.e+20
nav_lon.axis = 'YX'
nav_lat.axis = 'YX'
soilsat_inst.axis = 'YX'
soilsat_mean.axis = 'YX'
soilsat_anom.axis = 'YX'
soilsat_perc.axis = 'YX'
nav_lon.description = 'Longitude of grid cell center'
nav_lat.description = 'Latitude of grid cell center'
soilsat_inst.description = ('Simulated total column soil saturation for '
'a specific date')
soilsat_mean.description = ('Long-term mean simulated total colunm soil '
'saturation for a specific day of the year. '
'The long-term mean is calculated as the 5 day '
'moving average centered on the current day. '
'The averaging pariod is 1916-2004')
soilsat_anom.description = ('Total column soil saturation anomaly. Calculated '
'as soilsat_inst - soilsat_anom')
soilsat_perc.description = ('Total column soil saturation percentile. This '
'value shows how often during the 1916-2004 '
'reference period the soil saturation on this '
'day of the year (using a 5 day centered '
'window) was less than soilsat_inst')
nav_lon.valid_min = -180.
nav_lat.valid_min = -90.
soilsat_inst.valid_min = 0.
soilsat_mean.valid_min = 0.
soilsat_anom.valid_min = -1.
soilsat_perc.valid_min = 0.
nav_lon.valid_max = 180.
nav_lat.valid_max = 90.
soilsat_inst.valid_max = 1.
soilsat_mean.valid_max = 1.
soilsat_anom.valid_max = 1.
soilsat_perc.valid_max = 1.
nav_lon.modulo = 360.
nc.history = 'Created ' + time.ctime(time.time())
nc.description = ('Soil saturation data from the Variable Infiltration Model '
'as part of the operational surface water monitor. Note '
'that since this data is from the operational surface '
'water monitor, there may be occassional data problems. '
'Please check the data carefully')
nc.source = ('Surface Water Monitor, Surface Water Hydrology Group, '
'University of Washington, Seattle, Washington, USA')
nc.website = 'http://www.hydro.washington.edu/forecast/monitor'
nc.contact = 'Bart Nijssen, email: [email protected]'
nc.history = ' '.join(sys.argv)
nc.projection = 'Geographic'
nc.resolution = ('Spatial resolution: Longitude ({} degrees), latitude ({} '
#.........这里部分代码省略.........
示例5: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import contact [as 别名]
# Create output NetCDF file
# --------------------------
if os.path.exists(outfilename): os.remove(outfilename)
f1 = Dataset(outfilename, mode='w', format='NETCDF4_CLASSIC')
# Global attributes
f1.Conventions = "CF-1.6"
f1.title = "Monthly-mean (full water column) fields"
f1.history = "Simulations were done using a 8x8 km polar stereographic grid projection, however the final " \
"data are presented using a reference grid. Conversion between grid " \
"projectionsby grid2lonlatZ.py"
f1.source = "IMR, ROMSv3.7, IS4DVAR, NorthSea-8km reanalysis"
f1.institution = "Institute of Marine Research, Norway"
f1.references = "http://www.imr.no"
f1.product_version = "1.0"
f1.contact = "[email protected]"
f1.netcdf_version_id = "netCDF-4 Classic"
# Define dimensions
f1.createDimension('time', None)
f1.createDimension('depth', len(outlevels))
f1.createDimension('longitude', len(lon))
f1.createDimension('latitude', len(lat))
v = f1.createVariable('time', 'd', ('time',))
v0 = f0.variables['time']
v.long_name = 'time'
v.units = "Days since 1948-01-01 00:00:00"
v.calendar = "Gregorian"
ntimes = len(f0.dimensions['time'])
v[:ntimes] = v0[:]