本文整理汇总了Python中netCDF4.Dataset.summary方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.summary方法的具体用法?Python Dataset.summary怎么用?Python Dataset.summary使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.summary方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: tamoc_nc_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import summary [as 别名]
def tamoc_nc_file(fname, title, summary, source):
"""
Write the header meta data to an netCDF file for a TAMOC output
The TAMOC suite stores its output by detaul in a netCDF dataset file.
This function writes the standard TAMOC metadata to the header of the
netCDF file.
Parameters
----------
fname : str
File name of the file to write
title: str
String stating the TAMOC module where the data originated and the
type of data contained.
summary : str
String summarizing what is contained in the dataset or information
needed to interpret the dataset
source : str
String describing the source of the data in the dataset or of related
datasets
Returns
-------
nc : `netCDF4.Dataset` object
The `netCDF4.Dataset` object containing the open netCDF4 file where
the data should be stored.
"""
# Create the netCDF dataset object
nc = Dataset(fname, 'w', format='NETCDF4_CLASSIC')
# Write the netCDF header data for a TAMOC suite output
nc.Conventions = 'TAMOC Modeling Suite Output File'
nc.Metadata_Conventions = 'TAMOC Python Model'
nc.featureType = 'profile'
nc.cdm_data_type = 'Profile'
nc.nodc_template_version = \
'NODC_NetCDF_Profile_Orthogonal_Template_v1.0'
nc.title = title
nc.summary = summary
nc.source = source
nc.creator_url = 'http://github.com/socolofs/tamoc'
nc.date_created = datetime.today().isoformat(' ')
nc.date_modified = datetime.today().isoformat(' ')
nc.history = 'Creation'
# Return the netCDF dataset
return nc
示例2: generate_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import summary [as 别名]
def generate_nc(parser_context):
parser = XLSParser()
with open(parser_context.filepath, 'r') as f:
doc = f.read()
info = parser.extract_worksheets(doc)
nccl = info[parser_context.worksheet]
#header_line = 3
#columns = nccl[header_line]
#data_range = (4, 66)
data_rows = nccl[parser_context.data_range[0]:parser_context.data_range[1]]
print 'Generating',parser_context.output_file
nc = Dataset(parser_context.output_file, 'w')
nc.createDimension('time', len(data_rows)*12)
nc.GDAL = "GDAL 1.9.2, released 2012/10/08"
nc.history = "Created dynamically in IPython Notebook 2013-11-14"
nc.title = nccl[0][0]
nc.summary = nccl[1][0]
nc.naming_authority = 'GLOS'
nc.source = 'GLERL'
nc.standard_name_vocabulary = "http://www.cgd.ucar.edu/cms/eaton/cf-metadata/standard_name.html"
nc.project = 'GLOS'
nc.Conventions = "CF-1.6"
time = nc.createVariable('time', 'f8', ('time',))
time.standard_name = 'time'
time.units = 'seconds since 1970-01-01'
time.long_name = 'Time'
time.axis = 'T'
precip = nc.createVariable(parser_context.variable, 'f8', ('time',), fill_value=parser_context.fill_value)
#precip.standard_name = 'precipitation_amount'
precip.standard_name = parser_context.standard_name
precip.units = parser_context.units
for i,row in enumerate(data_rows):
for j in xrange(12):
the_date = datetime(row[0], j+1, 1)
timestamp = calendar.timegm(the_date.utctimetuple())
time[i*12 + j] = timestamp
try:
value = float(row[j+1])
except ValueError:
continue
except TypeError:
continue
precip[i*12 + j] = value
nc.close()
示例3: makenetcdf_
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import summary [as 别名]
#.........这里部分代码省略.........
if fields[4] == "":
sals[i, 0] = -9999
else:
sals[i, 0] = fields[4]
if fields[5] == "":
fco2s[i, 0] = -9999
else:
fco2s[i, 0] = fields[5]
if len(fields[6]) == 0:
fco2qcs[i, 0] = -128
else:
fco2qcs[i, 0] = makeqcvalue_(int(fields[6]))
depthvar[:,:] = depths
positionvar[:,:] = positions
sstvar[:,:] = temps
sssvar[:,:] = sals
fco2var[:,:] = fco2s
fco2qcvar[:,:] = fco2qcs
depthdmvar[:,:] = dms
sstdmvar[:,:] = dms
sssdmvar[:,:] = dms
fco2dmvar[:,:] = dms
# Global attributes
nc.id = filenameroot
nc.data_type = "OceanSITES trajectory data"
nc.netcdf_version = "netCDF-4 classic model"
nc.format_version = "1.2"
nc.Conventions = "CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 "\
+ "Copernicus-InSituTAC-ParametersList-3.1.0"
nc.cdm_data_type = "Trajectory"
nc.data_mode = "R"
nc.area = "Global Ocean"
nc.geospatial_lat_min = str(minlat)
nc.geospatial_lat_max = str(maxlat)
nc.geospatial_lon_min = str(minlon)
nc.geospatial_lon_max = str(maxlon)
nc.geospatial_vertical_min = "5.00"
nc.geospatial_vertical_max = "5.00"
nc.last_latitude_observation = lats[-1]
nc.last_longitude_observation = lons[-1]
nc.last_date_observation = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_start = starttime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_end = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
#datasetdate = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.date_update = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.history = datasetdate + " : Creation"
nc.update_interval = "daily"
nc.data_assembly_center = "BERGEN"
nc.institution = "University of Bergen / Geophysical Institute"
nc.institution_edmo_code = "4595"
nc.institution_references = " "
nc.contact = "[email protected]"
nc.title = "Global Ocean - In Situ near-real time carbon observation"
nc.author = "cmems-service"
nc.naming_authority = "Copernicus"
nc.platform_code = getplatformcallsign_(platform_code)
nc.site_code = getplatformcallsign_(platform_code)
# For buoys -> Mooring observation.
platform_category_code = getplatformcategorycode_(platform_code)
nc.platform_name = getplatformname_(platform_code)
nc.source_platform_category_code = platform_category_code
nc.source = PLATFORM_CODES[platform_category_code]
nc.quality_control_indicator = "6" # "Not used"
nc.quality_index = "0"
nc.comment = " "
nc.summary = " "
nc.reference = "http://marine.copernicus.eu/, https://www.icos-cp.eu/"
nc.citation = "These data were collected and made freely available by the " \
+ "Copernicus project and the programs that contribute to it."
nc.distribution_statement = "These data follow Copernicus standards; they " \
+ "are public and free of charge. User assumes all risk for use of data. " \
+ "User must display citation in any publication or product using data. " \
+ "User must contact PI prior to any commercial use of data."
# Write the netCDF
nc.close()
# Read the netCDF file into memory
with open(ncpath, "rb") as ncfile:
ncbytes = ncfile.read()
# Delete the temp netCDF file
os.remove(ncpath)
return [filenameroot, ncbytes]
示例4: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import summary [as 别名]
# Assume that /projects/CHARIS is sshfs mounted on this machine, and
# that the user has write permission
fid = Dataset('~/projects/CHARIS/snow_cover/modice.v0.4/min05yr_nc/MODICE.v0.4.1test.nc', 'w', format='NETCDF4')
fid.Conventions = "CF-1.6"
fid = Dataset('/home/vagrant/measures-byu/src/prod/cetb_file/templates/cetb_global_template.nc', 'w', format='NETCDF4')
fid.Conventions = "CF-1.6"
fid.title = "MODICE mask for a minimum number of years"
fid.product_version = "v0.4"
#fid.software_version_id = "TBD"
#fid.software_repository = "[email protected]:nsidc/measures-byu.git"
fid.source = "MODICE"
fid.source_version_id = "v04"
fid.history = ""
fid.comment = "Mask locations with 2 indicate MODICE for >= min_years."
fid.references = "Painter, T. H., Brodzik, M. J., A. Racoviteanu, R. Armstrong. 2012. Automated mapping of Earth's annual minimum exposed snow and ice with MODIS. Geophysical Research Letters, 39(20):L20501, doi:10.1029/2012GL053340."
fid.summary = ["An improved, enhanced-resolution, gridded passive microwave Earth System Data Record \n",
"for monitoring cryospheric and hydrologic time series\n" ]fid.title = "MEaSUREs Calibrated Passive Microwave Daily EASE-Grid 2.0 Brightness Temperature ESDR"
fid.institution = ["National Snow and Ice Data Center\n",
"Cooperative Institute for Research in Environmental Sciences\n",
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher = ["National Snow and Ice Data Center\n",
"Cooperative Institute for Research in Environmental Sciences\n",
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher_url = "http://nsidc.org/charis"
fid.publisher_email = "[email protected]"
fid.project = "CHARIS"
fid.standard_name_vocabulary = "CF Standard Name Table (v27, 28 September 2013)"
fid.cdm_data_type = "grid"
fid.keywords = "EARTH SCIENCE > SPECTRAL/ENGINEERING > MICROWAVE > BRIGHTNESS TEMPERATURE"
fid.keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Earth Science Keywords, Version 8.1"
示例5: range
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import summary [as 别名]
nc_lat_var.long_name = 'Latitude'
#nc_lat_var.standard_name = 'latitude'
# lon variable attributes
nc_lon_var = nc_file.createVariable('lon', 'f4',('station_nm',))
nc_lon_var.units = 'degree_east'
nc_lon_var.long_name = 'Longitude'
#nc_lon_var.standard_name = 'longitude'
# Create ncdf attributes
nc_file.WML_Conventions = 'CF-1.6'
nc_file.WML_featureType = 'timeSeries'
nc_file.WML_cdm_data_type = 'Station'
nc_file.WML_standard_name_vocabulary = 'CF-1.6'
nc_file.title = nc_title
nc_file.summary = nc_summary
nc_file.id = 'testing_id'
nc_file.naming_authory = 'testing_authority'
nc_file.WML_date_created = nc_date_create
nc_file.WML_creator_name = nc_creator_name
nc_file.creator_email = nc_creator_email
nc_file.project = nc_project
nc_file.processing_level = nc_proc_level
nc_file.WML_profile = 'single variable'
# data
dates = [datetime(2001,3,1)+n*timedelta(hours=12) for n in range(12)]
nc_time[:] = date2num(dates,units=nc_time.units,calendar=nc_time.calendar)
#nc_station_names[:] = [stringtoarr("aaaa",4),stringtoarr("bbbb",4)]
dummy = [stringtoarr("aaaa",4),stringtoarr("bbbb",4)]
nc_station_names[:] = dummy
示例6: initialize_output
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import summary [as 别名]
def initialize_output(filename, id_dim_name, time_len,
id_len, time_step_seconds):
"""Creates netCDF file with CF dimensions and variables, but no data.
Arguments:
filename -- full path and filename for output netCDF file
id_dim_name -- name of Id dimension and variable, e.g., COMID
time_len -- (integer) length of time dimension (number of time steps)
id_len -- (integer) length of Id dimension (number of time series)
time_step_seconds -- (integer) number of seconds per time step
"""
cf_nc = Dataset(filename, 'w', format='NETCDF3_CLASSIC')
# Create global attributes
log(' globals', 'DEBUG')
cf_nc.featureType = 'timeSeries'
cf_nc.Metadata_Conventions = 'Unidata Dataset Discovery v1.0'
cf_nc.Conventions = 'CF-1.6'
cf_nc.cdm_data_type = 'Station'
cf_nc.nodc_template_version = (
'NODC_NetCDF_TimeSeries_Orthogonal_Template_v1.1')
cf_nc.standard_name_vocabulary = ('NetCDF Climate and Forecast (CF) ' +
'Metadata Convention Standard Name ' +
'Table v28')
cf_nc.title = 'RAPID Result'
cf_nc.summary = ("Results of RAPID river routing simulation. Each river " +
"reach (i.e., feature) is represented by a point " +
"feature at its midpoint, and is identified by the " +
"reach's unique NHDPlus COMID identifier.")
cf_nc.time_coverage_resolution = 'point'
cf_nc.geospatial_lat_min = 0.0
cf_nc.geospatial_lat_max = 0.0
cf_nc.geospatial_lat_units = 'degrees_north'
cf_nc.geospatial_lat_resolution = 'midpoint of stream feature'
cf_nc.geospatial_lon_min = 0.0
cf_nc.geospatial_lon_max = 0.0
cf_nc.geospatial_lon_units = 'degrees_east'
cf_nc.geospatial_lon_resolution = 'midpoint of stream feature'
cf_nc.geospatial_vertical_min = 0.0
cf_nc.geospatial_vertical_max = 0.0
cf_nc.geospatial_vertical_units = 'm'
cf_nc.geospatial_vertical_resolution = 'midpoint of stream feature'
cf_nc.geospatial_vertical_positive = 'up'
cf_nc.project = 'National Flood Interoperability Experiment'
cf_nc.processing_level = 'Raw simulation result'
cf_nc.keywords_vocabulary = ('NASA/Global Change Master Directory ' +
'(GCMD) Earth Science Keywords. Version ' +
'8.0.0.0.0')
cf_nc.keywords = 'DISCHARGE/FLOW'
cf_nc.comment = 'Result time step (seconds): ' + str(time_step_seconds)
timestamp = datetime.utcnow().isoformat() + 'Z'
cf_nc.date_created = timestamp
cf_nc.history = (timestamp + '; added time, lat, lon, z, crs variables; ' +
'added metadata to conform to NODC_NetCDF_TimeSeries_' +
'Orthogonal_Template_v1.1')
# Create dimensions
log(' dimming', 'DEBUG')
cf_nc.createDimension('time', time_len)
cf_nc.createDimension(id_dim_name, id_len)
# Create variables
log(' timeSeries_var', 'DEBUG')
timeSeries_var = cf_nc.createVariable(id_dim_name, 'i4', (id_dim_name,))
timeSeries_var.long_name = (
'Unique NHDPlus COMID identifier for each river reach feature')
timeSeries_var.cf_role = 'timeseries_id'
log(' time_var', 'DEBUG')
time_var = cf_nc.createVariable('time', 'i4', ('time',))
time_var.long_name = 'time'
time_var.standard_name = 'time'
time_var.units = 'seconds since 1970-01-01 00:00:00 0:00'
time_var.axis = 'T'
log(' lat_var', 'DEBUG')
lat_var = cf_nc.createVariable('lat', 'f8', (id_dim_name,),
fill_value=-9999.0)
lat_var.long_name = 'latitude'
lat_var.standard_name = 'latitude'
lat_var.units = 'degrees_north'
lat_var.axis = 'Y'
log(' lon_var', 'DEBUG')
lon_var = cf_nc.createVariable('lon', 'f8', (id_dim_name,),
fill_value=-9999.0)
lon_var.long_name = 'longitude'
lon_var.standard_name = 'longitude'
lon_var.units = 'degrees_east'
lon_var.axis = 'X'
log(' z_var', 'DEBUG')
z_var = cf_nc.createVariable('z', 'f8', (id_dim_name,),
fill_value=-9999.0)
z_var.long_name = ('Elevation referenced to the North American ' +
'Vertical Datum of 1988 (NAVD88)')
z_var.standard_name = 'surface_altitude'
z_var.units = 'm'
#.........这里部分代码省略.........