本文整理汇总了Python中netCDF4.Dataset.cdm_data_type方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.cdm_data_type方法的具体用法?Python Dataset.cdm_data_type怎么用?Python Dataset.cdm_data_type使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.cdm_data_type方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: tamoc_nc_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
def tamoc_nc_file(fname, title, summary, source):
"""
Write the header meta data to an netCDF file for a TAMOC output
The TAMOC suite stores its output by detaul in a netCDF dataset file.
This function writes the standard TAMOC metadata to the header of the
netCDF file.
Parameters
----------
fname : str
File name of the file to write
title: str
String stating the TAMOC module where the data originated and the
type of data contained.
summary : str
String summarizing what is contained in the dataset or information
needed to interpret the dataset
source : str
String describing the source of the data in the dataset or of related
datasets
Returns
-------
nc : `netCDF4.Dataset` object
The `netCDF4.Dataset` object containing the open netCDF4 file where
the data should be stored.
"""
# Create the netCDF dataset object
nc = Dataset(fname, 'w', format='NETCDF4_CLASSIC')
# Write the netCDF header data for a TAMOC suite output
nc.Conventions = 'TAMOC Modeling Suite Output File'
nc.Metadata_Conventions = 'TAMOC Python Model'
nc.featureType = 'profile'
nc.cdm_data_type = 'Profile'
nc.nodc_template_version = \
'NODC_NetCDF_Profile_Orthogonal_Template_v1.0'
nc.title = title
nc.summary = summary
nc.source = source
nc.creator_url = 'http://github.com/socolofs/tamoc'
nc.date_created = datetime.today().isoformat(' ')
nc.date_modified = datetime.today().isoformat(' ')
nc.history = 'Creation'
# Return the netCDF dataset
return nc
示例2: makenetcdf_
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
#.........这里部分代码省略.........
if fields[4] == "":
sals[i, 0] = -9999
else:
sals[i, 0] = fields[4]
if fields[5] == "":
fco2s[i, 0] = -9999
else:
fco2s[i, 0] = fields[5]
if len(fields[6]) == 0:
fco2qcs[i, 0] = -128
else:
fco2qcs[i, 0] = makeqcvalue_(int(fields[6]))
depthvar[:,:] = depths
positionvar[:,:] = positions
sstvar[:,:] = temps
sssvar[:,:] = sals
fco2var[:,:] = fco2s
fco2qcvar[:,:] = fco2qcs
depthdmvar[:,:] = dms
sstdmvar[:,:] = dms
sssdmvar[:,:] = dms
fco2dmvar[:,:] = dms
# Global attributes
nc.id = filenameroot
nc.data_type = "OceanSITES trajectory data"
nc.netcdf_version = "netCDF-4 classic model"
nc.format_version = "1.2"
nc.Conventions = "CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 "\
+ "Copernicus-InSituTAC-ParametersList-3.1.0"
nc.cdm_data_type = "Trajectory"
nc.data_mode = "R"
nc.area = "Global Ocean"
nc.geospatial_lat_min = str(minlat)
nc.geospatial_lat_max = str(maxlat)
nc.geospatial_lon_min = str(minlon)
nc.geospatial_lon_max = str(maxlon)
nc.geospatial_vertical_min = "5.00"
nc.geospatial_vertical_max = "5.00"
nc.last_latitude_observation = lats[-1]
nc.last_longitude_observation = lons[-1]
nc.last_date_observation = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_start = starttime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_end = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
#datasetdate = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.date_update = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.history = datasetdate + " : Creation"
nc.update_interval = "daily"
nc.data_assembly_center = "BERGEN"
nc.institution = "University of Bergen / Geophysical Institute"
nc.institution_edmo_code = "4595"
nc.institution_references = " "
nc.contact = "[email protected]"
nc.title = "Global Ocean - In Situ near-real time carbon observation"
nc.author = "cmems-service"
nc.naming_authority = "Copernicus"
nc.platform_code = getplatformcallsign_(platform_code)
nc.site_code = getplatformcallsign_(platform_code)
# For buoys -> Mooring observation.
platform_category_code = getplatformcategorycode_(platform_code)
nc.platform_name = getplatformname_(platform_code)
nc.source_platform_category_code = platform_category_code
nc.source = PLATFORM_CODES[platform_category_code]
nc.quality_control_indicator = "6" # "Not used"
nc.quality_index = "0"
nc.comment = " "
nc.summary = " "
nc.reference = "http://marine.copernicus.eu/, https://www.icos-cp.eu/"
nc.citation = "These data were collected and made freely available by the " \
+ "Copernicus project and the programs that contribute to it."
nc.distribution_statement = "These data follow Copernicus standards; they " \
+ "are public and free of charge. User assumes all risk for use of data. " \
+ "User must display citation in any publication or product using data. " \
+ "User must contact PI prior to any commercial use of data."
# Write the netCDF
nc.close()
# Read the netCDF file into memory
with open(ncpath, "rb") as ncfile:
ncbytes = ncfile.read()
# Delete the temp netCDF file
os.remove(ncpath)
return [filenameroot, ncbytes]
示例3: modify_soop_trv_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
def modify_soop_trv_netcdf(netcdf_file_path, channel_id_info):
"""
Modify the downloaded NetCDF file so it passes both CF and IMOS checker
input:
netcdfFile_path(str) : path of netcdf file to modify
channel_id_index(tupple) : information from xml for the channel
"""
logger = logging_aims()
modify_aims_netcdf(netcdf_file_path, channel_id_info)
netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
ship_code = netcdf_file_obj.platform_code
vessel_name = ship_callsign(ship_code)
if vessel_name is None:
logger.error(' UNKNOWN SHIP - channel %s' % str(channel_id_info['channel_id']))
netcdf_file_obj.close()
return False
# add gatts to net_cDF
netcdf_file_obj.cdm_data_type = 'Trajectory'
netcdf_file_obj.vessel_name = vessel_name
netcdf_file_obj.trip_id = channel_id_info['trip_id']
netcdf_file_obj.cdm_data_type = "Trajectory"
coordinates_att = "TIME LATITUDE LONGITUDE DEPTH"
# depth
depth = netcdf_file_obj.variables['depth']
depth.positive = 'down'
depth.axis = 'Z'
depth.reference_datum = 'sea surface'
depth.valid_max = 30.0
depth.valid_min = -10.0
netcdf_file_obj.renameVariable('depth', 'DEPTH')
# latitude longitude
latitude = netcdf_file_obj.variables['LATITUDE']
latitude.ancillary_variables = 'LATITUDE_quality_control'
longitude = netcdf_file_obj.variables['LONGITUDE']
longitude.ancillary_variables = 'LONGITUDE_quality_control'
latitude_qc = netcdf_file_obj.variables['LATITUDE_quality_control']
latitude_qc.long_name = 'LATITUDE quality control'
latitude_qc.standard_name = 'latitude status_flag'
longitude_qc = netcdf_file_obj.variables['LONGITUDE_quality_control']
longitude_qc.long_name = 'LONGITUDE quality control'
longitude_qc.standard_name = 'longitude status_flag'
netcdf_file_obj.close()
netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
main_var = get_main_soop_trv_var(netcdf_file_path)
netcdf_file_obj.variables[main_var].coordinates = coordinates_att
netcdf_file_obj.close()
if not convert_time_cf_to_imos(netcdf_file_path):
return False
remove_dimension_from_netcdf(netcdf_file_path) # last modification to do !
return True
示例4: create_mhl_sst_ncfile
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
def create_mhl_sst_ncfile(txtfile, site_code_short, data,
time, dtime, spatial_data):
"""
create NetCDF file for MHL Wave data
"""
site_code = site_list[site_code_short][0]
netcdf_filename = create_netcdf_filename(site_code, data, dtime)
netcdf_filepath = os.path.join(
output_folder, "%s.nc") % netcdf_filename
ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")
# generate site and deployment specific attributes
ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s "
"Sea water temperature (%s) -"
"Deployment No. %s %s to %s") % (
site_list[site_code_short][1], site_code,
spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
max(dtime).strftime("%d-%m-%Y"))
ncfile.institution = 'Manly Hydraulics Laboratory'
ncfile.keywords = ('Oceans | Ocean temperature |'
'Sea Surface Temperature')
ncfile.principal_investigator = 'Mark Kulmar'
ncfile.cdm_data_type = 'Station'
ncfile.platform_code = site_code
abstract_default = ("The sea water temperature is measured by a thermistor mounted in the "
"buoy hull approximately 400 mm below the water "
"surface. The thermistor has a resolution of 0.05 "
"Celsius and an accuracy of 0.2 Celsius. The "
"measurements are transmitted to a shore station "
"where it is stored on a PC before routine transfer "
"to Manly Hydraulics Laboratory via email.")
if site_code_short in ['COF', 'CRH', 'EDE', 'PTK']:
abstract_specific = ("This dataset contains sea water temperature "
"data collected by a wave monitoring buoy moored off %s. ") % site_list[site_code_short][1]
else:
abstract_specific = ("This dataset contains sea water temperature "
"data collected by a wave monitoring buoy moored off %s "
"approximately %s kilometres from the coastline. ") % (
site_list[site_code_short][1], site_list[site_code_short][2])
ncfile.abstract = abstract_specific + abstract_default
ncfile.comment = ("The sea water temperature data (SST) is routinely quality controlled (usually twice per week) "
"using a quality control program developed by Manly Hydraulics Laboratory. The SST data gathered "
"by the buoy is regularly compared to the latest available satellite derived sea SST images available "
"from the Bluelink ocean forecasting web pages to ensure the integrity of the dataset. Erroneous SST "
"records are removed and good quality data is flagged as \'Quality Controlled\' in the "
"Manly Hydraulics Laboratory SST database.")
ncfile.sourceFilename = os.path.basename(txtfile)
ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.geospatial_lat_min = spatial_data[1]
ncfile.geospatial_lat_max = spatial_data[1]
ncfile.geospatial_lon_min = spatial_data[2]
ncfile.geospatial_lon_max = spatial_data[2]
ncfile.geospatial_vertical_max = 0.
ncfile.geospatial_vertical_min = 0.
ncfile.deployment_number = str(spatial_data[0])
# add dimension and variables
ncfile.createDimension('TIME', len(time))
TIME = ncfile.createVariable('TIME', "d", 'TIME')
TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
LATITUDE = ncfile.createVariable(
'LATITUDE', "d", fill_value=99999.)
LONGITUDE = ncfile.createVariable(
'LONGITUDE', "d", fill_value=99999.)
TEMP = ncfile.createVariable('TEMP', "f", 'TIME', fill_value=99999.)
# add global attributes and variable attributes stored in config files
config_file = os.path.join(os.getcwd(), 'global_att_sst.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
# replace nans with fillvalue in dataframe
data = data.fillna(value=float(99999.))
TIME[:] = time
TIMESERIES[:] = 1
LATITUDE[:] = spatial_data[1]
LONGITUDE[:] = spatial_data[2]
TEMP[:] = data['SEA_TEMP'].values
ncfile.close()
示例5: Table
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
fid.references = "Painter, T. H., Brodzik, M. J., A. Racoviteanu, R. Armstrong. 2012. Automated mapping of Earth's annual minimum exposed snow and ice with MODIS. Geophysical Research Letters, 39(20):L20501, doi:10.1029/2012GL053340."
fid.summary = ["An improved, enhanced-resolution, gridded passive microwave Earth System Data Record \n",
"for monitoring cryospheric and hydrologic time series\n" ]fid.title = "MEaSUREs Calibrated Passive Microwave Daily EASE-Grid 2.0 Brightness Temperature ESDR"
fid.institution = ["National Snow and Ice Data Center\n",
"Cooperative Institute for Research in Environmental Sciences\n",
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher = ["National Snow and Ice Data Center\n",
"Cooperative Institute for Research in Environmental Sciences\n",
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher_url = "http://nsidc.org/charis"
fid.publisher_email = "[email protected]"
fid.project = "CHARIS"
fid.standard_name_vocabulary = "CF Standard Name Table (v27, 28 September 2013)"
fid.cdm_data_type = "grid"
fid.keywords = "EARTH SCIENCE > SPECTRAL/ENGINEERING > MICROWAVE > BRIGHTNESS TEMPERATURE"
fid.keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Earth Science Keywords, Version 8.1"
fid.platform = "TBD"
fid.sensor = "TBD"
fid.naming_authority = "org.doi.dx"
fid.id = "10.5067/MEASURES/CRYOSPHERE/nsidc-0630.001"
fid.date_created = "TBD"
fid.acknowledgement = ["This data set was created with funding from NASA MEaSUREs Grant #NNX13AI23A.\n",
"Data archiving and distribution is supported by the NASA NSIDC Distributed Active Archive Center (DAAC)."]
fid.license = "No constraints on data access or use"
fid.processing_level = "Level 3"
fid.creator_name = "Mary J. Brodzik"
fid.creator_email = "[email protected]"
fid.creator_url = "http://nsidc.org/charis"
fid.contributor_name = "T. H. Painter, M. J. Brodzik, R. L. Armstrong"
示例6: create_mhl_wave_ncfile
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
def create_mhl_wave_ncfile(txtfile, site_code_short, data,
time, dtime, spatial_data):
"""
create NetCDF file for MHL Wave data
"""
site_code = site_list[site_code_short][0]
netcdf_filename = create_netcdf_filename(site_code, data, dtime)
netcdf_filepath = os.path.join(
output_folder, "%s.nc") % netcdf_filename
ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")
# add IMOS1.4 global attributes and variable attributes stored in config
# files
config_file = os.path.join(os.getcwd(),'mhl_wave_library', 'global_att_wave.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
# Additional attribute either retrieved from original necdtf file
# (if exists) or defined below
original_netcdf_file_path = os.path.join(
input_folder, "%s.nc") % netcdf_filename
if os.path.exists(original_netcdf_file_path):
# get glob attributes from original netcdf files.
parse_nc_attribute(original_netcdf_file_path, ncfile)
else:
# generate site and deployment specific attributes
ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s"
"Offshore Wave Data (% s) -"
"Deployment No. %s %s to %s") % (
site_list[site_code_short][1], site_code,
spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
max(dtime).strftime("%d-%m-%Y"))
ncfile.institution = 'Manly Hydraulics Laboratory'
ncfile.keywords = ('Oceans | Ocean Waves |'
'Significant Wave Height, Oceans | Ocean Waves'
'| Wave Period, Oceans | Ocean Waves |'
'Wave Spectra, Oceans | Ocean Waves |'
'Wave Speed / direction')
ncfile.principal_investigator = 'Mark Kulmar'
ncfile.cdm_data_type = 'Station'
ncfile.platform_code = site_code
ncfile.site_name = site_list[site_code_short][1]
if site_code in ['WAVEPOK', 'WAVECOH', 'WAVECRH', 'WAVEEDN']:
config_file = os.path.join(
os.getcwd(), 'common', 'abstract_WAVE_default.att')
elif site_code == 'WAVEBAB':
config_file = os.path.join(os.getcwd(),'common', 'abstract_WAVEBAB.att')
elif site_code == 'WAVEBYB':
config_file = os.path.join(os.getcwd(), 'common', 'abstract_WAVEBYB.att')
else: # WAVESYD
config_file = os.path.join(os.getcwd(), 'common', 'abstract_WAVESYD.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
ncfile.sourceFilename = os.path.basename(txtfile)
ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.geospatial_lat_min = spatial_data[1]
ncfile.geospatial_lat_max = spatial_data[1]
ncfile.geospatial_lon_min = spatial_data[2]
ncfile.geospatial_lon_max = spatial_data[2]
ncfile.geospatial_vertical_max = 0.
ncfile.geospatial_vertical_min = 0.
ncfile.deployment_number = str(spatial_data[0])
# add dimension and variables
ncfile.createDimension('TIME', len(time))
TIME = ncfile.createVariable('TIME', "d", 'TIME')
TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
LATITUDE = ncfile.createVariable(
'LATITUDE', "d", fill_value=99999.)
LONGITUDE = ncfile.createVariable(
'LONGITUDE', "d", fill_value=99999.)
WHTH = ncfile.createVariable('WHTH', "f", 'TIME', fill_value=99999.)
WMSH = ncfile.createVariable('WMSH', "f", 'TIME', fill_value=99999.)
HRMS = ncfile.createVariable('HRMS', "f", 'TIME', fill_value=99999.)
WHTE = ncfile.createVariable('WHTE', "f", 'TIME', fill_value=99999.)
WMXH = ncfile.createVariable('WMXH', "f", 'TIME', fill_value=99999.)
TCREST = ncfile.createVariable('TCREST', "f", 'TIME', fill_value=99999.)
WPMH = ncfile.createVariable('WPMH', "f", 'TIME', fill_value=99999.)
WPTH = ncfile.createVariable('WPTH', "f", 'TIME', fill_value=99999.)
YRMS = ncfile.createVariable('YRMS', "f", 'TIME', fill_value=99999.)
WPPE = ncfile.createVariable('WPPE', "f", 'TIME', fill_value=99999.)
TP2 = ncfile.createVariable('TP2', "f", 'TIME', fill_value=99999.)
M0 = ncfile.createVariable('M0', "f", 'TIME', fill_value=99999.)
WPDI = ncfile.createVariable('WPDI', "f", 'TIME', fill_value=99999.)
# add global attributes and variable attributes stored in config files
config_file = os.path.join(os.getcwd(),'mhl_wave_library', 'global_att_wave.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=True)
for nc_var in [WPTH, WPPE, WPMH, WPDI, WMXH,WMSH, WHTH, WHTE, TP2, TCREST]:
nc_var.valid_max = np.float32(nc_var.valid_max)
nc_var.valid_min = np.float32(nc_var.valid_min)
# replace nans with fillvalue in dataframe
#.........这里部分代码省略.........
示例7: initialize_output
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import cdm_data_type [as 别名]
def initialize_output(filename, id_dim_name, time_len,
id_len, time_step_seconds):
"""Creates netCDF file with CF dimensions and variables, but no data.
Arguments:
filename -- full path and filename for output netCDF file
id_dim_name -- name of Id dimension and variable, e.g., COMID
time_len -- (integer) length of time dimension (number of time steps)
id_len -- (integer) length of Id dimension (number of time series)
time_step_seconds -- (integer) number of seconds per time step
"""
cf_nc = Dataset(filename, 'w', format='NETCDF3_CLASSIC')
# Create global attributes
log(' globals', 'DEBUG')
cf_nc.featureType = 'timeSeries'
cf_nc.Metadata_Conventions = 'Unidata Dataset Discovery v1.0'
cf_nc.Conventions = 'CF-1.6'
cf_nc.cdm_data_type = 'Station'
cf_nc.nodc_template_version = (
'NODC_NetCDF_TimeSeries_Orthogonal_Template_v1.1')
cf_nc.standard_name_vocabulary = ('NetCDF Climate and Forecast (CF) ' +
'Metadata Convention Standard Name ' +
'Table v28')
cf_nc.title = 'RAPID Result'
cf_nc.summary = ("Results of RAPID river routing simulation. Each river " +
"reach (i.e., feature) is represented by a point " +
"feature at its midpoint, and is identified by the " +
"reach's unique NHDPlus COMID identifier.")
cf_nc.time_coverage_resolution = 'point'
cf_nc.geospatial_lat_min = 0.0
cf_nc.geospatial_lat_max = 0.0
cf_nc.geospatial_lat_units = 'degrees_north'
cf_nc.geospatial_lat_resolution = 'midpoint of stream feature'
cf_nc.geospatial_lon_min = 0.0
cf_nc.geospatial_lon_max = 0.0
cf_nc.geospatial_lon_units = 'degrees_east'
cf_nc.geospatial_lon_resolution = 'midpoint of stream feature'
cf_nc.geospatial_vertical_min = 0.0
cf_nc.geospatial_vertical_max = 0.0
cf_nc.geospatial_vertical_units = 'm'
cf_nc.geospatial_vertical_resolution = 'midpoint of stream feature'
cf_nc.geospatial_vertical_positive = 'up'
cf_nc.project = 'National Flood Interoperability Experiment'
cf_nc.processing_level = 'Raw simulation result'
cf_nc.keywords_vocabulary = ('NASA/Global Change Master Directory ' +
'(GCMD) Earth Science Keywords. Version ' +
'8.0.0.0.0')
cf_nc.keywords = 'DISCHARGE/FLOW'
cf_nc.comment = 'Result time step (seconds): ' + str(time_step_seconds)
timestamp = datetime.utcnow().isoformat() + 'Z'
cf_nc.date_created = timestamp
cf_nc.history = (timestamp + '; added time, lat, lon, z, crs variables; ' +
'added metadata to conform to NODC_NetCDF_TimeSeries_' +
'Orthogonal_Template_v1.1')
# Create dimensions
log(' dimming', 'DEBUG')
cf_nc.createDimension('time', time_len)
cf_nc.createDimension(id_dim_name, id_len)
# Create variables
log(' timeSeries_var', 'DEBUG')
timeSeries_var = cf_nc.createVariable(id_dim_name, 'i4', (id_dim_name,))
timeSeries_var.long_name = (
'Unique NHDPlus COMID identifier for each river reach feature')
timeSeries_var.cf_role = 'timeseries_id'
log(' time_var', 'DEBUG')
time_var = cf_nc.createVariable('time', 'i4', ('time',))
time_var.long_name = 'time'
time_var.standard_name = 'time'
time_var.units = 'seconds since 1970-01-01 00:00:00 0:00'
time_var.axis = 'T'
log(' lat_var', 'DEBUG')
lat_var = cf_nc.createVariable('lat', 'f8', (id_dim_name,),
fill_value=-9999.0)
lat_var.long_name = 'latitude'
lat_var.standard_name = 'latitude'
lat_var.units = 'degrees_north'
lat_var.axis = 'Y'
log(' lon_var', 'DEBUG')
lon_var = cf_nc.createVariable('lon', 'f8', (id_dim_name,),
fill_value=-9999.0)
lon_var.long_name = 'longitude'
lon_var.standard_name = 'longitude'
lon_var.units = 'degrees_east'
lon_var.axis = 'X'
log(' z_var', 'DEBUG')
z_var = cf_nc.createVariable('z', 'f8', (id_dim_name,),
fill_value=-9999.0)
z_var.long_name = ('Elevation referenced to the North American ' +
'Vertical Datum of 1988 (NAVD88)')
z_var.standard_name = 'surface_altitude'
z_var.units = 'm'
#.........这里部分代码省略.........