本文整理汇总了Python中netCDF4.Dataset.date_created方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.date_created方法的具体用法?Python Dataset.date_created怎么用?Python Dataset.date_created使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.date_created方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def setUp(self):
""" Check that a the AIMS system or this script hasn't been modified.
This function checks that a downloaded file still has the same md5.
"""
logging_aims()
channel_id = '8365'
from_date = '2008-09-30T00:27:27Z'
thru_date = '2008-09-30T00:30:00Z'
level_qc = 1
aims_rss_val = 100
xml_url = 'http://data.aims.gov.au/gbroosdata/services/rss/netcdf/level%s/%s' % (str(level_qc), str(aims_rss_val))
aims_xml_info = parse_aims_xml(xml_url)
channel_id_info = aims_xml_info[channel_id]
self.netcdf_tmp_file_path = download_channel(channel_id, from_date, thru_date, level_qc)
modify_soop_trv_netcdf(self.netcdf_tmp_file_path, channel_id_info)
# force values of attributes which change all the time
netcdf_file_obj = Dataset(self.netcdf_tmp_file_path, 'a', format='NETCDF4')
netcdf_file_obj.date_created = "1970-01-01T00:00:00Z"
netcdf_file_obj.history = 'data validation test only'
netcdf_file_obj.close()
shutil.move(self.netcdf_tmp_file_path, remove_creation_date_from_filename(self.netcdf_tmp_file_path))
self.netcdf_tmp_file_path = remove_creation_date_from_filename(self.netcdf_tmp_file_path)
示例2: tamoc_nc_file
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def tamoc_nc_file(fname, title, summary, source):
"""
Write the header meta data to an netCDF file for a TAMOC output
The TAMOC suite stores its output by detaul in a netCDF dataset file.
This function writes the standard TAMOC metadata to the header of the
netCDF file.
Parameters
----------
fname : str
File name of the file to write
title: str
String stating the TAMOC module where the data originated and the
type of data contained.
summary : str
String summarizing what is contained in the dataset or information
needed to interpret the dataset
source : str
String describing the source of the data in the dataset or of related
datasets
Returns
-------
nc : `netCDF4.Dataset` object
The `netCDF4.Dataset` object containing the open netCDF4 file where
the data should be stored.
"""
# Create the netCDF dataset object
nc = Dataset(fname, 'w', format='NETCDF4_CLASSIC')
# Write the netCDF header data for a TAMOC suite output
nc.Conventions = 'TAMOC Modeling Suite Output File'
nc.Metadata_Conventions = 'TAMOC Python Model'
nc.featureType = 'profile'
nc.cdm_data_type = 'Profile'
nc.nodc_template_version = \
'NODC_NetCDF_Profile_Orthogonal_Template_v1.0'
nc.title = title
nc.summary = summary
nc.source = source
nc.creator_url = 'http://github.com/socolofs/tamoc'
nc.date_created = datetime.today().isoformat(' ')
nc.date_modified = datetime.today().isoformat(' ')
nc.history = 'Creation'
# Return the netCDF dataset
return nc
示例3: create_mhl_sst_ncfile
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def create_mhl_sst_ncfile(txtfile, site_code_short, data,
time, dtime, spatial_data):
"""
create NetCDF file for MHL Wave data
"""
site_code = site_list[site_code_short][0]
netcdf_filename = create_netcdf_filename(site_code, data, dtime)
netcdf_filepath = os.path.join(
output_folder, "%s.nc") % netcdf_filename
ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")
# generate site and deployment specific attributes
ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s "
"Sea water temperature (%s) -"
"Deployment No. %s %s to %s") % (
site_list[site_code_short][1], site_code,
spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
max(dtime).strftime("%d-%m-%Y"))
ncfile.institution = 'Manly Hydraulics Laboratory'
ncfile.keywords = ('Oceans | Ocean temperature |'
'Sea Surface Temperature')
ncfile.principal_investigator = 'Mark Kulmar'
ncfile.cdm_data_type = 'Station'
ncfile.platform_code = site_code
abstract_default = ("The sea water temperature is measured by a thermistor mounted in the "
"buoy hull approximately 400 mm below the water "
"surface. The thermistor has a resolution of 0.05 "
"Celsius and an accuracy of 0.2 Celsius. The "
"measurements are transmitted to a shore station "
"where it is stored on a PC before routine transfer "
"to Manly Hydraulics Laboratory via email.")
if site_code_short in ['COF', 'CRH', 'EDE', 'PTK']:
abstract_specific = ("This dataset contains sea water temperature "
"data collected by a wave monitoring buoy moored off %s. ") % site_list[site_code_short][1]
else:
abstract_specific = ("This dataset contains sea water temperature "
"data collected by a wave monitoring buoy moored off %s "
"approximately %s kilometres from the coastline. ") % (
site_list[site_code_short][1], site_list[site_code_short][2])
ncfile.abstract = abstract_specific + abstract_default
ncfile.comment = ("The sea water temperature data (SST) is routinely quality controlled (usually twice per week) "
"using a quality control program developed by Manly Hydraulics Laboratory. The SST data gathered "
"by the buoy is regularly compared to the latest available satellite derived sea SST images available "
"from the Bluelink ocean forecasting web pages to ensure the integrity of the dataset. Erroneous SST "
"records are removed and good quality data is flagged as \'Quality Controlled\' in the "
"Manly Hydraulics Laboratory SST database.")
ncfile.sourceFilename = os.path.basename(txtfile)
ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.geospatial_lat_min = spatial_data[1]
ncfile.geospatial_lat_max = spatial_data[1]
ncfile.geospatial_lon_min = spatial_data[2]
ncfile.geospatial_lon_max = spatial_data[2]
ncfile.geospatial_vertical_max = 0.
ncfile.geospatial_vertical_min = 0.
ncfile.deployment_number = str(spatial_data[0])
# add dimension and variables
ncfile.createDimension('TIME', len(time))
TIME = ncfile.createVariable('TIME', "d", 'TIME')
TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
LATITUDE = ncfile.createVariable(
'LATITUDE', "d", fill_value=99999.)
LONGITUDE = ncfile.createVariable(
'LONGITUDE', "d", fill_value=99999.)
TEMP = ncfile.createVariable('TEMP', "f", 'TIME', fill_value=99999.)
# add global attributes and variable attributes stored in config files
config_file = os.path.join(os.getcwd(), 'global_att_sst.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
# replace nans with fillvalue in dataframe
data = data.fillna(value=float(99999.))
TIME[:] = time
TIMESERIES[:] = 1
LATITUDE[:] = spatial_data[1]
LONGITUDE[:] = spatial_data[2]
TEMP[:] = data['SEA_TEMP'].values
ncfile.close()
示例4: create_pigment_tss_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def create_pigment_tss_nc(metadata, data, output_folder):
""" create a netcdf file for pigment or TSS data """
netcdf_filepath = os.path.join(output_folder, "%s.nc" % create_filename_output(metadata, data))
output_netcdf_obj = Dataset(netcdf_filepath, "w", format="NETCDF4")
# read gatts from input, add them to output. Some gatts will be overwritten
input_gatts = metadata['gatts']
check_vessel_name(input_gatts['vessel_name']) # this raises a warning only
if input_gatts['vessel_name'].strip() == '':
input_gatts['vessel_name'] = 'UNKNOWN VESSEL'
gatt_to_dispose = ['geospatial_lat_min', 'geospatial_lat_max', 'geospatial_lon_min',
'geospatial_lon_max', 'geospatial_vertical_min', 'geospatial_vertical_max',
'conventions', 'local_time_zone']
for gatt in input_gatts.keys():
if gatt not in gatt_to_dispose:
if input_gatts[gatt] != '':
setattr(output_netcdf_obj, gatt, input_gatts[gatt])
setattr(output_netcdf_obj, 'input_xls_filename', os.path.basename(metadata['filename_input']))
if 'local_time_zone' in input_gatts.keys():
if input_gatts['local_time_zone'] != '':
setattr(output_netcdf_obj, 'local_time_zone', np.float(input_gatts['local_time_zone']))
output_netcdf_obj.date_created = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
output_netcdf_obj.geospatial_vertical_min = data.Depth.min()
output_netcdf_obj.geospatial_vertical_max = data.Depth.max()
output_netcdf_obj.createDimension("obs", data.shape[0])
output_netcdf_obj.createDimension("station", len(data.Station_Code.unique()))
output_netcdf_obj.createDimension('name_strlen', 50)
# a profile is defined by a time station combo. 2 profiles at the same time
# but at a different location can exist. In order to find the unique
# profiles, the unique values of a string array of 'time-station' is counted
time_station_arr = ['%s_%s' % (a, b) for a, b in zip(data.index, data.Station_Code.values)]
len_prof = len(np.unique(time_station_arr))
output_netcdf_obj.createDimension("profile", len_prof)
var_time = output_netcdf_obj.createVariable("TIME", "d", "profile", fill_value=get_imos_parameter_info('TIME', '_FillValue'))
var_lat = output_netcdf_obj.createVariable("LATITUDE", "f4", "station", fill_value=get_imos_parameter_info('LATITUDE', '_FillValue'))
var_lon = output_netcdf_obj.createVariable("LONGITUDE", "f4", "station", fill_value=get_imos_parameter_info('LONGITUDE', '_FillValue'))
var_station_name = output_netcdf_obj.createVariable("station_name", "S1", (u'station', u'name_strlen'))
var_station_idx = output_netcdf_obj.createVariable("station_index", "i4", "profile")
var_profile = output_netcdf_obj.createVariable("profile", "i4", "profile")
var_rowsize = output_netcdf_obj.createVariable("row_size", "i4", "profile")
var_depth = output_netcdf_obj.createVariable("DEPTH", "f4", "obs", fill_value=get_imos_parameter_info('DEPTH', '_FillValue'))
var = 'DEPTH'
if metadata['varatts']['Depth']['Comments'] != '' or metadata['varatts']['Depth']['Comments'] != 'positive down':
setattr(output_netcdf_obj[var], 'comments', metadata['varatts']['Depth']['Comments'].replace('positive down', ''))
# creation of rest of variables
var_to_dispose = ['Latitude', 'Longitude', 'Depth', 'Time', 'Station_Code']
for var in data.columns:
if var not in var_to_dispose:
if metadata['varatts'][var]['Fill value'] == '':
fillvalue = -999
else:
fillvalue = metadata['varatts'][var]['Fill value']
output_netcdf_obj.createVariable(var, "d", "obs", fill_value=fillvalue)
if metadata['varatts'][var]['IMOS long_name'] != '':
setattr(output_netcdf_obj[var], 'long_name', metadata['varatts'][var]['IMOS long_name'])
if metadata['varatts'][var]['Units'] != '':
setattr(output_netcdf_obj[var], 'units', metadata['varatts'][var]['Units'])
if metadata['varatts'][var]['Comments'] != '':
setattr(output_netcdf_obj[var], 'comments', metadata['varatts'][var]['Comments'])
# SPM is set wrongly as a standard_name is original xls files
if 'SPM' not in var:
if metadata['varatts'][var]['CF standard_name'] != '':
setattr(output_netcdf_obj[var], 'standard_name', metadata['varatts'][var]['CF standard_name'])
if 'Sample_Number' in var:
setattr(output_netcdf_obj[var], 'units', 1)
if np.dtype(data[var]) == 'O':
os.remove(netcdf_filepath)
_error('Incorrect values for variable \"%s\"' % var)
output_netcdf_obj[var][:] = np.array(data[var].values).astype(np.double)
# Contigious ragged array representation of Stations netcdf 1.5
# add gatts and variable attributes as stored in config files
conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)
# lat lon depth
_, idx_station_uniq = np.unique(data.Station_Code, return_index=True)
idx_station_uniq.sort()
var_lat[:] = data.Latitude.values[idx_station_uniq].astype(np.float)
var_lon[:] = data.Longitude.values[idx_station_uniq].astype(np.float)
if np.dtype(data.Depth) == 'O':
try:
var_depth[:] = data.Depth.values.astype(np.float)
except ValueError:
os.remove(netcdf_filepath)
_error('Incorrect depth value')
else:
#.........这里部分代码省略.........
示例5: create_absorption_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def create_absorption_nc(metadata, data, output_folder):
""" create a netcdf file for absorption data """
netcdf_filepath = os.path.join(output_folder, "%s.nc" % create_filename_output(metadata, data))
output_netcdf_obj = Dataset(netcdf_filepath, "w", format="NETCDF4")
data_dict = data[1]
data_df = data[0]
# read gatts from input, add them to output. Some gatts will be overwritten
input_gatts = metadata['gatts']
check_vessel_name(input_gatts['vessel_name']) # this raises a warning only
if input_gatts['vessel_name'].strip() == '':
input_gatts['vessel_name'] = 'UNKNOWN VESSEL'
gatt_to_dispose = ['geospatial_lat_min', 'geospatial_lat_max', 'geospatial_lon_min',
'geospatial_lon_max', 'geospatial_vertical_min', 'geospatial_vertical_max',
'conventions', 'local_time_zone']
for gatt in input_gatts.keys():
if gatt not in gatt_to_dispose:
if input_gatts[gatt] != '':
setattr(output_netcdf_obj, gatt, input_gatts[gatt])
setattr(output_netcdf_obj, 'input_xls_filename', os.path.basename(metadata['filename_input']))
if 'local_time_zone' in input_gatts.keys():
if input_gatts['local_time_zone'] != '':
setattr(output_netcdf_obj, 'local_time_zone', np.float(input_gatts['local_time_zone']))
output_netcdf_obj.date_created = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
output_netcdf_obj.geospatial_vertical_min = min(data_dict['Depth'])
output_netcdf_obj.geospatial_vertical_max = max(data_dict['Depth'])
output_netcdf_obj.createDimension("obs", data_df.shape[1])
output_netcdf_obj.createDimension("station", len(np.unique(data_dict['Station_Code'])))
output_netcdf_obj.createDimension('name_strlen', 50)
output_netcdf_obj.createDimension('wavelength', data_df.shape[0])
# a profile is defined by a time station combo. 2 profiles at the same time
# but at a different location can exist. In order to find the unique
# profiles, the unique values of a string array of 'time-station' is counted
time_station_arr = ['%s_%s' % (a, b) for a, b in zip(data_dict['Dates'], data_dict['Station_Code'])]
len_prof = len(np.unique(time_station_arr))
output_netcdf_obj.createDimension("profile", len_prof)
var_time = output_netcdf_obj.createVariable("TIME", "d", "profile", fill_value=get_imos_parameter_info('TIME', '_FillValue'))
var_lat = output_netcdf_obj.createVariable("LATITUDE", "f", "station", fill_value=get_imos_parameter_info('LATITUDE', '_FillValue'))
var_lon = output_netcdf_obj.createVariable("LONGITUDE", "f", "station", fill_value=get_imos_parameter_info('LONGITUDE', '_FillValue'))
var_station_name = output_netcdf_obj.createVariable("station_name", "S1", (u'station', u'name_strlen'))
var_station_idx = output_netcdf_obj.createVariable("station_index", "i4", "profile")
var_profile = output_netcdf_obj.createVariable("profile", "i4", "profile")
var_rowsize = output_netcdf_obj.createVariable("row_size", "i4", "profile")
var_depth = output_netcdf_obj.createVariable("DEPTH", "f", "obs", fill_value=get_imos_parameter_info('DEPTH', '_FillValue'))
var_wavelength = output_netcdf_obj.createVariable("wavelength", "f", "wavelength")
var = data_dict['main_var_name'][0]
output_netcdf_obj.createVariable(var, "d", ("obs", "wavelength"), fill_value=metadata['varatts_col'][var]['Fill value'])
if metadata['varatts_col'][var]['IMOS long_name'] != '':
setattr(output_netcdf_obj[var], 'long_name', metadata['varatts_col'][var]['IMOS long_name'])
if metadata['varatts_col'][var]['Units'] != '':
setattr(output_netcdf_obj[var], 'units', metadata['varatts_col'][var]['Units'])
if metadata['varatts_col'][var]['Comments'] != '':
setattr(output_netcdf_obj[var], 'comments', metadata['varatts_col'][var]['Comments'])
if metadata['varatts_col'][var]['CF standard_name'] != '':
setattr(output_netcdf_obj[var], 'standard_name', metadata['varatts_col'][var]['CF standard_name'])
data_val = data_df.transpose()
output_netcdf_obj[var][:] = np.array(data_val.values)
# Contigious ragged array representation of Stations netcdf 1.5
# add gatts and variable attributes as stored in config files
conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)
# lat lon depth
_, idx_station_uniq = np.unique(data_dict['Station_Code'], return_index=True)
idx_station_uniq.sort()
var_lat[:] = np.array(data_dict['Latitude'])[idx_station_uniq]
var_lon[:] = np.array(data_dict['Longitude'])[idx_station_uniq]
var_depth[:] = data_dict['Depth']
var_depth.positive = 'down'
# time
_, idx_time_station_uniq = np.unique(time_station_arr, return_index=True)
idx_time_station_uniq.sort()
time_values = (data_dict['Dates'][idx_time_station_uniq]).to_pydatetime()
time_val_dateobj = date2num(time_values, output_netcdf_obj['TIME'].units, output_netcdf_obj['TIME'].calendar)
var_time[:] = time_val_dateobj
# wavelength
var = 'Wavelength'
var_wavelength[:] = data_dict['Wavelength']
if metadata['varatts_col'][var]['IMOS long_name'] != '':
setattr(var_wavelength, 'long_name', metadata['varatts_col'][var]['IMOS long_name'])
if metadata['varatts_col'][var]['Units'] != '':
setattr(var_wavelength, 'units', metadata['varatts_col'][var]['Units'])
if metadata['varatts_col'][var]['Comments'] != '':
setattr(var_wavelength, 'comments', metadata['varatts_col'][var]['Comments'])
if metadata['varatts_col'][var]['CF standard_name'] != '':
setattr(var_wavelength, 'standard_name', metadata['varatts_col'][var]['CF standard_name'])
#.........这里部分代码省略.........
示例6: Funceme
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
foo.createDimension('time', None)
foo.createDimension('latitude', pcp.shape[1])
foo.createDimension('longitude', pcp.shape[2])
foo.institution = 'Climate Hazards Group. University of California at Santa Barbara'
foo.creator_name = 'Pete Peterson'
foo.history = 'created by Climate Hazards Group - Modified by Funceme (NetCDF3 - South America)'
foo.title = 'CHIRPS Version 2.0'
foo.creator_email = '[email protected]'
foo.documentation = 'http://pubs.usgs.gov/ds/832/'
foo.comments = 'time variable denotes the first day of the given month.'
foo.ftp_url = 'ftp://chg-ftpout.geog.ucsb.edu/pub/org/chg/products/CHIRPS-latest/'
foo.website = 'http://chg.geog.ucsb.edu/data/chirps/index.html'
foo.faq = 'http://chg-wiki.geog.ucsb.edu/wiki/CHIRPS_FAQ'
foo.version = 'Version 2.0'
foo.date_created = '2015-12-02'
lats = foo.createVariable('latitude', 'f4', ('latitude'))
lats.units = 'degrees_north'
lats.long_name = 'latitude'
lats.axis = "Y"
lats[:] = lat[:]
lons = foo.createVariable('longitude', 'f4', ('longitude'))
lons.units = 'degrees_east'
lons.long_name = 'longitude'
lons.axis = "X"
lons[:] = lon[:]
times = foo.createVariable('time', 'f4', ('time'))
times.units = 'days since 1981-01-01 00:00:00'
示例7: modify_aims_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def modify_aims_netcdf(netcdf_file_path, channel_id_info):
""" Modify the downloaded netCDF file so it passes both CF and IMOS checker
input:
netcdf_file_path(str) : path of netcdf file to modify
channel_id_index(dict) : information from xml for the channel
"""
imos_env_path = os.path.join(os.environ.get('DATA_SERVICES_DIR'), 'lib', 'netcdf', 'imos_env')
if not os.path.isfile(imos_env_path):
logger = logging_aims()
logger.error('%s is not accessible' % imos_env_path)
close_logger(logger)
sys.exit(1)
dotenv.load_dotenv(imos_env_path)
netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
netcdf_file_obj.naming_authority = 'IMOS'
# add gatts to NetCDF
netcdf_file_obj.aims_channel_id = int(channel_id_info['channel_id'])
if not (channel_id_info['metadata_uuid'] == 'Not Available'):
netcdf_file_obj.metadata_uuid = channel_id_info['metadata_uuid']
if not netcdf_file_obj.instrument_serial_number:
del(netcdf_file_obj.instrument_serial_number)
# add CF gatts, values stored in lib/netcdf/imos_env
netcdf_file_obj.Conventions = os.environ.get('CONVENTIONS')
netcdf_file_obj.data_centre_email = os.environ.get('DATA_CENTRE_EMAIL')
netcdf_file_obj.data_centre = os.environ.get('DATA_CENTRE')
netcdf_file_obj.project = os.environ.get('PROJECT')
netcdf_file_obj.acknowledgement = os.environ.get('ACKNOWLEDGEMENT')
netcdf_file_obj.distribution_statement = os.environ.get('DISTRIBUTION_STATEMENT')
netcdf_file_obj.date_created = strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
netcdf_file_obj.quality_control_set = 1
imos_qc_convention = 'IMOS standard set using the IODE flags'
netcdf_file_obj.author = 'laurent besnard'
netcdf_file_obj.author_email = '[email protected]'
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LAT_max', 'geospatial_lat_max')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LAT_min', 'geospatial_lat_min')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LON_max', 'geospatial_lon_max')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LON_min', 'geospatial_lon_min')
# variables modifications
time = netcdf_file_obj.variables['time']
time.calendar = 'gregorian'
time.axis = 'T'
time.valid_min = 0.0
time.valid_max = 9999999999.0
netcdf_file_obj.renameDimension('time', 'TIME')
netcdf_file_obj.renameVariable('time', 'TIME')
netcdf_file_obj.time_coverage_start = num2date(time[:], time.units, time.calendar).min().strftime('%Y-%m-%dT%H:%M:%SZ')
netcdf_file_obj.time_coverage_end = num2date(time[:], time.units, time.calendar).max().strftime('%Y-%m-%dT%H:%M:%SZ')
# latitude longitude
latitude = netcdf_file_obj.variables['LATITUDE']
latitude.axis = 'Y'
latitude.valid_min = -90.0
latitude.valid_max = 90.0
latitude.reference_datum = 'geographical coordinates, WGS84 projection'
latitude.standard_name = 'latitude'
latitude.long_name = 'latitude'
longitude = netcdf_file_obj.variables['LONGITUDE']
longitude.axis = 'X'
longitude.valid_min = -180.0
longitude.valid_max = 180.0
longitude.reference_datum = 'geographical coordinates, WGS84 projection'
longitude.standard_name = 'longitude'
longitude.long_name = 'longitude'
# handle masked arrays
lon_array = longitude[:]
lat_array = latitude[:]
if type(lon_array) != numpy.ma.core.MaskedArray or len(lon_array) == 1:
netcdf_file_obj.geospatial_lon_min = min(lon_array)
netcdf_file_obj.geospatial_lon_max = max(lon_array)
else:
netcdf_file_obj.geospatial_lon_min = numpy.ma.MaskedArray.min(lon_array)
netcdf_file_obj.geospatial_lon_max = numpy.ma.MaskedArray.max(lon_array)
if type(lat_array) != numpy.ma.core.MaskedArray or len(lat_array) == 1:
netcdf_file_obj.geospatial_lat_min = min(lat_array)
netcdf_file_obj.geospatial_lat_max = max(lat_array)
else:
numpy.ma.MaskedArray.min(lat_array)
netcdf_file_obj.geospatial_lat_min = numpy.ma.MaskedArray.min(lat_array)
netcdf_file_obj.geospatial_lat_max = numpy.ma.MaskedArray.max(lat_array)
# Change variable name, standard name, longname, untis ....
if 'Seawater_Intake_Temperature' in netcdf_file_obj.variables.keys():
var = netcdf_file_obj.variables['Seawater_Intake_Temperature']
var.units = 'Celsius'
netcdf_file_obj.renameVariable('Seawater_Intake_Temperature', 'TEMP')
netcdf_file_obj.renameVariable('Seawater_Intake_Temperature_quality_control', 'TEMP_quality_control')
var.ancillary_variables = 'TEMP_quality_control'
#.........这里部分代码省略.........
示例8: Table
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
fid.publisher = ["National Snow and Ice Data Center\n",
"Cooperative Institute for Research in Environmental Sciences\n",
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher_url = "http://nsidc.org/charis"
fid.publisher_email = "[email protected]"
fid.project = "CHARIS"
fid.standard_name_vocabulary = "CF Standard Name Table (v27, 28 September 2013)"
fid.cdm_data_type = "grid"
fid.keywords = "EARTH SCIENCE > SPECTRAL/ENGINEERING > MICROWAVE > BRIGHTNESS TEMPERATURE"
fid.keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Earth Science Keywords, Version 8.1"
fid.platform = "TBD"
fid.sensor = "TBD"
fid.naming_authority = "org.doi.dx"
fid.id = "10.5067/MEASURES/CRYOSPHERE/nsidc-0630.001"
fid.date_created = "TBD"
fid.acknowledgement = ["This data set was created with funding from NASA MEaSUREs Grant #NNX13AI23A.\n",
"Data archiving and distribution is supported by the NASA NSIDC Distributed Active Archive Center (DAAC)."]
fid.license = "No constraints on data access or use"
fid.processing_level = "Level 3"
fid.creator_name = "Mary J. Brodzik"
fid.creator_email = "[email protected]"
fid.creator_url = "http://nsidc.org/charis"
fid.contributor_name = "T. H. Painter, M. J. Brodzik, R. L. Armstrong"
fid.contributor_role = "Principal Investigator, Co-Investigator, Co-Investigator"
fid.citation = ["Brodzik, M. J., D. G. Long, M. A. Hardman, A. C. Paget. 2015.\n",
"MEaSUREs Calibrated Passive Microwave Daily EASE-Grid 2.0 Brightness Temperature ESDR.\n",
"Version 0.01.\n",
"[Indicate subset used].\n",
"Boulder, Colorado USA: NASA DAAC at the National Snow and Ice Data Center." ]
示例9: create_burst_average_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def create_burst_average_netcdf(input_netcdf_file_path, output_dir):
"""
generate the burst netcdf file for WQM product.
see variable conf_file if editing of gatt and var att need to be done
"""
input_file_rel_path = get_input_file_rel_path(input_netcdf_file_path)
input_netcdf_obj = Dataset(input_netcdf_file_path, 'r')
global INSTRUMENT_SAMPLE_INTERVAL
INSTRUMENT_SAMPLE_INTERVAL = getattr(input_netcdf_obj, 'instrument_sample_interval', 1)
burst_vars = create_burst_average_var(input_netcdf_obj)
time_burst_vals = burst_vars.values()[0]['time_mean']
tmp_netcdf_dir = tempfile.mkdtemp()
output_netcdf_file_path = os.path.join(tmp_netcdf_dir, generate_netcdf_burst_filename(input_netcdf_file_path, burst_vars))
output_netcdf_obj = Dataset(output_netcdf_file_path, "w", format="NETCDF4")
# read gatts from input, add them to output. Some gatts will be overwritten
input_gatts = input_netcdf_obj.__dict__.keys()
gatt_to_dispose = ['author', 'file_version_quality_control', 'quality_control_set',
'compliance_checker_version', 'compliance_checker_last_updated',
'quality_control_log']
for gatt in input_gatts:
if gatt not in gatt_to_dispose:
setattr(output_netcdf_obj, gatt, getattr(input_netcdf_obj, gatt))
if 'WQM' in output_netcdf_obj.instrument:
output_netcdf_obj.title = 'Burst-averaged biogeochemical measurements at %s' % (input_netcdf_obj.site_code)
elif 'CTD' in output_netcdf_obj.instrument:
output_netcdf_obj.title = 'Burst-averaged moored CTD measurements at %s' % (input_netcdf_obj.site_code)
m = re.match('.*\.nc', input_file_rel_path)
output_netcdf_obj.input_file = m.group()
output_netcdf_obj.date_created = DATE_UTC_NOW.strftime("%Y-%m-%dT%H:%M:%SZ")
depth_burst_mean_val = burst_vars['DEPTH']['var_mean']
if np.isnan(depth_burst_mean_val).all():
output_netcdf_obj.geospatial_vertical_min = np.double(input_netcdf_obj['NOMINAL_DEPTH'][:])
output_netcdf_obj.geospatial_vertical_max = np.double(input_netcdf_obj['NOMINAL_DEPTH'][:])
else:
output_netcdf_obj.geospatial_vertical_min = np.nanmin(depth_burst_mean_val)
output_netcdf_obj.geospatial_vertical_max = np.nanmax(depth_burst_mean_val)
# set up dimensions and variables
output_netcdf_obj.createDimension("TIME", len(time_burst_vals))
var_time = output_netcdf_obj.createVariable("TIME", input_netcdf_obj["TIME"].dtype,
("TIME",))
dimensionless_var = list_dimensionless_var(input_netcdf_obj)
# No FillValue for dimensions as for IMOS conventions
for var in dimensionless_var:
output_netcdf_obj.createVariable(var, input_netcdf_obj[var].dtype)
output_netcdf_obj[var][:] = input_netcdf_obj[var][:]
for var in burst_vars.keys():
var_dtype = input_netcdf_obj[var].dtype
fillvalue = getattr(input_netcdf_obj[var], '_FillValue', None)
output_var_mean = output_netcdf_obj.createVariable(var, var_dtype, ("TIME",), fill_value=fillvalue)
output_var_min = output_netcdf_obj.createVariable('%s_burst_min' % var, var_dtype, ("TIME",), fill_value=fillvalue)
output_var_max = output_netcdf_obj.createVariable('%s_burst_max' % var, var_dtype, ("TIME",), fill_value=fillvalue)
output_var_sd = output_netcdf_obj.createVariable('%s_burst_sd' % var, var_dtype, ("TIME",), fill_value=fillvalue)
output_var_num_obs = output_netcdf_obj.createVariable('%s_num_obs' % var, "i4", ("TIME",))
# set up 'bonus' var att from original FV01 file into FV02
input_var_object = input_netcdf_obj[var]
input_var_list_att = input_var_object.__dict__.keys()
var_att_disposable = ['name', 'long_name', \
'_FillValue', 'ancillary_variables', \
'ChunkSize', 'coordinates']
for var_att in [att for att in input_var_list_att if att not in var_att_disposable]:
setattr(output_netcdf_obj[var], var_att, getattr(input_netcdf_obj[var], var_att))
if var_att != 'comment':
setattr(output_var_min, var_att, getattr(input_netcdf_obj[var], var_att))
setattr(output_var_max, var_att, getattr(input_netcdf_obj[var], var_att))
setattr(output_var_sd, var_att, getattr(input_netcdf_obj[var], var_att))
# make sur standard_deviation variable doesnt have a standard_name attr
if hasattr(output_var_sd, 'standard_name'):
delattr(output_var_sd, 'standard_name')
setattr(output_var_mean, 'coordinates', getattr(input_netcdf_obj[var], 'coordinates', ''))
setattr(output_var_mean, 'ancillary_variables', ('%s_num_obs %s_burst_sd %s_burst_min %s_burst_max' % (var, var, var, var)))
setattr(output_var_mean, 'cell_methods', 'TIME: mean')
setattr(output_var_min, 'cell_methods', 'TIME: minimum')
setattr(output_var_max, 'cell_methods', 'TIME: maximum')
setattr(output_var_sd, 'cell_methods', 'TIME: standard_deviation')
setattr(output_var_sd, 'long_name', 'Standard deviation of values in burst, after rejection of flagged data')
setattr(output_var_num_obs, 'long_name', 'Number of observations included in the averaging process')
setattr(output_var_min, 'long_name', 'Minimum data value in burst, after rejection of flagged data')
setattr(output_var_max, 'long_name', 'Maximum data value in burst, after rejection of flagged data')
setattr(output_var_mean, 'long_name', 'Mean of %s values in burst, after rejection of flagged data' % (getattr(input_netcdf_obj[var], 'standard_name',
getattr(input_netcdf_obj[var], 'long_name', ''))))
output_var_num_obs.units = "1"
#.........这里部分代码省略.........
示例10: create_mhl_wave_ncfile
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def create_mhl_wave_ncfile(txtfile, site_code_short, data,
time, dtime, spatial_data):
"""
create NetCDF file for MHL Wave data
"""
site_code = site_list[site_code_short][0]
netcdf_filename = create_netcdf_filename(site_code, data, dtime)
netcdf_filepath = os.path.join(
output_folder, "%s.nc") % netcdf_filename
ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")
# add IMOS1.4 global attributes and variable attributes stored in config
# files
config_file = os.path.join(os.getcwd(),'mhl_wave_library', 'global_att_wave.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
# Additional attribute either retrieved from original necdtf file
# (if exists) or defined below
original_netcdf_file_path = os.path.join(
input_folder, "%s.nc") % netcdf_filename
if os.path.exists(original_netcdf_file_path):
# get glob attributes from original netcdf files.
parse_nc_attribute(original_netcdf_file_path, ncfile)
else:
# generate site and deployment specific attributes
ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s"
"Offshore Wave Data (% s) -"
"Deployment No. %s %s to %s") % (
site_list[site_code_short][1], site_code,
spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
max(dtime).strftime("%d-%m-%Y"))
ncfile.institution = 'Manly Hydraulics Laboratory'
ncfile.keywords = ('Oceans | Ocean Waves |'
'Significant Wave Height, Oceans | Ocean Waves'
'| Wave Period, Oceans | Ocean Waves |'
'Wave Spectra, Oceans | Ocean Waves |'
'Wave Speed / direction')
ncfile.principal_investigator = 'Mark Kulmar'
ncfile.cdm_data_type = 'Station'
ncfile.platform_code = site_code
ncfile.site_name = site_list[site_code_short][1]
if site_code in ['WAVEPOK', 'WAVECOH', 'WAVECRH', 'WAVEEDN']:
config_file = os.path.join(
os.getcwd(), 'common', 'abstract_WAVE_default.att')
elif site_code == 'WAVEBAB':
config_file = os.path.join(os.getcwd(),'common', 'abstract_WAVEBAB.att')
elif site_code == 'WAVEBYB':
config_file = os.path.join(os.getcwd(), 'common', 'abstract_WAVEBYB.att')
else: # WAVESYD
config_file = os.path.join(os.getcwd(), 'common', 'abstract_WAVESYD.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
ncfile.sourceFilename = os.path.basename(txtfile)
ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.geospatial_lat_min = spatial_data[1]
ncfile.geospatial_lat_max = spatial_data[1]
ncfile.geospatial_lon_min = spatial_data[2]
ncfile.geospatial_lon_max = spatial_data[2]
ncfile.geospatial_vertical_max = 0.
ncfile.geospatial_vertical_min = 0.
ncfile.deployment_number = str(spatial_data[0])
# add dimension and variables
ncfile.createDimension('TIME', len(time))
TIME = ncfile.createVariable('TIME', "d", 'TIME')
TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
LATITUDE = ncfile.createVariable(
'LATITUDE', "d", fill_value=99999.)
LONGITUDE = ncfile.createVariable(
'LONGITUDE', "d", fill_value=99999.)
WHTH = ncfile.createVariable('WHTH', "f", 'TIME', fill_value=99999.)
WMSH = ncfile.createVariable('WMSH', "f", 'TIME', fill_value=99999.)
HRMS = ncfile.createVariable('HRMS', "f", 'TIME', fill_value=99999.)
WHTE = ncfile.createVariable('WHTE', "f", 'TIME', fill_value=99999.)
WMXH = ncfile.createVariable('WMXH', "f", 'TIME', fill_value=99999.)
TCREST = ncfile.createVariable('TCREST', "f", 'TIME', fill_value=99999.)
WPMH = ncfile.createVariable('WPMH', "f", 'TIME', fill_value=99999.)
WPTH = ncfile.createVariable('WPTH', "f", 'TIME', fill_value=99999.)
YRMS = ncfile.createVariable('YRMS', "f", 'TIME', fill_value=99999.)
WPPE = ncfile.createVariable('WPPE', "f", 'TIME', fill_value=99999.)
TP2 = ncfile.createVariable('TP2', "f", 'TIME', fill_value=99999.)
M0 = ncfile.createVariable('M0', "f", 'TIME', fill_value=99999.)
WPDI = ncfile.createVariable('WPDI', "f", 'TIME', fill_value=99999.)
# add global attributes and variable attributes stored in config files
config_file = os.path.join(os.getcwd(),'mhl_wave_library', 'global_att_wave.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=True)
for nc_var in [WPTH, WPPE, WPMH, WPDI, WMXH,WMSH, WHTH, WHTE, TP2, TCREST]:
nc_var.valid_max = np.float32(nc_var.valid_max)
nc_var.valid_min = np.float32(nc_var.valid_min)
# replace nans with fillvalue in dataframe
#.........这里部分代码省略.........
示例11: initialize_output
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import date_created [as 别名]
def initialize_output(filename, id_dim_name, time_len,
id_len, time_step_seconds):
"""Creates netCDF file with CF dimensions and variables, but no data.
Arguments:
filename -- full path and filename for output netCDF file
id_dim_name -- name of Id dimension and variable, e.g., COMID
time_len -- (integer) length of time dimension (number of time steps)
id_len -- (integer) length of Id dimension (number of time series)
time_step_seconds -- (integer) number of seconds per time step
"""
cf_nc = Dataset(filename, 'w', format='NETCDF3_CLASSIC')
# Create global attributes
log(' globals', 'DEBUG')
cf_nc.featureType = 'timeSeries'
cf_nc.Metadata_Conventions = 'Unidata Dataset Discovery v1.0'
cf_nc.Conventions = 'CF-1.6'
cf_nc.cdm_data_type = 'Station'
cf_nc.nodc_template_version = (
'NODC_NetCDF_TimeSeries_Orthogonal_Template_v1.1')
cf_nc.standard_name_vocabulary = ('NetCDF Climate and Forecast (CF) ' +
'Metadata Convention Standard Name ' +
'Table v28')
cf_nc.title = 'RAPID Result'
cf_nc.summary = ("Results of RAPID river routing simulation. Each river " +
"reach (i.e., feature) is represented by a point " +
"feature at its midpoint, and is identified by the " +
"reach's unique NHDPlus COMID identifier.")
cf_nc.time_coverage_resolution = 'point'
cf_nc.geospatial_lat_min = 0.0
cf_nc.geospatial_lat_max = 0.0
cf_nc.geospatial_lat_units = 'degrees_north'
cf_nc.geospatial_lat_resolution = 'midpoint of stream feature'
cf_nc.geospatial_lon_min = 0.0
cf_nc.geospatial_lon_max = 0.0
cf_nc.geospatial_lon_units = 'degrees_east'
cf_nc.geospatial_lon_resolution = 'midpoint of stream feature'
cf_nc.geospatial_vertical_min = 0.0
cf_nc.geospatial_vertical_max = 0.0
cf_nc.geospatial_vertical_units = 'm'
cf_nc.geospatial_vertical_resolution = 'midpoint of stream feature'
cf_nc.geospatial_vertical_positive = 'up'
cf_nc.project = 'National Flood Interoperability Experiment'
cf_nc.processing_level = 'Raw simulation result'
cf_nc.keywords_vocabulary = ('NASA/Global Change Master Directory ' +
'(GCMD) Earth Science Keywords. Version ' +
'8.0.0.0.0')
cf_nc.keywords = 'DISCHARGE/FLOW'
cf_nc.comment = 'Result time step (seconds): ' + str(time_step_seconds)
timestamp = datetime.utcnow().isoformat() + 'Z'
cf_nc.date_created = timestamp
cf_nc.history = (timestamp + '; added time, lat, lon, z, crs variables; ' +
'added metadata to conform to NODC_NetCDF_TimeSeries_' +
'Orthogonal_Template_v1.1')
# Create dimensions
log(' dimming', 'DEBUG')
cf_nc.createDimension('time', time_len)
cf_nc.createDimension(id_dim_name, id_len)
# Create variables
log(' timeSeries_var', 'DEBUG')
timeSeries_var = cf_nc.createVariable(id_dim_name, 'i4', (id_dim_name,))
timeSeries_var.long_name = (
'Unique NHDPlus COMID identifier for each river reach feature')
timeSeries_var.cf_role = 'timeseries_id'
log(' time_var', 'DEBUG')
time_var = cf_nc.createVariable('time', 'i4', ('time',))
time_var.long_name = 'time'
time_var.standard_name = 'time'
time_var.units = 'seconds since 1970-01-01 00:00:00 0:00'
time_var.axis = 'T'
log(' lat_var', 'DEBUG')
lat_var = cf_nc.createVariable('lat', 'f8', (id_dim_name,),
fill_value=-9999.0)
lat_var.long_name = 'latitude'
lat_var.standard_name = 'latitude'
lat_var.units = 'degrees_north'
lat_var.axis = 'Y'
log(' lon_var', 'DEBUG')
lon_var = cf_nc.createVariable('lon', 'f8', (id_dim_name,),
fill_value=-9999.0)
lon_var.long_name = 'longitude'
lon_var.standard_name = 'longitude'
lon_var.units = 'degrees_east'
lon_var.axis = 'X'
log(' z_var', 'DEBUG')
z_var = cf_nc.createVariable('z', 'f8', (id_dim_name,),
fill_value=-9999.0)
z_var.long_name = ('Elevation referenced to the North American ' +
'Vertical Datum of 1988 (NAVD88)')
z_var.standard_name = 'surface_altitude'
z_var.units = 'm'
#.........这里部分代码省略.........