本文整理汇总了Python中netCDF4.Dataset.time_coverage_start方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.time_coverage_start方法的具体用法?Python Dataset.time_coverage_start怎么用?Python Dataset.time_coverage_start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.time_coverage_start方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_time_extents
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
def test_time_extents(self):
'''
Test that the time extents are being checked
'''
result = self.acdd.check_time_extents(self.ds)
self.assert_result_is_good(result)
empty_ds = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(empty_ds.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
empty_ds.createDimension('time', 1)
time_var = empty_ds.createVariable('time', 'float32', ('time',))
time_var.units = 'seconds since 1970-01-01 00:00:00 UTC'
time_var[:] = [1451692800] # 20160102T000000Z in seconds since epoch
empty_ds.time_coverage_start = '20160102T000000Z'
empty_ds.time_coverage_end = '20160102T000000Z'
result = self.acdd.check_time_extents(empty_ds)
self.assert_result_is_good(result)
# try the same thing with time offsets
time_var.units = 'seconds since 1970-01-01 00:00:00-10:00'
empty_ds.time_coverage_start = '20160102T000000-1000'
empty_ds.time_coverage_end = '20160102T000000-1000'
result = self.acdd.check_time_extents(empty_ds)
self.assert_result_is_good(result)
示例2: makenetcdf_
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
#.........这里部分代码省略.........
if fields[4] == "":
sals[i, 0] = -9999
else:
sals[i, 0] = fields[4]
if fields[5] == "":
fco2s[i, 0] = -9999
else:
fco2s[i, 0] = fields[5]
if len(fields[6]) == 0:
fco2qcs[i, 0] = -128
else:
fco2qcs[i, 0] = makeqcvalue_(int(fields[6]))
depthvar[:,:] = depths
positionvar[:,:] = positions
sstvar[:,:] = temps
sssvar[:,:] = sals
fco2var[:,:] = fco2s
fco2qcvar[:,:] = fco2qcs
depthdmvar[:,:] = dms
sstdmvar[:,:] = dms
sssdmvar[:,:] = dms
fco2dmvar[:,:] = dms
# Global attributes
nc.id = filenameroot
nc.data_type = "OceanSITES trajectory data"
nc.netcdf_version = "netCDF-4 classic model"
nc.format_version = "1.2"
nc.Conventions = "CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 "\
+ "Copernicus-InSituTAC-ParametersList-3.1.0"
nc.cdm_data_type = "Trajectory"
nc.data_mode = "R"
nc.area = "Global Ocean"
nc.geospatial_lat_min = str(minlat)
nc.geospatial_lat_max = str(maxlat)
nc.geospatial_lon_min = str(minlon)
nc.geospatial_lon_max = str(maxlon)
nc.geospatial_vertical_min = "5.00"
nc.geospatial_vertical_max = "5.00"
nc.last_latitude_observation = lats[-1]
nc.last_longitude_observation = lons[-1]
nc.last_date_observation = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_start = starttime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_end = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
#datasetdate = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.date_update = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.history = datasetdate + " : Creation"
nc.update_interval = "daily"
nc.data_assembly_center = "BERGEN"
nc.institution = "University of Bergen / Geophysical Institute"
nc.institution_edmo_code = "4595"
nc.institution_references = " "
nc.contact = "[email protected]"
nc.title = "Global Ocean - In Situ near-real time carbon observation"
nc.author = "cmems-service"
nc.naming_authority = "Copernicus"
nc.platform_code = getplatformcallsign_(platform_code)
nc.site_code = getplatformcallsign_(platform_code)
# For buoys -> Mooring observation.
platform_category_code = getplatformcategorycode_(platform_code)
nc.platform_name = getplatformname_(platform_code)
nc.source_platform_category_code = platform_category_code
nc.source = PLATFORM_CODES[platform_category_code]
nc.quality_control_indicator = "6" # "Not used"
nc.quality_index = "0"
nc.comment = " "
nc.summary = " "
nc.reference = "http://marine.copernicus.eu/, https://www.icos-cp.eu/"
nc.citation = "These data were collected and made freely available by the " \
+ "Copernicus project and the programs that contribute to it."
nc.distribution_statement = "These data follow Copernicus standards; they " \
+ "are public and free of charge. User assumes all risk for use of data. " \
+ "User must display citation in any publication or product using data. " \
+ "User must contact PI prior to any commercial use of data."
# Write the netCDF
nc.close()
# Read the netCDF file into memory
with open(ncpath, "rb") as ncfile:
ncbytes = ncfile.read()
# Delete the temp netCDF file
os.remove(ncpath)
return [filenameroot, ncbytes]
示例3: str
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
y_rho = rootgrp.createVariable("y_rho","f8",("eta_rho","xi_rho",))
x_u = rootgrp.createVariable("x_u","f8",("eta_u", "xi_u",))
y_u = rootgrp.createVariable("y_u","f8",("eta_u", "xi_u",))
x_v = rootgrp.createVariable("x_v","f8",("eta_v", "xi_v",))
y_v = rootgrp.createVariable("y_v","f8",("eta_v", "xi_v",))
#angle = rootgrp.createVariable("angle","f8",("lat_rho","lon_rho",))
##lon_vert = rootgrp.createVariable("x_vert","f8",("lat","lon"))
##lat_vert = rootgrp.createVariable("y_vert","f8",("lat","lon"))
zp = rootgrp.createVariable("zp", "f8",("eta_rho","xi_rho"))
z = rootgrp.createVariable("z", "f8",("ocean_time","s_rho","eta_rho","xi_rho",),fill_value = 9.999999933815813e+36)
import time as t
# global
rootgrp.description = "Mikes Globcurrent subset + Grid Data"
rootgrp.history = "Created " + t.ctime(t.time())
rootgrp.time_coverage_start = "1-Jan-2008"
rootgrp.time_coverage_end = "31-Aug-2008"
rootgrp.spatialResolution = str(float(ds.spatial_resolution)) #RESOLUTION OF GLOBCURRENT!
# by variable
# longitudes
lon_v.units = "degrees_east"
lon_v.long_name = "longitude of V-points"
lon_v.standard_name = "longitude"
lon_v.field = "lon_v, scalar"
lon_u.units = "degrees_east"
lon_u.long_name = "longitude of U-points"
lon_u.standard_name = "longitude"
lon_u.field = "lon_u, scalar"
# latitudes
lat_v.units = "degrees_north"
示例4: create_mhl_sst_ncfile
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
def create_mhl_sst_ncfile(txtfile, site_code_short, data,
time, dtime, spatial_data):
"""
create NetCDF file for MHL Wave data
"""
site_code = site_list[site_code_short][0]
netcdf_filename = create_netcdf_filename(site_code, data, dtime)
netcdf_filepath = os.path.join(
output_folder, "%s.nc") % netcdf_filename
ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")
# generate site and deployment specific attributes
ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s "
"Sea water temperature (%s) -"
"Deployment No. %s %s to %s") % (
site_list[site_code_short][1], site_code,
spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
max(dtime).strftime("%d-%m-%Y"))
ncfile.institution = 'Manly Hydraulics Laboratory'
ncfile.keywords = ('Oceans | Ocean temperature |'
'Sea Surface Temperature')
ncfile.principal_investigator = 'Mark Kulmar'
ncfile.cdm_data_type = 'Station'
ncfile.platform_code = site_code
abstract_default = ("The sea water temperature is measured by a thermistor mounted in the "
"buoy hull approximately 400 mm below the water "
"surface. The thermistor has a resolution of 0.05 "
"Celsius and an accuracy of 0.2 Celsius. The "
"measurements are transmitted to a shore station "
"where it is stored on a PC before routine transfer "
"to Manly Hydraulics Laboratory via email.")
if site_code_short in ['COF', 'CRH', 'EDE', 'PTK']:
abstract_specific = ("This dataset contains sea water temperature "
"data collected by a wave monitoring buoy moored off %s. ") % site_list[site_code_short][1]
else:
abstract_specific = ("This dataset contains sea water temperature "
"data collected by a wave monitoring buoy moored off %s "
"approximately %s kilometres from the coastline. ") % (
site_list[site_code_short][1], site_list[site_code_short][2])
ncfile.abstract = abstract_specific + abstract_default
ncfile.comment = ("The sea water temperature data (SST) is routinely quality controlled (usually twice per week) "
"using a quality control program developed by Manly Hydraulics Laboratory. The SST data gathered "
"by the buoy is regularly compared to the latest available satellite derived sea SST images available "
"from the Bluelink ocean forecasting web pages to ensure the integrity of the dataset. Erroneous SST "
"records are removed and good quality data is flagged as \'Quality Controlled\' in the "
"Manly Hydraulics Laboratory SST database.")
ncfile.sourceFilename = os.path.basename(txtfile)
ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.geospatial_lat_min = spatial_data[1]
ncfile.geospatial_lat_max = spatial_data[1]
ncfile.geospatial_lon_min = spatial_data[2]
ncfile.geospatial_lon_max = spatial_data[2]
ncfile.geospatial_vertical_max = 0.
ncfile.geospatial_vertical_min = 0.
ncfile.deployment_number = str(spatial_data[0])
# add dimension and variables
ncfile.createDimension('TIME', len(time))
TIME = ncfile.createVariable('TIME', "d", 'TIME')
TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
LATITUDE = ncfile.createVariable(
'LATITUDE', "d", fill_value=99999.)
LONGITUDE = ncfile.createVariable(
'LONGITUDE', "d", fill_value=99999.)
TEMP = ncfile.createVariable('TEMP', "f", 'TIME', fill_value=99999.)
# add global attributes and variable attributes stored in config files
config_file = os.path.join(os.getcwd(), 'global_att_sst.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
# replace nans with fillvalue in dataframe
data = data.fillna(value=float(99999.))
TIME[:] = time
TIMESERIES[:] = 1
LATITUDE[:] = spatial_data[1]
LONGITUDE[:] = spatial_data[2]
TEMP[:] = data['SEA_TEMP'].values
ncfile.close()
示例5: create_pigment_tss_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
#.........这里部分代码省略.........
var_station_idx = output_netcdf_obj.createVariable("station_index", "i4", "profile")
var_profile = output_netcdf_obj.createVariable("profile", "i4", "profile")
var_rowsize = output_netcdf_obj.createVariable("row_size", "i4", "profile")
var_depth = output_netcdf_obj.createVariable("DEPTH", "f4", "obs", fill_value=get_imos_parameter_info('DEPTH', '_FillValue'))
var = 'DEPTH'
if metadata['varatts']['Depth']['Comments'] != '' or metadata['varatts']['Depth']['Comments'] != 'positive down':
setattr(output_netcdf_obj[var], 'comments', metadata['varatts']['Depth']['Comments'].replace('positive down', ''))
# creation of rest of variables
var_to_dispose = ['Latitude', 'Longitude', 'Depth', 'Time', 'Station_Code']
for var in data.columns:
if var not in var_to_dispose:
if metadata['varatts'][var]['Fill value'] == '':
fillvalue = -999
else:
fillvalue = metadata['varatts'][var]['Fill value']
output_netcdf_obj.createVariable(var, "d", "obs", fill_value=fillvalue)
if metadata['varatts'][var]['IMOS long_name'] != '':
setattr(output_netcdf_obj[var], 'long_name', metadata['varatts'][var]['IMOS long_name'])
if metadata['varatts'][var]['Units'] != '':
setattr(output_netcdf_obj[var], 'units', metadata['varatts'][var]['Units'])
if metadata['varatts'][var]['Comments'] != '':
setattr(output_netcdf_obj[var], 'comments', metadata['varatts'][var]['Comments'])
# SPM is set wrongly as a standard_name is original xls files
if 'SPM' not in var:
if metadata['varatts'][var]['CF standard_name'] != '':
setattr(output_netcdf_obj[var], 'standard_name', metadata['varatts'][var]['CF standard_name'])
if 'Sample_Number' in var:
setattr(output_netcdf_obj[var], 'units', 1)
if np.dtype(data[var]) == 'O':
os.remove(netcdf_filepath)
_error('Incorrect values for variable \"%s\"' % var)
output_netcdf_obj[var][:] = np.array(data[var].values).astype(np.double)
# Contigious ragged array representation of Stations netcdf 1.5
# add gatts and variable attributes as stored in config files
conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)
# lat lon depth
_, idx_station_uniq = np.unique(data.Station_Code, return_index=True)
idx_station_uniq.sort()
var_lat[:] = data.Latitude.values[idx_station_uniq].astype(np.float)
var_lon[:] = data.Longitude.values[idx_station_uniq].astype(np.float)
if np.dtype(data.Depth) == 'O':
try:
var_depth[:] = data.Depth.values.astype(np.float)
except ValueError:
os.remove(netcdf_filepath)
_error('Incorrect depth value')
else:
var_depth[:] = data.Depth.values.astype(np.float)
var_depth.positive = 'down'
# time
_, idx_time_station_uniq = np.unique(time_station_arr, return_index=True)
idx_time_station_uniq.sort()
time_values = (data.index[idx_time_station_uniq]).to_pydatetime()
time_val_dateobj = date2num(time_values, output_netcdf_obj['TIME'].units, output_netcdf_obj['TIME'].calendar)
var_time[:] = time_val_dateobj.astype(np.double)
# station
var_station_name[:] = stringtochar(np.array(data.Station_Code.values[idx_station_uniq], 'S50'))
# compute number of observations per profile
if len_prof == 1:
var_rowsize[:] = data.shape[0]
else:
n_obs_per_prof = []
for i in range(len_prof - 1):
n_obs_per_prof.append(idx_time_station_uniq[i + 1] - idx_time_station_uniq[i])
n_obs_per_prof.append(len(data.index.values) - idx_time_station_uniq[-1])
var_rowsize[:] = n_obs_per_prof
# compute association between profile number and station name
# which station this profile is for
aa = np.array(data.Station_Code)[idx_station_uniq].tolist()
bb = np.array(data.Station_Code)[idx_time_station_uniq].tolist()
var_station_idx[:] = [aa.index(b) + 1 for b in bb]
# profile
var_profile[:] = range(1, len_prof + 1)
output_netcdf_obj.geospatial_vertical_min = output_netcdf_obj['DEPTH'][:].min()
output_netcdf_obj.geospatial_vertical_max = output_netcdf_obj['DEPTH'][:].max()
output_netcdf_obj.geospatial_lat_min = output_netcdf_obj['LATITUDE'][:].min()
output_netcdf_obj.geospatial_lat_max = output_netcdf_obj['LATITUDE'][:].max()
output_netcdf_obj.geospatial_lon_min = output_netcdf_obj['LONGITUDE'][:].min()
output_netcdf_obj.geospatial_lon_max = output_netcdf_obj['LONGITUDE'][:].max()
output_netcdf_obj.time_coverage_start = min(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')
output_netcdf_obj.time_coverage_end = max(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')
output_netcdf_obj.close()
return netcdf_filepath
示例6: create_absorption_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
#.........这里部分代码省略.........
output_netcdf_obj.createDimension('name_strlen', 50)
output_netcdf_obj.createDimension('wavelength', data_df.shape[0])
# a profile is defined by a time station combo. 2 profiles at the same time
# but at a different location can exist. In order to find the unique
# profiles, the unique values of a string array of 'time-station' is counted
time_station_arr = ['%s_%s' % (a, b) for a, b in zip(data_dict['Dates'], data_dict['Station_Code'])]
len_prof = len(np.unique(time_station_arr))
output_netcdf_obj.createDimension("profile", len_prof)
var_time = output_netcdf_obj.createVariable("TIME", "d", "profile", fill_value=get_imos_parameter_info('TIME', '_FillValue'))
var_lat = output_netcdf_obj.createVariable("LATITUDE", "f", "station", fill_value=get_imos_parameter_info('LATITUDE', '_FillValue'))
var_lon = output_netcdf_obj.createVariable("LONGITUDE", "f", "station", fill_value=get_imos_parameter_info('LONGITUDE', '_FillValue'))
var_station_name = output_netcdf_obj.createVariable("station_name", "S1", (u'station', u'name_strlen'))
var_station_idx = output_netcdf_obj.createVariable("station_index", "i4", "profile")
var_profile = output_netcdf_obj.createVariable("profile", "i4", "profile")
var_rowsize = output_netcdf_obj.createVariable("row_size", "i4", "profile")
var_depth = output_netcdf_obj.createVariable("DEPTH", "f", "obs", fill_value=get_imos_parameter_info('DEPTH', '_FillValue'))
var_wavelength = output_netcdf_obj.createVariable("wavelength", "f", "wavelength")
var = data_dict['main_var_name'][0]
output_netcdf_obj.createVariable(var, "d", ("obs", "wavelength"), fill_value=metadata['varatts_col'][var]['Fill value'])
if metadata['varatts_col'][var]['IMOS long_name'] != '':
setattr(output_netcdf_obj[var], 'long_name', metadata['varatts_col'][var]['IMOS long_name'])
if metadata['varatts_col'][var]['Units'] != '':
setattr(output_netcdf_obj[var], 'units', metadata['varatts_col'][var]['Units'])
if metadata['varatts_col'][var]['Comments'] != '':
setattr(output_netcdf_obj[var], 'comments', metadata['varatts_col'][var]['Comments'])
if metadata['varatts_col'][var]['CF standard_name'] != '':
setattr(output_netcdf_obj[var], 'standard_name', metadata['varatts_col'][var]['CF standard_name'])
data_val = data_df.transpose()
output_netcdf_obj[var][:] = np.array(data_val.values)
# Contigious ragged array representation of Stations netcdf 1.5
# add gatts and variable attributes as stored in config files
conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)
# lat lon depth
_, idx_station_uniq = np.unique(data_dict['Station_Code'], return_index=True)
idx_station_uniq.sort()
var_lat[:] = np.array(data_dict['Latitude'])[idx_station_uniq]
var_lon[:] = np.array(data_dict['Longitude'])[idx_station_uniq]
var_depth[:] = data_dict['Depth']
var_depth.positive = 'down'
# time
_, idx_time_station_uniq = np.unique(time_station_arr, return_index=True)
idx_time_station_uniq.sort()
time_values = (data_dict['Dates'][idx_time_station_uniq]).to_pydatetime()
time_val_dateobj = date2num(time_values, output_netcdf_obj['TIME'].units, output_netcdf_obj['TIME'].calendar)
var_time[:] = time_val_dateobj
# wavelength
var = 'Wavelength'
var_wavelength[:] = data_dict['Wavelength']
if metadata['varatts_col'][var]['IMOS long_name'] != '':
setattr(var_wavelength, 'long_name', metadata['varatts_col'][var]['IMOS long_name'])
if metadata['varatts_col'][var]['Units'] != '':
setattr(var_wavelength, 'units', metadata['varatts_col'][var]['Units'])
if metadata['varatts_col'][var]['Comments'] != '':
setattr(var_wavelength, 'comments', metadata['varatts_col'][var]['Comments'])
if metadata['varatts_col'][var]['CF standard_name'] != '':
setattr(var_wavelength, 'standard_name', metadata['varatts_col'][var]['CF standard_name'])
# stationss
var_station_name[:] = stringtochar(np.array(data_dict['Station_Code'], 'S50')[np.sort(idx_station_uniq)])
# compute number of observations per profile
if len_prof == 1:
var_rowsize[:] = data.shape[1]
else:
n_obs_per_prof = []
for i in range(len_prof - 1):
n_obs_per_prof.append(idx_time_station_uniq[i + 1] - idx_time_station_uniq[i])
n_obs_per_prof.append(data_df.shape[1] - idx_time_station_uniq[-1])
var_rowsize[:] = n_obs_per_prof
# compute association between profile number and station name
# which station this profile is for
aa = np.array(data_dict['Station_Code'])[idx_station_uniq].tolist()
bb = np.array(data_dict['Station_Code'])[idx_time_station_uniq].tolist()
var_station_idx[:] = [aa.index(b) + 1 for b in bb]
# profile
var_profile[:] = range(1, len_prof + 1)
output_netcdf_obj.geospatial_vertical_min = output_netcdf_obj['DEPTH'][:].min()
output_netcdf_obj.geospatial_vertical_max = output_netcdf_obj['DEPTH'][:].max()
output_netcdf_obj.geospatial_lat_min = output_netcdf_obj['LATITUDE'][:].min()
output_netcdf_obj.geospatial_lat_max = output_netcdf_obj['LATITUDE'][:].max()
output_netcdf_obj.geospatial_lon_min = output_netcdf_obj['LONGITUDE'][:].min()
output_netcdf_obj.geospatial_lon_max = output_netcdf_obj['LONGITUDE'][:].max()
output_netcdf_obj.time_coverage_start = min(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')
output_netcdf_obj.time_coverage_end = max(time_values).strftime('%Y-%m-%dT%H:%M:%SZ')
output_netcdf_obj.close()
return netcdf_filepath
示例7: datetime
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
station_list = ["S001", "S002", "S003", "S004", "S005", "S006", "S007", "S008","S009", "S010", "S011", "S012"]
lat_list = [-150.0, -120.0, -90.0, -60.0, -30.0, 0.0, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0]
lon_list = [-150.0, -120.0, -90.0, -60.0, -30.0, 0.0, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0]
data_values = [17.0, 26.0, 58.0, 37.0, 45.0, 66.0, 27.0, -2.0, 7.0, 9.0, 23.0, 14.0]
# Main
try:
# global
root_grp.description = "Test"
root_grp.cdm_datatype = "Station"
root_grp.stationDimension = "station_nm";
root_grp.featureType = "TimeSeries";
root_grp.conventions= "CF-1.6";
baseDate = datetime(2001,3,1)
root_grp.time_coverage_start = "2001-03-01 12:00:00";
# dimensions
root_grp.createDimension('time', None)
root_grp.createDimension('station_nm', None) # stations unlimited = http://cf-pcmdi.llnl.gov/documents/cf-conventions/1.6/cf-conventions.html#idp8314368
# variables
times = root_grp.createVariable('time', 'f8', ('time',))
times.units = 'hours since 2001-03-01 12:00:00'
#times.units = 'hours since 0001-01-01 00:00:00.0'
times.calendar = 'gregorian'
times.standard_name= 'time'
data = root_grp.createVariable('data', 'f4', ('station_nm','time',))
data.coordinates='time lat lon'
data.long_name = "Streamflow"
data.units = "ft^3 / sec"
stations = root_grp.createVariable('stations', 'c', ('station_nm','time',))
示例8: modify_aims_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
def modify_aims_netcdf(netcdf_file_path, channel_id_info):
""" Modify the downloaded netCDF file so it passes both CF and IMOS checker
input:
netcdf_file_path(str) : path of netcdf file to modify
channel_id_index(dict) : information from xml for the channel
"""
imos_env_path = os.path.join(os.environ.get('DATA_SERVICES_DIR'), 'lib', 'netcdf', 'imos_env')
if not os.path.isfile(imos_env_path):
logger = logging_aims()
logger.error('%s is not accessible' % imos_env_path)
close_logger(logger)
sys.exit(1)
dotenv.load_dotenv(imos_env_path)
netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
netcdf_file_obj.naming_authority = 'IMOS'
# add gatts to NetCDF
netcdf_file_obj.aims_channel_id = int(channel_id_info['channel_id'])
if not (channel_id_info['metadata_uuid'] == 'Not Available'):
netcdf_file_obj.metadata_uuid = channel_id_info['metadata_uuid']
if not netcdf_file_obj.instrument_serial_number:
del(netcdf_file_obj.instrument_serial_number)
# add CF gatts, values stored in lib/netcdf/imos_env
netcdf_file_obj.Conventions = os.environ.get('CONVENTIONS')
netcdf_file_obj.data_centre_email = os.environ.get('DATA_CENTRE_EMAIL')
netcdf_file_obj.data_centre = os.environ.get('DATA_CENTRE')
netcdf_file_obj.project = os.environ.get('PROJECT')
netcdf_file_obj.acknowledgement = os.environ.get('ACKNOWLEDGEMENT')
netcdf_file_obj.distribution_statement = os.environ.get('DISTRIBUTION_STATEMENT')
netcdf_file_obj.date_created = strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
netcdf_file_obj.quality_control_set = 1
imos_qc_convention = 'IMOS standard set using the IODE flags'
netcdf_file_obj.author = 'laurent besnard'
netcdf_file_obj.author_email = '[email protected]'
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LAT_max', 'geospatial_lat_max')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LAT_min', 'geospatial_lat_min')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LON_max', 'geospatial_lon_max')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LON_min', 'geospatial_lon_min')
# variables modifications
time = netcdf_file_obj.variables['time']
time.calendar = 'gregorian'
time.axis = 'T'
time.valid_min = 0.0
time.valid_max = 9999999999.0
netcdf_file_obj.renameDimension('time', 'TIME')
netcdf_file_obj.renameVariable('time', 'TIME')
netcdf_file_obj.time_coverage_start = num2date(time[:], time.units, time.calendar).min().strftime('%Y-%m-%dT%H:%M:%SZ')
netcdf_file_obj.time_coverage_end = num2date(time[:], time.units, time.calendar).max().strftime('%Y-%m-%dT%H:%M:%SZ')
# latitude longitude
latitude = netcdf_file_obj.variables['LATITUDE']
latitude.axis = 'Y'
latitude.valid_min = -90.0
latitude.valid_max = 90.0
latitude.reference_datum = 'geographical coordinates, WGS84 projection'
latitude.standard_name = 'latitude'
latitude.long_name = 'latitude'
longitude = netcdf_file_obj.variables['LONGITUDE']
longitude.axis = 'X'
longitude.valid_min = -180.0
longitude.valid_max = 180.0
longitude.reference_datum = 'geographical coordinates, WGS84 projection'
longitude.standard_name = 'longitude'
longitude.long_name = 'longitude'
# handle masked arrays
lon_array = longitude[:]
lat_array = latitude[:]
if type(lon_array) != numpy.ma.core.MaskedArray or len(lon_array) == 1:
netcdf_file_obj.geospatial_lon_min = min(lon_array)
netcdf_file_obj.geospatial_lon_max = max(lon_array)
else:
netcdf_file_obj.geospatial_lon_min = numpy.ma.MaskedArray.min(lon_array)
netcdf_file_obj.geospatial_lon_max = numpy.ma.MaskedArray.max(lon_array)
if type(lat_array) != numpy.ma.core.MaskedArray or len(lat_array) == 1:
netcdf_file_obj.geospatial_lat_min = min(lat_array)
netcdf_file_obj.geospatial_lat_max = max(lat_array)
else:
numpy.ma.MaskedArray.min(lat_array)
netcdf_file_obj.geospatial_lat_min = numpy.ma.MaskedArray.min(lat_array)
netcdf_file_obj.geospatial_lat_max = numpy.ma.MaskedArray.max(lat_array)
# Change variable name, standard name, longname, untis ....
if 'Seawater_Intake_Temperature' in netcdf_file_obj.variables.keys():
var = netcdf_file_obj.variables['Seawater_Intake_Temperature']
var.units = 'Celsius'
netcdf_file_obj.renameVariable('Seawater_Intake_Temperature', 'TEMP')
netcdf_file_obj.renameVariable('Seawater_Intake_Temperature_quality_control', 'TEMP_quality_control')
var.ancillary_variables = 'TEMP_quality_control'
#.........这里部分代码省略.........
示例9: create_burst_average_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
#.........这里部分代码省略.........
output_var_num_obs = output_netcdf_obj.createVariable('%s_num_obs' % var, "i4", ("TIME",))
# set up 'bonus' var att from original FV01 file into FV02
input_var_object = input_netcdf_obj[var]
input_var_list_att = input_var_object.__dict__.keys()
var_att_disposable = ['name', 'long_name', \
'_FillValue', 'ancillary_variables', \
'ChunkSize', 'coordinates']
for var_att in [att for att in input_var_list_att if att not in var_att_disposable]:
setattr(output_netcdf_obj[var], var_att, getattr(input_netcdf_obj[var], var_att))
if var_att != 'comment':
setattr(output_var_min, var_att, getattr(input_netcdf_obj[var], var_att))
setattr(output_var_max, var_att, getattr(input_netcdf_obj[var], var_att))
setattr(output_var_sd, var_att, getattr(input_netcdf_obj[var], var_att))
# make sur standard_deviation variable doesnt have a standard_name attr
if hasattr(output_var_sd, 'standard_name'):
delattr(output_var_sd, 'standard_name')
setattr(output_var_mean, 'coordinates', getattr(input_netcdf_obj[var], 'coordinates', ''))
setattr(output_var_mean, 'ancillary_variables', ('%s_num_obs %s_burst_sd %s_burst_min %s_burst_max' % (var, var, var, var)))
setattr(output_var_mean, 'cell_methods', 'TIME: mean')
setattr(output_var_min, 'cell_methods', 'TIME: minimum')
setattr(output_var_max, 'cell_methods', 'TIME: maximum')
setattr(output_var_sd, 'cell_methods', 'TIME: standard_deviation')
setattr(output_var_sd, 'long_name', 'Standard deviation of values in burst, after rejection of flagged data')
setattr(output_var_num_obs, 'long_name', 'Number of observations included in the averaging process')
setattr(output_var_min, 'long_name', 'Minimum data value in burst, after rejection of flagged data')
setattr(output_var_max, 'long_name', 'Maximum data value in burst, after rejection of flagged data')
setattr(output_var_mean, 'long_name', 'Mean of %s values in burst, after rejection of flagged data' % (getattr(input_netcdf_obj[var], 'standard_name',
getattr(input_netcdf_obj[var], 'long_name', ''))))
output_var_num_obs.units = "1"
var_units = getattr(input_netcdf_obj[var], 'units')
if var_units:
output_var_mean.units = var_units
output_var_min.units = var_units
output_var_max.units = var_units
output_var_sd.units = var_units
var_stdname = getattr(input_netcdf_obj[var], 'standard_name', '')
if var_stdname != '':
output_var_num_obs.standard_name = "%s number_of_observations" % var_stdname
# set up var values
output_var_mean[:] = np.ma.masked_invalid(burst_vars[var]['var_mean'])
output_var_min[:] = np.ma.masked_invalid(burst_vars[var]['var_min'])
output_var_max[:] = np.ma.masked_invalid(burst_vars[var]['var_max'])
output_var_sd[:] = np.ma.masked_invalid(burst_vars[var]['var_sd'])
output_var_num_obs[:] = np.ma.masked_invalid(burst_vars[var]['var_num_obs'])
# add gatts and variable attributes as stored in config files
conf_file_generic = os.path.join(os.path.dirname(__file__), 'generate_nc_file_att')
generate_netcdf_att(output_netcdf_obj, conf_file_generic, conf_file_point_of_truth=True)
# set up original varatts for the following dim, var
varnames = dimensionless_var
varnames.append('TIME')
for varname in varnames:
for varatt in input_netcdf_obj[varname].__dict__.keys():
output_netcdf_obj.variables[varname].setncattr(varatt, getattr(input_netcdf_obj[varname], varatt))
time_comment = '%s. Time stamp corresponds to the middle of the burst measurement.' % getattr(input_netcdf_obj['TIME'], 'comment', '')
output_netcdf_obj.variables['TIME'].comment = time_comment.lstrip('. ')
time_burst_val_dateobj = num2date(time_burst_vals, input_netcdf_obj['TIME'].units, input_netcdf_obj['TIME'].calendar)
output_netcdf_obj.time_coverage_start = time_burst_val_dateobj.min().strftime('%Y-%m-%dT%H:%M:%SZ')
output_netcdf_obj.time_coverage_end = time_burst_val_dateobj.max().strftime('%Y-%m-%dT%H:%M:%SZ')
# append original gatt to burst average gatt
gatt = 'comment'
if hasattr(input_netcdf_obj, gatt):
setattr(output_netcdf_obj, gatt, getattr(input_netcdf_obj, gatt))
gatt = 'history'
setattr(output_netcdf_obj, gatt, ('%s. %s' % (getattr(input_netcdf_obj, gatt, ''), 'Created %s' % time.ctime(time.time()))).lstrip('. '))
gatt = 'abstract'
setattr(output_netcdf_obj, gatt, ('%s. %s' % (getattr(output_netcdf_obj, gatt, ''), \
'Data from the bursts have been cleaned and averaged to create data products. This file is one such product.')).lstrip('. '))
# add burst keywords
gatt = 'keywords'
keywords_burst = 'AVERAGED, BINNED'
setattr(output_netcdf_obj, gatt, ('%s, %s' % (getattr(input_netcdf_obj, gatt, ''), keywords_burst)).lstrip(', '))
# add values to variables
output_netcdf_obj['TIME'][:] = np.ma.masked_invalid(time_burst_vals)
github_comment = 'Product created with %s' % get_git_revision_script_url(os.path.realpath(__file__))
output_netcdf_obj.lineage = ('%s. %s' % (getattr(output_netcdf_obj, 'lineage', ''), github_comment)).lstrip('. ')
output_netcdf_obj.close()
input_netcdf_obj.close()
shutil.move(output_netcdf_file_path, output_dir)
shutil.rmtree(tmp_netcdf_dir)
return os.path.join(output_dir, os.path.basename(output_netcdf_file_path))
示例10: create_mhl_wave_ncfile
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
def create_mhl_wave_ncfile(txtfile, site_code_short, data,
time, dtime, spatial_data):
"""
create NetCDF file for MHL Wave data
"""
site_code = site_list[site_code_short][0]
netcdf_filename = create_netcdf_filename(site_code, data, dtime)
netcdf_filepath = os.path.join(
output_folder, "%s.nc") % netcdf_filename
ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")
# add IMOS1.4 global attributes and variable attributes stored in config
# files
config_file = os.path.join(os.getcwd(),'mhl_wave_library', 'global_att_wave.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
# Additional attribute either retrieved from original necdtf file
# (if exists) or defined below
original_netcdf_file_path = os.path.join(
input_folder, "%s.nc") % netcdf_filename
if os.path.exists(original_netcdf_file_path):
# get glob attributes from original netcdf files.
parse_nc_attribute(original_netcdf_file_path, ncfile)
else:
# generate site and deployment specific attributes
ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s"
"Offshore Wave Data (% s) -"
"Deployment No. %s %s to %s") % (
site_list[site_code_short][1], site_code,
spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
max(dtime).strftime("%d-%m-%Y"))
ncfile.institution = 'Manly Hydraulics Laboratory'
ncfile.keywords = ('Oceans | Ocean Waves |'
'Significant Wave Height, Oceans | Ocean Waves'
'| Wave Period, Oceans | Ocean Waves |'
'Wave Spectra, Oceans | Ocean Waves |'
'Wave Speed / direction')
ncfile.principal_investigator = 'Mark Kulmar'
ncfile.cdm_data_type = 'Station'
ncfile.platform_code = site_code
ncfile.site_name = site_list[site_code_short][1]
if site_code in ['WAVEPOK', 'WAVECOH', 'WAVECRH', 'WAVEEDN']:
config_file = os.path.join(
os.getcwd(), 'common', 'abstract_WAVE_default.att')
elif site_code == 'WAVEBAB':
config_file = os.path.join(os.getcwd(),'common', 'abstract_WAVEBAB.att')
elif site_code == 'WAVEBYB':
config_file = os.path.join(os.getcwd(), 'common', 'abstract_WAVEBYB.att')
else: # WAVESYD
config_file = os.path.join(os.getcwd(), 'common', 'abstract_WAVESYD.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=False)
ncfile.sourceFilename = os.path.basename(txtfile)
ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
ncfile.geospatial_lat_min = spatial_data[1]
ncfile.geospatial_lat_max = spatial_data[1]
ncfile.geospatial_lon_min = spatial_data[2]
ncfile.geospatial_lon_max = spatial_data[2]
ncfile.geospatial_vertical_max = 0.
ncfile.geospatial_vertical_min = 0.
ncfile.deployment_number = str(spatial_data[0])
# add dimension and variables
ncfile.createDimension('TIME', len(time))
TIME = ncfile.createVariable('TIME', "d", 'TIME')
TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
LATITUDE = ncfile.createVariable(
'LATITUDE', "d", fill_value=99999.)
LONGITUDE = ncfile.createVariable(
'LONGITUDE', "d", fill_value=99999.)
WHTH = ncfile.createVariable('WHTH', "f", 'TIME', fill_value=99999.)
WMSH = ncfile.createVariable('WMSH', "f", 'TIME', fill_value=99999.)
HRMS = ncfile.createVariable('HRMS', "f", 'TIME', fill_value=99999.)
WHTE = ncfile.createVariable('WHTE', "f", 'TIME', fill_value=99999.)
WMXH = ncfile.createVariable('WMXH', "f", 'TIME', fill_value=99999.)
TCREST = ncfile.createVariable('TCREST', "f", 'TIME', fill_value=99999.)
WPMH = ncfile.createVariable('WPMH', "f", 'TIME', fill_value=99999.)
WPTH = ncfile.createVariable('WPTH', "f", 'TIME', fill_value=99999.)
YRMS = ncfile.createVariable('YRMS', "f", 'TIME', fill_value=99999.)
WPPE = ncfile.createVariable('WPPE', "f", 'TIME', fill_value=99999.)
TP2 = ncfile.createVariable('TP2', "f", 'TIME', fill_value=99999.)
M0 = ncfile.createVariable('M0', "f", 'TIME', fill_value=99999.)
WPDI = ncfile.createVariable('WPDI', "f", 'TIME', fill_value=99999.)
# add global attributes and variable attributes stored in config files
config_file = os.path.join(os.getcwd(),'mhl_wave_library', 'global_att_wave.att')
generate_netcdf_att(ncfile, config_file,
conf_file_point_of_truth=True)
for nc_var in [WPTH, WPPE, WPMH, WPDI, WMXH,WMSH, WHTH, WHTE, TP2, TCREST]:
nc_var.valid_max = np.float32(nc_var.valid_max)
nc_var.valid_min = np.float32(nc_var.valid_min)
# replace nans with fillvalue in dataframe
#.........这里部分代码省略.........
示例11: change_dataformat
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import time_coverage_start [as 别名]
# Global attributes
rootgrp.data_type = 'EGO glider time-series data'
rootgrp.format_version = '1.0'
rootgrp.platform_code = '99999'
rootgrp.date_update = change_dataformat(rootgrp.date_modified) # should be converted from rootgrp.date_modified
rootgrp.data_mode = data_mode_dic[rootgrp.data_mode]
rootgrp.naming_authority = 'EGO'
rootgrp.id = outputfile.split('.')[0] # taken from file name... maybe something better to do
rootgrp.source = "Glider observation"
rootgrp.Conventions = "CF-1.4 EGO-1.0"
rootgrp.geospatial_lat_min = str(rootgrp.geospatial_lat_min)
rootgrp.geospatial_lat_max = str(rootgrp.geospatial_lat_max)
rootgrp.geospatial_lon_min = str(rootgrp.geospatial_lon_min)
rootgrp.geospatial_lon_max = str(rootgrp.geospatial_lon_max)
rootgrp.time_coverage_start = change_dataformat(rootgrp.time_coverage_start)
rootgrp.time_coverage_end = change_dataformat(rootgrp.time_coverage_end)
rootgrp.renameVariable('depth', 'DEPTH')
# Rename TIME variable and add attributes
rootgrp.renameVariable('time', 'TIME')
TIME = rootgrp.variables['TIME']
TIME.long_name = "Epoch time"
TIME.units = "seconds since 1970-01-01T00:00:00Z"
TIME.valid_min = "0." # problem to be mentionned
TIME.valid_max = "9000000000" # problem to be mentionned
TIME.QC_procedure, = "1"
TIME.comment, = " "
TIME.ancillary_variable = "TIME_QC"
TIME.sdn_parameter_urn = "SDN:P01::ELTMEP01"