本文整理汇总了Python中netCDF4.Dataset.naming_authority方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.naming_authority方法的具体用法?Python Dataset.naming_authority怎么用?Python Dataset.naming_authority使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类netCDF4.Dataset
的用法示例。
在下文中一共展示了Dataset.naming_authority方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: generate_nc
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import naming_authority [as 别名]
def generate_nc(parser_context):
parser = XLSParser()
with open(parser_context.filepath, 'r') as f:
doc = f.read()
info = parser.extract_worksheets(doc)
nccl = info[parser_context.worksheet]
#header_line = 3
#columns = nccl[header_line]
#data_range = (4, 66)
data_rows = nccl[parser_context.data_range[0]:parser_context.data_range[1]]
print 'Generating',parser_context.output_file
nc = Dataset(parser_context.output_file, 'w')
nc.createDimension('time', len(data_rows)*12)
nc.GDAL = "GDAL 1.9.2, released 2012/10/08"
nc.history = "Created dynamically in IPython Notebook 2013-11-14"
nc.title = nccl[0][0]
nc.summary = nccl[1][0]
nc.naming_authority = 'GLOS'
nc.source = 'GLERL'
nc.standard_name_vocabulary = "http://www.cgd.ucar.edu/cms/eaton/cf-metadata/standard_name.html"
nc.project = 'GLOS'
nc.Conventions = "CF-1.6"
time = nc.createVariable('time', 'f8', ('time',))
time.standard_name = 'time'
time.units = 'seconds since 1970-01-01'
time.long_name = 'Time'
time.axis = 'T'
precip = nc.createVariable(parser_context.variable, 'f8', ('time',), fill_value=parser_context.fill_value)
#precip.standard_name = 'precipitation_amount'
precip.standard_name = parser_context.standard_name
precip.units = parser_context.units
for i,row in enumerate(data_rows):
for j in xrange(12):
the_date = datetime(row[0], j+1, 1)
timestamp = calendar.timegm(the_date.utctimetuple())
time[i*12 + j] = timestamp
try:
value = float(row[j+1])
except ValueError:
continue
except TypeError:
continue
precip[i*12 + j] = value
nc.close()
示例2: makenetcdf_
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import naming_authority [as 别名]
#.........这里部分代码省略.........
if fields[4] == "":
sals[i, 0] = -9999
else:
sals[i, 0] = fields[4]
if fields[5] == "":
fco2s[i, 0] = -9999
else:
fco2s[i, 0] = fields[5]
if len(fields[6]) == 0:
fco2qcs[i, 0] = -128
else:
fco2qcs[i, 0] = makeqcvalue_(int(fields[6]))
depthvar[:,:] = depths
positionvar[:,:] = positions
sstvar[:,:] = temps
sssvar[:,:] = sals
fco2var[:,:] = fco2s
fco2qcvar[:,:] = fco2qcs
depthdmvar[:,:] = dms
sstdmvar[:,:] = dms
sssdmvar[:,:] = dms
fco2dmvar[:,:] = dms
# Global attributes
nc.id = filenameroot
nc.data_type = "OceanSITES trajectory data"
nc.netcdf_version = "netCDF-4 classic model"
nc.format_version = "1.2"
nc.Conventions = "CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 "\
+ "Copernicus-InSituTAC-ParametersList-3.1.0"
nc.cdm_data_type = "Trajectory"
nc.data_mode = "R"
nc.area = "Global Ocean"
nc.geospatial_lat_min = str(minlat)
nc.geospatial_lat_max = str(maxlat)
nc.geospatial_lon_min = str(minlon)
nc.geospatial_lon_max = str(maxlon)
nc.geospatial_vertical_min = "5.00"
nc.geospatial_vertical_max = "5.00"
nc.last_latitude_observation = lats[-1]
nc.last_longitude_observation = lons[-1]
nc.last_date_observation = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_start = starttime.strftime("%Y-%m-%dT%H:%M:%SZ")
nc.time_coverage_end = endtime.strftime("%Y-%m-%dT%H:%M:%SZ")
#datasetdate = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.date_update = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
#nc.history = datasetdate + " : Creation"
nc.update_interval = "daily"
nc.data_assembly_center = "BERGEN"
nc.institution = "University of Bergen / Geophysical Institute"
nc.institution_edmo_code = "4595"
nc.institution_references = " "
nc.contact = "[email protected]"
nc.title = "Global Ocean - In Situ near-real time carbon observation"
nc.author = "cmems-service"
nc.naming_authority = "Copernicus"
nc.platform_code = getplatformcallsign_(platform_code)
nc.site_code = getplatformcallsign_(platform_code)
# For buoys -> Mooring observation.
platform_category_code = getplatformcategorycode_(platform_code)
nc.platform_name = getplatformname_(platform_code)
nc.source_platform_category_code = platform_category_code
nc.source = PLATFORM_CODES[platform_category_code]
nc.quality_control_indicator = "6" # "Not used"
nc.quality_index = "0"
nc.comment = " "
nc.summary = " "
nc.reference = "http://marine.copernicus.eu/, https://www.icos-cp.eu/"
nc.citation = "These data were collected and made freely available by the " \
+ "Copernicus project and the programs that contribute to it."
nc.distribution_statement = "These data follow Copernicus standards; they " \
+ "are public and free of charge. User assumes all risk for use of data. " \
+ "User must display citation in any publication or product using data. " \
+ "User must contact PI prior to any commercial use of data."
# Write the netCDF
nc.close()
# Read the netCDF file into memory
with open(ncpath, "rb") as ncfile:
ncbytes = ncfile.read()
# Delete the temp netCDF file
os.remove(ncpath)
return [filenameroot, ncbytes]
示例3: modify_aims_netcdf
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import naming_authority [as 别名]
def modify_aims_netcdf(netcdf_file_path, channel_id_info):
""" Modify the downloaded netCDF file so it passes both CF and IMOS checker
input:
netcdf_file_path(str) : path of netcdf file to modify
channel_id_index(dict) : information from xml for the channel
"""
imos_env_path = os.path.join(os.environ.get('DATA_SERVICES_DIR'), 'lib', 'netcdf', 'imos_env')
if not os.path.isfile(imos_env_path):
logger = logging_aims()
logger.error('%s is not accessible' % imos_env_path)
close_logger(logger)
sys.exit(1)
dotenv.load_dotenv(imos_env_path)
netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
netcdf_file_obj.naming_authority = 'IMOS'
# add gatts to NetCDF
netcdf_file_obj.aims_channel_id = int(channel_id_info['channel_id'])
if not (channel_id_info['metadata_uuid'] == 'Not Available'):
netcdf_file_obj.metadata_uuid = channel_id_info['metadata_uuid']
if not netcdf_file_obj.instrument_serial_number:
del(netcdf_file_obj.instrument_serial_number)
# add CF gatts, values stored in lib/netcdf/imos_env
netcdf_file_obj.Conventions = os.environ.get('CONVENTIONS')
netcdf_file_obj.data_centre_email = os.environ.get('DATA_CENTRE_EMAIL')
netcdf_file_obj.data_centre = os.environ.get('DATA_CENTRE')
netcdf_file_obj.project = os.environ.get('PROJECT')
netcdf_file_obj.acknowledgement = os.environ.get('ACKNOWLEDGEMENT')
netcdf_file_obj.distribution_statement = os.environ.get('DISTRIBUTION_STATEMENT')
netcdf_file_obj.date_created = strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
netcdf_file_obj.quality_control_set = 1
imos_qc_convention = 'IMOS standard set using the IODE flags'
netcdf_file_obj.author = 'laurent besnard'
netcdf_file_obj.author_email = '[email protected]'
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LAT_max', 'geospatial_lat_max')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LAT_min', 'geospatial_lat_min')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LON_max', 'geospatial_lon_max')
rename_netcdf_attribute(netcdf_file_obj, 'geospatial_LON_min', 'geospatial_lon_min')
# variables modifications
time = netcdf_file_obj.variables['time']
time.calendar = 'gregorian'
time.axis = 'T'
time.valid_min = 0.0
time.valid_max = 9999999999.0
netcdf_file_obj.renameDimension('time', 'TIME')
netcdf_file_obj.renameVariable('time', 'TIME')
netcdf_file_obj.time_coverage_start = num2date(time[:], time.units, time.calendar).min().strftime('%Y-%m-%dT%H:%M:%SZ')
netcdf_file_obj.time_coverage_end = num2date(time[:], time.units, time.calendar).max().strftime('%Y-%m-%dT%H:%M:%SZ')
# latitude longitude
latitude = netcdf_file_obj.variables['LATITUDE']
latitude.axis = 'Y'
latitude.valid_min = -90.0
latitude.valid_max = 90.0
latitude.reference_datum = 'geographical coordinates, WGS84 projection'
latitude.standard_name = 'latitude'
latitude.long_name = 'latitude'
longitude = netcdf_file_obj.variables['LONGITUDE']
longitude.axis = 'X'
longitude.valid_min = -180.0
longitude.valid_max = 180.0
longitude.reference_datum = 'geographical coordinates, WGS84 projection'
longitude.standard_name = 'longitude'
longitude.long_name = 'longitude'
# handle masked arrays
lon_array = longitude[:]
lat_array = latitude[:]
if type(lon_array) != numpy.ma.core.MaskedArray or len(lon_array) == 1:
netcdf_file_obj.geospatial_lon_min = min(lon_array)
netcdf_file_obj.geospatial_lon_max = max(lon_array)
else:
netcdf_file_obj.geospatial_lon_min = numpy.ma.MaskedArray.min(lon_array)
netcdf_file_obj.geospatial_lon_max = numpy.ma.MaskedArray.max(lon_array)
if type(lat_array) != numpy.ma.core.MaskedArray or len(lat_array) == 1:
netcdf_file_obj.geospatial_lat_min = min(lat_array)
netcdf_file_obj.geospatial_lat_max = max(lat_array)
else:
numpy.ma.MaskedArray.min(lat_array)
netcdf_file_obj.geospatial_lat_min = numpy.ma.MaskedArray.min(lat_array)
netcdf_file_obj.geospatial_lat_max = numpy.ma.MaskedArray.max(lat_array)
# Change variable name, standard name, longname, untis ....
if 'Seawater_Intake_Temperature' in netcdf_file_obj.variables.keys():
var = netcdf_file_obj.variables['Seawater_Intake_Temperature']
var.units = 'Celsius'
netcdf_file_obj.renameVariable('Seawater_Intake_Temperature', 'TEMP')
netcdf_file_obj.renameVariable('Seawater_Intake_Temperature_quality_control', 'TEMP_quality_control')
var.ancillary_variables = 'TEMP_quality_control'
#.........这里部分代码省略.........
示例4: Table
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import naming_authority [as 别名]
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher = ["National Snow and Ice Data Center\n",
"Cooperative Institute for Research in Environmental Sciences\n",
"University of Colorado at Boulder\n",
"Boulder, CO"]
fid.publisher_url = "http://nsidc.org/charis"
fid.publisher_email = "[email protected]"
fid.project = "CHARIS"
fid.standard_name_vocabulary = "CF Standard Name Table (v27, 28 September 2013)"
fid.cdm_data_type = "grid"
fid.keywords = "EARTH SCIENCE > SPECTRAL/ENGINEERING > MICROWAVE > BRIGHTNESS TEMPERATURE"
fid.keywords_vocabulary = "NASA Global Change Master Directory (GCMD) Earth Science Keywords, Version 8.1"
fid.platform = "TBD"
fid.sensor = "TBD"
fid.naming_authority = "org.doi.dx"
fid.id = "10.5067/MEASURES/CRYOSPHERE/nsidc-0630.001"
fid.date_created = "TBD"
fid.acknowledgement = ["This data set was created with funding from NASA MEaSUREs Grant #NNX13AI23A.\n",
"Data archiving and distribution is supported by the NASA NSIDC Distributed Active Archive Center (DAAC)."]
fid.license = "No constraints on data access or use"
fid.processing_level = "Level 3"
fid.creator_name = "Mary J. Brodzik"
fid.creator_email = "[email protected]"
fid.creator_url = "http://nsidc.org/charis"
fid.contributor_name = "T. H. Painter, M. J. Brodzik, R. L. Armstrong"
fid.contributor_role = "Principal Investigator, Co-Investigator, Co-Investigator"
fid.citation = ["Brodzik, M. J., D. G. Long, M. A. Hardman, A. C. Paget. 2015.\n",
"MEaSUREs Calibrated Passive Microwave Daily EASE-Grid 2.0 Brightness Temperature ESDR.\n",
"Version 0.01.\n",
"[Indicate subset used].\n",
示例5: Dataset
# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import naming_authority [as 别名]
shutil.copy2(gliderdir + gliderfile, outputdir + outputfile)
# Open netCDF file to modify names and attributes
rootgrp = Dataset(outputdir + outputfile, 'r+', format='NETCDF4')
# Dimension
rootgrp.renameDimension('time', 'TIME')
# Global attributes
rootgrp.data_type = 'EGO glider time-series data'
rootgrp.format_version = '1.0'
rootgrp.platform_code = '99999'
rootgrp.date_update = change_dataformat(rootgrp.date_modified) # should be converted from rootgrp.date_modified
rootgrp.data_mode = data_mode_dic[rootgrp.data_mode]
rootgrp.naming_authority = 'EGO'
rootgrp.id = outputfile.split('.')[0] # taken from file name... maybe something better to do
rootgrp.source = "Glider observation"
rootgrp.Conventions = "CF-1.4 EGO-1.0"
rootgrp.geospatial_lat_min = str(rootgrp.geospatial_lat_min)
rootgrp.geospatial_lat_max = str(rootgrp.geospatial_lat_max)
rootgrp.geospatial_lon_min = str(rootgrp.geospatial_lon_min)
rootgrp.geospatial_lon_max = str(rootgrp.geospatial_lon_max)
rootgrp.time_coverage_start = change_dataformat(rootgrp.time_coverage_start)
rootgrp.time_coverage_end = change_dataformat(rootgrp.time_coverage_end)
rootgrp.renameVariable('depth', 'DEPTH')
# Rename TIME variable and add attributes
rootgrp.renameVariable('time', 'TIME')