当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.title方法代码示例

本文整理汇总了Python中netCDF4.Dataset.title方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.title方法的具体用法?Python Dataset.title怎么用?Python Dataset.title使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在netCDF4.Dataset的用法示例。


在下文中一共展示了Dataset.title方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: netcdfSIT

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def netcdfSIT(lats,lons,var):
    directory = '/home/zlabe/Surtsey/seaice_obs/Thk/March/'
    name = 'icesatG_regrid_March_20032008.nc'
    filename = directory + name
    ncfile = Dataset(filename,'w',format='NETCDF4')
    ncfile.description = 'Sea ice thickness processed by NASA-G and now' \
                         'regridded on an EASE2.0 100 km grid for the' \
                         'period of March 2003-2008'
    
    ### Dimensions
    ncfile.createDimension('years',var.shape[0])
    ncfile.createDimension('lat',var.shape[1])
    ncfile.createDimension('lon',var.shape[2])
    
    ### Variables
    years = ncfile.createVariable('years','f4',('years'))
    latitude = ncfile.createVariable('lat','f4',('lat','lat'))
    longitude = ncfile.createVariable('lon','f4',('lon','lon'))
    varns = ncfile.createVariable('sit','f4',('years','lat','lon'))
    
    ### Units
    varns.units = 'meters'
    ncfile.title = 'ICESat-G'
    ncfile.instituion = 'Dept. ESS at University of California, Irvine'
    ncfile.source = 'NASA-G'
    ncfile.references = 'Donghui Yi, H. Zwally'
    
    ### Data
    years[:] = list(xrange(var.shape[0]))
    latitude[:] = lats
    longitude[:] = lons
    varns[:] = var
    
    ncfile.close()
    print 'Completed: Created netCDF4 File!'
开发者ID:zmlabe,项目名称:SeaIceThickness,代码行数:37,代码来源:calc_IcesatG.py

示例2: netcdfPiomas

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def netcdfPiomas(lats,lons,var,directory):
    name = 'piomas_regrid_March_19792015.nc'
    filename = directory + name
    ncfile = Dataset(filename,'w',format='NETCDF4')
    ncfile.description = 'PIOMAS Sea ice thickness reanalysis from 1979-2015 ' \
                        'interpolated on a 180x180 grid (latxlon)' \
                        'of NSIDC EASE100' 
    
    ### Dimensions
    ncfile.createDimension('years',var.shape[0])
    ncfile.createDimension('lat',var.shape[1])
    ncfile.createDimension('lon',var.shape[2])
    
    ### Variables
    years = ncfile.createVariable('years','f4',('years'))
    latitude = ncfile.createVariable('lat','f4',('lat','lat'))
    longitude = ncfile.createVariable('lon','f4',('lon','lon'))
    varns = ncfile.createVariable('thick','f4',('years','lat','lon'))
    
    ### Metrics
    varns.units = 'meters'
    ncfile.title = 'PIOMAS March SIT'
    ncfile.instituion = 'Dept. ESS at University of California, Irvine'
    ncfile.source = 'University of Washington'
    ncfile.references = '[Zhang and Rothrock, 2003]'
    
    ### Data
    years[:] = list(xrange(var.shape[0]))
    latitude[:] = lats
    longitude[:] = lons
    varns[:] = var
    
    ncfile.close()
    print 'Completed: Created netCDF4 File!'
开发者ID:zmlabe,项目名称:SeaIceThickness,代码行数:36,代码来源:calc_MarchSIT_timeseries.py

示例3: writeCMIP5File

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def writeCMIP5File(modelName,scenario,myvarname,lon,lat,time,mydata,mydataanomaly,outfilename):
    
     myformat='NETCDF3_CLASSIC'
     
     if os.path.exists(outfilename):
          os.remove(outfilename)
     print "Results written to netcdf file: %s"%(outfilename)
     if myvarname=="sic": myvar="SIC"
     
     f1 = Dataset(outfilename, mode='w', format=myformat)
     f1.title       = "IPCC AR5 %s"%(myvar)
     f1.description = "IPCC AR5 running averages of %s for model %s for scenario %s"%(myvar,modelName,scenario)
     f1.history     = "Created " + str(datetime.now())
     f1.source      = "Trond Kristiansen ([email protected])"
     f1.type        = "File in NetCDF3 format created using iceExtract.py"
     f1.Conventions = "CF-1.0"

     """Define dimensions"""
     f1.createDimension('x',  len(lon))
     f1.createDimension('y', len(lat))
     f1.createDimension('time', None)
        
     vnc = f1.createVariable('longitude', 'd', ('x',),zlib=False)
     vnc.long_name = 'Longitude'
     vnc.units = 'degree_east'
     vnc.standard_name = 'longitude'
     vnc[:] = lon

     vnc = f1.createVariable('latitude', 'd', ('y',),zlib=False)
     vnc.long_name = 'Latitude'
     vnc.units = 'degree_north'
     vnc.standard_name = 'latitude'
     vnc[:] = lat

     v_time = f1.createVariable('time', 'd', ('time',),zlib=False)
     v_time.long_name = 'Years'
     v_time.units = 'Years'
     v_time.field = 'time, scalar, series'
     v_time[:]=time     
     
     v_temp=f1.createVariable('SIC', 'd', ('time', 'y', 'x',),zlib=False)
     v_temp.long_name = "Sea-ice area fraction (%)"
     v_temp.units = "%"
     v_temp.time = "time"
     v_temp.field="SIC, scalar, series"
     v_temp.missing_value = 1e20
     
    
     if myvarname=='sic':
          f1.variables['SIC'][:,:,:]  = mydata
          
     f1.close()
开发者ID:trondkr,项目名称:OceanLight,代码行数:54,代码来源:IOwrite.py

示例4: tamoc_nc_file

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def tamoc_nc_file(fname, title, summary, source):
    """
    Write the header meta data to an netCDF file for a TAMOC output
    
    The TAMOC suite stores its output by detaul in a netCDF dataset file.  
    This function writes the standard TAMOC metadata to the header of the 
    netCDF file.  
    
    Parameters
    ----------
    fname : str
        File name of the file to write
    title:  str
        String stating the TAMOC module where the data originated and the 
        type of data contained.  
    summary : str
        String summarizing what is contained in the dataset or information
        needed to interpret the dataset
    source : str
        String describing the source of the data in the dataset or of related
        datasets
    
    Returns
    -------
    nc : `netCDF4.Dataset` object
        The `netCDF4.Dataset` object containing the open netCDF4 file where
        the data should be stored.
    
    """
    
    # Create the netCDF dataset object
    nc = Dataset(fname, 'w', format='NETCDF4_CLASSIC')
    
    # Write the netCDF header data for a TAMOC suite output
    nc.Conventions = 'TAMOC Modeling Suite Output File'
    nc.Metadata_Conventions = 'TAMOC Python Model'
    nc.featureType = 'profile'
    nc.cdm_data_type = 'Profile'
    nc.nodc_template_version = \
        'NODC_NetCDF_Profile_Orthogonal_Template_v1.0'
    nc.title = title
    nc.summary = summary
    nc.source = source
    nc.creator_url = 'http://github.com/socolofs/tamoc'
    nc.date_created = datetime.today().isoformat(' ')
    nc.date_modified = datetime.today().isoformat(' ')
    nc.history = 'Creation'
    
    # Return the netCDF dataset
    return nc
开发者ID:socolofs,项目名称:tamoc,代码行数:52,代码来源:model_share.py

示例5: generate_nc

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def generate_nc(parser_context):
    parser = XLSParser()
    with open(parser_context.filepath, 'r') as f:
        doc = f.read()
    info = parser.extract_worksheets(doc)
    nccl = info[parser_context.worksheet]
    #header_line = 3
    #columns = nccl[header_line]
    #data_range = (4, 66)
    data_rows = nccl[parser_context.data_range[0]:parser_context.data_range[1]]
    print 'Generating',parser_context.output_file
    nc = Dataset(parser_context.output_file, 'w')
    nc.createDimension('time', len(data_rows)*12)
    nc.GDAL = "GDAL 1.9.2, released 2012/10/08"
    nc.history = "Created dynamically in IPython Notebook 2013-11-14"
    nc.title = nccl[0][0]
    nc.summary = nccl[1][0]
    nc.naming_authority = 'GLOS'
    nc.source = 'GLERL'
    nc.standard_name_vocabulary = "http://www.cgd.ucar.edu/cms/eaton/cf-metadata/standard_name.html"
    nc.project = 'GLOS'
    nc.Conventions = "CF-1.6"
    time = nc.createVariable('time', 'f8', ('time',))
    time.standard_name = 'time'
    time.units = 'seconds since 1970-01-01'
    time.long_name = 'Time'
    time.axis = 'T'
    precip = nc.createVariable(parser_context.variable, 'f8', ('time',), fill_value=parser_context.fill_value)
    #precip.standard_name = 'precipitation_amount'
    precip.standard_name = parser_context.standard_name

    precip.units = parser_context.units
    for i,row in enumerate(data_rows):
        for j in xrange(12):
            the_date = datetime(row[0], j+1, 1)
            timestamp = calendar.timegm(the_date.utctimetuple())
            time[i*12 + j] = timestamp
            try:
                value = float(row[j+1])
            except ValueError:
                continue
            except TypeError:
                continue

            precip[i*12 + j] = value
    nc.close() 
开发者ID:lukecampbell,项目名称:glos,代码行数:48,代码来源:hydro.py

示例6: write_nc_file

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def write_nc_file(daily_results, filename, nc, anom_mode=False):
    #Grab every 4th time value to represent daily
    daily_time_var = nc.variables['time'][::4]

    nc_out = Dataset(filename, mode='w', format='NETCDF4') 
    nc_out.createDimension('lon', LONS)
    nc_out.createDimension('lat', LATS)
    nc_out.createDimension('time', None) #UNLIMITED
    nc_out.createDimension('month', MONTHS_YEAR)
    nc_out.title = ''
    nc_out.institution = ''
    nc_out.project = ''
    nc_out.contact = '[email protected]'
    nc_out.Conventions = "CF-1.6"
    
    longitude = nc_out.createVariable('lon', 'f8', ('lon',))
    longitude.standard_name = 'longitude'
    longitude.long_name = 'longitude'
    longitude.units = 'degrees_east'
    longitude.modulo = 360.0
    longitude.axis = 'X'
    longitude[:] = np.arange(0, 360.0, 2.0)
    
    latitude = nc_out.createVariable('lat', 'f8', ('lat',))
    latitude.standard_name = 'latitude'
    latitude.long_name = 'latitude'
    latitude.units = 'degrees_north'
    latitude.axis = 'Y'
    latitude[:] = np.arange(-90.0, 92.0, 2.0)
    
    time = nc_out.createVariable('time', 'f8', ('time',))
    time.units = 'hours since 1-1-1 0:0:0' 
    time.calendar = 'standard' #Gregorian
    time[:] = daily_time_var 
    
    if anom_mode:
        daily_mean = nc_out.createVariable('daily_anom', 'f8', ('time', 'lat', 'lon'))
        daily_mean.long_name = 'z500 daily anomaly vs 1981-2010'
    else:
        daily_mean = nc_out.createVariable('daily_mean', 'f8', ('time', 'lat', 'lon'))
        daily_mean.long_name = 'z500 daily mean'

    daily_mean[:] = daily_results
    nc_out.close()
开发者ID:abuddenb,项目名称:agu2015,代码行数:46,代码来源:calc_daily_mean_current.py

示例7: netcdfSIT

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def netcdfSIT(lats,lons,var,varmean):
    directory = '/home/zlabe/Surtsey/seaice_obs/Thk/March/'
    name = 'sub_regrid_March_19861994.nc'
    filename = directory + name
    ncfile = Dataset(filename,'w',format='NETCDF4')
    ncfile.description = 'Sea ice thickness processed by submarine' \
                         'data although record is spotty throughout' \
                         '1986-1994 reference period. Mean thickness' \
                         'over the period is also included.'
    
    ### Dimensions
    ncfile.createDimension('years',var.shape[0])
    ncfile.createDimension('lat',var.shape[1])
    ncfile.createDimension('lon',var.shape[2])
    
    ### Variables
    years = ncfile.createVariable('years','f4',('years'))
    latitude = ncfile.createVariable('lat','f4',('lat','lat'))
    longitude = ncfile.createVariable('lon','f4',('lon','lon'))
    varns = ncfile.createVariable('sit','f4',('years','lat','lon'))
    varnsmean = ncfile.createVariable('meansit','f4',('lat','lon'))
    
    ### Units
    varns.units = 'meters'
    varnsmean.units = 'meters'
    ncfile.title = 'Submarine Data'
    ncfile.instituion = 'Dept. ESS at University of California, Irvine'
    ncfile.source = 'NSIDC, J. Maslanik & A.P. Barrett'
    ncfile.created_by = 'Zachary Labe ([email protected])'
#    ncfile.references = ''
    
    ### Data
    years[:] = list(xrange(var.shape[0]))
    latitude[:] = lats
    longitude[:] = lons
    varns[:] = var
    varnsmean[:] = varmean
    
    ncfile.close()
    print 'Completed: Created netCDF4 File!'
开发者ID:zmlabe,项目名称:SeaIceThickness,代码行数:42,代码来源:calc_Submarine.py

示例8: netcdfSIT

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def netcdfSIT(lats,lons,var,varmean):
    directory = '/home/zlabe/Surtsey/seaice_obs/Thk/March/'
    name = 'icebridge_regrid_March_20092014.nc'
    filename = directory + name
    ncfile = Dataset(filename,'w',format='NETCDF4')
    ncfile.description = 'Sea ice thickness processed by iceBridge onto' \
                         'EASE2.0 100km grids for spring 2009-2014.' \
                         'Mean thickness also included for period over' \
                         '2009-2014.'
    
    ### Dimensions
    ncfile.createDimension('years',var.shape[0])
    ncfile.createDimension('lat',var.shape[1])
    ncfile.createDimension('lon',var.shape[2])
    
    ### Variables
    years = ncfile.createVariable('years','f4',('years'))
    latitude = ncfile.createVariable('lat','f4',('lat','lat'))
    longitude = ncfile.createVariable('lon','f4',('lon','lon'))
    varns = ncfile.createVariable('sit','f4',('years','lat','lon'))
    varnsmean = ncfile.createVariable('meansit','f4',('lat','lon'))
    
    ### Units
    varns.units = 'meters'
    varnsmean.units = 'meters'
    ncfile.title = 'IceBridge'
    ncfile.instituion = 'Dept. ESS at University of California, Irvine'
    ncfile.source = 'NSIDC, Andrew P. Barrett'
#    ncfile.references = ''
    
    ### Data
    years[:] = list(xrange(var.shape[0]))
    latitude[:] = lats
    longitude[:] = lons
    varns[:] = var
    varnsmean[:] = varmean
    
    ncfile.close()
    print 'Completed: Created netCDF4 File!'
开发者ID:zmlabe,项目名称:SeaIceThickness,代码行数:41,代码来源:calc_Icebridge.py

示例9: netcdfSatelliteG

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def netcdfSatelliteG(lats,lons,var,directory):
    name = 'satelliteG_regrid_March_20032015.nc'
    filename = directory + name
    ncfile = Dataset(filename,'w',format='NETCDF4')
    ncfile.description = 'Satellite data for ICESat-G (2003-2008) and' \
                         'CryoSat-2 (2011-2015) that have been regridded' \
                         'on 180x180 EASE2.0 100km grids. Years 2009-2010 are' \
                         'available in the data but have been filled with' \
                         'nan values to be inclusive. Shape is therefore' \
                         '[13,180,180]'
    
    ### Dimensions
    ncfile.createDimension('years',var.shape[0])
    ncfile.createDimension('lat',var.shape[1])
    ncfile.createDimension('lon',var.shape[2])
    
    ### Variables
    years = ncfile.createVariable('years','f4',('years'))
    latitude = ncfile.createVariable('lat','f4',('lat','lat'))
    longitude = ncfile.createVariable('lon','f4',('lon','lon'))
    varns = ncfile.createVariable('thick','f4',('years','lat','lon'))
    
    ### Metrics
    varns.units = 'meters'
    ncfile.title = 'ICESat_G/CryoSat March SIT'
    ncfile.instituion = 'Dept. ESS at University of California, Irvine'
    ncfile.source = 'NASA-G/ESA Products'
    ncfile.references = 'Donghui Yi, H. Zwally, S. Laxon'
    
    ### Data
    years[:] = list(xrange(var.shape[0]))
    latitude[:] = lats
    longitude[:] = lons
    varns[:] = var
    
    ncfile.close()
    print 'Completed: Created netCDF4 File!'
开发者ID:zmlabe,项目名称:SeaIceThickness,代码行数:39,代码来源:calc_MarchSIT_timeseries.py

示例10: len

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
    yfv[:]   = np.arange(njf)
    lonsa[:]  = lons_anal
    latsa[:]  = lats_anal
    lonsf[:]  = lons_fcst
    latsf[:]  = lats_fcst

    conusmask[:] = conusmask_ccpa

    thrnumv[:] = xc[:]
    thrvalv[:] = thresh[:]
 
    rootgrp.latcorners = [lats_anal[0,0], lats_anal[0,-1], lats_anal[-1,0], lats_anal[-1,-1]]
    rootgrp.loncorners = [lons_anal[0,0], lons_anal[0,-1], lons_anal[-1,0], lons_anal[-1,-1]]

    rootgrp.stream = "s4" # ????
    rootgrp.title = "Reforecast V2 accum. ensemble-mean precip forecast and analyzed CDF + rank correlation"
    rootgrp.Conventions = "CF-1.0"  # ????
    rootgrp.history = "Revised Mar 2016 by Hamill" 
    rootgrp.institution = \
        "Reforecast from ERSL/PSD using NCEP/EMC GEFS, circa 2012"
    rootgrp.platform = "Model" 
    rootgrp.references = "http://www.esrl.noaa.gov/psd/forecasts/reforecast2/" 
    
    # ---- open ensemble data file for each year, read in data, and augment cdf 
    #      information for that year if the sample is within the month of interest
    #      or the neighboring month

    rankcorr_fa = -99.99*np.ones((nja,nia),dtype=np.float)    
    nyears = len(range(2002,2016))
    print 'nsamps = ',92*nyears
    precipa = np.zeros((nja,nia),dtype=np.float)
开发者ID:ThomasMoreHamill,项目名称:analog,代码行数:33,代码来源:compute_precip_CCPAgrid_cdfs.py

示例11: ConvertNCCF

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def ConvertNCCF(TheFileIn,TheFileOut,TheTimes,TheDaysArray,TheCLats,TheCLongs,TheClimPeriod,TheMissing,TheType):
    ''' Discover what is in the file '''
    ''' Open and read in all bits '''
    ''' Write out in cf compliant style '''

    ncf=Dataset(TheFileIn,'r')
    nc_dims = list(ncf.dimensions)	# list of dimensions [dim for dim in ncf.dimensions]
    nc_vars = list(ncf.variables)  # list of nc variables [var for var in ncf.variables]
    nc_attrs = ncf.ncattrs()		# list of global attributes

    ndims=len(nc_dims)
    nvars=len(nc_vars)
    ngatts=len(nc_attrs)

# Get all global attributes
    TheGAtts=np.empty(ngatts,dtype=object)	# an empty array with the right number of string elements
    for (noo,att) in enumerate(nc_attrs):	# enumerate and use elements of the list
        TheGAtts[noo]=ncf.getncattr(att)	# get each global attribute and populate array

# Get all dimensions
    TheDims=np.empty(ndims)	# an empty array with the right number of string elements
    for (noo,dim) in enumerate(nc_dims):	# enumerate and use elements of the list
        TheDims[noo]=len(ncf.dimensions[dim])	# get length of each dimension
# NO DIMENSION ATTRIBUTES - 
#    TheDimAttrNames=[[] for i in xrange(ndims)]		# create list of lists - one for the attribute names of each dimension
#    TheDimAttrs=[[] for i in xrange(ndims)]		# create list of lists - one for the attributes of each dimension
#    for (noo,dim) in enumerate(nc_dims):	# enumerate and use elements of the list
#        TheDimAttrNames[noo]=ncf.dimensions[dim].ncattrs()	# fill names
#        for (nee,nats) in enumerate(TheDimAttrNames[noo]):      # loop through each name and get the attribute   
#            TheDimAttrs[noo][nee]=f.dimensions[dim].getncattr(nats)	

# Get all variables, and their attributes
    TheVarAttrNames=[[] for i in xrange(nvars)]		# create list of lists - one for the attribute names of each dimension
    TheVarAttrs=[[] for i in xrange(nvars)]		# create list of lists - one for the attributes of each dimension
    TheVars=[[] for i in xrange(nvars)]		# create list of lists - one for the attributes of each dimension
    for (noo,var) in enumerate(nc_vars):	# enumerate and use elements of the list
        TheVarAttrNames[noo]=ncf.variables[var].ncattrs()	# fill names
        for (nee,nats) in enumerate(TheVarAttrNames[noo]):      # loop through each name and get the attribute   
            TheVarAttrs[noo].append(ncf.variables[var].getncattr(nats))	
        TheVars[noo]=ncf.variables[nc_vars[noo]][:]


# Now write out, checking if the standard stuff is not there, and if not, then add in
    ncfw=Dataset(TheFileOut,'w',format='NETCDF3_CLASSIC')
    
# Set up the global attributes
# Is there a description?
    moo=np.where(np.array(nc_attrs) == 'description')
    if (moo[0] >= 0):
        ncfw.description=TheGAtts[moo[0]]
    else:
        ncfw.description="HadISDH monthly mean land surface "+TheType+" climate monitoring product from 1973 onwards. Quality control, homogenisation, uncertainty estimation, averaging over gridboxes (no smoothing or interpolation)."
# Is there a title?
    moo=np.where(np.array(nc_attrs) == 'title')
    if (moo[0] >= 0):
        ncfw.title=TheGAtts[moo[0]]
    else:
        ncfw.title="HadISDH monthly mean land surface "+TheType+" climate monitoring product from 1973 onwards."
# Is there an institution?
    moo=np.where(np.array(nc_attrs) == 'institution')
    if (moo[0] >= 0):
        ncfw.institution=TheGAtts[moo[0]]
    else:
        ncfw.institution="Met Office Hadley Centre (UK), National Climatic Data Centre (USA), Climatic Research Unit (UK), National Physical Laboratory (UK), Bjerknes Centre for Climate Research (Norway)"
# Is there a history?
    moo=np.where(np.array(nc_attrs) == 'history')
    if (moo[0] >= 0):
        ncfw.history=TheGAtts[moo[0]]
    else:
        ncfw.history="Updated 4 February 2014"
# Is there a source?
    moo=np.where(np.array(nc_attrs) == 'source')
    if (moo[0] >= 0):
        ncfw.source=TheGAtts[moo[0]]
    else:
        ncfw.source="HadISD.1.0.2.2013f (Dunn et al., 2012)"
# Is there a comment?
    moo=np.where(np.array(nc_attrs) == 'comment')
    if (moo[0] >= 0):
        ncfw.comment=TheGAtts[moo[0]]
    else:
        ncfw.comment=""
# Is there a reference?
    moo=np.where(np.array(nc_attrs) == 'reference')
    if (moo[0] >= 0):
        ncfw.reference=TheGAtts[moo[0]]
    else:
        ncfw.reference="Willett, K. M., Dunn, R. J. H., Thorne, P. W., Bell, S., de Podesta, M., Parker, D. E., Jones, P. D., and Williams Jr., C. N.: HadISDH land surface multi-variable humidity and temperature record for climate monitoring, Clim. Past, 10, 1983-2006, doi:10.5194/cp-10-1983-2014, 2014."
# Is there a version?
    moo=np.where(np.array(nc_attrs) == 'version')
    if (moo[0] >= 0):
        ncfw.version=TheGAtts[moo[0]]
    else:
        ncfw.version="HadISDH.2.0.0.2013p"
# Is there a Conventions?
    moo=np.where(np.array(nc_attrs) == 'Conventions')
    if (moo[0] >= 0):
        ncfw.Conventions=TheGAtts[moo[0]]
    else:
        ncfw.Conventions="CF-1.0"
#.........这里部分代码省略.........
开发者ID:Kate-Willett,项目名称:Climate_Explorer,代码行数:103,代码来源:Convert_cfnc_AUG2014.py

示例12: write_exodus_file

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def write_exodus_file(filename, cells, vertices, shape="SHELL4"):
    """
    Write Exodus-II file compatible with CUBIT.

    cells is a 0-based array (ncells, ncorners).

    vertices is (nvertices, dim).

    All cells are placed in a single block.

    Requires netCDF4 module.
    """
    import numpy
    from netCDF4 import Dataset

    len_string = 33

    root = Dataset(filename, 'w', format='NETCDF3_CLASSIC')

    # Set global attributes
    root.api_version = 4.98
    root.version = 4.98
    root.floating_point_word_size = 8
    root.file_size = 0
    root.title = "cubit"

    # Setup dimensions

    # Generic information
    root.createDimension('len_string', len_string)
    root.createDimension('len_line', 81)
    root.createDimension('four', 4)
    root.createDimension('num_qa_rec', 1)
    root.createDimension('time_step', None)

    # Mesh specific information
    (ncells, ncorners) = cells.shape
    (nvertices, dim) = vertices.shape
    root.createDimension('num_dim', dim)
    root.createDimension('num_el_blk', 1)
    root.createDimension('num_nod_per_el1', ncorners)
    root.createDimension('num_att_in_blk1', 1)

    root.createDimension('num_nodes', nvertices)
    root.createDimension('num_elem', ncells)
    root.createDimension('num_el_in_blk1', ncells)

    # Setup variables
    connect1 = root.createVariable('connect1', numpy.int32,
                                   ('num_el_in_blk1', 'num_nod_per_el1',))

    coord = root.createVariable('coord', numpy.float64,
                                ('num_dim', 'num_nodes',))
    
    time_whole = root.createVariable('time_whole', numpy.float64,
                                     ('time_step',))
    
    coor_names = root.createVariable('coor_names', 'S1',
                                     ('num_dim', 'len_string',))
    
    qa_records = root.createVariable('qa_records', 'S1',
                                     ('num_qa_rec', 'four', 'len_string',))
    
    eb_names = root.createVariable('eb_names', 'S1',
                                   ('num_el_blk', 'len_string',))

    elem_map = root.createVariable('elem_map', numpy.int32,
                                   ('num_elem',))

    eb_status = root.createVariable('eb_status', numpy.int32,
                                    ('num_el_blk',))

    eb_prop1 = root.createVariable('eb_prop1', numpy.int32,
                                   ('num_el_blk',))

    attrib1 = root.createVariable('attrib1', numpy.float64,
                                  ('num_el_in_blk1', 'num_att_in_blk1',))

    # Set variable values
    connect1[:] = 1+cells[:]
    connect1.elem_type = shape

    coord[:] = vertices.transpose()[:]

    from netCDF4 import stringtoarr
    if dim == 2:
        coor_names[0,:] = stringtoarr("x", len_string)
        coor_names[1,:] = stringtoarr("y", len_string)
    elif dim == 3:
        coor_names[0,:] = stringtoarr("x", len_string)
        coor_names[1,:] = stringtoarr("y", len_string)
        coor_names[2,:] = stringtoarr("z", len_string)


    qa_records[0,0,:] = stringtoarr("CUBIT", len_string)
    qa_records[0,1,:] = stringtoarr("11.0", len_string)
    qa_records[0,2,:] = stringtoarr("01/01/2000", len_string)
    qa_records[0,3,:] = stringtoarr("12:00:00", len_string)

    elem_map[:] = numpy.arange(1, ncells+1, dtype=numpy.int32)[:]
#.........这里部分代码省略.........
开发者ID:geodynamics,项目名称:pylith_benchmarks,代码行数:103,代码来源:cubit_io.py

示例13: Dataset

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
# In[2]:

from netCDF4 import Dataset
import json


# In[ ]:

# Assume that /projects/CHARIS is sshfs mounted on this machine, and
# that the user has write permission
fid = Dataset('~/projects/CHARIS/snow_cover/modice.v0.4/min05yr_nc/MODICE.v0.4.1test.nc', 'w', format='NETCDF4')
fid.Conventions = "CF-1.6"
fid = Dataset('/home/vagrant/measures-byu/src/prod/cetb_file/templates/cetb_global_template.nc', 'w', format='NETCDF4')
fid.Conventions = "CF-1.6"
fid.title = "MODICE mask for a minimum number of years"
fid.product_version = "v0.4"
#fid.software_version_id = "TBD"
#fid.software_repository = "[email protected]:nsidc/measures-byu.git"
fid.source = "MODICE"
fid.source_version_id = "v04"
fid.history = ""
fid.comment = "Mask locations with 2 indicate MODICE for >= min_years."
fid.references = "Painter, T. H., Brodzik, M. J., A. Racoviteanu, R. Armstrong. 2012. Automated mapping of Earth's annual minimum exposed snow and ice with MODIS. Geophysical Research Letters, 39(20):L20501, doi:10.1029/2012GL053340."
fid.summary = ["An improved, enhanced-resolution, gridded passive microwave Earth System Data Record \n",
               "for monitoring cryospheric and hydrologic time series\n" ]fid.title = "MEaSUREs Calibrated Passive Microwave Daily EASE-Grid 2.0 Brightness Temperature ESDR"
fid.institution = ["National Snow and Ice Data Center\n",
                   "Cooperative Institute for Research in Environmental Sciences\n",
                   "University of Colorado at Boulder\n",
                   "Boulder, CO"]
fid.publisher = ["National Snow and Ice Data Center\n",
开发者ID:mjbrodzik,项目名称:ipython_notebooks,代码行数:32,代码来源:make_MODICEv04_min05yr_netcdf.py

示例14: create_mhl_sst_ncfile

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def create_mhl_sst_ncfile(txtfile, site_code_short, data,
                          time, dtime, spatial_data):
    """
    create NetCDF file for MHL Wave data
    """
    site_code = site_list[site_code_short][0]
    netcdf_filename = create_netcdf_filename(site_code, data, dtime)
    netcdf_filepath = os.path.join(
        output_folder, "%s.nc") % netcdf_filename
    ncfile = Dataset(netcdf_filepath, "w", format="NETCDF4")


    # generate site and deployment specific attributes
    ncfile.title = ("IMOS - ANMN New South Wales(NSW) %s "
                    "Sea water temperature (%s) -"
                    "Deployment No. %s %s to %s") % (
            site_list[site_code_short][1], site_code,
            spatial_data[0], min(dtime).strftime("%d-%m-%Y"),
            max(dtime).strftime("%d-%m-%Y"))
    ncfile.institution = 'Manly Hydraulics Laboratory'
    ncfile.keywords = ('Oceans | Ocean temperature |'
                           'Sea Surface Temperature')
    ncfile.principal_investigator = 'Mark Kulmar'
    ncfile.cdm_data_type = 'Station'
    ncfile.platform_code = site_code

    abstract_default = ("The sea water temperature is measured by a thermistor mounted in the "
                        "buoy hull approximately 400 mm below the water "
                        "surface.  The thermistor has a resolution of 0.05 "
                        "Celsius and an accuracy of 0.2 Celsius.  The "
                        "measurements are transmitted to a shore station "
                        "where it is stored on a PC before routine transfer "
                        "to Manly Hydraulics Laboratory via email.")

    if site_code_short in ['COF', 'CRH', 'EDE', 'PTK']:

        abstract_specific = ("This dataset contains sea water temperature "
                             "data collected by a wave monitoring buoy moored off %s. ") % site_list[site_code_short][1]
    else:
        abstract_specific = ("This dataset contains sea water temperature "
                             "data collected by a wave monitoring buoy moored off %s "
                             "approximately %s kilometres from the coastline. ") % (

                          site_list[site_code_short][1], site_list[site_code_short][2])

    ncfile.abstract = abstract_specific + abstract_default
    ncfile.comment = ("The sea water temperature data (SST) is routinely quality controlled (usually twice per week) "
                      "using a quality control program developed by Manly Hydraulics Laboratory.  The SST data gathered "
                      "by the buoy is regularly compared to the latest available satellite derived sea SST images available "
                      "from the Bluelink ocean forecasting web pages to ensure the integrity of the dataset.  Erroneous SST "
                      "records are removed and good quality data is flagged as \'Quality Controlled\' in the "
                      "Manly Hydraulics Laboratory SST database.") 
    ncfile.sourceFilename = os.path.basename(txtfile)
    ncfile.date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
    ncfile.time_coverage_start = min(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
    ncfile.time_coverage_end = max(dtime).strftime("%Y-%m-%dT%H:%M:%SZ")
    ncfile.geospatial_lat_min = spatial_data[1]
    ncfile.geospatial_lat_max = spatial_data[1]
    ncfile.geospatial_lon_min = spatial_data[2]
    ncfile.geospatial_lon_max = spatial_data[2]
    ncfile.geospatial_vertical_max = 0.
    ncfile.geospatial_vertical_min = 0.
    ncfile.deployment_number = str(spatial_data[0])

    # add dimension and variables
    ncfile.createDimension('TIME', len(time))

    TIME = ncfile.createVariable('TIME', "d", 'TIME')
    TIMESERIES = ncfile.createVariable('TIMESERIES', "i")
    LATITUDE = ncfile.createVariable(
        'LATITUDE', "d", fill_value=99999.)
    LONGITUDE = ncfile.createVariable(
        'LONGITUDE', "d", fill_value=99999.)
    TEMP = ncfile.createVariable('TEMP', "f", 'TIME', fill_value=99999.)

    # add global attributes and variable attributes stored in config files
    config_file = os.path.join(os.getcwd(), 'global_att_sst.att')
    generate_netcdf_att(ncfile, config_file,
                        conf_file_point_of_truth=False)
    
    # replace nans with fillvalue in dataframe
    data = data.fillna(value=float(99999.))

    TIME[:] = time
    TIMESERIES[:] = 1
    LATITUDE[:] = spatial_data[1]
    LONGITUDE[:] = spatial_data[2]
    TEMP[:] = data['SEA_TEMP'].values
    ncfile.close()
开发者ID:aodn,项目名称:data-services,代码行数:91,代码来源:process_MHLsst_from_txt.py

示例15: write_netcdf

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import title [as 别名]
def write_netcdf(filename,varnames,data,title):
	r"""
	Parameters
	----------
	filename: string
		path + filename of netCDF file e. g., '~/Desktop/netcdf_file.nc'

	varnames: list
		names of the variables names e.g., ['var1','var2','var3'] (names should be exactly the same of 
		the data.varnames)
	
	data: class 
		class containing the variables to be saved

	title: string 
		Title or any brief description of the data set 
	"""
	from netCDF4 import Dataset
	import numpy as np 

	# Opening file
	filenc = Dataset(filename,'w',clobber=True)


	# Global Attributes of the file
	filenc.title = title
	filenc.institution = 'The Rosenstiel School of Marine and Atmospheric Science, University of Miami'
	filenc.author = 'MSc Tiago Carrilho Bilo'

	## Creating variables
	for v in xrange(len(varnames)):
		exec("tp = data."+varnames[v]+".dtype")
		exec("d = len(data."+varnames[v]+".shape)")

		if d == 1:
			exec("filenc.createDimension('i"+np.str(v)+"', None)")
			exec("var"+np.str(v)+" = filenc.createVariable(varnames[v], tp.name, ('i"+np.str(v)+"'), zlib=True)")
			exec("var"+np.str(v)+"[:] = data."+varnames[v])			
		elif d == 2:		
			exec("filenc.createDimension('i"+np.str(v)+"', None)")
			exec("filenc.createDimension('j"+np.str(v)+"', None)")
			exec("var"+np.str(v)+" = filenc.createVariable(varnames[v], tp.name, ('i"+np.str(v)+"','j"+np.str(v)+"'), zlib=True)")
			exec("var"+np.str(v)+"[:,:] = data."+varnames[v])			
		elif d == 3:		
			exec("filenc.createDimension('i"+np.str(v)+"', None)")
			exec("filenc.createDimension('j"+np.str(v)+"', None)")
			exec("filenc.createDimension('k"+np.str(v)+"', None)")
			exec("var"+np.str(v)+" = filenc.createVariable(varnames[v], tp.name, ('i"+np.str(v)+"','j"+np.str(v)+"','k"+np.str(v)+"'), zlib=True)")
			exec("var"+np.str(v)+"[:,:,:] = data."+varnames[v])			
		elif d == 4:
			exec("filenc.createDimension('i"+np.str(v)+"', None)")
			exec("filenc.createDimension('j"+np.str(v)+"', None)")
			exec("filenc.createDimension('k"+np.str(v)+"', None)")
			exec("filenc.createDimension('t"+np.str(v)+"', None)")
			exec("var"+np.str(v)+" = filenc.createVariable(varnames[v], tp.name, ('i"+np.str(v)+"','j"+np.str(v)+"','k"+np.str(v)+"','t"+np.str(v)+"'), zlib=True)")			
			exec("var"+np.str(v)+"[:,:,:,:] = data."+varnames[v])

	filenc.close()
	print "File "+filename+" created"

	return
开发者ID:tiagobilo,项目名称:ModelSubset,代码行数:63,代码来源:models.py


注:本文中的netCDF4.Dataset.title方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。