当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.setncattr方法代码示例

本文整理汇总了Python中netCDF4.Dataset.setncattr方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.setncattr方法的具体用法?Python Dataset.setncattr怎么用?Python Dataset.setncattr使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在netCDF4.Dataset的用法示例。


在下文中一共展示了Dataset.setncattr方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: init_nc

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def init_nc(outfile,variables,dims,globalatts):
    """
    Initialises the output netcdf file for writing
    """
    print "Generating file: %s..."%outfile    
    nc = Dataset(outfile,'w',format='NETCDF4_CLASSIC') 
    
    # Write the global attributes
    for gg in globalatts.keys():
        nc.setncattr(gg,globalatts[gg])
            
    # Create the dimensions
    for dd in dims:
        nc.createDimension(dd,dims[dd])
    
    # Create the variables
    for vv in variables:
        if vv['isFilled']:
            tmpvar=nc.createVariable(vv['Name'],vv['dtype'],vv['Dimensions'],zlib=True,complevel=2,fill_value=99999.0)
        else:
            tmpvar=nc.createVariable(vv['Name'],vv['dtype'],vv['Dimensions'])
    
        # Create the attributes
        for aa in vv['Attributes'].keys():
            tmpvar.setncattr(aa,vv['Attributes'][aa]) 
                   
    nc.close()    
开发者ID:jadelson,项目名称:suntanspy,代码行数:29,代码来源:joinsun.py

示例2: wrf_copy_attributes

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def wrf_copy_attributes( infilename, outfilename, nlevs ):
    '''
    Copies the netCDF attributes of one file into another file
    that is created by this function. This is so that information
    like the model start date, dx, and namelist options are data
    attributes in post-processed netCDF files. This will assume that
    the grid domain in both files is the same, and will use the west-
    east and south-north values from the input file.

    Parameters
    ----------
    infilename: The name/path of the input file to be read
    outfilename: The name/path of the output file to be written
    nlevs: The number of vertical levels that the output file should have


    '''
    ## open the files
    infile = Dataset( infilename )
    outfile = Dataset( outfilename, 'w', format='NETCDF4' )
    
    ## create dimensions
    level = outfile.createDimension( 'bottom_top', nlevs )
    time = outfile.createDimension( 'time', None )
    lon = outfile.createDimension( 'south_north', infile.getncattr('SOUTH-NORTH_PATCH_END_UNSTAG') )
    lat = outfile.createDimension( 'west_east', infile.getncattr('WEST-EAST_PATCH_END_UNSTAG') )
    
    ## copy the global attributes to the new file
    inattrs = infile.ncattrs()
    for attr in inattrs:
        outfile.setncattr( attr, infile.getncattr( attr ) )
    ## close both files
    infile.close()
    outfile.close()
开发者ID:islenv,项目名称:wrftools-1,代码行数:36,代码来源:attributes.py

示例3: save

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
 def save(self,outfile='DEM.nc'):
     """ Saves the DEM to a netcdf file"""
     
     # Create the global attributes
     if self.isnorth:
         proj = "UTM %d (%s) in northern hemisphere."%(self.utmzone,self.CS)
     else:
         proj = "UTM %d (%s) in southern hemisphere."%(self.utmzone,self.CS)
     
     intparamstr = 'Interpolation Type: %s, Number of neighbours: %d, Maximum search distance: %3.1f m'%(self.interptype,self.NNear,self.maxdist)
     if self.interptype=='idw':
         intparamstr += ', IDW power: %2.1f'%self.p
     elif self.interptype=='kriging':
         intparamstr += ', Variogram model: %s, sill: %3.1f, nugget: %3.1f, range: %3.1f'%(self.varmodel,self.sill,self.nugget,self.vrange)
         
     globalatts = {'title':'DEM model',\
     'history':'Created on '+time.ctime(),\
     'Input dataset':self.infile,\
     'Projection':proj,\
     'Interpolation Parameters':intparamstr}
     
     
     nc = Dataset(outfile, 'w', format='NETCDF4')
     # Write the global attributes
     for gg in globalatts.keys():
         nc.setncattr(gg,globalatts[gg])
         
     # Create the dimensions
     dimnamex = 'nx'
     dimlength = self.grd.nx
     nc.createDimension(dimnamex,dimlength)
     dimnamey = 'ny'
     dimlength = self.grd.ny
     nc.createDimension(dimnamey,dimlength)
     
     # Create the lat lon variables
     tmpvarx=nc.createVariable('X','f8',(dimnamex,))
     tmpvary=nc.createVariable('Y','f8',(dimnamey,))
     tmpvarx[:] = self.grd.X[0,:]
     tmpvary[:] = self.grd.Y[:,0]
     # Create the attributes
     tmpvarx.setncattr('long_name','Easting')
     tmpvarx.setncattr('units','metres')
     tmpvary.setncattr('long_name','Northing')
     tmpvary.setncattr('units','metres')
     
     # Write the topo data
     tmpvarz=nc.createVariable('topo','f8',(dimnamey,dimnamex),zlib=True,least_significant_digit=1)
     tmpvarz[:] = self.Z
     tmpvarz.setncattr('long_name','Topographic elevation')
     tmpvarz.setncattr('units','metres')
     tmpvarz.setncattr('coordinates','X, Y')
     tmpvarz.setncattr('positive','up')
     tmpvarz.setncattr('datum',self.vdatum)
     
     nc.close()
     
     print 'DEM save to %s.'%outfile
开发者ID:jadelson,项目名称:suntanspy,代码行数:60,代码来源:demBuilder.py

示例4: main

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def main(args):
    dsin = Dataset(args.infile)
    dsout = Dataset(args.outfile, 'w')

    # Copy global attributes
    for att in dsin.ncattrs():
        dsout.setncattr(att, dsin.getncattr(att))

    # Determine variables to copy
    if args.variable:
        if not set(args.variable).issubset(set(dsin.variables.keys())):
            raise AssertionError('Specified variables are not available in the input file')
        vars_to_copy = set(args.variable)
        
        # Vars as exclusion list?
        if args.exclude:
            vars_to_copy = set(dsin.variables.keys()).difference(vars_to_copy)
    else:
        vars_to_copy = dsin.variables.keys()

    # Determine dimensions to copy
    dims_to_copy = set()
    for v in vars_to_copy:
        dims_to_copy = dims_to_copy.union(set(dsin.variables[v].dimensions))
    # Add associate dimvars (Assumes dimvars have same name as dimension)
    if not all([x in dsin.variables.keys() for x in dims_to_copy]):
        raise AssertionError('Not all dimenions being copied have associated dimension variables')

    print 'Copying variables: {}'.format(vars_to_copy)
    print 'Copying dimensions: {}'.format(dims_to_copy)

    # Copy Dimensions
    for dname, dim in dsin.dimensions.items():
        if dname not in dims_to_copy:
            continue
        print dname, len(dim)
        dsout.createDimension(dname, len(dim) if not dim.isunlimited() else None)

    # Copy Variables
    for v_name, varin in dsin.variables.items():
        if v_name not in vars_to_copy:
            continue
        outVar = dsout.createVariable(v_name, varin.datatype, varin.dimensions)
        print v_name, varin.datatype, varin.dimensions, varin.shape, len(varin.shape)

        # Copy all variable attributes
        outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()})

        # Itteratively write variables with 3+ dimensions
        if len(varin.shape) > 2:
            count = float(varin.shape[0])
            for i in range(varin.shape[0]):
                if args.progress: 
                    sys.stdout.write("\r{:.2%}".format(i/count))
                outVar[i,:,:] = varin[i,:,:]
        else:
            outVar[:] = varin[:]
开发者ID:pacificclimate,项目名称:netcdf-pycli,代码行数:59,代码来源:ncvarsubset.py

示例5: setUp

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
 def setUp(self):
     self.netcdf_file = FILE_NAME
     nc = Dataset(self.netcdf_file,'w',format='NETCDF3_64BIT_DATA')
     # create a 64-bit dimension
     d = nc.createDimension('dim',dimsize) # 64-bit dimension
     # create an 8-bit unsigned integer variable
     v = nc.createVariable('var',np.uint8,'dim')
     v[:ndim] = arrdata
     # create a 64-bit integer attribute (issue #878)
     nc.setncattr('int64_attr', np.int64(-9223372036854775806))
     nc.close()
开发者ID:Unidata,项目名称:netcdf4-python,代码行数:13,代码来源:tst_cdf5.py

示例6: wrf_copy_attributes

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def wrf_copy_attributes( infilename, outfilename ):
    ## open the files
    infile = Dataset( infilename )
    outfile = Dataset( outfilename, 'w', format='NETCDF4' )
    
    ## create dimensions
    level = outfile.createDimension( 'bottom_top', None )
    time = outfile.createDimension( 'time', None )
    lon = outfile.createDimension( 'south_north', infile.getncattr('SOUTH-NORTH_PATCH_END_UNSTAG') )
    lat = outfile.createDimension( 'west_east', infile.getncattr('WEST-EAST_PATCH_END_UNSTAG') )
    
    ## copy the global attributes to the new file
    inattrs = infile.ncattrs()
    for attr in inattrs:
        outfile.setncattr( attr, infile.getncattr( attr ) )
    infile.close()
    outfile.close()
开发者ID:dljuly3,项目名称:python_scripts,代码行数:19,代码来源:parallel_interp_pv.py

示例7: savenc

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
 def savenc(self,outfile='DEM.nc'):
     """ Saves the DEM to a netcdf file"""
     
     # Create the global attributes
     
     globalatts = {'title':'DEM model',\
     'history':'Created on '+time.ctime(),\
     'Input dataset':self.infile}
     
     
     nc = Dataset(outfile, 'w', format='NETCDF4')
     # Write the global attributes
     for gg in globalatts.keys():
         nc.setncattr(gg,globalatts[gg])
         
     # Create the dimensions
     dimnamex = 'nx'
     dimlength = self.nx
     nc.createDimension(dimnamex,dimlength)
     dimnamey = 'ny'
     dimlength = self.ny
     nc.createDimension(dimnamey,dimlength)
     
     # Create the lat lon variables
     tmpvarx=nc.createVariable('X','f8',(dimnamex,))
     tmpvary=nc.createVariable('Y','f8',(dimnamey,))
     tmpvarx[:] = self.X[0,:]
     tmpvary[:] = self.Y[:,0]
     # Create the attributes
     tmpvarx.setncattr('long_name','Easting')
     tmpvarx.setncattr('units','metres')
     tmpvary.setncattr('long_name','Northing')
     tmpvary.setncattr('units','metres')
     
     # Write the topo data
     tmpvarz=nc.createVariable('topo','f8',(dimnamey,dimnamex),zlib=True,least_significant_digit=1)
     tmpvarz[:] = self.Z
     tmpvarz.setncattr('long_name','Topographic elevation')
     tmpvarz.setncattr('units','metres')
     tmpvarz.setncattr('coordinates','X, Y')
     tmpvarz.setncattr('positive','up')
     
     nc.close()
     
     print 'DEM save to %s.'%outfile
开发者ID:jadelson,项目名称:suntanspy,代码行数:47,代码来源:dem.py

示例8: save

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
 def save(self, fname, use_datetime_str=True):
     """Save into a netCDF4 file"""
     if os.path.exists(fname):
         if os.path.islink(fname):
             os.unlink(fname)
         else:
             os.remove(fname)
     f = Dataset(fname, 'w', format='NETCDF4')
     for dname, length in self.dims.items():
         if dname == 'TIME':
             f.createDimension(dname)
         else:
             f.createDimension(dname, length)
     for vname in self.vars:
         dimnames = self.var_dims[vname]
         t = self.vars[vname].dtype.str.lstrip('<>|=')
         if type(vname) is unicode:
             vname = str(vname)
         if vname == 'datetime':
             if use_datetime_str:
                 f.createVariable(vname, str, dimnames)
                 for i, dt in enumerate(self.vars[vname]):
                     f.variables[vname][i] = self.vars[vname][i].strftime(_std_datetime_fmt)
             else:
                 f.createVariable(vname, 'i4', dimnames)
                 f.variables[vname].setncattr('units', _std_datetime_units)
                 f.variables[vname][:] = date2num(self.vars[vname], _std_datetime_units)
         else:
             if t.startswith('O'):
                 t = str
             f.createVariable(vname, t, dimnames)
             f.variables[vname][:] = self.vars[vname]
         f.variables[vname].setncattr('aver_method', self.var_aver_methods[vname])
 
     for attr in self.attrs:
         if type(attr) is unicode:
             attr = str(attr)
         if isinstance(self.attrs[attr], datetime):
             f.setncattr(attr, self.attrs[attr].strftime(_std_datetime_fmt))
         else:
             a = str(self.attrs[attr]) if type(self.attrs[attr]) is unicode else self.attrs[attr]
             f.setncattr(attr, a)
     f.close()
开发者ID:yeyuguo,项目名称:metamet,代码行数:45,代码来源:lidar.py

示例9: copy_mmd_dataset_writeable

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def copy_mmd_dataset_writeable(old_mmd_dataset):
    dataset_path = old_mmd_dataset.filepath().replace(".nc", "-corrected.nc4")
    print("creating file " + dataset_path)
    new_mmd_dataset = Dataset(dataset_path, mode="w")

    for dimension_name in old_mmd_dataset.dimensions:
        dimension = old_mmd_dataset.dimensions[dimension_name]
        new_mmd_dataset.createDimension(dimension_name, len(dimension))
    for variable_name in old_mmd_dataset.variables:
        print("  creating variable " + variable_name)
        old_variable = old_mmd_dataset.variables[variable_name]
        new_variable = new_mmd_dataset.createVariable(variable_name, old_variable.dtype,
                                                      dimensions=old_variable.dimensions, zlib=True)
        if variable_name.find("brightness_temperature") != -1:
            for attribute_name in old_variable.ncattrs():
                print("    creating attribute " + attribute_name)
                if attribute_name == "valid_min":
                    attribute_value = -26000
                elif attribute_name == "valid_max":
                    attribute_value = 28000
                elif attribute_name == "scale_factor":
                    attribute_value = 0.005
                elif attribute_name == "add_offset":
                    attribute_value = 180.0
                else:
                    attribute_value = old_variable.getncattr(attribute_name)
                new_variable.setncattr(attribute_name, attribute_value)
        else:
            for attribute_name in old_variable.ncattrs():
                print("    creating attribute " + attribute_name)
                attribute_value = old_variable.getncattr(attribute_name)
                new_variable.setncattr(attribute_name, attribute_value)
    for attribute_name in old_mmd_dataset.ncattrs():
        print("  creating global attribute " + attribute_name)
        attribute_value = old_mmd_dataset.getncattr(attribute_name)
        new_mmd_dataset.setncattr(attribute_name, attribute_value)

    return new_mmd_dataset
开发者ID:bcdev,项目名称:sst-cci-toolbox,代码行数:40,代码来源:fixmmd6b.py

示例10: copy_mmd_dataset_writeable

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def copy_mmd_dataset_writeable(old_mmd_dataset):
    dataset_path = old_mmd_dataset.filepath().replace(".nc", "-corrected.nc4")
    print("creating file " + dataset_path)
    new_mmd_dataset = Dataset(dataset_path, mode="w")

    for dimension_name in old_mmd_dataset.dimensions:
        dimension = old_mmd_dataset.dimensions[dimension_name]
        new_mmd_dataset.createDimension(dimension_name, len(dimension))
    for variable_name in old_mmd_dataset.variables:
        print("  creating variable " + variable_name)
        old_variable = old_mmd_dataset.variables[variable_name]
        new_variable = new_mmd_dataset.createVariable(variable_name, old_variable.dtype,
                                                      dimensions=old_variable.dimensions, zlib=True)
        for attribute_name in old_variable.ncattrs():
            print("    creating attribute " + attribute_name)
            attribute_value = old_variable.getncattr(attribute_name)
            new_variable.setncattr(attribute_name, attribute_value)
    for attribute_name in old_mmd_dataset.ncattrs():
        print("  creating global attribute " + attribute_name)
        attribute_value = old_mmd_dataset.getncattr(attribute_name)
        new_mmd_dataset.setncattr(attribute_name, attribute_value)

    return new_mmd_dataset
开发者ID:bcdev,项目名称:sst-cci-toolbox,代码行数:25,代码来源:fixmmd.py

示例11: TrajectoryNetCDFWriter

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]

#.........这里部分代码省略.........
            
        if not self._nc:
            self._logger.error('The NetCDF file has not been initialized')
            return
            
        if not self._nc.isopen():
            self._logger.warning('The NetCDF file is already closed: {:s}'.format(self._output_path))
            return
            
        # Update global geospatial attributes
        self._update_geospatial_global_attributes()
        # Update global time_coverage attributes
        self._update_time_coverage_global_attributes()
        
        self._nc.close()
        
        #output_nc = self._output_path
        
        self._nc = None
        #self._output_path = None
        
        return self._out_nc
            
        
    def update_history(self, message):
        """ Updates the global history attribute with the message appended to
        and ISO8601:2004 timestamp
        """

        # Get timestamp for this access
        now_time_ts = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
        history_string = '{:s}: {:s}\n'.format(now_time_ts, message)
        if 'history' not in self._nc.ncattrs():
            self._nc.setncattr('history', history_string)
            return
        
        previous_history = self._nc.history.strip()
        if not previous_history:
            self._nc.history = history_string
        else:
            self._nc.history += history_string
        
        
    def set_platform(self):
        """ Creates a variable that describes the glider
        """

        self.set_scalar('platform')
        for key, value in sorted(self._attributes['deployment']['platform'].items()):
            self._nc.variables['platform'].setncattr(key, value)
        
    def set_global_attributes(self):
        """ Sets a dictionary of values as global attributes
        """

        for key, value in sorted(self._attributes['global'].items()):
            self._nc.setncattr(key, value)
            
    
    def _update_time_coverage_global_attributes(self):
        """Update all global time_coverage attributes.  The following global
        attributes are created/updated:
            time_coverage_start
            time_coverage_end
            time_coverage_duration
        """
开发者ID:benjwadams,项目名称:gncutils,代码行数:70,代码来源:TrajectoryNetCDFWriter.py

示例12: Dataset

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
            b[0][b[1].index(field)][record,:] = np.transpose(vert.vert_int(np.transpose(b[0][b[1].index(field)][record,:], (1,2,0)),z_ri,z_ro,undef,mask_rho,mask_rho,[b[0][b[1].index("zeta")][0,:].shape[1],b[0][b[1].index("zeta")][0,:].shape[0],Ni,No]), (2,0,1))[:]    
        
#    except:
#        print "not a mask_rho?"
#        sys.exit()



if args.field:
    if not args.outf:
        nc = Dataset(args.inf+"_vert", 'w', format='NETCDF3_64BIT')
    else:
        nc = Dataset(args.outf, 'w', format='NETCDF3_64BIT')
    for i in f.ncattrs():
        print i, f.getncattr(i)
        nc.setncattr(i, f.getncattr(i))
    nc.delncattr('history')
    nc.setncattr('history', f.getncattr('history')+'\n Modified by '+str(os.path.basename(__file__))+' '+str(tm.strftime("%c")))


    for i in f.dimensions.keys():
        print i
        nc.createDimension(i, len(f.dimensions[i]))

    for i in f.variables.keys():
        print i, f.variables[i].dimensions
        w = nc.createVariable(i, 'f8',  f.variables[i].dimensions)

        for j in f.variables[i].ncattrs():
       #     print j, ": ", f.variables[i].getncattr(j), f.variables[i].dimensions
            w.setncattr(j, f.variables[i].getncattr(j))
开发者ID:aleatorius,项目名称:roms_mypytools,代码行数:33,代码来源:clim_vert_grid4.py

示例13: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
  def run(self):
	if type(self.fnsi) == type('abc'):
		self.fnsi = glob(self.fnsi)
		  
	if not exists(self.fnsi[0]):
		print 'mergeNC:\tERROR:\tinputfile name does not exists:', self.fnsi[0]
		return
	if self.debug: print 'mergeNC:\tINFO:\topening dataset:\t', self.fnsi[0]	
	nci = Dataset(self.fnsi[0],'r')#Quiet =True)
	
	if self.timeAverage:
		print 'mergeNC:\tWARNING:\ttimeAverage is not yet debugged. '# are no use:', self.vars
		#return		
	if not self.vars:
		if self.debug: print 'mergeNC:\tINFO:\tvariables to save are empty, saving all.'
		self.vars = nci.variables.keys()

	if self.vars == 'all':
		if self.debug: print 'mergeNC:\tINFO:\tvariables to save:  \'all\' requested. '
		self.vars = nci.variables.keys()
				
	if self.cal != 'standard':
		if self.debug: print 'mergeNC:\tINFO:\tUsing non-standard calendar:', self.cal
				
	
	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'mergeNC:\tERROR:\tvariable,' ,v,', not found in ',self.fnsi[0]
		return
		
	#create dataset and header.
	if self.debug: print 'mergeNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		try:
		    if self.debug: print 'mergeNC:\tINFO:\tcopying attribute: \t\"'+str(a)+'\":\t', str(nci.getncattr(a))
		    nco.setncattr(a,nci.getncattr(a))
		except:
		    if self.debug: print 'changeNC:\twarning:\tThat attribute probably isn\'t using ASCII characters!'			
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using mergeNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	
	# list of variables to save, assuming some conventions
	alwaysInclude = ['time', 'lat','lon', 'latbnd', 'lonbnd', 'latitude', 'longitude', 't','nav_lat','nav_lon', 'time_counter', 'deptht','depth','depthu','depthv','depthw','z','month',]
	alwaysInclude = intersection(nci.variables.keys(),alwaysInclude) 
	save = list(set(sorted(alwaysInclude + self.vars)))
	time = intersection(['time', 't','time_counter','month',], alwaysInclude)
	if len(time) ==1: tvar=time[0]
	else: tvar = 'time'
	
	missing = {}
	if self.fullCheck:
	    if self.debug: print 'mergeNC:\tINFO:\tPerforming full check for missing entries'
	    for t,fni in enumerate(self.fnsi):
		#if self.debug: print 'mergeNC:\tINFO:\tOpening ', fni, ' ...', t   
		nci = Dataset(fni,'r')
		keys =nci.variables.keys()
		for s in save:
			if s in alwaysInclude:continue
			if s not in keys:
				print 'mergeNC:\tWARNING:\tFull check: ',s,' is missing from ', fni
				try: missing[s].append(fni)
				except:missing[s] = [fni,]
	    	nci.close()

 	    for s in missing.keys():
	        #remove key: 	    
	    	#print 'mergeNC:\tWARNING:\tFull check:\tremoving',s,' from ',save
	    	#save.remove(s)
	        
	        #remove missing files:
	    	for fn in missing[s]: 
		    	print 'mergeNC:\tWARNING:\tFull check:\tremoving',fni,' from files'	    	
	    		try:self.fnsi.remove(fn)
	    		except: print 'mergeNC:\tWARNING:\tFull check:\t',fni,' already removed from files'


  
	
	# create dimensions:
	nci = Dataset(self.fnsi[0],'r')#Quiet =True)	
	for d in nci.dimensions.keys():
	  if nci.dimensions[d].isunlimited() or d.lower() in ['time','time_counter',time]: dimSize = None
	  else:	  dimSize=len(nci.dimensions[d])
	  nco.createDimension(d, dimSize)	
	  if self.debug: print 'mergeNC:\tINFO:\tCreating Dimension:', d,dimSize

	# create Variables:
	for var in save:
		dt = nci.variables[var].dtype
		
		if self.debug: 
				print 'mergeNC:\tINFO:\tCreating Variable:',var,nci.variables[var].dtype,nci.variables[var].dimensions,
				print "zlib=True,complevel=5,fill_value=",default_fillvals['f8']

	  	nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5,fill_value=default_fillvals['f8'])

	# Long Names:
#.........这里部分代码省略.........
开发者ID:ledm,项目名称:NetCDF_manip,代码行数:103,代码来源:mergeNC.py

示例14: regrid2

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def regrid2(infile,geofilename,datestr):
	"""
	Regrid FFDAS input .nc files to the specified
	WRF domain and outputs the proper file
	C. Martin - 7/2016
	- This version will get domain grid information from WRF geogrid files
	"""
	# read in input netCDF file
	datain = Dataset(infile,'r')
	# read in input geogrid file
	geofile = Dataset(geofilename,'r')
	# get domain info from Geogrid file
	latwrf = geofile.variables['XLAT_M'][0]
	lonwrf = geofile.variables['XLONG_M'][0]
	# get the four corners of the domain
	lat_s = latwrf[0,0];lat_n = latwrf[-1,-1]
	lon_w = lonwrf[0,0];lon_e = lonwrf[-1,-1]
	# get the resolution in degrees
	res = (lat_n - lat_s)/len(latwrf[0,:])
	# get input lat and lon
	lat1 = datain.variables['latitude'][:]
	lon1 = datain.variables['longitude'][:]
	# indices of data within WRF domain
	lat_inds = np.where(( lat1 >= lat_s-0.1) & (lat1 <= lat_n+0.1))
	lon_inds = np.where(( lon1 >= lon_w-0.1) & (lon1 <= lon_e+0.1))
	# subset the data
	flux = datain.variables['flux'][:]
	fluxsubset = flux[:,lat_inds[0],:]
	fluxsubset = fluxsubset[:,:,lon_inds[0]]
	lons, lats = np.meshgrid(lon1[lon_inds[0]], lat1[lat_inds[0]])
	# the FFDAS data is in units kg/cell/hr
	# must convert it to mol/km^2/hr for WRF
	mass_C = 12.
	area_div = 11**2 # assuming 11km for each 0.1 degree and squaring it for km^2
	# open the output file for writing
	# get the path to the output file
	# wrf looks for wrfchemi_dXX_YYYY-MM-DD_HH:MM:SS
	yearstr = datestr[0:4]; monstr = datestr[4:6]; daystr = datestr[6:8]
	geodir = geofilename.split('/')
	del geodir[-1]
	wrfdir = ''
	if geodir[0] == '':
		for a in range(len(geodir)-1):
	  		wrfdir = wrfdir+'/'+geodir[a+1]
	else:
		for a in range(len(geodir)):
			wrfdir = wrfdir+'/'+geodir[a]
	domain = geofilename[-5:]
	domain = domain[0:2]
	emissfile = 'wrfchemi_d'+domain+'_'+yearstr+'-'+monstr+'-'+daystr+'_00:00:00' # NOTE change this later to be more flexible with start time?
	# open output netCDF file
	dataout = Dataset(wrfdir+'/'+emissfile, "w", format="NETCDF4")
	# set up the regridded data
	newflux = np.zeros((24,1,len(latwrf[:,0]),len(latwrf[0])))
	# regrid the data
	for i in range(0,24):
		regridded = interpolate.griddata((lons.flatten(),lats.flatten()),fluxsubset[i].flatten(),(lonwrf,latwrf),method='linear')
		newflux[i,0,:,:] = (regridded*1000./mass_C)/area_div # convert to g and then moles then divide to get km^2
	# set up output file
	timeout = dataout.createDimension("Time", None)
	StrLength = dataout.createDimension("StrLength", 19)
	lat2 = dataout.createDimension("latitude", len(latwrf[:,0]))
	lon2 = dataout.createDimension("longitude", len(latwrf[0]))
	ez = dataout.createDimension("emissions_zdim",1)
	times = dataout.createVariable("Times", "S1", ("Time","StrLength"))
	co2 = dataout.createVariable("E_CO2","f4",("Time","emissions_zdim","latitude","longitude"))
	co2.setncattr("Sector","PMCH")
	co2.setncattr("FieldType", 104)
	timearr = []
	for i in range(24):
		exec("timestr1 = yearstr+'-'+monstr+'-'+daystr+'_%02d:00:00' % i")
		timearr.append(list(timestr1))
	# add some metadata
	dataout.description = "Regridded FFDAS flux netCDF file - "+str(res)+" degree"
	dataout.history = "Created " + time.ctime(time.time())
	dataout.source = "ffdas2wrf.py - C. Martin - Univ. of MD - 7/2016"
	co2.units = "mol/km^2/hr"
	dataout.setncattr("MMINLU", "USGS")
	dataout.setncattr("NUM_LAND_CAT", 24)
	# write to file
	times[:] = timearr
	co2[:] = newflux
	# close files
	datain.close()
	geofile.close()
	dataout.close()
开发者ID:martin2098,项目名称:WRF-CO2,代码行数:88,代码来源:ffdas2wrf.py

示例15: regrid

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import setncattr [as 别名]
def regrid(infile,outfile,lat_s,lat_n,lon_w,lon_e,res,datestr):
  # read in input netCDF file
  datain = Dataset(infile,'r')

  # open output netCDF file
  dataout = Dataset(outfile, "w", format="NETCDF4")

  # get dimensions from input file
  lat1 = datain.variables['latitude'][:]
  lon1 = datain.variables['longitude'][:]

  # indices
  lat_inds = np.where(( lat1 >= lat_s-0.1) & (lat1 <= lat_n+0.1))
  lon_inds = np.where(( lon1 >= lon_w-0.1) & (lon1 <= lon_e+0.1))
  
  # subset the data
  flux = datain.variables['flux'][:]
  fluxsubset = flux[:,lat_inds[0],:]
  fluxsubset = fluxsubset[:,:,lon_inds[0]]

  # regrid the data
  newlat = np.arange(lat_s+res/2.,lat_n-res/2.,res)
  #newlon = np.arange(lon_w+res/2.,lon_e-res/2.,res)
  newlon = np.arange(lon_w+res/2.,lon_e+res,res)

  # the FFDAS data is in units kg/cell/hr
  # must convert it to mol/km^2/hr for WRF
  mass_C = 12.
  area_div = (0.1/res)**2 # change in resolution squared to divide by (I think this is right?)

  lons, lats = np.meshgrid(lon1[lon_inds[0]], lat1[lat_inds[0]])
  newlons, newlats = np.meshgrid(newlon, newlat)
  newflux = np.zeros((24,1,len(newlat),len(newlon)))
  ppm400 = np.zeros((24,1,len(newlat),len(newlon)))

  for i in range(0,24):
    regridded = interpolate.griddata((lons.flatten(),lats.flatten()),fluxsubset[i].flatten(),(newlons,newlats),method='linear')
    newflux[i,0,:,:] = (regridded*1000./mass_C)/area_div # convert to g and then moles then divide by new area
    ppm400[i,0,:,:] = 400.


  # set up output file
  timeout = dataout.createDimension("Time", None)
  StrLength = dataout.createDimension("StrLength", 19)
  lat2 = dataout.createDimension("latitude", len(newlat))
  #lat2 = dataout.createDimension("south_north", len(newlat))
  #lon2 = dataout.createDimension("west_east", len(newlon))
  lon2 = dataout.createDimension("longitude", len(newlon))
  ez = dataout.createDimension("emissions_zdim",1)
  times = dataout.createVariable("Times", "S1", ("Time","StrLength"))
  #latitudes = dataout.createVariable("latitude","f4",("latitude",))
  #longitudes = dataout.createVariable("longitude","f4",("longitude",))
  co2 = dataout.createVariable("E_CO2","f4",("Time","emissions_zdim","latitude","longitude"))
  #co2_2 = dataout.createVariable("CO2_ANT","f4",("Time","emissions_zdim","latitude","longitude"))
  #co2 = dataout.createVariable("E_CO2","f4",("Time","emissions_zdim","south_north","west_east"))

  co2.setncattr("Sector","PMCH")
  co2.setncattr("FieldType", 104)
  #co2_2.setncattr("Sector","PMCH")
  #co2_2.setncattr("FieldType", 104)

  timearr = []

  yearstr = datestr[0:4]; monstr = datestr[4:6]; daystr = datestr[6:8]

  for i in range(24):
    exec("timestr1 = yearstr+'-'+monstr+'-'+daystr+'_%02d:00:00' % i")
    timearr.append(list(timestr1))

  # add some metadata
  dataout.description = "Regridded FFDAS flux netCDF file - "+str(res)+" degree"
  dataout.history = "Created " + time.ctime(time.time())
  dataout.source = "convert_ffdas_hrly.py - C. Martin - Univ. of MD - 2/2016"
  co2.units = "kgC/cell/h"
  #co2_2.units = "kgC/cell/h"

  dataout.setncattr("MMINLU", "USGS")
  dataout.setncattr("NUM_LAND_CAT", 24)

  # write to file
  times[:] = timearr
  co2[:] = newflux
  #co2_2[:] = ppm400

  # close files
  datain.close()
  dataout.close()
开发者ID:martin2098,项目名称:WRF-CO2,代码行数:89,代码来源:ffdas2wrf.py


注:本文中的netCDF4.Dataset.setncattr方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。