当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.variables[var][:]方法代码示例

本文整理汇总了Python中netCDF4.Dataset.variables[var][:]方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.variables[var][:]方法的具体用法?Python Dataset.variables[var][:]怎么用?Python Dataset.variables[var][:]使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在netCDF4.Dataset的用法示例。


在下文中一共展示了Dataset.variables[var][:]方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: multiply_by_area

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def multiply_by_area(options):
    data=Dataset(options.in_file)
    output=Dataset(options.out_file,'w')
    replicate_netcdf_file(output,data)
    lengths=spherical_tools.coords(data)

    var_list=['wa','mass','weight_wa']
    for var in var_list:
        replicate_netcdf_var(output,data,var)
        if var=='wa' and options.no_wa:
            output.variables[var][:]=data.variables[var][:]
        else:
            output.variables[var][:]=data.variables[var][:]*np.reshape(lengths.area_lat_lon,(1,1,)+data.variables[var].shape[-2:])
        output.sync()
    data.close()
    return output
开发者ID:laliberte,项目名称:pydiv,代码行数:18,代码来源:pydiv.py

示例2: wa_from_div

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def wa_from_div(options):
    data=Dataset(options.in_file)
    output=Dataset(options.out_file,'w')
    replicate_netcdf_file(output,data)

    #Retrieve data and create output:
    vars_space=dict()
    for var in ['div','wa']:
        if var=='wa': replicate_netcdf_var(output,data,var)
        vars_space[var]=data.variables[var][:,:,:,:].astype(np.float,copy=False)
    for var in ['mass']:
        vars_space[var]=(data.variables[var][1:,:,:,:].astype(np.float,copy=False) -
                         data.variables[var][:-1,:,:,:].astype(np.float,copy=False))
    
    data.close()
    
    #Compute the mass divergence:
    DIV = vars_space['mass'] + vars_space['div']
    vars_space['wa'][:,1:,...]=-np.cumsum(np.ma.array(DIV).anom(1),axis=1) 
    #vars_space['wa'][1:-1,1:,:]=np.ma.array(DIV).anom(0) 
    vars_space['wa'][:,0,...]=0.0
    for var in ['wa']:
        output.variables[var][:]=vars_space[var]

    output.sync()
    output.close()
    return
开发者ID:laliberte,项目名称:pydiv,代码行数:29,代码来源:pydiv.py

示例3: createGUI

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def createGUI(edits_file, topo_file, var='depth'):

  # Open netcdf file with list of edits
  try: rge = Dataset(edits_file, 'r')
  except: error('There was a problem opening "'+edits_file+'".')

  if not ( 'iEdit' in rge.variables and 'jEdit' in rge.variables and 'zEdit' in rge.variables):
    print edits_file,'does not have any recorded edits'
    return

  try:
    iEdit = rge.variables['iEdit'][:]
    jEdit = rge.variables['jEdit'][:]
    zEdit = rge.variables['zEdit'][:]
    zEdit_units = rge.variables['zEdit'].units
    eni = rge.variables['ni'][:]
    enj = rge.variables['nj'][:]
    rge.close()
  except: raise Exception('There was a problem reading '+edits_file)

  rg = Dataset(topo_file, 'r+')

  # Sanity check
  if rg.variables[var].units != zEdit_units: raise Exception('Units mismatch!')
  if rg.variables[var].shape[0] != enj: raise Exception('j-dimension mismatch!')
  if rg.variables[var].shape[1] != eni: raise Exception('i-dimension mismatch!')

  depth = rg.variables[var][:,:]

  if ( 'iEdit' in rg.variables and 'jEdit' in rg.variables and 'zEdit' in rg.variables):
    # Undo existing edits
    for n,(i,j,z) in enumerate(zip(rg.variables['iEdit'][:], rg.variables['jEdit'][:], rg.variables['zEdit'][:])):
      depth[j,i] = z
  else:
    # Create new edit variables
    rg.createDimension('nEdits', None)
    nc_iEdit = rg.createVariable('iEdit', 'i', ('nEdits',))
    nc_jEdit = rg.createVariable('jEdit', 'i', ('nEdits',))
    nc_zEdit = rg.createVariable('zEdit', 'f', ('nEdits',))
    nc_iEdit.long_name = 'i-index of edited data'
    nc_jEdit.long_name = 'j-index of edited data'
    nc_zEdit.long_name = 'Original value of edited data'
    nc_zEdit.units = zEdit_units

  n = rg.variables['iEdit'].shape[0]
  if n > zEdit.shape[0]:
    raise Exception('List of existing edits are longer than list of new edits')

  # Apply edits
  old_depths = numpy.zeros( zEdit.shape[0] )
  for n,(i,j,z) in enumerate(zip(iEdit, jEdit, zEdit)):
    old_depths[n] = depth[j,i]
    depth[j,i] = z

  rg.variables[var][:] = depth
  rg.variables['iEdit'][:] = iEdit
  rg.variables['jEdit'][:] = jEdit
  rg.variables['zEdit'][:] = old_depths

  rg.close()
开发者ID:feiliuesmf,项目名称:MOM6,代码行数:62,代码来源:apply_edits.py

示例4: writeback

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
    def writeback(self):
        if(self.average):
            nc2 = Dataset(self.ncfile+'.averaged.nc','w')

            # Copy back dimensions
            for dim in self.nc.dimensions:
                if(dim != 't'):
                    nc2.createDimension(dim,len(self.nc.dimensions[dim]))

            # Copy back averaged data 
            for var in self.nc.variables:
                dim = self.nc.variables[var].dimensions[-1]
                if(dim != 't'):
                    nc2.createVariable(var,'f8',(dim,),fill_value=default_fillvals['f8'])
                    nc2.variables[var].units = self.nc.variables[var].units
                    # TO-DO MicroHH: fix long_name / longname problem 
                    #nc2.variables[var].long_name = self.nc.variables[var].long_name
                    nc2.variables[var][:] = getattr(self,var)
            nc2.close()
开发者ID:Chiil,项目名称:microhh,代码行数:21,代码来源:readmicrohh.py

示例5: write

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def write(nc_params, data):
    """Update netCDF file with new parameters"""

    f = Dataset(nc_params, 'r+')

    note1 = 'ARNO baseflow parameter'
    variables = ['Ds', 'Dsmax', 'Ws', 'c']
    for var in variables:
        print('Writing updated param: {}'.format(var))
        f.variables[var][:] = data[var]
        f.variables[var].note = note1

    note2 = 'Converted NIJSSEN2001 baseflow params to ARNO baseflow params'
    try:
        f.history += note2
    except:
        f.history = note2

    f.close()

    return
开发者ID:orianac,项目名称:tonic,代码行数:23,代码来源:NIJSSEN2001_to_ARNO.py

示例6: average_two_similar_netcdfs

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def average_two_similar_netcdfs(file1, file2, outfile):
    '''Take two input files and average every grid cell
    of every variable. This requires that both files have
    the exact same dimensions and variables.'''
    # copy file1 to the end file location
    copyfile(file1, outfile)

    # open your new output file in read/write mode
    root = Dataset(outfile, 'r+')

    # open the second input file in read mode
    root2 = Dataset(file2, 'r')

    # verify that your two files have the same dimensions
    if sorted(root.dimensions.keys()) != sorted(root2.dimensions.keys()):
        raise Exception('These two input files have different dimensions.')

    # verify that your dimensions all have the same length
    for dim in root.dimensions.keys():
        if len(root.dimensions[dim]) != len(root2.dimensions[dim]):
            raise Exception('The dimension ' + dim + ' is not the same length in both files.')

    # verify that your two files have the same variables
    if sorted(root.variables.keys()) != sorted(root2.variables.keys()):
        raise Exception('These two input files have different dimensions.')

    # verify that your variables all have the same shape
    for var in root.variables.keys():
        if root.variables[var].shape != root2.variables[var].shape:
            raise Exception('The variable ' + var + ' is not the same shape in both files.')

    # loop through all the variables in the outfile and perform the average
    for var in root.variables.keys():
        root.variables[var][:] = (root.variables[var][:] + root2.variables[var][:]) / 2.0

    root.close()
开发者ID:SemanticPrincess,项目名称:python_for_scientists,代码行数:38,代码来源:average_two_netcdfs.py

示例7: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]

#.........这里部分代码省略.........
	  	    
	# manipulate old dimensions:
	for dim in dimensions:
	  newDim = dim
	  dimSize=len(nci.dimensions[dim])
	  if nci.dimensions[dim].isunlimited(): dimSize = None
	  if self.av['dim'][dim]['name']: newDim=self.av['dim'][dim]['name']
	  if self.av['dim'][dim]['newSize']: dimSize = self.av['dim'][dim]['newSize']
	  nco.createDimension(newDim, dimSize)
	  if self.debug: print 'changeNC:\tINFO:\tadding dimension: ',dim,'-->', newDim, '\t(',dimSize,')'


	# list of variables to save
	keys = nci.variables.keys()

	try:	newVars = self.av['newVar'].keys()
	except:	newVars = []

	
	# create Variables:
	for var in newVars:
		dimensions 	= self.av['newVar'][var]['newDims']
		vartype		= self.av['newVar'][var]['dtype']
		nco.createVariable(var, vartype, tuple(dimensions),zlib=True,complevel=5)
		if self.debug: print 'changeNC:\tINFO:\tadding new variable: ',var, '\t(',dimensions,')'
	
	for var in keys:
		if var in newVars:continue 	
		newname = var
		if self.av[var]['name']:newname = self.av[var]['name']
		if newname.lower() in ['false', 'none','remove', 'delete', 0]:
			if self.debug: print 'changeNC:\tINFO:\tremoving variable: ',var
			continue
		dimensions 	= list(nci.variables[var].dimensions)
		vartype 	= nci.variables[var].dtype
		
		for d,dim in enumerate(dimensions):
			if self.av['dim'][dim]['name']: dimensions[d] = self.av['dim'][dim]['name']
			
		if self.av[var]['newDims']: dimensions = self.av[var]['newDims']

      		nco.createVariable(newname, vartype, tuple(dimensions),zlib=True,complevel=5)
	  	if self.debug: print 'changeNC:\tINFO:\tadding variable: ',var,'-->', newname, '\t(',dimensions,')'
	  
	# Long Names:
	for var in newVars:
		nco.variables[var].long_name = self.av['newVar'][var]['long_name']
		if self.debug: print 'changeNC:\tINFO:\tadding new long_name: ',var, '\t(',self.av['newVar'][var]['long_name'],')'
				
	for var in keys:
		if var in newVars:continue 
		long_name = ''
		newname = var	
		if self.av[var]['name']:newname = self.av[var]['name']		
		if newname.lower() in ['false', 'none','remove', 'delete', 0]:continue		
		if self.av[var]['long_name']: long_name= self.av[var]['long_name']
		else: 
		  try:  	long_name=nci.variables[var].long_name
		  except:	print 'changeNC:\tWarning:\tNo long_name for ', var
		if long_name: nco.variables[newname].long_name=long_name
		if self.debug: print 'changeNC:\tINFO:\tadding long_name: ',var,'-->', newname, '\t(',long_name,')'
		
	# Units:
	for var in newVars:
		nco.variables[var].units = self.av['newVar'][var]['units']
		if self.debug: print 'changeNC:\tINFO:\tadding units: ',var, '\t(', self.av['newVar'][var]['units'],')'				
	for var in keys: 
		if var in newVars:continue 
		units = ''
		newname = var
		if self.av[var]['name']:newname = self.av[var]['name']
		if newname.lower() in ['false', 'none','remove', 'delete', 0]:continue		
		if self.av[var]['units']: units= self.av[var]['units']
		else: 
		  try:  	units=nci.variables[var].units
		  except:	print 'changeNC:\tWarning:\tNo units for ', var
		if units: nco.variables[newname].units=units
		if self.debug: print 'changeNC:\tINFO:\tadding units: ',var,'-->', newname, '\t(',units,')'
						
	# Fill Values:
	for var in newVars:
		nco.variables[var][:] = self.av['newVar'][var]['newData']
		if self.debug: print 'changeNC:\tINFO:\tFilling ', var, ' ...',self.av['newVar'][var]['newData']
	for var in keys:
		if var in newVars:continue 	
		newname = var
		func = lambda  x: x
		if self.av[var]['name']:newname = self.av[var]['name'] 
		if newname.lower() in ['false', 'none','remove', 'delete', 0]:continue		
		if self.av[var]['convert']:func = self.av[var]['convert']
		if len(self.av[var]['newData']): arr=self.av[var]['newData']
		else: 	arr = nci.variables[var][:]		
		if self.debug: print 'changeNC:\tINFO:\tCopying ', var, ' ...' ,newname, arr.shape,
		nco.variables[newname][:] =func(arr)
		if self.debug: print '->', nco.variables[newname][:].shape
	# Close netcdfs:
	nco.close()
	nci.close()
	print 'changeNC:\tINFO:\tsuccessfully created:\t', self.fno
	return
开发者ID:ledm,项目名称:NetCDF_manip,代码行数:104,代码来源:changeNC.py

示例8: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]

#.........这里部分代码省略.........
	    		except: print 'mergeNC:\tWARNING:\tFull check:\t',fni,' already removed from files'


  
	
	# create dimensions:
	nci = Dataset(self.fnsi[0],'r')#Quiet =True)	
	for d in nci.dimensions.keys():
	  if nci.dimensions[d].isunlimited() or d.lower() in ['time','time_counter',time]: dimSize = None
	  else:	  dimSize=len(nci.dimensions[d])
	  nco.createDimension(d, dimSize)	
	  if self.debug: print 'mergeNC:\tINFO:\tCreating Dimension:', d,dimSize

	# create Variables:
	for var in save:
		dt = nci.variables[var].dtype
		
		if self.debug: 
				print 'mergeNC:\tINFO:\tCreating Variable:',var,nci.variables[var].dtype,nci.variables[var].dimensions,
				print "zlib=True,complevel=5,fill_value=",default_fillvals['f8']

	  	nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5,fill_value=default_fillvals['f8'])

	# Long Names:
	for var in save: 
		try:  	nco.variables[var].long_name=nci.variables[var].long_name
		except:	
			if self.debug: print 'mergeNC:\tWarning:\tNo long_name for ', var
		  
	# Units:
	for var in save:
	    #if var in time and self.timeAverage: nco.variables[var].units='Month'
	    #else:
		try:  	nco.variables[var].units=nci.variables[var].units
		except: 
			if self.debug: print 'mergeNC:\tWarning:\tNo units for ', var	
	
	# Fill Values:
	for var in alwaysInclude:
		if var in time:continue
		if self.debug: print 'mergeNC:\tINFO:\tCopying ', var, ' ...', nci.variables[var][:].shape
		try:nco.variables[var][:] = nci.variables[var][:].data
		except:nco.variables[var][:] = nci.variables[var][:]
	nci.close()
	
	a={}
	a[tvar] = []
	for var in save:
		if var in alwaysInclude: continue
		a[var]=[]

	for t,fni in enumerate(self.fnsi):
		if self.debug: print 'mergeNC:\tINFO:\tOpening ', fni, ' ...', t   
		nci = Dataset(fni,'r')
		
		#times:
		try:
		  tval = num2date(nci.variables[tvar][:],nci.variables[tvar].units,calendar=self.cal)		
		  a[tvar].extend( date2num(tval,nco.variables[tvar].units,calendar=self.cal))
		except:
		  a[tvar].extend(nci.variables[tvar][:])
		
		if self.debug: print 'mergeNC:\tINFO:\tTIME:',t, tvar, array(a[tvar]).shape

		# not time:
		for var in a.keys():
		  if var in time:continue
		  if var in nci.variables.keys(): arr = nci.variables[var][:]
		  else:
	  	    if self.debug:print 'mergeNC:\tWARNING:', fni,' is missing variable:',var, nco.variables[var][0,:].shape

	  	    arr = masked_all(nco.variables[var][0,:].shape)
		  if not self.timeAverage:
		  	if not len(a[var]): a[var]=arr
			else:    a[var] = append(a[var], arr, axis=0)
		  else:
		  	if not len(a[var]): a[var]=arr
			else:    a[var] += arr
						
		  if self.debug: print 'mergeNC:\tINFO\tvar:', t, var, 'len:',len(a[var]), arr.shape,a[var].shape

		nci.close()
	
	if self.timeAverage: 
	    for var in a.keys():
		if self.debug: print "mergeNC:\tINFO\tTime Average:", var 
		if var == tvar:
			nco.variables[tvar][:] = [mean(a[var]),]	
		else:
			nco.variables[var][:] = array(a[var])[None,:]/float(len(self.fnsi))
			
	else: # No time averaging.
	    for var in a.keys():
		if self.debug: print 'mergeNC:\tINFO:\tsaving ', var, ' ...',nco.variables[var][:].shape,array(a[var]).shape, nco.variables[var].dimensions #, a[var][0]
		nco.variables[var][:] = array(a[var])
		
	# Close output netcdfs:
	nco.close()
	if self.debug: print 'mergeNC:\tINFO:\tsuccessfully created:\t', self.fno
	return
开发者ID:ledm,项目名称:NetCDF_manip,代码行数:104,代码来源:mergeNC.py

示例9: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]

#.........这里部分代码省略.........
		else:	
			#1D latbnd,lonbnd 
			la = getIndex(self.loc[0], nci.variables['latbnd'])
			lo = getIndex(self.loc[1], nci.variables['lonbnd'])		
		self.slice = [la,lo]
	if self.debug: print 'Saving the location: ',self.slice

	# special key word to save all the variables
	if self.vars == 'all':
		self.vars = nci.variables.keys()
		
	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'depthProfileNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		nci.close()
		return
	
	
	
	#create dataset and header.
	if self.debug: print 'depthProfileNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'depthProfileNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using depthProfileNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	
	# list of variables to save, assuming some conventions
	alwaysInclude = ['time', 'lat','lon', 'latbnd', 'lonbnd', 'nav_lat','nav_lat', 'time_counter', 'deptht',]
	timeNames = ['time', 'time_counter', 't']
	latNames = ['lat', 'latbnd','nav_lat','x']
	lonNames = ['lon', 'lonbnd','nav_lon','y']
	save =   list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = list(set(sorted(save + self.vars)))
	
	# create dimensions:
	for d in nci.dimensions.keys():
	  if d in timeNames:  nco.createDimension(d, None)
	  elif d in latNames: nco.createDimension(d, 1)
	  elif d in lonNames: nco.createDimension(d, 1)	  
	  else:		      nco.createDimension(d, len(nci.dimensions[d]))

	# create Variables:
	for var in save:  nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5)

	# Long Names:
	for var in save: 
		try:  	long_name=nci.variables[var].long_name
		except:	
		  if self.debug: print 'depthProfileNC:\tWarning:\tNo long_name for ', var
		  long_name = var
		  
		#if self.timemean: long_name.replace('Daily', 'Monthly')	
		nco.variables[var].long_name=long_name
		if self.debug: print 'depthProfileNC:\t Adding long_name for ', var, long_name
		  
	# Units:
	for var in save: 
		try:  	nco.variables[var].units=nci.variables[var].units
		except: print 'depthProfileNC:\tWarning:\tNo units for ', var	
		
	# Fill Values:
	for var in save:
		if self.debug: print 'depthProfileNC:\tINFO:\tCopying ', var, ' ...'
		shape  = nci.variables[var].shape
		if len(shape) == 4:
			arr = nci.variables[var][:,:,self.slice[0],self.slice[1]]
			arr = arr[:,:,None,None] #add extra empty dimensions:
		elif len(nci.variables[var].shape) == 3:
			try:
				arr = nci.variables[var][:,self.slice[0],self.slice[1]]
				arr = arr[:,None,None] #add extra empty dimensions:
			except: 
				print 'depthProfileNC:\tERROR:\tCoping failed due to unusual shape', var,shape, self.slice
				return
		elif var in latNames or var in lonNames:
			arr = nci.variables[var][self.slice[0],self.slice[1]]
			arr = arr[None,None]
		else:
			arr = nci.variables[var][:]
		
		#if self.timemean and len(intersection(['time','t'], nci.variables[var].dimensions)):
		#	if self.debug: print 'depthProfileNC:\tInfo:\tSaving time averaged var:',var
		#	arr = marray([arr.mean(0),])
		#	while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])
			
		if self.debug: 
			print 'depthProfileNC:\tInfo:\tSaving var:',var, arr.shape, 
			print '[nco is expecting:',nco.variables[var][:].shape,']\tdims:', nci.variables[var].dimensions
		
		nco.variables[var][:] =arr

	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'depthProfileNC:\tINFO:\tsuccessfully created:\t', self.fno
	return
开发者ID:pwcazenave,项目名称:NetCDF_manip,代码行数:104,代码来源:depthProfileNC.py

示例10: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
  def run(self):	
	if not self.vars:
		print 'depthManipNC:\tERROR:\tvariables to save are no use:', self.vars
		return
	if not exists(self.fni):
		print 'depthManipNC:\tERROR:\tinputfile name does not exists:', self.fni
		return
	
	if self.depthFlags =='':
		print 'depthManipNC:\tWARNING:\tNo depth flags given, assuming surface values only.'
		self.depthFlags = zeros(len(self.vars),  dtype=int)
		
	if len(self.vars)!= len(self.depthFlags):
		print 'depthManipNC:\tERROR:\tVariables do not match depth flags:', len(self.vars), '!=', len(self.depthFlags)
		return
	self.varflag={}
	for var,flag in zip(self.vars, self.depthFlags):self.varflag[var] = flag
	
	if self.debug: print 'depthManipNC:\tINFO:\topening dataset:\t', self.fni	
	nci = Dataset(self.fni,'r')#Quiet =True)
	#if self.depthFlags and 'zbnd' not in nci.variables.keys():
	#	print 'depthManipNC:\tERROR:\tdepthFlags is ',self.depthFlags,'but inputfile name does contain \'zbnd\''
	#	return
	
	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'depthManipNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		return
		
	#create dataset and header.
	if self.debug: print 'depthManipNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'depthManipNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using depthManipNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	
	# list of variables to save, assuming some conventions
	alwaysInclude = ['time', 'lat','lon', 'latbnd', 'lonbnd']
	save = list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = list(set(sorted(save + self.vars)))
	
	# create dimensions:
	for d in nci.dimensions.keys():
	  if d in ['time',]: nco.createDimension(d, None)
	  elif d in ['depth', 'z',]: nco.createDimension(d, 1)
	  else:		     nco.createDimension(d, len(nci.dimensions[d]))

	# create Variables:
	for var in save:  nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5)

	
	# Long Names:
	for var in save:
		varln = ''
		long_name = ''
		try:  	long_name=nci.variables[var].long_name
		except:	
			if self.debug: print 'depthManipNC:\tWarning:\tNo long_name for ', var
		if var in self.vars:
			long_name += ' '+self.depthStrings[str(self.varflag[var])]
		if self.timemean: long_name.replace('Daily', '')
		if self.timemean: long_name.replace('Monthly', '')
		nco.variables[var].long_name = long_name
		if self.debug: print 'depthManipNC:\tInfo:\tAdding long_name:',var,long_name
	# Units:
	for var in save: 
		units = ''
		try:  	units=nci.variables[var].units
		except: 
			if self.debug: print 'depthManipNC:\tWarning:\tNo units for ', var
		if var in self.vars:
		  if  self.varflag[var] == 1: units = units.replace('m^3', 'm^2')
		  
		nco.variables[var].units=units
		if self.debug: print 'depthManipNC:\tInfo:\tAdding units:',var,units

	if 'zbnd' in nci.variables.keys(): self.zbnd =nci.variables['zbnd'][:]
	if 'bathymetry' in nci.variables.keys(): self.bathy =nci.variables['bathymetry'][:]	
	
	# Fill Values:
	for var in save:
		if var not in self.vars: #no change
			 arr=nci.variables[var][:] 
		else: 
			flag = self.varflag[var]
			if self.debug: print 'depthManipNC:\tInfo:\tFilling var:',var, 'flag:', flag
			if flag == 1:	
				arr = (nci.variables[var][:] * abs((self.zbnd[:,:,:,:,1]-self.zbnd[:,:,:,:,0]))).sum(1)			
				arr= arr[:,None,:,:]
			elif flag in [-2,-1,0]: arr =nci.variables[var][:,flag,:,:]
			elif flag in [-15,]:	arr =self.bottomLayer(nci, var)
			else: 	
				arr =nci.variables[var][:,flag,:,:].mean(1)
				arr= arr[:,None,:,:]
			
			#while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])
#.........这里部分代码省略.........
开发者ID:ledm,项目名称:NetCDF_manip,代码行数:103,代码来源:depthManipNC.py

示例11: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]

#.........这里部分代码省略.........
		
	# create dimensions:
	#for d in nci.dimensions.keys():
	#  if d in ['time',]: nco.createDimension(d, None)
	#  else:		     nco.createDimension(d, len(nci.dimensions[d]))
	nco.createDimension('index', None)

	# create Variables:

	nco.createVariable('index', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)	
	nco.createVariable('index_t', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_z', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_y', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)		
	nco.createVariable('index_x', int64, ['index',],zlib=True,complevel=5)#,chunksizes=10000)
	for var in save:
		nco.createVariable(var, nci.variables[var].dtype, ['index',],zlib=True,complevel=5)#,chunksizes=10000)
	
	# Long Names:
	nco.variables['index'].long_name='index'
	nco.variables['index_t'].long_name='index - time'	
	nco.variables['index_z'].long_name='index - depth'
	nco.variables['index_y'].long_name='index - latitude'
	nco.variables['index_x'].long_name='index - longitude'
			
	for var in save: 
		try:  	long_name=nci.variables[var].long_name
		except:	
		  if self.debug: print 'convertToOneDNC:\tWarning:\tNo long_name for ', var
		  long_name = var
		  
		nco.variables[var].long_name=long_name
		if self.debug: print 'convertToOneDNC:\t Adding long_name for ', var, long_name
		  
	# Units:
	nco.variables['index'].units=''
	nco.variables['index_t'].units=''
	nco.variables['index_z'].units=''
	nco.variables['index_y'].units=''
	nco.variables['index_x'].units=''
					
	for var in save: 
		try:  	nco.variables[var].units=nci.variables[var].units
		except: print 'convertToOneDNC:\tWarning:\tNo units for ', var	
		
	# Fill Values:
	sorted_Coords = sorted(CoordsToKeep.iteritems(), key=itemgetter(1))

	data={}
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index  ...' , len(sorted_Coords)	
#	nco.variables['index'][:] = [ int(a[1]) for a in sorted_Coords]	
	nco.variables['index'][:] = array([ a[1] for a in sorted_Coords])
	nco.sync()	
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index t ...' 	
	nco.variables['index_t'][:] = array([a[0][0] for a in sorted_Coords])
	nco.sync()
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index z ...' 			
	nco.variables['index_z'][:] = array([a[0][1] for a in sorted_Coords])
	nco.sync()		
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index y ...' 	
	nco.variables['index_y'][:] = array([a[0][2] for a in sorted_Coords])
	nco.sync()		
	if self.debug: print 'convertToOneDNC:\tINFO:\tCopying index x ...' 	
	nco.variables['index_x'][:] = array([a[0][3] for a in sorted_Coords])
	nco.sync()	
		
	
	for var in save:
		if self.debug: print 'convertToOneDNC:\tINFO:\tCopying ', var, ' ...' 
		arr = nci.variables[var][:]
		outarr = []
		if arr.ndim ==1:
			if var.lower() in ['time','time_counter','t']:	d = 0
			if var.lower() in ['depth','deptht',]:		d = 1
			if var.lower() in ['latbnd','lat','latitude']:	d = 2			
			if var.lower() in ['lonbnd','lon','longitude']:d = 3
			#for c in (CoordsToKeep.keys()):	
			for c in sorted_Coords:
				outarr.append(arr[c[0][d]])
			try: print var, d
			except: var, "not found"
		elif arr.ndim ==2:
			if var.lower() in ['nav_lat','nav_lon']:	d = (2,3)
			print var, 'lendth : 2', d
			
			for c in sorted_Coords:
			#for c in sorted(CoordsToKeep.keys()):	
				outarr.append(arr[(c[0][2:])])				
		else:
		    #for c in sorted(CoordsToKeep.keys()):
		    	for c in sorted_Coords:
				outarr.append(arr[c[0]])
		outarr= marray(outarr)
		if self.debug: print 'convertToOneDNC:\tINFO:\tSaving var:',var, arr.shape, '->', outarr.shape , 'coords:',len(sorted_Coords)
		nco.variables[var][:] =outarr
		nco.sync()	
	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'convertToOneDNC:\tINFO:\tsuccessfully created:\t', self.fno
	return				
开发者ID:pwcazenave,项目名称:NetCDF_manip,代码行数:104,代码来源:convertToOneDNC.py

示例12: correct_mass_fluxes

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def correct_mass_fluxes(options):
    data=Dataset(options.in_file)
    output=Dataset(options.out_file,'w')
    replicate_netcdf_file(output,data)

    #Retrieve data and create output:
    type=np.float
    vars_space=dict()
    for var in ['ua','va','wa']:
        replicate_netcdf_var(output,data,var)
        vars_space[var]=data.variables[var][:].astype(type,copy=False)
    for var in ['mass']:
        replicate_netcdf_var(output,data,var)
        vars_space[var]=data.variables[var][:].astype(type,copy=False)
        output.variables[var][:]=vars_space[var]
    for var in ['dmassdt']:
        vars_space[var]=(vars_space['mass'][1:,...]-vars_space['mass'][:-1,...]).astype(type,copy=False)
    
    
    #Compute spherical lengths:
    lengths=spherical_tools.coords(data)
    #Create vector calculus space:
    vector_calculus=spherical_tools.vector_calculus_spherical(vars_space['dmassdt'].shape[1:],lengths)

    for id in [0,-1]:
        if np.abs(data.variables['lat'][id])==90.0:
            vars_space['ua'][:,:,id,:]=0.0
    
    #Compute the mass divergence:
    DIV=np.zeros_like(vars_space['dmassdt'])
    for time_id, time in enumerate(range(len(data.variables['time']))):
        DIV[time_id,...] = (vars_space['dmassdt'][time_id,...] + 
                vector_calculus.DIV_from_UVW_mass(*[vars_space[var][time_id,...] for var in ['ua','va','wa']])
                )

    for time_id, time in enumerate(range(len(data.variables['time']))):
        #Compute the velocity potential of the residual:
        Chi = vector_calculus.inverse_laplacian(-DIV[time_id,...],maxiter=options.maxiter)

        #Compute the velocities corrections and record to output:
        for var, correction in zip(['ua','va','wa'],vector_calculus.UVW_mass_from_Chi(Chi)):
            vars_space[var][time_id,...]-=correction

    dmass=np.zeros_like(vars_space['dmassdt'])
    for time_id in range(len(data.variables['time'])):
        dmass[time_id,...] = (vars_space['dmassdt'][time_id,...] + 
                        vector_calculus.DIV_from_UVW_mass(*[vars_space[var][time_id,...] for var in ['ua','va','wa']])
                             )

    #Fix vertical velocity:
    vars_space['wa'][:,1:-1,...]-=np.cumsum(np.ma.array(dmass).anom(1),axis=1)[:,:-1,:,:]

    for var in ['ua','va','wa']:
        output.variables[var][:]=vars_space[var]

    if options.check_output:
        output.createVariable('dmass_old',type,('time','lev','lat','lon'))
        output.variables['dmass_old'][:,...] = DIV

        output.createVariable('dmass',type,('time','lev','lat','lon'))
        for time_id in range(len(data.variables['time'])):
            dmass[time_id,...] = (vars_space['dmassdt'][time_id,...] + 
                                                        vector_calculus.DIV_from_UVW_mass(*[vars_space[var][time_id,...] for var in ['ua','va','wa']])
                                                        )
        output.variables['dmass'][:,...] = dmass

    output.sync()
    output.close()
    data.close()

    return
开发者ID:laliberte,项目名称:pydiv,代码行数:73,代码来源:pydiv.py

示例13: coarse_grain_horizontal

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
def coarse_grain_horizontal(options):
    data=Dataset(options.in_file)
    output=Dataset(options.out_file,'w')
    replicate_netcdf_file(output,data)

    lengths_high=spherical_tools.coords(data)

    for var in ['time','lev','slev']:
        replicate_netcdf_var(output,data,var)

    output.createDimension('slon',len(data.dimensions['slon'])/2)
    output.createVariable('slon',np.float,('slon',))
    output.variables['slon'][:]=data.variables['slon'][::2]
    output.createDimension('lon',len(data.dimensions['lon'])/2)
    output.createVariable('lon',np.float,('lon',))
    output.variables['lon'][:]=(data.variables['lon'][::2]+data.variables['lon'][1::2])/2

    output.createDimension('slat',len(data.dimensions['slat'])/3)
    output.createVariable('slat',np.float,('slat',))
    output.variables['slat'][:]=data.variables['slat'][1::3]
    output.createDimension('lat',len(data.dimensions['slat'])/3+1)
    output.createVariable('lat',np.float,('lat',))
    output.variables['lat'][1:-1]=(data.variables['lat'][2:-2:3]+data.variables['lat'][3:-2:3]+data.variables['lat'][4:-2:3])/3
    output.variables['lat'][0]=-90.0
    output.variables['lat'][-1]=90.0

    lengths_low=spherical_tools.coords(output)
    
    var='ua'
    output.createVariable(var,np.float,('time','lev','lat','slon'))
    output.variables[var][:,:,1:-1,:]=(data.variables[var][:,:,2:-2:3,::2]+data.variables[var][:,:,3:-2:3,::2]+data.variables[var][:,:,4:-2:3,::2])
    output.variables[var][:,:,0,:]=0.0
    output.variables[var][:,:,-1,:]=0.0

    var='va'
    output.createVariable(var,np.float,('time','lev','slat','lon'))
    output.variables[var][:]=(data.variables[var][:,:,1::3,::2]+data.variables[var][:,:,1::3,1::2])
    output.sync()

    for var in ['wa']:
        output.createVariable(var,np.float,('time','slev','lat','lon'))
        output.variables[var][:]=full_average(data.variables[var][:],output.variables[var].shape)
    output.sync()

    for var in ['mass']:
        output.createVariable(var,np.float,('time','lev','lat','lon'))
        output.variables[var][:]=full_average(data.variables[var][:],output.variables[var].shape)
    output.sync()

    for var in ['ta','hus','pa']:
        output.createVariable(var,np.float,('time','lev','lat','lon'))
        #temp=data.variables[var][:]*np.reshape(lengths_high.area_lat_lon,(1,1,)+lengths_high.area_lat_lon.shape)*data.variables['dpa'][:]
        #output.variables[var][:]=full_average(temp,output.variables[var].shape)/(output.variables['dpa']*np.reshape(lengths_low.area_lat_lon,(1,1,)+lengths_low.area_lat_lon.shape))
        output.variables[var][:]=full_average(data.variables[var][:]*data.variables['mass'][:],output.variables[var].shape)/output.variables['mass']
    output.sync()

    test_divergence=False
    if test_divergence:
        #Retrieve data and create output:
        vars_space=dict()
        for var in ['ua','va','wa']:
            vars_space[var]=output.variables[var][0,:,:,:].astype(np.float,copy=False)

        vars_space['dmassdt']=(output.variables['mass'][1,:,:,:].astype(np.float,copy=False)-
                               output.variables['mass'][0,:,:,:].astype(np.float,copy=False)) 

        #Compute spherical lengths:
        lengths=spherical_tools.coords(output)
        #Create vector calculus space:
        vector_calculus=spherical_tools.vector_calculus_spherical(vars_space['dpadt'].shape,lengths)

        #FOR MERRA:
        vars_space['ua']/=lengths.mer_len_lat_slon
        vars_space['va']/=lengths.zon_len_slat_lon
        vars_space['wa']/=np.reshape(lengths.area_lat_lon,(1,)+ vars_space['wa'].shape[1:])
        ####

        #Compute the mass divergence:
        DIV = vars_space['dmassdt'] + vector_calculus.DIV_from_UVW(*[vars_space[var] for var in ['ua','va','wa']])

        output.createVariable('dmass',np.float,('time','lev','lat','lon'))
        output.variables['dmass'][:]=DIV
    output.close()
    data.close()
    return
开发者ID:laliberte,项目名称:pydiv,代码行数:87,代码来源:pydiv.py

示例14: print

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
    print('  processing file #%3i of %3i:'%(n+1,nfiles))
    print('    %s\n'%filelist[n])
    # compute monthly averages
    m = int(datergx.search(filelist[n]).group()[-2:])-1 # infer month from filename (for climatology)
    xtime[n] = n+1 # month since start 
    xmon[m] += 1 # one more item
    for var in varlist:
      #ncvar = varmap.get(var,var)
      # N.B.: these are already monthly means, but for some reason they still have a singleton time dimension
      print var,climdata[var][m,...].shape, cesmout.variables[var][0,...].shape, cesmout.variables[var].dtype
      climdata[var][m,...] = climdata[var][m,...] + cesmout.variables[var][0,...] # accumulate climatology
      # N.B.: in-place operations are not possible, otherwise array masks are not preserved
    # close file
    cesmout.close()
  
  # normalize climatology
  if n < nmons: xmon[xmon==0] = 1 # avoid division by zero 
  for var in varlist:
    for i in xrange(len(xmon)):
      if xmon[i] > 0:
        climdata[var][i,...] = climdata[var][i,...] / xmon[i] # 'None" indicates a singleton dimension
    
  ## finish
  # save to files
  print(' Done. Writing output to:\n  %s'%(dstdir,))
  for var in varlist:
    clim.variables[var][:] = climdata[var] 
  # close files
  clim.close()
  print('    %s'%(climfile,))
开发者ID:andrewsoong,项目名称:WRF-Tools,代码行数:32,代码来源:cesm_average.py

示例15: run

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import variables[var][:] [as 别名]
  def run(self):	
	if not self.vars:
		print 'pruneNC:\tERROR:\tvariables to save are no use:', self.vars
		return
	if not exists(self.fni):
		print 'pruneNC:\tERROR:\tinputfile name does not exists:', self.fni
		return
				
	nci = Dataset(self.fni,'r')#Quiet =True)
	
	if self.depthInt: 
		print 'FAIL: maybe you should look at the depthManip.py class instead. This one only removes variables from a netcdf.'
		return
		
	
	#check that there are some overlap between input vars and nci:
	for v in self.vars:
		if v in nci.variables.keys():continue
		print 'pruneNC:\tERROR:\tvariable,' ,v,', not found in ',self.fni
		return
		
	#create dataset and header.
	if self.debug: print 'pruneNC:\tINFO:\tCreating a new dataset:\t', self.fno
	nco = Dataset(self.fno,'w')
	for a in nci.ncattrs():
		if self.debug: print 'pruneNC:\tINFO:\tcopying attribute: \t\"'+a+'\":\t', nci.getncattr(a)
		nco.setncattr(a,nci.getncattr(a))	
	appendToDesc= 'Reprocessed on '+todaystr()+' by '+getuser()+' using pruneNC.py'
	try: nco.Notes = nci.Notes + '\n\t\t'+appendToDesc
	except: nco.Notes = appendToDesc
	
	# list of variables to save, assuming some conventions
	alwaysInclude = ['time', 'lat','lon', 'latbnd', 'lonbnd']
	save =   list(set(nci.variables.keys()).intersection(set(alwaysInclude) ) ) 
	save = list(set(sorted(save + self.vars)))
	
	# create dimensions:
	for d in nci.dimensions.keys():
	  if d in ['time',]: nco.createDimension(d, None)
	  else:		     nco.createDimension(d, len(nci.dimensions[d]))

	# create Variables:
	for var in save:  nco.createVariable(var, nci.variables[var].dtype, nci.variables[var].dimensions,zlib=True,complevel=5)

	# Long Names:
	for var in save: 
		try:  	long_name=nci.variables[var].long_name
		except:	
		  print 'pruneNC:\tWarning:\tNo long_name for ', var
		  long_name = var
		  
		if self.timemean: long_name.replace('Daily', 'Monthly')	
		nco.variables[var].long_name=long_name
		if self.debug: print 'pruneNC:\t Adding long_name for ', var, long_name
		  
	# Units:
	for var in save: 
		try:  	nco.variables[var].units=nci.variables[var].units
		except: print 'pruneNC:\tWarning:\tNo units for ', var	
		
	# Fill Values:
	for var in save:
		if self.debug: print 'pruneNC:\tINFO:\tCopying ', var, ' ...' 
		arr = nci.variables[var][:]
		
		if self.timemean and len(intersection(['time','t'], nci.variables[var].dimensions)):
			if self.debug: print 'pruneNC:\tInfo:\tSaving time averaged var:',var
			arr = marray([arr.mean(0),])
			while len(arr.shape) < len(nci.variables[var].dimensions): arr = marray(arr[None,:])
			
		if self.debug: print 'pruneNC:\tInfo:\tSaving var:',var, arr.shape, '\tdims:', nci.variables[var].dimensions
		nco.variables[var][:] =arr

	# Close netcdfs:
	nco.close()
	nci.close()
	if self.debug: print 'pruneNC:\tINFO:\tsuccessfully created:\t', self.fno
	return
开发者ID:ledm,项目名称:NetCDF_manip,代码行数:80,代码来源:pruneNC.py


注:本文中的netCDF4.Dataset.variables[var][:]方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。