当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.conventions方法代码示例

本文整理汇总了Python中netCDF4.Dataset.conventions方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.conventions方法的具体用法?Python Dataset.conventions怎么用?Python Dataset.conventions使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在netCDF4.Dataset的用法示例。


在下文中一共展示了Dataset.conventions方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: check_merg_file_differences

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
def check_merg_file_differences(baselinePath1, baselinePath2,  outputDir):
    '''
        Purpose:: To compare MERG files cloud elements from two directories

        Input:: baselinePath1: a string representing the path to the files
                baselinePath2: a string representing the path to the files
                outputDir: a string representing the path to store the results at

        Output::
                diffDir: the path to a directory with the differences 
                diffDirText: a text file with the differences logged. This is stored in diffDir

        Assumptions:: 
                there exists unittestResults.txt file in each path (to check for the nodes in the graphs)

    '''
    dir1Filenames = []
    dir2Filenames = []
    diffNames = []

    if not os.path.exists(baselinePath1):
        print 'Please check inputs. No folder named %s ' % baselinePath1
        return
    else:
        dir1Filenames = [os.path.basename(x) for x in glob.glob(baselinePath1+'/*.nc')]
        
    if not os.path.exists(baselinePath2):
        print 'Please check inputs. No folder named %s ' % baselinePath2
        return
    else:
        dir2Filenames = [os.path.basename(x) for x in glob.glob(baselinePath2+'/*.nc')]
        
    if not os.path.exists(outputDir):
        os.mkdir(outputDir)

    diffLog = open(outputDir+'/diffLog.txt','wb')
    diffLog.write('\n baselinePath1 is %s' % baselinePath1)
    diffLog.write('\n baselinePath2 is %s' % baselinePath2)

    # 1. Check if there are differences in the filenames
    diffNames = list(set(dir1Filenames) - set(dir2Filenames))
    if len(diffNames) == 0:
        diffLog.write('\n Differences in filenames: None\n')
        print 'Differences in filenames: None'
    else:
        diffLog.write('\n Differences in filenames: \n')
        print 'Differences in filenames: \n'

        for eachFile in diffNames:
            diffLog.write('%s\n' % eachFile)
            print '%s' % eachFile
        diffLog.write(('*')*80)
        print ('*')*80
        return

    # 2. Check the content of each pair of similarly names files using nco ncdiff
    print 'checking the content \n'
    for i in xrange(len(dir1Filenames)):
        file_one = '%s/%s' % (baselinePath1, dir1Filenames[i])
        file_two = '%s/%s' % (baselinePath2, dir2Filenames[i])

        # Open the two files and diff the arrays
        file1 = Dataset(file_one, 'r', format='NETCDF4')
        file1Data = file1.variables['brightnesstemp'][:, :, :]

        file2 = Dataset(file_two, 'r', format='NETCDF4')
        file2Data = file2.variables['brightnesstemp'][:, :, :]

        if np.array_equal(file1Data, file2Data):
            message = '%s and %s are equal \n' % (file_one, file_two)
            diffLog.write(message)
            print message
        else:
            message = '\n** %s and %s are NOT equal. Check the outputDir for the diff file.\n' % (file_one, file_two)
            diffLog.write(message)
            print message
            file1 = Dataset(baselinePath1+'/'+dir1Filenames[i], 'r', format='NETCDF4')
            alllatsraw = file1.variables['latitude'][:]
            alllonsraw = file1.variables['longitude'][:]
            timesraw = file1.variables['time'][:]
            file1.close()

            LON, LAT = np.meshgrid(alllonsraw, alllatsraw)

            diff = Dataset(outputDir+'/'+dir1Filenames[i], 'w', format='NETCDF4')
            diff.description = 'Difference between '+dir1Filenames[i]+' & '+dir2Filenames[i]
            diff.calendar = 'standard'
            diff.conventions = 'COARDS'
            diff.createDimension('time', None)
            diff.createDimension('lat', len(LAT[:,0]))
            diff.createDimension('lon', len(LON[0,:]))

            tempDims = ('time', 'lat', 'lon',)
            times = diff.createVariable('time', 'f8', ('time',))
            latitude = diff.createVariable('latitude', 'f8', ('lat',))
            longitude = diff.createVariable('longitude', 'f8', ('lon',))
            brightnesstemp = diff.createVariable('brightnesstemp', 'i16', tempDims)
            brightnesstemp.units = 'Kelvin'

            # write NETCDF data
#.........这里部分代码省略.........
开发者ID:caocampb,项目名称:grab-tag-graph,代码行数:103,代码来源:checkMERGBaseline.py

示例2: Exception

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
    if (inRootGrp.start_year != startDate.year):
        raise Exception("Specified simulation start year ({0}) does not match with input vapor pressure data file data year.".format(startDate.year))
        exit()

     # DEBUG: print global attributes of the original file:
##    for gAttribute in inRootGrp.ncattrs():
##        print 'Global attribute name:', gAttribute, '=', getattr(inRootGrp,gAttribute)


    # add global file level attributes to the new netcdf file
    outRootGrp.start_year = inRootGrp.start_year
    outRootGrp.data_variable_name = inRootGrp.data_variable_name
    outRootGrp.data_time_step = inTimeStep
    outRootGrp.orginal_data_source = 'Daymet Software Version 2.0'
    outRootGrp.conventions = 'CF-1.0'
    outRootGrp.modified_data_source = 'CI Water System'
    outRootGrp.spatial_reference = 'NAD83_UTM_Zone_12N'
    outRootGrp.datum = 'D_North_America_1983'

    # get dimension values from the original netcdf file
    inputTimeVar = inRootGrp.variables['time']
    inputXvar = inRootGrp.variables['x']
    inputYvar = inRootGrp.variables['y']
    inputVpVar = inRootGrp.variables[inRootGrp.data_variable_name]

    print(inputTimeVar.shape[0])
    print(inputXvar.shape[0])
    print(inputYvar.shape[0])
    print(inputVpVar.shape)
开发者ID:CI-Water-DASYCIM,项目名称:UEBPythonPackage,代码行数:31,代码来源:GenerateWatershedDaymetMultipleVpdDataPointsPerDay.py

示例3: datetime

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
		newList.append(stringtoarr(list[i], numchars))
	return newList

station_list = ["S001", "S002", "S003", "S004", "S005", "S006", "S007", "S008","S009", "S010", "S011", "S012"]
lat_list = [-150.0, -120.0, -90.0, -60.0, -30.0, 0.0, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0]
lon_list = [-150.0, -120.0, -90.0, -60.0, -30.0, 0.0, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0]
data_values = [17.0, 26.0, 58.0, 37.0, 45.0, 66.0, 27.0, -2.0, 7.0, 9.0, 23.0, 14.0]

# Main
try: 
    # global
    root_grp.description = "Test"
    root_grp.cdm_datatype = "Station"
    root_grp.stationDimension = "station_nm";
    root_grp.featureType = "TimeSeries";
    root_grp.conventions= "CF-1.6";
    baseDate = datetime(2001,3,1)
    root_grp.time_coverage_start = "2001-03-01 12:00:00";

    # dimensions
    root_grp.createDimension('time', None)
    root_grp.createDimension('station_nm', None) # stations unlimited = http://cf-pcmdi.llnl.gov/documents/cf-conventions/1.6/cf-conventions.html#idp8314368
    # variables
    times = root_grp.createVariable('time', 'f8', ('time',))
    times.units = 'hours since 2001-03-01 12:00:00'
    #times.units = 'hours since 0001-01-01 00:00:00.0'
    times.calendar = 'gregorian'
    times.standard_name= 'time'
    data = root_grp.createVariable('data', 'f4', ('station_nm','time',))
    data.coordinates='time lat lon'
    data.long_name = "Streamflow"
开发者ID:andreasdjokic,项目名称:wml2-ncdf,代码行数:33,代码来源:createNCDF.py

示例4: plot_accu_TRMM

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
def plot_accu_TRMM(finalMCCList, MAIN_DIRECTORY):
    '''
    Purpose:: (1) generate a file with the accumulated precipitation for the MCS (2) generate the appropriate image
            
    Input:: finalMCCList: a list of dictionaries representing a list of nodes representing a MCC
            MAIN_DIRECTORY: a string representing the path to the main directory where the data generated is saved

    Returns:: a netcdf file containing the accumulated precip
    
    Generates: A plot (generated in Grads) of the accumulated precipitation

    '''

    os.chdir((MAIN_DIRECTORY+'/TRMMnetcdfCEs'))
    fname = ''
    imgFilename = ''
    firstPartName = ''
    firstTime = True
    replaceExpXDef = ''
    thisNode = ''

    subprocess.call('export DISPLAY=:0.0', shell=True)

    # Generate the file name using MCCTimes
    # if the file name exists, add it to the accTRMM file
    for path in finalMCCList:
        firstTime = True
        for eachNode in path:
            thisNode = mccSearch.this_dict(eachNode)
            fname = 'TRMM' + str(thisNode['cloudElementTime']).replace(' ', '_') + thisNode['uniqueID'] + '.nc'

            if os.path.isfile(fname):
                # Open NetCDF file add info to the accu
                cloudElementTRMMData = Dataset(fname, 'r', format='NETCDF4')
                precipRate = cloudElementTRMMData.variables['precipitation_Accumulation'][:]
                lats = cloudElementTRMMData.variables['latitude'][:]
                lons = cloudElementTRMMData.variables['longitude'][:]
                LONTRMM, LATTRMM = np.meshgrid(lons, lats)
                nygrdTRMM = len(LATTRMM[:, 0])
                nxgrdTRMM = len(LONTRMM[0, :])
                precipRate = ma.masked_array(precipRate, mask=(precipRate < 0.0))
                cloudElementTRMMData.close()

                if firstTime is True:
                    firstPartName = str(thisNode['uniqueID'])+str(thisNode['cloudElementTime']).replace(' ', '_')+'-'
                    accuPrecipRate = ma.zeros(precipRate.shape)
                    firstTime = False

                accuPrecipRate += precipRate

        imgFilename = MAIN_DIRECTORY+'/images/MCS_'+firstPartName+str(thisNode['cloudElementTime']).replace(' ', '_')+'.gif'
        # Create new netCDF file
        accuTRMMFile = MAIN_DIRECTORY+'/TRMMnetcdfCEs/accu'+firstPartName+str(thisNode['cloudElementTime']).replace(' ', '_')+'.nc'
        # Write the file
        accuTRMMData = Dataset(accuTRMMFile, 'w', format='NETCDF4')
        accuTRMMData.description = 'Accumulated precipitation data'
        accuTRMMData.calendar = 'standard'
        accuTRMMData.conventions = 'COARDS'
        # Dimensions
        accuTRMMData.createDimension('time', None)
        accuTRMMData.createDimension('lat', nygrdTRMM)
        accuTRMMData.createDimension('lon', nxgrdTRMM)

        # Variables
        TRMMprecip = ('time', 'lat', 'lon',)
        times = accuTRMMData.createVariable('time', 'f8', ('time',))
        times.units = 'hours since ' + str(thisNode['cloudElementTime']).replace(' ', '_')[:-6]
        latitude = accuTRMMData.createVariable('latitude', 'f8', ('lat',))
        longitude = accuTRMMData.createVariable('longitude', 'f8', ('lon',))
        rainFallacc = accuTRMMData.createVariable('precipitation_Accumulation', 'f8', TRMMprecip)
        rainFallacc.units = 'mm'

        longitude[:] = LONTRMM[0, :]
        longitude.units = 'degrees_east'
        longitude.long_name = 'Longitude'

        latitude[:] = LATTRMM[:, 0]
        latitude.units = 'degrees_north'
        latitude.long_name = 'Latitude'

        rainFallacc[:] = accuPrecipRate[:]

        accuTRMMData.close()

        # Generate the image with GrADS
        subprocess.call('rm acc.ctl', shell=True)
        subprocess.call('touch acc.ctl', shell=True)
        replaceExpDset = 'echo DSET ' + accuTRMMFile + ' >> acc.ctl'
        subprocess.call(replaceExpDset, shell=True)
        subprocess.call('echo "OPTIONS yrev little_endian template" >> acc.ctl', shell=True)
        subprocess.call('echo "DTYPE netcdf" >> acc.ctl', shell=True)
        subprocess.call('echo "UNDEF  0" >> acc.ctl', shell=True)
        subprocess.call('echo "TITLE  TRMM MCS accumulated precipitation" >> acc.ctl', shell=True)
        replaceExpXDef = 'echo XDEF ' + str(nxgrdTRMM) + ' LINEAR ' + str(min(lons)) + ' ' + \
            str((max(lons)-min(lons))/nxgrdTRMM) + ' >> acc.ctl'
        subprocess.call(replaceExpXDef, shell=True)
        replaceExpYDef = 'echo YDEF '+str(nygrdTRMM)+' LINEAR '+str(min(lats)) + ' ' + \
            str((max(lats)-min(lats))/nygrdTRMM)+' >>acc.ctl'
        subprocess.call(replaceExpYDef, shell=True)
        subprocess.call('echo "ZDEF   01 LEVELS 1" >> acc.ctl', shell=True)
#.........这里部分代码省略.........
开发者ID:caocampb,项目名称:grab-tag-graph,代码行数:103,代码来源:plotting.py

示例5: plot_accu_in_time_range

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
def plot_accu_in_time_range(starttime, endtime, MAIN_DIRECTORY, tRes):
    '''
    Purpose:: Create accumulated precip plot within a time range given using all CEs

    Inputs:: starttime: a string representing the time to start the accumulations format yyyy-mm-dd_hh:mm:ss
             endtime: a string representing the time to end the accumulations format yyyy-mm-dd_hh:mm:ss
             MAIN_DIRECTORY: a string representing the path to the main directory where the data generated is saved
             tRes: a float representing the time res of the input data e.g. 30min=0.5
             
    Returns:: a netcdf file containing the accumulated precip for specified times a gif (generated in Grads)

    Generates:: a plot of the amount of precipitation accumulated between two dates for the domain

    TODO: pass of pick up from the NETCDF file the  lat, lon and resolution for generating the ctl file
    '''

    os.chdir((MAIN_DIRECTORY+'/TRMMnetcdfCEs/'))
    # Just incase the X11 server is giving problems
    subprocess.call('export DISPLAY=:0.0', shell=True)

    imgFilename = ''
    firstTime = True

    fileList = []
    sTime = datetime.strptime(starttime.replace('_', ' '), '%Y-%m-%d %H:%M:%S')
    eTime = datetime.strptime(endtime.replace('_', ' '), '%Y-%m-%d %H:%M:%S')
    thisTime = sTime

    while thisTime <= eTime:
        fileList = filter(os.path.isfile, glob.glob(('TRMM' + str(thisTime).replace(' ', '_') + '*' + '.nc')))
        for fname in fileList:
            cloudElementTRMMData = Dataset(fname, 'r', format='NETCDF4')
            precipRate = cloudElementTRMMData.variables['precipitation_Accumulation'][:]
            lats = cloudElementTRMMData.variables['latitude'][:]
            lons = cloudElementTRMMData.variables['longitude'][:]
            LONTRMM, LATTRMM = np.meshgrid(lons, lats)
            nygrdTRMM = len(LATTRMM[:, 0])
            nxgrdTRMM = len(LONTRMM[0, :])
            precipRate = ma.masked_array(precipRate, mask=(precipRate < 0.0))
            cloudElementTRMMData.close()

            if firstTime is True:
                accuPrecipRate = ma.zeros(precipRate.shape)
                firstTime = False

            accuPrecipRate += precipRate

        # Increment the time
        thisTime += timedelta(hours=tRes)

    # Create new netCDF file
    accuTRMMFile = MAIN_DIRECTORY+'/TRMMnetcdfCEs/accu'+starttime+'-'+endtime+'.nc'
    print 'accuTRMMFile ', accuTRMMFile
    # Write the file
    accuTRMMData = Dataset(accuTRMMFile, 'w', format='NETCDF4')
    accuTRMMData.description = 'Accumulated precipitation data'
    accuTRMMData.calendar = 'standard'
    accuTRMMData.conventions = 'COARDS'
    # Dimensions
    accuTRMMData.createDimension('time', None)
    accuTRMMData.createDimension('lat', nygrdTRMM)
    accuTRMMData.createDimension('lon', nxgrdTRMM)

    # Variables
    TRMMprecip = ('time', 'lat', 'lon',)
    times = accuTRMMData.createVariable('time', 'f8', ('time',))
    times.units = 'hours since ' + starttime[:-6]
    latitude = accuTRMMData.createVariable('latitude', 'f8', ('lat',))
    longitude = accuTRMMData.createVariable('longitude', 'f8', ('lon',))
    rainFallacc = accuTRMMData.createVariable('precipitation_Accumulation', 'f8', TRMMprecip)
    rainFallacc.units = 'mm'

    longitude[:] = LONTRMM[0, :]
    longitude.units = 'degrees_east'
    longitude.long_name = 'Longitude'

    latitude[:] = LATTRMM[:, 0]
    latitude.units = 'degrees_north'
    latitude.long_name = 'Latitude'

    rainFallacc[:] = accuPrecipRate[:]

    accuTRMMData.close()

    # Generate the image with GrADS
    # the ctl file
    subprocess.call('rm acc.ctl', shell=True)
    subprocess.call('touch acc.ctl', shell=True)
    replaceExpDset = 'echo DSET ' + accuTRMMFile + ' >> acc.ctl'
    subprocess.call(replaceExpDset, shell=True)
    subprocess.call('echo "OPTIONS yrev little_endian template" >> acc.ctl', shell=True)
    subprocess.call('echo "DTYPE netcdf" >> acc.ctl', shell=True)
    subprocess.call('echo "UNDEF  0" >> acc.ctl', shell=True)
    subprocess.call('echo "TITLE  TRMM MCS accumulated precipitation" >> acc.ctl', shell=True)
    replaceExpXDef = 'echo XDEF ' + str(nxgrdTRMM) + ' LINEAR ' + str(min(lons)) + ' ' + \
        str((max(lons)-min(lons))/nxgrdTRMM) + ' >> acc.ctl'
    subprocess.call(replaceExpXDef, shell=True)
    replaceExpYDef = 'echo YDEF '+str(nygrdTRMM)+' LINEAR '+str(min(lats)) + ' ' + \
        str((max(lats)-min(lats))/nygrdTRMM)+' >>acc.ctl'
    subprocess.call(replaceExpYDef, shell=True)
#.........这里部分代码省略.........
开发者ID:caocampb,项目名称:grab-tag-graph,代码行数:103,代码来源:plotting.py

示例6: corrected

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
    dataset.title='Daily global radiation'
    dataset.comment='Daily global radiation bias corrected (scaled distribution mapping) data of the EURO-CORDEX model. The reference period is 1981-2010, the years 2006-2010 are taken from the corresponding rcp4.5 scenario.'
    
var.grid_mapping = 'latitude_longitude'

# projection information
crs.longitude_of_prime_meridian = 0.0 
crs.semi_major_axis = 6378137.0
crs.inverse_flattening = 298.257223563
crs.comment = 'Latitude and longitude on the WGS 1984 datum'

# write data to netCDF variable
var[:] = ds[param].data
lats[:] = lat1d
lons[:] = lon1d

# fill in times
dates = [startdate+k*timedelta(days=1) for k in range(ds[param].data.shape[0])]
times[:] = date2num(dates, units=times.units, calendar=times.calendar)

# global attributes

dataset.project= "Climaproof, funded by the Austrian Development Agency (ADA) and co-funded by the United Nations Environmental Programme (UNEP)"
dataset.source = 'Bias Correction Method: Switanek et al., 2017, doi.org/10.5194/hess-21-2649-2017, Regridding Method: Higher-order patch recovery (patch) by Earth System Modelling Framework (ESMF) software ESMF_RegridWeightGen (http://www.earthsystemmodeling.org/esmf_releases/public/last/ESMF_refdoc/)'
dataset.contact = 'Maria Wind <[email protected]>, Herbert Formayer <[email protected]>'
dataset.institution = 'Institute of Meteorology, University of Natural Resources and Life Sciences, Vienna, Austria'
dataset.referencees = 'https://data.ccca.ac.at/group/climaproof'
dataset.conventions = 'CF-1.6'

# close dataset        
dataset.close()
开发者ID:wasserblum,项目名称:met,代码行数:33,代码来源:read_write_netcdf_MARIA.py

示例7: Dataset

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import conventions [as 别名]
    # open the netCDF file in readonly mode based on which we will be creating a new netcdf file
    inRootGrp = Dataset(inPrecpNetCDF_File, "r", format="NETCDF3_CLASSIC")

    # open a new blank netcdf file to which we will be writting data
    outRootGrp = Dataset(outWindNetCDF_File, "w", format="NETCDF3_CLASSIC")

    # DEBUG: print global attributes of the original file:
    ##    for gAttribute in inRootGrp.ncattrs():
    ##        print 'Global attribute name:', gAttribute, '=', getattr(inRootGrp,gAttribute)

    # add global file level attributes to the new netcdf file
    outRootGrp.start_year = inRootGrp.start_year
    outRootGrp.data_variable_name = "V"
    outRootGrp.data_time_step = inRootGrp.data_time_step
    outRootGrp.orginal_data_source = "CI Water Generated Data Source"
    outRootGrp.conventions = "CF-1.0"
    outRootGrp.modified_data_source = "CI Water System"
    outRootGrp.spatial_reference = "NAD83_UTM_Zone_12N"
    outRootGrp.datum = "D_North_America_1983"

    # get dimension values from the original netcdf file
    inputTimeVar = inRootGrp.variables["time"]
    inputXvar = inRootGrp.variables["x"]
    inputYvar = inRootGrp.variables["y"]
    inputPrecVar = inRootGrp.variables["Prec"]

    # DEBUG: print dimensions of variables in input netcdf file
    print(inputTimeVar.shape[0])
    print(inputXvar.shape[0])
    print(inputYvar.shape[0])
    print(inputPrecVar.shape)
开发者ID:CI-Water-DASYCIM,项目名称:UEBPythonPackage,代码行数:33,代码来源:GenerateWatershedDaymetMultipleWindDataPerDay.py


注:本文中的netCDF4.Dataset.conventions方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。