当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.close方法代码示例

本文整理汇总了Python中netCDF4.Dataset.close方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.close方法的具体用法?Python Dataset.close怎么用?Python Dataset.close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在netCDF4.Dataset的用法示例。


在下文中一共展示了Dataset.close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: dataToNC

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
    def dataToNC(self, ncName, subset, lookup):
        """Take dataframe and put in netCDF (new file or append).
        Assumes there's a 'time' variable in data/ncfile

        .. note: run in conda environment log2ncEnv3, do to line: appDF = subset[-exist]

        :param str ncName: name of netCDF with filepath
        :param dataframe subset: dataframe to be added to netCDF file
        :param lookup: a variable that might be needed for createNCshell, can be empty/Null

        """
        if not os.path.isfile(ncName):
            ncfile = self.createNCshell(ncName, lookup)
        ncfile = Dataset(ncName, 'a', format='NETCDF4')
        timeLen = len(ncfile.variables['time'][:])

        ## Add the following: remove any entries from the subset that already exist!!!!!!!
        # exist = subset.epoch.isin(ncDep.variables['time'][:]) #
        subset['epochs'] = subset.index.values.astype('int64') // 10**9
        # exist  = subset.index.isin(epochs) #wrong previously
        # environment numpy (1.11) didn't have 'isin' module
        exist = np.in1d(subset['epochs'], ncfile.variables['time'][:])
        appDF = subset[-exist]

        if len(appDF) > 0: # else all times are already in nc
            # length should be the same for time & all attributes
            ncfile.variables['time'][timeLen:] = appDF['epochs'].values
            # ncfile.variables['time'][timeLen:] = subset.index.values.astype(np.int64) // 10**9
            for attr in self.attrArr:
                #atLen = len(ncfile.variables[attr][:])
                ncfile.variables[attr][timeLen:] = appDF[attr].values
                self.attrMinMax(ncfile, attr)
            self.NCtimeMeta(ncfile)

        ncfile.close()
开发者ID:sarahheim,项目名称:ncObjects,代码行数:37,代码来源:nc.py

示例2: get_level

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def get_level(resource, level):
  from flyingpigeon.ocgis_module import call
  from netCDF4 import Dataset
  from flyingpigeon.utils import get_variable
  from numpy import squeeze

  try:
    level_data = call(resource, level_range=[int(level),int(level)])
    if type(resource) == list:
      resource.sort()
    variable = get_variable(level_data)
    logger.info('found %s in file' % variable)
    ds = Dataset(level_data, mode='a')
    var = ds.variables.pop(variable)
    dims = var.dimensions
    new_var = ds.createVariable('z%s'% level, var.dtype, dimensions=(dims[0],dims[2],dims[3]))
    # i = where(var[:]==level)
    new_var[:,:,:] = squeeze(var[:,0,:,:])
    ds.close()
    logger.info('level %s extracted' % level)

    data = call(level_data , variable = 'z%s'%level)
    
  except Exception as e:
    logger.error('failed to extract level %s ' % e)
  return data
开发者ID:KatiRG,项目名称:flyingpigeon,代码行数:28,代码来源:datafetch.py

示例3: setUp

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
    def setUp(self):
        self.standardtime = self.TestTime(datetime(1950, 1, 1), 366, 24,
                                          'hours since 1900-01-01', 'standard')

        self.file = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
        f = Dataset(self.file, 'w')
        f.createDimension('time', None)
        time = f.createVariable('time', float, ('time',))
        time.units = 'hours since 1900-01-01'
        time[:] = self.standardtime[:]
        f.createDimension('time2', 1)
        time2 = f.createVariable('time2', 'f8', ('time2',))
        time2.units = 'days since 1901-01-01'
        self.first_timestamp = datetime(2000, 1, 1)
        time2[0] = date2num(self.first_timestamp, time2.units)
        ntimes = 21
        f.createDimension("record", ntimes)
        time3 = f.createVariable("time3", numpy.int32, ("record", ))
        time3.units = "seconds since 1970-01-01 00:00:00"
        date = datetime(2037,1,1,0)
        dates = [date]
        for ndate in range(ntimes-1):
            date += (ndate+1)*timedelta(hours=1)
            dates.append(date)
        time3[:] = date2num(dates,time3.units)
        f.close()
开发者ID:ckhroulev,项目名称:netcdf4-python,代码行数:28,代码来源:tst_netcdftime.py

示例4: output_file

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def output_file(fname, varname, dat, lat_variable = None, lon_variable = None):
    if len(dat.shape) == 2: dat = np.reshape(dat, [1, dat.shape[0], dat.shape[1]])
    if lat_variable is None: lat_variable = coor_range(-90 , 90 , dat.shape[1])
    if lon_variable is None: lon_variable = coor_range(0, 360, dat.shape[2])

    rootgrp = Dataset(fname, "w", format="NETCDF4")

    time = rootgrp.createDimension("time", dat.shape[0])
    lat  = rootgrp.createDimension("lat", dat.shape[1])
    lon  = rootgrp.createDimension("lon", dat.shape[2])

    times      = rootgrp.createVariable("time","f8",("time",))
    latitudes  = rootgrp.createVariable("lat","f4",("lat",))
    longitudes = rootgrp.createVariable("lon","f4",("lon",))

    longitudes.lon_name         = 'Longitude'
    longitudes.axis             = "X"
    longitudes.standard_name    = "longitude"
    longitudes.units            = "degrees_east"

    latitudes.lon_name          = 'Latitude'
    latitudes.axis              = "Y"
    latitudes.standard_name     = "latitude"
    latitudes.units             = "degrees_north"

    dims = ("time","lat","lon",)
    var = rootgrp.createVariable(varname, "f4", dims)

    latitudes [:] = lat_variable
    longitudes[:] = lon_variable
    var[:,:,:]    = dat
    
    rootgrp.close()

    return dat
开发者ID:douglask3,项目名称:jules_inputs,代码行数:37,代码来源:jules_file_man.py

示例5: main

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def main(args):
    vars_ = '|'.join(args.variables)
    test_files = iter_matching(args.basedir, re.compile('.*({}).*(_rcp26|_rcp45|_rcp85|_historical_).*r1i1p1.*nc'.format(vars_)))

    if args.dry_run:
        for f in test_files:
            print f
        sys.exit(0)

    FileType = ClimdexFile if args.climdex else Cmip5File

    for fp in test_files:
        log.info(fp)

        nc = Dataset(fp)
        available_climo_periods = determine_climo_periods(nc)
        nc.close()
        file_ = FileType(fp)
        variable = file_.variable

        for _, t_range in available_climo_periods.items():

            # Create climatological period and update metadata
            log.info('Generating climo period %s to %s', d2s(t_range[0]), d2s(t_range[1]))
            out_fp = file_.generate_climo_fp(t_range, args.outdir)
            log.info('Output file: %s', format(out_fp))
            try:
                create_climo_file(fp, out_fp, t_range[0], t_range[1], variable)
            except:
                log.warn('Failed to create climatology file')
            else:
                update_climo_time_meta(out_fp, FileType)
开发者ID:pacificclimate,项目名称:climate-explorer-backend,代码行数:34,代码来源:generate_climos.py

示例6: load_era40

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def load_era40(filename):
    era40 = Dataset(filename, mode="r")
    longitudes = era40.variables["longitude"][:]
    latitudes = era40.variables["latitude"][:]
    t = era40.variables["p2t"][0][:]
    era40.close()
    return longitudes, latitudes, t
开发者ID:hertzsprung,项目名称:mpecdt-python,代码行数:9,代码来源:netcdf_plot.py

示例7: create_netcdf

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
 def create_netcdf(self):
     #I'm going to name the output grid's
     #netcdf file after the first file in the grid
     #and add the total filelist as an attribute
     
     newfilename = (self.filelist[0]).strip("'")
     newfilename = newfilename.strip(".loa")
     newfilename += ("_grid.nc")
     if (len(self.filelist) > 8):
         newfilename = "large_grid.nc"
     
     newfile = Dataset(newfilename, mode='w', clobber=True)
     newfile.createDimension('naxes0', self.naxes0)
     newfile.createDimension('naxes1', self.naxes1)
     newfile.createDimension('naxes2', self.naxes2)
     
     var = newfile.createVariable('otfmap', numpy.dtype(numpy.float32), (('naxes2', 'naxes1', 'naxes0')))
     
     var[:] = self.T
     
     var.__setattr__('filenames', self.filelist)
     var.__setattr__('xmax', self.xmax)
     var.__setattr__('ymax', self.ymax)
     var.__setattr__('xmin', self.xmin)
     var.__setattr__('ymin', self.ymin)
     
     newfile.close()
开发者ID:tkareta,项目名称:otfregrid,代码行数:29,代码来源:OTFRegrid_minimal.py

示例8: read_nc

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def read_nc(infile, varname, dimension=-1, is_time=0):
	'''Read a variable from a netCDF file

	Input:
		input file path
		variable name
		dimension: if < 0, read in all dimensions of the variable; if >= 0, only read in the [dimension]th of the variable (index starts from 0). For example, if the first dimension of the variable is time, and if dimension=2, then only reads in the 3rd time step.
		is_time: if the desired variable is time (1 for time; 0 for not time). If it is time, return an array of datetime object

	Return:
		var: a numpy array of
	''' 
	from netCDF4 import Dataset
	from netCDF4 import num2date

	nc = Dataset(infile, 'r')
	if is_time==0:  # if not time variable
		if dimension<0:
			var = nc.variables[varname][:]
		else:
			var = nc.variables[varname][dimension]
	if is_time==1:  # if time variable
		time = nc.variables[varname]
		if hasattr(time, 'calendar'):  # if time variable has 'calendar' attribute
			if dimension<0:
				var = num2date(time[:], time.units, time.calendar)
			else:
				var = num2date(time[dimension], time.units, time.calendar)
		else:  # if time variable does not have 'calendar' attribute
			if dimension<0:
				var = num2date(time[:], time.units)
			else:
				var = num2date(time[dimension], time.units)
	nc.close()
	return var
开发者ID:YifanCheng,项目名称:RIPS,代码行数:37,代码来源:my_functions.py

示例9: load_region

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def load_region(region_id, local=False, return_regions=False):

    if local:
        _vr = Dataset(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), r"data/terrain_parameters/VarslingsOmr_2017.nc"),
            "r")
        # flip up-down because Meps data is upside down
        #_regions = np.flipud(_vr.variables["LokalOmr_2018"][:])
        _regions = _vr.variables["LokalOmr_2018"][:]
    else:
        _vr = Dataset(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), r"data/terrain_parameters/VarslingsOmr_2019.nc"),
            "r")
        # flip up-down because Meps data is upside down
        #_regions = np.flipud(_vr.variables["skredomr19_km"][:])
        _regions = _vr.variables["skredomr19_km"][:]
        print("Missing value: {mv}".format(mv=_vr.variables["skredomr19_km"].missing_value))

    _region_bounds = np.where(_regions == region_id)  # just to get the bounding box

    # get the lower left and upper right corner of a rectangle around the region
    y_min, y_max, x_min, x_max = min(_region_bounds[0].flatten()), max(_region_bounds[0].flatten()), \
                                 min(_region_bounds[1].flatten()), max(_region_bounds[1].flatten())

    #reg_mask = np.ma.masked_where(_regions[y_min:y_max, x_min:x_max] == region_id, _regions[y_min:y_max, x_min:x_max]).mask
    #reg_mask = np.where(_regions[y_min:y_max, x_min:x_max] == region_id, _regions[y_min:y_max, x_min:x_max], np.nan)
    reg_mask = np.where(_regions[y_min:y_max, x_min:x_max] == region_id, 1., np.nan)
    #reg_mask = np.ma.masked_where(_reg_mask == region_id).mask
    _vr.close()

    if return_regions:
        return _regions, reg_mask, y_min, y_max, x_min, x_max
    else:
        return reg_mask, y_min, y_max, x_min, x_max
开发者ID:kmunve,项目名称:APS,代码行数:36,代码来源:load_region.py

示例10: nbdry_grid_hack

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def nbdry_grid_hack(grid_file, num_pts):

    # Read bathymetry and masks
    id = Dataset(grid_file, "a")
    h = id.variables["h"][:, :]
    mask_rho = id.variables["mask_rho"][:, :]
    mask_u = id.variables["mask_u"][:, :]
    mask_v = id.variables["mask_v"][:, :]
    mask_psi = id.variables["mask_psi"][:, :]

    # Loop over longitude
    for i in range(size(h, 1)):
        # Find the southernmost unmasked cell within "num_pts" of the
        # northern boundary and set all the points north of it to match
        found_pt = False
        for j in range(num_pts, -1, -1):
            if mask_rho[-j, i] == 1:
                if found_pt:
                    # Already found the right point
                    h[-j, i] = val
                else:
                    # This is the first unmasked point
                    found_pt = True
                    val = h[-j, i]

    # Save changes
    id.variables["h"][:, :] = h
    id.close()
开发者ID:kaalexander,项目名称:roms_tools,代码行数:30,代码来源:nbdry_grid_hack.py

示例11: save_alts_to_netcdf_file

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
def save_alts_to_netcdf_file(path="alt.nc",
                             data_path = "/home/huziy/skynet1_rech3/cordex/CORDEX_DIAG/NorthAmerica_0.44deg_MPI_B1",
                             year_range=None, coord_file=None):

    year_range = range(1950, 2101) if year_range is None else year_range
    ds = Dataset(path, mode="w", format="NETCDF3_CLASSIC")

    if coord_file is None:
        coord_file = os.path.join(data_path, "pmNorthAmerica_0.44deg_MPIHisto_B1_200009_moyenne")

    b, lons2d, lats2d = draw_regions.get_basemap_and_coords(file_path=coord_file)
    ds.createDimension('year', len(year_range))
    ds.createDimension('lon', lons2d.shape[0])
    ds.createDimension('lat', lons2d.shape[1])

    lon_variable = ds.createVariable('longitude', 'f4', ('lon', 'lat'))
    lat_variable = ds.createVariable('latitude', 'f4', ('lon', 'lat'))
    year_variable = ds.createVariable("year", "i4", ("year",))

    alt_variable = ds.createVariable("alt", "f4", ('year', 'lon', 'lat'))

    lon_variable[:, :] = lons2d[:, :]
    lat_variable[:, :] = lats2d[:, :]
    year_variable[:] = year_range

    dm = CRCMDataManager(data_folder=data_path)
    dm_list = len(year_range) * [dm]
    mean_types = len(year_range) * ["monthly"]
    pool = Pool(processes=6)
    alts = pool.map(get_alt_for_year, list(zip(year_range, dm_list, mean_types)))
    alts = np.array(alts)
    alt_variable[:, :, :] = alts[:, :, :]
    ds.close()
开发者ID:guziy,项目名称:RPN,代码行数:35,代码来源:active_layer_thickness.py

示例12: __init__

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
class EncodingExample:
    def __init__(self):
        self.nc = None


    def __enter__(self):
        self.nc = Dataset('/tmp/example.nc', 'w')
        return self

    def __exit__(self, type, value, traceback):
        try:
            self.nc.close()
        except:
            pass

    def make_file(self):
        self.nc.createDimension('time')
        t_var = self.nc.createVariable('temperature', np.float32, ('time',), fill_value=-9999.)
        t_var.standard_name = 'seawater_temperature'
        t_var.units = 'deg_C'
        t_var.long_name = 'Surface Temperature'
        qc_var = self.nc.createVariable('temperature_qc', np.uint8, ('time',), fill_value=np.uint8(9))
        qc_var.standard_name = 'seawater_temperature status_flag'
        qc_var.long_name = 'Surface Temperature Quality Flag'
        qc_var.flag_values = np.array([1, 2, 3, 4, 9], dtype=np.uint8)
        qc_var.flag_meanings = "quality_good not_evaluated suspect fail missing_data"

        qc_var = self.nc.createVariable('temperature_tests_qc', np.uint64, ('time',))
        qc_var.standard_name = 'seawater_temperature status_flag'
        qc_var.long_name = 'Seawater Temperature Test Flag'
        qc_var.flag_decode_instructions = 'http://www.ioos.noaa.gov/qartod/welcome.html'
开发者ID:lukecampbell,项目名称:notebooks,代码行数:33,代码来源:encoding.py

示例13: flagStats_single

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
    def flagStats_single(self, fname):
        '''counter of all the primary and secondary flags

        '''
        import pandas as pd
        df = Dataset(fname, 'r')
        arr = [pd.Series({'time size': df['time'].size})]
        for vrbl in df.variables:
            if '_flagPrimary' in vrbl:
                dict = {}
                v = vrbl.split('_')[0]
                flagP = vrbl
                flagS = v+'_flagSecondary'
                pArr = df[flagP][:]
                for p in [1,2,3,4,9]:
                    # print flagP, p,':', df[flagP][:].tolist().count(p)
                    dict[flagP+'.'+str(p)] = df[flagP][:].tolist().count(p)
                for s in [1,2,3]:
                    # print flagS, s, ':', df[flagS][:].tolist().count(s)
                    pAtsArr = df[flagP][np.isin(df[flagS][:],s)]
                    # print flagS, s, '(3):', pAtsArr.tolist().count(3)
                    # print flagS, s, '(4):', pAtsArr.tolist().count(4)
                    dict[flagS+'.'+str(s)+'.3']=  pAtsArr.tolist().count(3)
                    dict[flagS+'.'+str(s)+'.4']=  pAtsArr.tolist().count(4)
                arr.append(pd.Series(dict))
        return pd.concat(arr)

        df.close()
开发者ID:sarahheim,项目名称:ncObjects,代码行数:30,代码来源:nc.py

示例14: updateNCmeta

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
 def updateNCmeta(self, ncName, newDir, lookup):
     '''createNCshell (which will put latest metadata in netcdf), then add all previous data
     File name will be the same so pass directory to put new files.
     '''
     # import xarray as xr
     fname  = os.path.join(self.ncpath, ncName)
     print os.path.isfile(fname), fname
     newName = os.path.join(newDir, ncName)
     print newName
     if fname != newName:
         self.createNCshell(newName, lookup)
         # ds = xr.open_dataset(fname)
         # df = ds.to_dataframe()
         df = Dataset(fname, 'r')
         # df['epochs'] = df.index.values.astype('int64') // 10**9
         ncfile = Dataset(newName, 'a', format='NETCDF4')
         # ncfile.variables['time'][0:] = df['epochs'].values
         # ncfile.variables['time'][0:] = df['time']
         for vrbl in df.variables:
             # print vrbl, df.variables[vrbl].size, df.variables[vrbl].dtype
             #atLen = len(ncfile.variables[vrbl][:])
             ncfile.variables[vrbl][0:] = df[vrbl][:]
             if (df.variables[vrbl].dtype != 'S1') and (vrbl != 'time'): self.attrMinMax(ncfile, vrbl)
         self.NCtimeMeta(ncfile)
         df.close()
         ncfile.close()
         print 'done', ncName
开发者ID:sarahheim,项目名称:ncObjects,代码行数:29,代码来源:nc.py

示例15: NetCDFData

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import close [as 别名]
class NetCDFData(Data):

    def __init__(self, url):
        self._dataset = None
        self.__timestamp_cache = TTLCache(1, 3600)
        super(NetCDFData, self).__init__(url)

    def __enter__(self):
        self._dataset = Dataset(self.url, 'r')

        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self._dataset.close()

    @property
    def timestamps(self):
        if self.__timestamp_cache.get("timestamps") is None:
            var = None
            for v in ['time', 'time_counter']:
                if v in self._dataset.variables:
                    var = self._dataset.variables[v]
                    break

            t = netcdftime.utime(var.units)
            timestamps = np.array(
                map(
                    lambda ts: t.num2date(ts).replace(tzinfo=pytz.UTC),
                    var[:]
                )
            )
            timestamps.flags.writeable = False
            self.__timestamp_cache["timestamps"] = timestamps

        return self.__timestamp_cache.get("timestamps")
开发者ID:michaelsmit,项目名称:ocean-navigator,代码行数:37,代码来源:netcdf_data.py


注:本文中的netCDF4.Dataset.close方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。