当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.createCompoundType方法代码示例

本文整理汇总了Python中netCDF4.Dataset.createCompoundType方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.createCompoundType方法的具体用法?Python Dataset.createCompoundType怎么用?Python Dataset.createCompoundType使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在netCDF4.Dataset的用法示例。


在下文中一共展示了Dataset.createCompoundType方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setUp

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
 def setUp(self):
     self.file = FILE_NAME
     f  = Dataset(self.file, 'w')
     d = f.createDimension(DIM_NAME,None)
     g = f.createGroup(GROUP_NAME)
     wind_vector_type = f.createCompoundType(dtype, TYPE_NAME)
     wind_vectorunits_type = f.createCompoundType(dtypec, TYPE_NAMEC)
     v = f.createVariable(VAR_NAME,wind_vector_type, DIM_NAME)
     vv = g.createVariable(VAR_NAME2,wind_vector_type,DIM_NAME)
     v.missing_values = missvals
     v.units = windunits
     vv.missing_values = missvals
     vv.units = windunits
     f.close()
开发者ID:Unidata,项目名称:netcdf4-python,代码行数:16,代码来源:tst_compoundatt.py

示例2: runTest

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
 def runTest(self):
     """testing compound variables"""
     f = Dataset(self.file, 'r')
     v = f.variables[VAR_NAME]
     g = f.groups[GROUP_NAME]
     vv = g.variables[VAR_NAME]
     dataout = v[:]
     dataoutg = vv[:]
     # make sure data type is aligned
     assert (f.cmptypes['cmp4'] == dtype4a)
     assert(list(f.cmptypes.keys()) ==\
            [TYPE_NAME1,TYPE_NAME2,TYPE_NAME3,TYPE_NAME4,TYPE_NAME5])
     assert_array_equal(dataout['xxx']['xx']['i'],data['xxx']['xx']['i'])
     assert_array_equal(dataout['xxx']['xx']['j'],data['xxx']['xx']['j'])
     assert_array_almost_equal(dataout['xxx']['yy']['x'],data['xxx']['yy']['x'])
     assert_array_almost_equal(dataout['xxx']['yy']['y'],data['xxx']['yy']['y'])
     assert_array_almost_equal(dataout['yyy'],data['yyy'])
     assert_array_equal(dataoutg['x1']['i'],datag['x1']['i'])
     assert_array_equal(dataoutg['x1']['j'],datag['x1']['j'])
     assert_array_almost_equal(dataoutg['y1']['x'],datag['y1']['x'])
     assert_array_almost_equal(dataoutg['y1']['y'],datag['y1']['y'])
     f.close()
     # issue 773
     f = Dataset(self.file,'w')
     dtype = np.dtype([('observation', 'i4'),
                       ('station_name','S80')])
     dtype_nest = np.dtype([('observation', 'i4'),
                            ('station_name','S80'),
                            ('nested_observation',dtype)])
     station_data_t1 = f.createCompoundType(dtype,'station_data1')
     station_data_t2 = f.createCompoundType(dtype_nest,'station_data')
     f.createDimension('station',None)
     statdat = f.createVariable('station_obs', station_data_t2, ('station',))
     assert(statdat.dtype == station_data_t2.dtype)
     datain = np.empty(2,station_data_t2.dtype_view)
     datain['observation'][:] = (123,314)
     datain['station_name'][:] = ('Boulder','New York')
     datain['nested_observation']['observation'][:] = (-999,999)
     datain['nested_observation']['station_name'][:] = ('Boston','Chicago')
     statdat[:] = datain
     f.close()
     f = Dataset(self.file)
     dataout = f['station_obs'][:]
     assert(dataout.dtype == station_data_t2.dtype_view)
     assert_array_equal(datain, dataout)
     f.close()
开发者ID:Unidata,项目名称:netcdf4-python,代码行数:48,代码来源:tst_compoundvar.py

示例3: setUp

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
 def setUp(self):
     self.file = FILE_NAME
     f  = Dataset(self.file, 'w')
     d = f.createDimension(DIM_NAME,DIM_SIZE)
     g = f.createGroup(GROUP_NAME)
     # simple compound types.
     cmptype1 = f.createCompoundType(dtype1, TYPE_NAME1)
     cmptype2 = f.createCompoundType(dtype2, TYPE_NAME2)
     # close and reopen the file to make sure compound
     # type info read back in correctly.
     f.close()
     f = Dataset(self.file,'r+')
     g = f.groups[GROUP_NAME]
     # multiply nested compound types
     cmptype3 = f.createCompoundType(dtype3, TYPE_NAME3)
     cmptype4 = f.createCompoundType(dtype4, TYPE_NAME4)
     cmptype5 = f.createCompoundType(dtype5, TYPE_NAME5)
     v = f.createVariable(VAR_NAME,cmptype4, DIM_NAME)
     vv = g.createVariable(VAR_NAME,cmptype5, DIM_NAME)
     v[:] = data
     vv[:] = datag
     f.close()
开发者ID:8900,项目名称:netCDF4-Python,代码行数:24,代码来源:tst_compoundvar.py

示例4: setUp

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
 def setUp(self):
     self.file = FILE_NAME
     f  = Dataset(self.file, 'w')
     d = f.createDimension(DIM_NAME,DIM_SIZE)
     g = f.createGroup(GROUP_NAME)
     # simple compound types.
     cmptype1 = f.createCompoundType(dtype1, TYPE_NAME1)
     cmptype2 = f.createCompoundType(dtype2, TYPE_NAME2)
     # close and reopen the file to make sure compound
     # type info read back in correctly.
     f.close()
     f = Dataset(self.file,'r+')
     g = f.groups[GROUP_NAME]
     # multiply nested compound types
     cmptype3 = f.createCompoundType(dtype3, TYPE_NAME3)
     cmptype4 = f.createCompoundType(dtype4, TYPE_NAME4)
     cmptype5 = f.createCompoundType(dtype5, TYPE_NAME5)
     v = f.createVariable(VAR_NAME,cmptype4, DIM_NAME)
     vv = g.createVariable(VAR_NAME,cmptype5, DIM_NAME)
     v[:] = data
     vv[:] = datag
     # try reading the data back before the file is closed
     dataout = v[:]
     dataoutg = vv[:]
     assert (cmptype4 == dtype4a) # data type should be aligned
     assert (dataout.dtype == dtype4a) # data type should be aligned
     assert(list(f.cmptypes.keys()) ==\
            [TYPE_NAME1,TYPE_NAME2,TYPE_NAME3,TYPE_NAME4,TYPE_NAME5])
     assert_array_equal(dataout['xxx']['xx']['i'],data['xxx']['xx']['i'])
     assert_array_equal(dataout['xxx']['xx']['j'],data['xxx']['xx']['j'])
     assert_array_almost_equal(dataout['xxx']['yy']['x'],data['xxx']['yy']['x'])
     assert_array_almost_equal(dataout['xxx']['yy']['y'],data['xxx']['yy']['y'])
     assert_array_almost_equal(dataout['yyy'],data['yyy'])
     assert_array_equal(dataoutg['x1']['i'],datag['x1']['i'])
     assert_array_equal(dataoutg['x1']['j'],datag['x1']['j'])
     assert_array_almost_equal(dataoutg['y1']['x'],datag['y1']['x'])
     assert_array_almost_equal(dataoutg['y1']['y'],datag['y1']['y'])
     f.close()
开发者ID:Unidata,项目名称:netcdf4-python,代码行数:40,代码来源:tst_compoundvar.py

示例5: MFDataset

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
    x[0:10] = numpy.arange(nfile*10,10*(nfile+1))
    f.close()
# now read all those files in at once, in one Dataset.
from netCDF4 import MFDataset
f = MFDataset('mftest*nc')
print f.variables['x'][:]

# example showing how to save numpy complex arrays using compound types.
f = Dataset('complex.nc','w')
size = 3 # length of 1-d complex array
# create sample complex data.
datac = numpy.exp(1j*(1.+numpy.linspace(0, numpy.pi, size)))
print datac.dtype
# create complex128 compound data type.
complex128 = numpy.dtype([('real',numpy.float64),('imag',numpy.float64)])
complex128_t = f.createCompoundType(complex128,'complex128')
# create a variable with this data type, write some data to it.
f.createDimension('x_dim',None)
v = f.createVariable('cmplx_var',complex128_t,'x_dim')
data = numpy.empty(size,complex128) # numpy structured array
data['real'] = datac.real; data['imag'] = datac.imag
v[:] = data
# close and reopen the file, check the contents.
f.close()
f = Dataset('complex.nc')
print f
print f.variables['cmplx_var']
print f.cmptypes
print f.cmptypes['complex128']
v = f.variables['cmplx_var']
print v.shape
开发者ID:hhiester,项目名称:convert2vtk,代码行数:33,代码来源:tutorial.py

示例6: range

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
# create an unlimited  dimension call 'station'
f.createDimension('station',None)



NUMCHARS = 80 # number of characters to use in fixed-length strings.
winddtype = numpy.dtype([('speed','f4'),('direction','i4')])
statdtype = numpy.dtype([('latitude', 'f4'), ('longitude', 'f4'),
                         ('surface_wind',winddtype),
                         ('temp_sounding','f4',10),('press_sounding','i4',10),
                         ('location_name','S1',NUMCHARS)])
# use this data type definitions to create a compound data types
# called using the createCompoundType Dataset method.
# create a compound type for vector wind which will be nested inside
# the station data type. This must be done first!
wind_data_t = f.createCompoundType(winddtype,'wind_data')
# now that wind_data_t is defined, create the station data type.
station_data_t = f.createCompoundType(statdtype,'station_data')



statdat = f.createVariable('station_obs', station_data_t, ('station',))
# create a numpy structured array, assign data to it.
data = numpy.empty(2,station_data_t)
data['latitude'] = 40.
data['longitude'] = -105.
data['surface_wind']['speed'] = 12.5
data['surface_wind']['direction'] = 270
data['temp_sounding'] = (280.3,272.,270.,269.,266.,258.,254.1,250.,245.5,240.)
data['press_sounding'] = range(800,300,-50)
开发者ID:ctw,项目名称:ptsa_new,代码行数:32,代码来源:test_xray_serialization.py

示例7: test_tutorial

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
def test_tutorial():
# 2 unlimited dimensions.
#temp = rootgrp.createVariable('temp','f4',('time','level','lat','lon',))
# this makes the compression 'lossy' (preserving a precision of 1/1000)
# try it and see how much smaller the file gets.
    temp = rootgrp.createVariable('temp','f4',('time','level','lat','lon',),least_significant_digit=3)
# attributes.
    import time
    rootgrp.description = 'bogus example script'
    rootgrp.history = 'Created ' + time.ctime(time.time())
    rootgrp.source = 'netCDF4 python module tutorial'
    latitudes.units = 'degrees north'
    longitudes.units = 'degrees east'
    levels.units = 'hPa'
    temp.units = 'K'
    times.units = 'hours since 0001-01-01 00:00:00.0'
    times.calendar = 'gregorian'
    for name in rootgrp.ncattrs():
        print('Global attr', name, '=', getattr(rootgrp,name))
    print(rootgrp)
    print(rootgrp.__dict__)
    print(rootgrp.variables)
    print(rootgrp.variables['temp'])
    import numpy
# no unlimited dimension, just assign to slice.
    lats = numpy.arange(-90,91,2.5)
    lons = numpy.arange(-180,180,2.5)
    latitudes[:] = lats
    longitudes[:] = lons
    print('latitudes =\n',latitudes[:])
    print('longitudes =\n',longitudes[:])
# append along two unlimited dimensions by assigning to slice.
    nlats = len(rootgrp.dimensions['lat'])
    nlons = len(rootgrp.dimensions['lon'])
    print('temp shape before adding data = ',temp.shape)
    from numpy.random.mtrand import uniform # random number generator.
    temp[0:5,0:10,:,:] = uniform(size=(5,10,nlats,nlons))
    print('temp shape after adding data = ',temp.shape)
# levels have grown, but no values yet assigned.
    print('levels shape after adding pressure data = ',levels.shape)
# assign values to levels dimension variable.
    levels[:] = [1000.,850.,700.,500.,300.,250.,200.,150.,100.,50.]
# fancy slicing
    tempdat = temp[::2, [1,3,6], lats>0, lons>0]
    print('shape of fancy temp slice = ',tempdat.shape)
    print(temp[0, 0, [0,1,2,3], [0,1,2,3]].shape)
# fill in times.
    from datetime import datetime, timedelta
    from netCDF4 import num2date, date2num, date2index
    dates = [datetime(2001,3,1)+n*timedelta(hours=12) for n in range(temp.shape[0])]
    times[:] = date2num(dates,units=times.units,calendar=times.calendar)
    print('time values (in units %s): ' % times.units+'\\n',times[:])
    dates = num2date(times[:],units=times.units,calendar=times.calendar)
    print('dates corresponding to time values:\\n',dates)
    rootgrp.close()
# create a series of netCDF files with a variable sharing
# the same unlimited dimension.
    for nfile in range(10):
        f = Dataset('mftest'+repr(nfile)+'.nc','w',format='NETCDF4_CLASSIC')
        f.createDimension('x',None)
        x = f.createVariable('x','i',('x',))
        x[0:10] = numpy.arange(nfile*10,10*(nfile+1))
    f.close()
# now read all those files in at once, in one Dataset.
    from netCDF4 import MFDataset
    f = MFDataset('mftest*nc')
    print(f.variables['x'][:])
# example showing how to save numpy complex arrays using compound types.
    f = Dataset('complex.nc','w')
    size = 3 # length of 1-d complex array
# create sample complex data.
    datac = numpy.exp(1j*(1.+numpy.linspace(0, numpy.pi, size)))
    print(datac.dtype)
# create complex128 compound data type.
    complex128 = numpy.dtype([('real',numpy.float64),('imag',numpy.float64)])
    complex128_t = f.createCompoundType(complex128,'complex128')
# create a variable with this data type, write some data to it.
    f.createDimension('x_dim',None)
    v = f.createVariable('cmplx_var',complex128_t,'x_dim')
    data = numpy.empty(size,complex128) # numpy structured array
    data['real'] = datac.real; data['imag'] = datac.imag
    v[:] = data
# close and reopen the file, check the contents.
    f.close()
    f = Dataset('complex.nc')
    print(f)
    print(f.variables['cmplx_var'])
    print(f.cmptypes)
    print(f.cmptypes['complex128'])
    v = f.variables['cmplx_var']
    print(v.shape)
    datain = v[:] # read in all the data into a numpy structured array
# create an empty numpy complex array
    datac2 = numpy.empty(datain.shape,numpy.complex128)
# .. fill it with contents of structured array.
    datac2.real = datain['real']
    datac2.imag = datain['imag']
    print(datac.dtype,datac)
    print(datac2.dtype,datac2)
# more complex compound type example.
#.........这里部分代码省略.........
开发者ID:kmunve,项目名称:TSanalysis,代码行数:103,代码来源:crocus_forcing_nc.py

示例8: range

# 需要导入模块: from netCDF4 import Dataset [as 别名]
# 或者: from netCDF4.Dataset import createCompoundType [as 别名]
winddtype = numpy.dtype([("speed", "f4"), ("direction", "i4")])
statdtype = numpy.dtype(
    [
        ("latitude", "f4"),
        ("longitude", "f4"),
        ("surface_wind", winddtype),
        ("temp_sounding", "f4", 10),
        ("press_sounding", "i4", 10),
        ("location_name", "S1", NUMCHARS),
    ]
)
# use this data type definitions to create a compound data types
# called using the createCompoundType Dataset method.
# create a compound type for vector wind which will be nested inside
# the station data type. This must be done first!
wind_data_t = f.createCompoundType(winddtype, "wind_data")
# now that wind_data_t is defined, create the station data type.
station_data_t = f.createCompoundType(statdtype, "station_data")


statdat = f.createVariable("station_obs", station_data_t, ("station",))
# create a numpy structured array, assign data to it.
data = numpy.empty(2, station_data_t)
data["latitude"] = 40.0
data["longitude"] = -105.0
data["surface_wind"]["speed"] = 12.5
data["surface_wind"]["direction"] = 270
data["temp_sounding"] = (280.3, 272.0, 270.0, 269.0, 266.0, 258.0, 254.1, 250.0, 245.5, 240.0)
data["press_sounding"] = range(800, 300, -50)

data["location_name"][0] = stringtoarr("Boulder, Colorado, USA", NUMCHARS)
开发者ID:maciekswat,项目名称:ptsa_new,代码行数:33,代码来源:test_xray_serialization.py


注:本文中的netCDF4.Dataset.createCompoundType方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。