本文整理汇总了Python中pyhdf.SD.SD.select方法的典型用法代码示例。如果您正苦于以下问题:Python SD.select方法的具体用法?Python SD.select怎么用?Python SD.select使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyhdf.SD.SD
的用法示例。
在下文中一共展示了SD.select方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: read_var_point
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def read_var_point(filename,var_name,i,j,k,thetac,phic):
thetac = thetac[j]
phic = phic[k]
hdffile = SD(filename,SDC.READ)
if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
var=hdffile.select(var_name+'_').get(start=(k,j,i),count=(1,1,1)).squeeze()
else:
# R,theta,phi=r_theta_phi_uniform(filename)
if var_name in ['br','btheta','bphi']:
bx=hdffile.select('bx_').get(start=(k,j,i),count=(1,1,1)).squeeze()
by=hdffile.select('by_').get(start=(k,j,i),count=(1,1,1)).squeeze()
bz=hdffile.select('bz_').get(start=(k,j,i),count=(1,1,1)).squeeze()
if var_name=='br':
var = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
elif var_name=='btheta':
var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
else:
var =-bx*sin(phic) + by*cos(phic)
else:
vx=hdffile.select('vx_').get(start=(k,j,i),count=(1,1,1)).squeeze()
vy=hdffile.select('vy_').get(start=(k,j,i),count=(1,1,1)).squeeze()
vz=hdffile.select('vz_').get(start=(k,j,i),count=(1,1,1)).squeeze()
if var_name=='vr':
var = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
elif var_name=='vtheta':
var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
else:
var =-vx*sin(phic) + vy*cos(phic)
hdffile.end()
return(var)
示例2: read_var_islice
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def read_var_islice(filename,var_name,i,thetac,phic):
nk = phic.size
nj = thetac.size
phic = phic[:,None]
thetac = thetac[None,:]
hdffile = SD(filename,SDC.READ)
if var_name not in ['br','btheta','bphi','vr','vtheta','vphi']:
var=hdffile.select(var_name+'_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
else:
if var_name in ['br','btheta','bphi']:
bx=hdffile.select('bx_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
by=hdffile.select('by_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
bz=hdffile.select('bz_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
if var_name=='br':
var = bx*cos(phic)*sin(thetac) + by*sin(phic)*sin(thetac) + bz*cos(thetac)
elif var_name=='btheta':
var = bx*cos(phic)*cos(thetac) + by*sin(phic)*cos(thetac) - bz*sin(thetac)
else:
var =-bx*sin(phic) + by*cos(phic)
else:
vx=hdffile.select('vx_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
vy=hdffile.select('vy_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
vz=hdffile.select('vz_').get(start=(0,0,i),count=(nk,nj,1)).squeeze()
if var_name=='vr':
var = vx*cos(phic)*sin(thetac) + vy*sin(phic)*sin(thetac) + vz*cos(thetac)
elif var_name=='vtheta':
var = vx*cos(phic)*cos(thetac) + vy*sin(phic)*cos(thetac) - vz*sin(thetac)
else:
var =-vx*sin(phic) + vy*cos(phic)
hdffile.end()
return(var)
示例3: test_1000m_to_250m
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def test_1000m_to_250m(self):
"""test the 1 km to 250 meter interpolation facility
"""
gfilename_hdf = "testdata/MOD03_A12278_113638_2012278145123.hdf"
gfilename = "testdata/250m_lonlat_section_input.npz"
result_filename = "testdata/250m_lonlat_section_result.npz"
from pyhdf.SD import SD
from pyhdf.error import HDF4Error
gdata = None
try:
gdata = SD(gfilename_hdf)
except HDF4Error:
print "Failed reading eos-hdf file %s" % gfilename_hdf
try:
indata = np.load(gfilename)
except IOError:
return
if gdata:
lats = gdata.select("Latitude")[20:50, :]
lons = gdata.select("Longitude")[20:50, :]
else:
lats = indata['lat'] / 1000.
lons = indata['lon'] / 1000.
verif = np.load(result_filename)
vlons = verif['lon'] / 1000.
vlats = verif['lat'] / 1000.
tlons, tlats = modis1kmto250m(lons, lats)
self.assert_(np.allclose(tlons, vlons, atol=0.05))
self.assert_(np.allclose(tlats, vlats, atol=0.05))
示例4: readMOD35L2
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def readMOD35L2(fname, geoloc_only=False):
hdf_file = SD(HDFDIR + fname)
if not geoloc_only:
cloud_mask = hdf_file.select('Cloud_Mask').get()
lon = hdf_file.select('Longitude').get()
lat = hdf_file.select('Latitude').get()
hdf_file.end()
if not geoloc_only:
cld_msk = uint8(cloud_mask[0])
cloud = cld_msk & 6 # 0, 2, 4, 6
land = cld_msk & 192 # 0, 64, 128, 192
cloud[cloud==0] = 1 # 0 -> Cloud
cloud[cloud!=1] = 0 # 2, 4, 6 -> No cloud
coast = land
coast[coast==64] = 1 # 64 -> Coast
coast[coast!=1] = 0 # 0, 128, 192 -> Not coast
land[land!=0] = 1 # 64, 128, 192 -> Land, 0 -> Water
return lon, lat, cloud, land, coast
return lon, lat
示例5: load_standard_lfm_hdf
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def load_standard_lfm_hdf(filename):
""" Load the standard formated hdf which we want to emulate"""
f = SD(filename, SDC.READ)
X_grid = f.select('X_grid')
Y_grid = f.select('Y_grid')
Z_grid = f.select('Z_grid')
# x_grid is size nkp1,njp1,nip1
(nkp1,njp1,nip1) = X_grid[:].shape
# The LFM reader expects i to vary fastest, then j, then k
# However, the LFM pre-converted files store positions with k varying fastest (column-major)
# Recommend saving in column-major format. If it fails, we can always switch.
# i = 0; j = 0; k = 0
# print 'printing standard first row'
# for i in range(nip1):
# print X_grid[k,j,i]/R_e
# print 'printing j sweep'
# i = 0; j = 0; k = 0;
# for j in range(njp1):
# print X_grid[k,j,i]/R_e
# print 'printing k sweep'
# i = 0; j = 0; k = 0;
# for k in range(nkp1):
# print X_grid[k,j,i]/R_e
print 'standard nip1,njp1,nkp1 =', nip1,njp1,nkp1
ni = nip1-1
nj = njp1-1
nk = nkp1-1
print 'standard ni,nj,nk =', ni,nj,nk
示例6: run
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def run(FILE_NAME):
DATAFIELD_NAME = 'dHat'
if USE_NETCDF4:
from netCDF4 import Dataset
nc = Dataset(FILE_NAME)
var = nc.variables[DATAFIELD_NAME]
# This datafield has scale factor and add offset attributes, but no
# fill value. We'll turn off automatic scaling and do it ourselves.
var.set_auto_maskandscale(False)
data = nc.variables[DATAFIELD_NAME][:].astype(np.float64)
# Retrieve scale/offset attributes.
scale_factor = var.scale_factor
add_offset = var.add_offset
# Retrieve the geolocation data.
latitude = nc.variables['geolocation'][:,:,0]
longitude = nc.variables['geolocation'][:,:,1]
else:
from pyhdf.SD import SD, SDC
hdf = SD(FILE_NAME, SDC.READ)
ds = hdf.select(DATAFIELD_NAME)
data = ds[:,:].astype(np.double)
# Handle scale/osffset attributes.
attrs = ds.attributes(full=1)
sfa=attrs["scale_factor"]
scale_factor = sfa[0]
aoa=attrs["add_offset"]
add_offset = aoa[0]
# Retrieve the geolocation data.
geo = hdf.select('geolocation')
latitude = geo[:,:,0]
longitude = geo[:,:,1]
data = data / scale_factor + add_offset
# Draw an equidistant cylindrical projection using the high resolution
# coastline database.
m = Basemap(projection='cyl', resolution='h',
llcrnrlat=30, urcrnrlat = 36,
llcrnrlon=121, urcrnrlon = 133)
m.drawcoastlines(linewidth=0.5)
m.drawparallels(np.arange(30, 37), labels=[1, 0, 0, 0])
m.drawmeridians(np.arange(121, 133, 2), labels=[0, 0, 0, 1])
m.pcolormesh(longitude, latitude, data, latlon=True)
cb = m.colorbar()
cb.set_label('Unit:mm')
basename = os.path.basename(FILE_NAME)
plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))
fig = plt.gcf()
# plt.show()
pngfile = "{0}.py.png".format(basename)
fig.savefig(pngfile)
示例7: test_1000m_to_250m
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def test_1000m_to_250m(self):
"""Test the 1 km to 250 meter interpolation facility."""
# gfilename = \
# "/san1/test/data/modis/MOD03_A12278_113638_2012278145123.hdf"
gfilename = "/local_disk/src/python-geotiepoints/tests/MOD03_A12278_113638_2012278145123.hdf"
# result_filename = \
# "/san1/test/data/modis/250m_lonlat_results.npz"
result_filename = "/local_disk/src/python-geotiepoints/tests/250m_lonlat_results.npz"
from pyhdf.SD import SD
from pyhdf.error import HDF4Error
try:
gdata = SD(gfilename)
except HDF4Error:
print("Failed reading eos-hdf file %s" % gfilename)
return
lats = gdata.select("Latitude")[0:50, :]
lons = gdata.select("Longitude")[0:50, :]
verif = np.load(result_filename)
vlons = verif['lons']
vlats = verif['lats']
tlons, tlats = modis1kmto250m(lons, lats)
self.assert_(np.allclose(tlons, vlons, atol=0.05))
self.assert_(np.allclose(tlats, vlats, atol=0.05))
示例8: setup_grid
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def setup_grid(self):
"""Setup necessary variables for grid """
if not os.path.isfile(self.datadir + self.gridfile):
urllib.urlretrieve(self.dataurl + self.gridfile,
self.datadir + self.gridfile)
g = SD(self.datadir + self.gridfile, SDC.READ)
self.llat = g.select('Latitude')[:]
self.llon = g.select('Longitude')[:]
示例9: run
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def run(FILE_NAME):
DATAFIELD_NAME = 'Temperature_MW_A'
if USE_NETCDF4:
from netCDF4 import Dataset
nc = Dataset(FILE_NAME)
# The variable has a fill value,
# so netCDF4 converts it to a float64 masked array for us.
data = nc.variables[DATAFIELD_NAME][11,:,:]
latitude = nc.variables['Latitude'][:]
longitude = nc.variables['Longitude'][:]
else:
from pyhdf.SD import SD, SDC
hdf = SD(FILE_NAME, SDC.READ)
# List available SDS datasets.
# print hdf.datasets()
# Read dataset.
data3D = hdf.select(DATAFIELD_NAME)
data = data3D[11,:,:]
# Read geolocation dataset.
lat = hdf.select('Latitude')
latitude = lat[:,:]
lon = hdf.select('Longitude')
longitude = lon[:,:]
# Handle fill value.
attrs = data3D.attributes(full=1)
fillvalue=attrs["_FillValue"]
# fillvalue[0] is the attribute value.
fv = fillvalue[0]
data[data == fv] = np.nan
data = np.ma.masked_array(data, np.isnan(data))
# Draw an equidistant cylindrical projection using the low resolution
# coastline database.
m = Basemap(projection='cyl', resolution='l',
llcrnrlat=-90, urcrnrlat = 90,
llcrnrlon=-180, urcrnrlon = 180)
m.drawcoastlines(linewidth=0.5)
m.drawparallels(np.arange(-90., 120., 30.), labels=[1, 0, 0, 0])
m.drawmeridians(np.arange(-180., 181., 45.), labels=[0, 0, 0, 1])
m.pcolormesh(longitude, latitude, data, latlon=True, alpha=0.90)
cb = m.colorbar()
cb.set_label('Unit:K')
basename = os.path.basename(FILE_NAME)
plt.title('{0}\n {1} at TempPrsLvls=11'.format(basename, DATAFIELD_NAME))
fig = plt.gcf()
# plt.show()
pngfile = "{0}.{1}.py.png".format(basename, DATAFIELD_NAME)
fig.savefig(pngfile)
示例10: run
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def run(FILE_NAME):
DATAFIELD_NAME = 'SurfaceTemperature'
# The dataset is (6144 x 6400). Subset it to be around than 1K x 1K
# Otherwise, the plot will skip processing some regions.
rows = slice(0, 6144, 6)
cols = slice(0, 6400, 6)
if USE_NETCDF4:
from netCDF4 import Dataset
nc = Dataset(FILE_NAME)
data = nc.variables[DATAFIELD_NAME][rows, cols]
# Retrieve the geolocation data.
latitude = nc.variables['Latitude'][rows, cols]
longitude = nc.variables['Longitude'][rows, cols]
else:
from pyhdf.SD import SD, SDC
hdf = SD(FILE_NAME, SDC.READ)
# Read dataset.
data2D = hdf.select(DATAFIELD_NAME)
data = data2D[rows,cols]
# Read geolocation dataset.
lat = hdf.select('Latitude')
latitude = lat[rows,cols]
lon = hdf.select('Longitude')
longitude = lon[rows,cols]
# Apply the fill value. The valid minimum is zero, although there's no
# attribute.
data[data < 0] = np.nan
data = np.ma.masked_array(data, np.isnan(data))
# Render the data in a lambert azimuthal equal area projection.
m = Basemap(projection='nplaea', resolution='l',
boundinglat=60, lon_0=43)
m.drawcoastlines(linewidth=0.5)
m.drawparallels(np.arange(50, 90, 10), labels=[1, 0, 0, 1])
m.drawmeridians(np.arange(-180, 180, 30))
x, y = m(longitude, latitude)
m.pcolormesh(x, y, data)
cb = m.colorbar()
cb.set_label('Unknown')
basename = os.path.basename(FILE_NAME)
plt.title('{0}\n{1}'.format(basename, DATAFIELD_NAME))
fig = plt.gcf()
# plt.show()
pngfile = "{0}.py.png".format(basename)
fig.savefig(pngfile)
示例11: run
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def run(FILE_NAME):
# Identify the data field.
DATAFIELD_NAME = 'bsst'
if USE_NETCDF4:
from netCDF4 import Dataset
nc = Dataset(FILE_NAME)
# Subset the data to match the size of the swath geolocation fields.
# Turn off autoscaling, we'll handle that ourselves due to non-standard
# naming of the offset attribute.
var = nc.variables[DATAFIELD_NAME]
var.set_auto_maskandscale(False)
lat = nc.variables['lat']
lon = nc.variables['lon']
else:
from pyhdf.SD import SD, SDC
hdf = SD(FILE_NAME, SDC.READ)
# Read dataset.
var = hdf.select(DATAFIELD_NAME)
lat = hdf.select('lat')
lon = hdf.select('lon')
latitude = lat[::8]
longitude = lon[::8]
data = var[::8, ::8].astype(np.float64)
# Apply the attributes. By inspection, fill value is 0
data[data==0] = np.nan
data = data * var.scale_factor + var.add_off
datam = np.ma.masked_array(data, mask=np.isnan(data))
m = Basemap(projection='cyl', resolution='l',
llcrnrlat=-90, urcrnrlat=90,
llcrnrlon=-180, urcrnrlon=180)
m.drawcoastlines(linewidth=0.5)
m.drawparallels(np.arange(-90, 91, 45))
m.drawmeridians(np.arange(-180, 180, 45), labels=[True,False,False,True])
m.pcolormesh(longitude, latitude, datam, latlon=True)
cax = plt.axes([0.92, 0.3, 0.01, 0.4])
cb = plt.colorbar(cax=cax)
units = 'degrees-C'
cb.set_label(units)
basename = os.path.basename(FILE_NAME)
fig = plt.gcf()
# plt.show()
long_name = 'Sea Surface Temperature ('+DATAFIELD_NAME+')'
fig.suptitle('{0}\n{1}'.format(basename, long_name))
# plt.show()
pngfile = "{0}.py.png".format(basename)
fig.savefig(pngfile)
示例12: run
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def run(FILE_NAME):
# Identify the HDF-EOS2 swath data file.
DATAFIELD_NAME = 'radiances'
if USE_NETCDF4:
from netCDF4 import Dataset
nc = Dataset(FILE_NAME)
data = nc.variables['radiances'][:,:,567]
latitude = nc.variables['Latitude'][:]
longitude = nc.variables['Longitude'][:]
else:
from pyhdf.SD import SD, SDC
hdf = SD(FILE_NAME, SDC.READ)
# Read dataset.
data3D = hdf.select(DATAFIELD_NAME)
data = data3D[:,:,567]
# Read geolocation dataset.
lat = hdf.select('Latitude')
latitude = lat[:,:]
lon = hdf.select('Longitude')
longitude = lon[:,:]
# Replace the filled value with NaN, replace with a masked array.
data[data == -9999] = np.nan
datam = np.ma.masked_array(data, np.isnan(data))
# Draw a polar stereographic projection using the low resolution coastline
# database.
m = Basemap(projection='spstere', resolution='l',
boundinglat=-65, lon_0 = 180)
m.drawcoastlines(linewidth=0.5)
m.drawparallels(np.arange(-80., -50., 5.))
m.drawmeridians(np.arange(-180., 181., 20.), labels=[1, 0, 0, 1])
x, y = m(longitude, latitude)
m.pcolormesh(x, y, datam)
# See page 101 of "AIRS Version 5.0 Released Files Description" document [1]
# for unit specification.
units = 'mW/m**2/cm**-1/sr'
cb = m.colorbar()
cb.set_label('Unit:'+units)
basename = os.path.basename(FILE_NAME)
plt.title('{0}\n {1} at channel=567'.format(basename, DATAFIELD_NAME))
fig = plt.gcf()
# plt.show()
pngfile = "{0}.py.png".format(basename)
fig.savefig(pngfile)
示例13: get_lat_lon_modis
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def get_lat_lon_modis(satscene, options):
"""Read lat and lon.
"""
filename_tmpl = satscene.time_slot.strftime(options["geofile"])
file_list = glob.glob(os.path.join(options["dir"], filename_tmpl))
if len(file_list) == 0:
# Try in the same directory as the data
data_dir = os.path.split(options["filename"])[0]
file_list = glob.glob(os.path.join(data_dir, filename_tmpl))
if len(file_list) > 1:
logger.warning("More than 1 geolocation file matching!")
filename = max(file_list, key=lambda x: os.stat(x).st_mtime)
coarse_resolution = 1000
elif len(file_list) == 0:
logger.warning("No geolocation file matching " + filename_tmpl
+ " in " + options["dir"])
logger.debug("Using 5km geolocation and interpolating")
filename = options["filename"]
coarse_resolution = 5000
else:
filename = file_list[0]
coarse_resolution = 1000
logger.debug("Loading geolocation file: " + str(filename)
+ " at resolution " + str(coarse_resolution))
resolution = options["resolution"]
data = SD(str(filename))
lat = data.select("Latitude")
fill_value = lat.attributes()["_FillValue"]
lat = np.ma.masked_equal(lat.get(), fill_value)
lon = data.select("Longitude")
fill_value = lon.attributes()["_FillValue"]
lon = np.ma.masked_equal(lon.get(), fill_value)
if resolution == coarse_resolution:
return lat, lon
cores = options["cores"]
from geotiepoints import modis5kmto1km, modis1kmto500m, modis1kmto250m
logger.debug("Interpolating from " + str(coarse_resolution)
+ " to " + str(resolution))
if coarse_resolution == 5000:
lon, lat = modis5kmto1km(lon, lat)
if resolution == 500:
lon, lat = modis1kmto500m(lon, lat, cores)
if resolution == 250:
lon, lat = modis1kmto250m(lon, lat, cores)
return lat, lon
示例14: read_var
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def read_var(fname,varname,normalized=False):
f = SD(fname,SDC.READ)
phi = f.select('fakeDim0')[:]
theta = f.select('fakeDim1')[:]
r = f.select('fakeDim2')[:]
var = f.select('Data-Set-2')[:]
f.end()
if normalized:
return(phi,theta,r,var)
else:
return(phi,theta,r*mas_units['length'],var*mas_units[varname])
示例15: load_hdf_spec
# 需要导入模块: from pyhdf.SD import SD [as 别名]
# 或者: from pyhdf.SD.SD import select [as 别名]
def load_hdf_spec(filename,ix,iy,data_name='MCD43GF_CMG'):
"""
Purpose:
Simple hdf load file for MCD43GF files. Chose only specific indices to load
Input:
filename: full file path and name of the hdf file to load
ix: array of indices to be loaded in the x direction
iy: array of indices to be loaded in the y direction
Output:
dat: np.array of the requested data at the indices
Keywords:
data_name: (defaults to MCD43GF_CMG) the name of the dataset to load within the filename
Dependencies:
numpy
pyhdf
Required files:
filename
Example:
>>b = load_hdf_spec(fp+'MCD43GF_geo_shortwave_193_2007.hdf',[200,201,202],[503,504,505,506])
>>b
array([[ nan, nan, nan, nan],
[ nan, nan, nan, nan],
[ nan, nan, nan, nan]])
>>b.shape
(3L, 4L)
Modification History:
Written (v1.0): Samuel LeBlanc, 2017-03-22, Santa Cruz, CA
"""
import numpy as np
from pyhdf.SD import SD, SDC
hdf = SD(filename, SDC.READ)
if hasattr(ix,'__len__'):
if (len(ix)-1)<(ix[-1]-ix[0]):
raise ValueError('ix is not a contiguous array')
if (len(iy)-1)<(iy[-1]-iy[0]):
raise ValueError('iy is not a contiguous array')
dat = hdf.select(data_name).get(start=(ix[0],iy[0]),count=(ix[-1]-ix[0]+1,iy[-1]-iy[0]+1))
else:
dat = hdf.select(data_name).get(start=(ix,iy),count=(1,1))
dat = dat.astype(float)
dat[dat==32767] = np.nan
dat = dat/1000.0
return dat