本文整理汇总了Python中nansat.vrt.VRT.__init__方法的典型用法代码示例。如果您正苦于以下问题:Python VRT.__init__方法的具体用法?Python VRT.__init__怎么用?Python VRT.__init__使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nansat.vrt.VRT
的用法示例。
在下文中一共展示了VRT.__init__方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create NCEP VRT '''
if not gdalDataset:
raise WrongMapperError
geotransform = gdalDataset.GetGeoTransform()
if (geotransform != (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
gdalDataset.RasterCount != 2): # Not water proof
raise WrongMapperError
metaDict = [{'src': {'SourceFilename': fileName,
'SourceBand': 1},
'dst': {'wkv': 'eastward_wind',
'height': '10 m'}},
{'src': {'SourceFilename': fileName,
'SourceBand': 2},
'dst': {'wkv': 'northward_wind',
'height': '10 m'}},
{'src': [{'SourceFilename': fileName,
'SourceBand': 1,
'DataType': gdalDataset.GetRasterBand(1).DataType
},
{'SourceFilename': fileName,
'SourceBand': 2,
'DataType': gdalDataset.GetRasterBand(2).DataType
}],
'dst': {'wkv': 'wind_speed',
'PixelFunctionType': 'UVToMagnitude',
'name': 'windspeed',
'height': '2 m'
}},
{'src': [{'SourceFilename': fileName,
'SourceBand': 1,
'DataType': gdalDataset.GetRasterBand(1).DataType
},
{'SourceFilename': fileName,
'SourceBand': 2,
'DataType': gdalDataset.GetRasterBand(2).DataType
}],
'dst': {'wkv': 'wind_from_direction',
'PixelFunctionType': 'UVToDirectionFrom',
'name': 'winddirection',
'height': '2 m'
}
}]
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# Adding valid time from the GRIB file to dataset
validTime = gdalDataset.GetRasterBand(1).\
GetMetadata()['GRIB_VALID_TIME']
self._set_time(datetime.datetime.
utcfromtimestamp(int(validTime.strip().split(' ')[0])))
return
示例2: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create VRT '''
##############
# Get time
##############
if fileName[0:len(keywordBase)] != keywordBase:
raise AttributeError("Wrong mapper")
timestr = fileName[len(keywordBase)+1::]
time = datetime.strptime(timestr, '%Y%m%d%H%M')
######################################################
# Find windFileName corresponding to a Nansat-readable
# file in your local (or remote) file archive
######################################################
windFileName = localFolder + <.......>
######################################################
# Open file with any other Nansat mapper
######################################################
w = Nansat(windFileName)
VRT.__init__(self, vrtDataset=w.vrt.dataset)
return
示例3: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
'''
Mapping for the global 30 arc-second elevation (see
https://lta.cr.usgs.gov/GTOPO30).
Parameters:
-----------
fileName : string
Either the name of a gtopo30 DEM file, or <path>/gtopo30.vrt. The
latter is an aggregation of the DEM-files available with gtopo30
except the Antarctic one, which is in polarstereographic
projection. You can create your own gtopo30.vrt file with gdal:
> gdalbuildvrt gtopo30.vrt [E,W]*.DEM
'''
bn = os.path.basename(fileName)
if not bn=='gtopo30.vrt' and not os.path.splitext(bn)[1]=='.DEM':
raise WrongMapperError
metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand': 1},
'dst': {'wkv': 'height_above_reference_ellipsoid'}}]
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
示例4: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
title_correct = False
if not gdalMetadata:
raise WrongMapperError
for key, val in gdalMetadata.iteritems():
if "title" in key:
if not val == "Daily AMSR-E Arctic lead area fraction [in percent]":
raise WrongMapperError
else:
title_correct = True
if not title_correct:
raise WrongMapperError
# initiate VRT for the NSIDC 10 km grid
VRT.__init__(
self,
srcGeoTransform=(-3850000, 6250, 0.0, 5850000, 0.0, -6250),
srcProjection=NSR(3411).wkt,
srcRasterXSize=1216,
srcRasterYSize=1792,
)
src = {"SourceFilename": 'NETCDF:"%s":lf' % fileName, "SourceBand": 1}
dst = {"name": "leadFraction", "long_name": "AMSRE sea ice lead fraction"}
self._create_band(src, dst)
self.dataset.FlushCache()
示例5: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create LANDSAT VRT '''
# try to open .tar or .tar.gz or .tgz file with tar
try:
tarFile = tarfile.open(fileName)
except:
raise WrongMapperError
tarNames = tarFile.getnames()
#print tarNames
metaDict = []
for tarName in tarNames:
if ((tarName[0] == 'L' or tarName[0] == 'M') and
(tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')):
#print tarName
bandNo = tarName[-6:-4]
metaDict.append({
'src': {'SourceFilename': '/vsitar/%s/%s' % (fileName,
tarName),
'SourceBand': 1},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'suffix': bandNo}})
if not metaDict:
raise WrongMapperError
#print metaDict
sizeDiffBands = []
for iFile in range(len(metaDict)):
tmpName = metaDict[iFile]['src']['SourceFilename']
gdalDatasetTmp = gdal.Open(tmpName)
if iFile == 0:
gdalDatasetTmp0 = gdalDatasetTmp
xSize = gdalDatasetTmp.RasterXSize
ySize = gdalDatasetTmp.RasterYSize
elif (xSize != gdalDatasetTmp.RasterXSize or
ySize != gdalDatasetTmp.RasterYSize):
sizeDiffBands.append(iFile)
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDatasetTmp0)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# 8th band of LANDSAT8 is a double size band.
# Reduce the size to same as the 1st band.
if len(sizeDiffBands) != 0:
vrtXML = self.read_xml()
node0 = Node.create(vrtXML)
for iBand in sizeDiffBands:
iBandNode = node0.nodeList('VRTRasterBand')[iBand]
iNodeDstRect = iBandNode.node('DstRect')
iNodeDstRect.replaceAttribute('xSize', str(xSize))
iNodeDstRect.replaceAttribute('ySize', str(ySize))
self.write_xml(node0.rawxml())
示例6: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30,
**kwargs):
# check if mapper fits
if not gdalMetadata:
raise WrongMapperError
if not os.path.splitext(inputFileName)[1] == '.mnt':
raise WrongMapperError
try:
mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude'])
mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude'])
mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude'])
mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude'])
mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String']
Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines'])
Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns'])
Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size'])
Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size'])
except:
raise WrongMapperError
# find subdataset with DEPTH
subDatasets = gdalDataset.GetSubDatasets()
dSourceFile = None
for subDataset in subDatasets:
if subDataset[0].endswith('.mnt":DEPTH'):
dSourceFile = subDataset[0]
if dSourceFile is None:
raise WrongMapperError
dSubDataset = gdal.Open(dSourceFile)
dMetadata = dSubDataset.GetMetadata()
try:
scale_factor = dMetadata['DEPTH#scale_factor']
add_offset = dMetadata['DEPTH#add_offset']
except:
raise WrongMapperError
geoTransform = [mbWestLongitude, Element_x_size, 0,
mbNorthLatitude, 0, -Element_y_size]
# create empty VRT dataset with geolocation only
VRT.__init__(self, srcGeoTransform=geoTransform,
srcMetadata=gdalMetadata,
srcProjection=NSR(mbProj4String).wkt,
srcRasterXSize=Number_columns,
srcRasterYSize=Number_lines)
metaDict = [{'src': {'SourceFilename': dSourceFile,
'SourceBand': 1,
'ScaleRatio' : scale_factor,
'ScaleOffset' : add_offset},
'dst': {'wkv': 'depth'}}]
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
示例7: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create MODIS_L1 VRT '''
# raise error in case of not GOCI L1B
try:
title = gdalMetadata['HDFEOS_POINTS_Scene_Header_Scene_Title']
except (TypeError, KeyError):
raise WrongMapperError
if not title == 'GOCI Level-1B Data':
raise WrongMapperError
# set GOCI projection parameters
lat_0 = gdalMetadata['HDFEOS_POINTS_Map_Projection_Central_Latitude_(parallel)']
lon_0 = gdalMetadata['HDFEOS_POINTS_Map_Projection_Central_Longitude_(meridian)']
rasterXSize = int(gdalMetadata['HDFEOS_POINTS_Scene_Header_number_of_columns'].split(' ')[0])
rasterYSize = int(gdalMetadata['HDFEOS_POINTS_Scene_Header_number_of_rows'].split(' ')[0])
proj4 = '+proj=ortho +lat_0=%s +lon_0=%s units=m +ellps=WGS84 +datum=WGS84 +no_defs' % (lat_0, lon_0)
srs = osr.SpatialReference()
srs.ImportFromProj4(proj4)
projection = srs.ExportToWkt()
geoTransform = (-1391500.0, 500.0, 0.0, 1349500.0, 0.0, -500.0)
# create empty VRT dataset with georeference only
VRT.__init__(self, srcGeoTransform=geoTransform,
srcProjection=projection,
srcRasterXSize=rasterXSize,
srcRasterYSize=rasterYSize)
# add bands from subdatasets
subDatasets = gdalDataset.GetSubDatasets()
metaDict = []
wavelengths = ['412', '443', '490', '555', '660', '680', '745', '865']
for subDSI in range(8):
metaEntry = {'src': {'SourceFilename': subDatasets[subDSI][0],
'SourceBand': 1,
'ScaleRatio': 1e-6},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': wavelengths[subDSI],
'suffix': wavelengths[subDSI]}}
metaDict.append(metaEntry)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
示例8: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create VRT '''
fileBaseName = os.path.basename(fileName)
if not fileBaseName == 'MOD44W.vrt':
raise WrongMapperError
metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand': 1},
'dst': {'wkv': 'land_binary_mask'}}]
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
mm = pti.get_gcmd_instrument('MODIS')
ee = pti.get_gcmd_platform('TERRA')
self.dataset.SetMetadataItem('instrument', json.dumps(mm))
self.dataset.SetMetadataItem('platform', json.dumps(ee))
示例9: init_from_xml
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def init_from_xml(self, productXml):
''' Fast init from metada in XML only '''
numberOfLines = int(productXml
.node('imageAttributes')
.node('rasterAttributes')
.node('numberOfLines')
.value)
numberOfSamples = int(productXml
.node('imageAttributes')
.node('rasterAttributes')
.node('numberOfSamplesPerLine')
.value)
VRT.__init__(self, srcRasterXSize=numberOfSamples, srcRasterYSize=numberOfLines)
gcps = []
geogrid = productXml.node('imageAttributes').node('geographicInformation').node('geolocationGrid')
for child in geogrid.children:
pix = float(child.node('imageCoordinate').node('pixel').value)
lin = float(child.node('imageCoordinate').node('line').value)
lon = float(child.node('geodeticCoordinate').node('longitude').value)
lat = float(child.node('geodeticCoordinate').node('latitude').value)
gcps.append(gdal.GCP(lon, lat, 0, pix, lin))
self.dataset.SetGCPs(gcps, NSR().wkt)
dates = list(map(parse, [child.node('timeStamp').value for child in
(productXml.node('sourceAttributes')
.node('orbitAndAttitude')
.node('orbitInformation')
.nodeList('stateVector'))]))
self.dataset.SetMetadataItem('time_coverage_start', min(dates).isoformat())
self.dataset.SetMetadataItem('time_coverage_end', max(dates).isoformat())
self.dataset.SetMetadataItem('platform', json.dumps(pti.get_gcmd_platform('radarsat-2')))
self.dataset.SetMetadataItem('instrument', json.dumps(pti.get_gcmd_instrument('SAR')))
self.dataset.SetMetadataItem('Entry Title', 'Radarsat-2 SAR')
self.dataset.SetMetadataItem('Data Center', 'CSA')
self.dataset.SetMetadataItem('ISO Topic Category', 'Oceans')
self.dataset.SetMetadataItem('Summary', 'Radarsat-2 SAR data')
示例10: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create VRT '''
if (os.path.split(fileName)[1][0:4] != '101_' or
os.path.split(fileName)[1][0:4] != '102_'):
raise WrongMapperError
try:
product = gdalDataset.GetDriver().LongName
except:
raise WrongMapperError
if (product != 'GeoTIFF' or fileName[-3:] != 'tif' or
gdalDataset.RasterCount != 3):
raise WrongMapperError
metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand': 1},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '555'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 2},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '655'}},
{'src': {'SourceFilename': fileName, 'SourceBand': 3},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': '800'}}
]
# from https://gsics.nesdis.noaa.gov/wiki/Development/StandardVariableNames
# add DataType into 'src' and name into 'dst'
for bandDict in metaDict:
if 'DataType' not in bandDict['src']:
bandDict['src']['DataType'] = 2
if 'wavelength' in bandDict['dst']:
bandDict['dst']['name'] = ('toa_radiance_' +
bandDict['dst']['wavelength'])
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
示例11: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata,
cache='', lons=None, lats=None, **kwargs):
''' Create NCEP VRT
Parameters:
fileName : str
sstcci_online:analysed_sst:2010-01-01
sstcci_online:analysis_error:2010-01-01
sstcci_online:sea_ice_fraction:2010-01-01
sstcci_online:sea_ice_fraction_error:2010-01-01
sstcci_online:mask:2010-01-01
cache : str or bool
if str - name of the cahcing directory
If False or None - no caching
lon : list
minimum and maimum values of longitude
lat : list
minimum and maimum values of latitude
'''
keywordBase = 'sstcci_online'
if not fileName.startswith(keywordBase):
raise WrongMapperError
# create caching directory
if cache == '':
cache = os.path.curdir
if cache and not os.path.exists(cache):
os.mkdir(cache)
# Get prod name
prodName = fileName.split(':')[1]
# Get date
iDate = parse(fileName.split(':')[2])
# create dataset URL
dsURL = iDate.strftime(self.SST_CCI_URL_FORMAT)
# get lon, lat, time dimensions from the OC CCI Dataset
self.lon, self.lat = self.get_lon_lat(cache, dsURL)
# get rows and cols that contain predefined spatial domain
self.rows, self.cols, lons, lats, geoTransform = self.get_rows_cols(lons, lats)
# create VRT with correct lon/lat (geotransform)
VRT.__init__(self, srcProjection=NSR().wkt,
srcRasterXSize=len(self.cols),
srcRasterYSize=len(self.rows),
srcGeoTransform=geoTransform)
# Get SourceFilename either from memory array or from cached file
sourceFilename = self.get_sourcefilename(cache, dsURL, iDate, prodName, lons, lats)
metaDict = [{'src': {
'SourceFilename': sourceFilename,
'SourceBand': 1,
},
'dst': {
'name': prodName,
}
}]
self._create_bands(metaDict)
# set time
self.dataset.SetMetadataItem('time_coverage_start', iDate.isoformat())
示例12: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None,
mask='', **kwargs):
''' Create MER2 VRT
Parameters
-----------
fileName : string
gdalDataset : gdal dataset
gdalMetadata : gdal metadata
latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
'''
# test if input files is GLOBCOLOUR L3B
iDir, iFile = os.path.split(fileName)
iFileName, iFileExt = os.path.splitext(iFile)
#print 'idir:', iDir, iFile, iFileName[0:5], iFileExt[0:8]
if iFileName[0:4] != 'L3b_' or iFileExt != '.nc':
raise WrongMapperError
# define shape of GLOBCOLOUR grid
GLOBCOLOR_ROWS = 180 * 24
GLOBCOLOR_COLS = 360 * 24
# define lon/lat grids for projected var
if latlonGrid is None:
latlonGrid = np.mgrid[90:-90:4320j,
-180:180:8640j].astype('float32')
#latlonGrid = np.mgrid[80:50:900j, -10:30:1200j].astype('float16')
#latlonGrid = np.mgrid[47:39:300j, 25:45:500j].astype('float32')
# create empty VRT dataset with geolocation only
VRT.__init__(self, lon=latlonGrid[1], lat=latlonGrid[0])
# get list of similar (same date) files in the directory
simFilesMask = os.path.join(iDir, iFileName[0:30] + '*' + mask + '.nc')
simFiles = glob.glob(simFilesMask)
simFiles.sort()
metaDict = []
self.bandVRTs = {'mask': [], 'lonlat': []}
mask = None
for simFile in simFiles:
print 'sim: ', simFile
f = netcdf_file(simFile)
# get iBinned, index for converting from binned into GLOBCOLOR-grid
colBinned = f.variables['col'][:]
rowBinned = f.variables['row'][:]
iBinned = (colBinned.astype('uint32') +
(rowBinned.astype('uint32') - 1) * GLOBCOLOR_COLS)
colBinned = None
rowBinned = None
# get iRawPro, index for converting
# from GLOBCOLOR-grid to latlonGrid
yRawPro = np.rint(1 + (GLOBCOLOR_ROWS - 1) *
(latlonGrid[0] + 90) / 180)
lon_step_Mat = 1 / np.cos(np.pi * latlonGrid[0] / 180.) / 24.
xRawPro = np.rint(1 + (latlonGrid[1] + 180) / lon_step_Mat)
iRawPro = xRawPro + (yRawPro - 1) * GLOBCOLOR_COLS
iRawPro[iRawPro < 0] = 0
iRawPro = np.rint(iRawPro).astype('uint32')
yRawPro = None
xRawPro = None
for varName in f.variables:
# find variable with _mean, eg CHL1_mean
if '_mean' in varName:
var = f.variables[varName]
break
# skip variable if no WKV is give in Globcolour
if varName not in self.varname2wkv:
continue
# get WKV
varWKV = self.varname2wkv[varName]
# read binned data
varBinned = var[:]
# convert to GLOBCOLOR grid
varRawPro = np.zeros([GLOBCOLOR_ROWS, GLOBCOLOR_COLS], 'float32')
varRawPro.flat[iBinned] = varBinned
# convert to latlonGrid
varPro = varRawPro.flat[iRawPro.flat[:]].reshape(iRawPro.shape)
#plt.imshow(varPro);plt.colorbar();plt.show()
# add mask band
if mask is None:
mask = np.zeros(varPro.shape, 'uint8')
mask[:] = 1
mask[varPro > 0] = 64
# add VRT with array with data from projected variable
self.bandVRTs['mask'].append(VRT(array=mask))
# add metadata to the dictionary
metaDict.append({
#.........这里部分代码省略.........
示例13: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create VRT '''
ThreddsBase = 'http://thredds.met.no/thredds/dodsC/myocean/siw-tac/siw-metno-svalbard/'
# First check if mapper is called with keyword syntax:
# filename = metno_hires_seaice:YYYYmmdd
keywordBase = 'metno_hires_seaice'
foundDataset = False
if fileName[0:len(keywordBase)] == keywordBase:
keywordTime = fileName[len(keywordBase)+1:]
requestedTime = datetime.strptime(keywordTime, '%Y%m%d')
# Search for nearest available file, within the closest 3 days
for deltaDay in [0, -1, 1, -2, 2, -3, 3]:
validTime = (requestedTime + timedelta(days=deltaDay) +
timedelta(hours=15))
fileName = (ThreddsBase +
validTime.strftime(
'%Y/%m/ice_conc_svalbard_%Y%m%d1500.nc'))
try:
urllib2.urlopen(fileName + '.dds')
foundDataset = True
# Data is found for this day
break
except:
# No data for this day
pass
if not foundDataset:
raise WrongMapperError
# Then check if a valid OPeNDAP URL is given
# (or has been constructed from keyword)
if fileName[0:len(ThreddsBase)] != ThreddsBase:
AttributeError("Not Met.no Svalbard-ice Thredds URL")
else:
timestr = fileName[-15:-3]
validTime = datetime.strptime(timestr, '%Y%m%d%H%M')
fileName = fileName + '?ice_concentration[0][y][x]'
srcProjection = osr.SpatialReference()
srcProjection.ImportFromProj4('+proj=stere lon_0=0.0 +lat_0=90 +datum=WGS84 +ellps=WGS84 +units=km +no_defs')
srcProjection = srcProjection.ExportToWkt()
# From thredds web, with manual shift
srcGeotransform = (-1243.008 - 1, 1, 0, -3190.026 - 7, 0, 1)
# create empty VRT dataset with geolocation only
VRT.__init__(self,
srcGeoTransform=srcGeotransform,
srcProjection=srcProjection,
srcRasterXSize=3812,
srcRasterYSize=2980)
metaDict = [{'src': {'SourceFilename': fileName,
'sourceBand': 1},
'dst': {'name': 'sea_ice_area_fraction',
'wkv': 'sea_ice_area_fraction'}}]
# Add band
self._create_bands(metaDict)
# Set time
self.logger.info('Valid time: %s', str(validTime))
self._set_time(validTime)
示例14: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata,
GCP_COUNT=10, **kwargs):
''' Create VRT
Parameters
----------
GCP_COUNT : int
number of GCPs along each dimention
'''
# extension must be .nc
if os.path.splitext(fileName)[1] != '.nc':
raise WrongMapperError
# file must contain navigation_data/longitude
try:
ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % fileName)
except RuntimeError:
raise WrongMapperError
else:
dsMetadata = ds.GetMetadata()
# title value must be known
if dsMetadata.get('title', '') not in self.titles:
raise WrongMapperError
# get geophysical data variables
subDatasets = gdal.Open(fileName).GetSubDatasets()
metaDict = []
for subDataset in subDatasets:
groupName = subDataset[0].split('/')[-2]
if groupName not in ['geophysical_data', 'navigation_data']:
continue
varName = subDataset[0].split('/')[-1]
subds = gdal.Open(subDataset[0])
b = subds.GetRasterBand(1)
bMetadata = b.GetMetadata()
# set SRC/DST parameters
metaEntry = {'src': {'SourceFilename': subDataset[0],
'sourceBand': 1,
'DataType': b.DataType},
'dst': {'name': varName}}
# replace datatype for l2_flags
if varName == 'l2_flags':
metaEntry['src']['DataType'] = 4
metaEntry['src']['SourceType'] = 'SimpleSource'
# set scale if exist
metaKey = '%s_%s_scale_factor' % (groupName, varName)
if metaKey in bMetadata:
metaEntry['src']['ScaleRatio'] = bMetadata[metaKey]
# set offset if exist
metaKey = '%s_%s_add_offset' % (groupName, varName)
if metaKey in bMetadata:
metaEntry['src']['ScaleOffset'] = bMetadata[metaKey]
# set standard_name if exists
metaKey = '%s_%s_standard_name' % (groupName, varName)
if metaKey in bMetadata:
metaEntry['dst']['wkv'] = bMetadata[metaKey]
# set other metadata
for metaKey in bMetadata:
newMetaKey = metaKey.replace('%s_%s_' % (groupName, varName), '')
if newMetaKey not in ['scale_factor', 'add_offset', 'DIMENSION_LIST', '_FillValue']:
metaEntry['dst'][newMetaKey] = bMetadata[metaKey]
metaDict.append(metaEntry)
# make GCPs
# get lat/lon grids
longitude = gdal.Open('HDF5:"%s"://navigation_data/longitude' % fileName).ReadAsArray()
latitude = gdal.Open('HDF5:"%s"://navigation_data/latitude' % fileName).ReadAsArray()
rasterYSize, rasterXSize = longitude.shape
step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT))
step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT))
gcps = []
k = 0
center_lon = 0
center_lat = 0
for i0 in range(0, latitude.shape[0], step0):
for i1 in range(0, latitude.shape[1], step1):
# create GCP with X,Y,pixel,line from lat/lon matrices
lon = float(longitude[i0, i1])
lat = float(latitude[i0, i1])
if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
gcp = gdal.GCP(lon, lat, 0, i1+0.5, i0+0.5)
gcps.append(gcp)
center_lon += lon
center_lat += lat
k += 1
time_coverage_start = dsMetadata['time_coverage_start']
time_coverage_end = dsMetadata['time_coverage_end']
# create VRT
#.........这里部分代码省略.........
示例15: __init__
# 需要导入模块: from nansat.vrt import VRT [as 别名]
# 或者: from nansat.vrt.VRT import __init__ [as 别名]
def __init__(self, fileName, gdalDataset, gdalMetadata,
GCP_COUNT0=5, GCP_COUNT1=20, pixelStep=1,
lineStep=1, **kwargs):
''' Create VIIRS VRT '''
if not 'GMTCO_npp_' in fileName:
raise WrongMapperError
ifiledir = os.path.split(fileName)[0]
ifiles = glob.glob(ifiledir + 'SVM??_npp_d*_obpg_ops.h5')
ifiles.sort()
viirsWavelengths = [None, 412, 445, 488, 555, 672, 746, 865, 1240,
1378, 1610, 2250, 3700, 4050, 8550, 10736, 12013]
# create empty VRT dataset with geolocation only
xDatasetSource = ('HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Longitude'
% fileName)
xDatasetBand = 1
xDataset = gdal.Open(xDatasetSource)
VRT.__init__(self, xDataset)
metaDict = []
for ifile in ifiles:
ifilename = os.path.split(ifile)[1]
print ifilename
bNumber = int(ifilename[3:5])
print bNumber
bWavelength = viirsWavelengths[bNumber]
print bWavelength
SourceFilename = ('HDF5:"%s"://All_Data/VIIRS-M%d-SDR_All/Radiance'
% (ifile, bNumber))
print SourceFilename
metaEntry = {'src': {'SourceFilename': SourceFilename,
'SourceBand': 1},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': str(bWavelength),
'suffix': str(bWavelength)}
}
metaDict.append(metaEntry)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
xVRTArray = xDataset.ReadAsArray()
xVRTArray = gaussian_filter(xVRTArray, 5).astype('float32')
xVRT = VRT(array=xVRTArray)
yDatasetSource = ('HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Latitude'
% fileName)
yDatasetBand = 1
yDataset = gdal.Open(yDatasetSource)
yVRTArray = yDataset.ReadAsArray()
yVRTArray = gaussian_filter(yVRTArray, 5).astype('float32')
yVRT = VRT(array=yVRTArray)
#self.add_geolocationArray(GeolocationArray(xDatasetSource,
# yDatasetSource))
#"""
# estimate pixel/line step
self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep))
# ==== ADD GCPs and Pojection ====
# get lat/lon matrices
longitude = xVRT.dataset.GetRasterBand(1).ReadAsArray()
latitude = yVRT.dataset.GetRasterBand(1).ReadAsArray()
# estimate step of GCPs
step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT0))
step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT1))
self.logger.debug('gcpCount: %d %d %d %d, %d %d',
latitude.shape[0], latitude.shape[1],
GCP_COUNT0, GCP_COUNT1, step0, step1)
# generate list of GCPs
gcps = []
k = 0
for i0 in range(0, latitude.shape[0], step0):
for i1 in range(0, latitude.shape[1], step1):
# create GCP with X,Y,pixel,line from lat/lon matrices
lon = float(longitude[i0, i1])
lat = float(latitude[i0, i1])
if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
gcp = gdal.GCP(lon, lat, 0,
i1 * pixelStep, i0 * lineStep)
self.logger.debug('%d %d %d %f %f',
k, gcp.GCPPixel, gcp.GCPLine,
gcp.GCPX, gcp.GCPY)
gcps.append(gcp)
k += 1
# append GCPs and lat/lon projection to the vsiDataset
self.dataset.SetGCPs(gcps, NSR().wkt)
# remove geolocation array
self.remove_geolocationArray()