Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if bandName[-1:].isdigit():
bandName = bandName[:-1]
if bandName[-1:].isdigit():
bandName = bandName[:-1]
dst['name'] = bandName
# remove non-necessary metadata from dst
for rmMetadata in rmMetadatas:
if rmMetadata in dst:
dst.pop(rmMetadata)
# append band with src and dst dictionaries
metaDict.append({'src': src, 'dst': dst})
# create empty VRT dataset with geolocation only
VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# Create complex data bands from 'xxx_real' and 'xxx_imag' bands
# using pixelfunctions
rmBands = []
for iBandNo in range(self.dataset.RasterCount):
iBand = self.dataset.GetRasterBand(iBandNo + 1)
iBandName = iBand.GetMetadataItem('name')
# find real data band
if iBandName.find("_real") != -1:
realBandNo = iBandNo
realBand = self.dataset.GetRasterBand(realBandNo + 1)
realDtype = realBand.GetMetadataItem('DataType')
bandName = iBandName.replace(iBandName.split('_')[-1],
bandSizes,
bandDatasets):
if bandSize == bandXSise:
# let last part of file name be suffix
bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1]
metaDict.append({
'src': {'SourceFilename': bandFileName,
'SourceBand': 1,
'ScaleRatio': 0.1},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'suffix': bandSuffix}})
gdalDataset4Use = bandDataset
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset4Use)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
if len(mtlFileName) > 0:
mtlFileName = os.path.join(os.path.split(bandFileNames[0])[0],
mtlFileName)
mtlFileLines = [line.strip() for line in
self.read_xml(mtlFileName).split('\n')]
dateString = [line.split('=')[1].strip()
for line in mtlFileLines
if ('DATE_ACQUIRED' in line or
'ACQUISITION_DATE' in line)][0]
timeStr = [line.split('=')[1].strip()
for line in mtlFileLines
if ('SCENE_CENTER_TIME' in line or
if dSourceFile is None:
raise WrongMapperError
dSubDataset = gdal.Open(dSourceFile)
dMetadata = dSubDataset.GetMetadata()
try:
scale_factor = dMetadata['DEPTH#scale_factor']
add_offset = dMetadata['DEPTH#add_offset']
except:
raise WrongMapperError
geoTransform = [mbWestLongitude, Element_x_size, 0,
mbNorthLatitude, 0, -Element_y_size]
# create empty VRT dataset with geolocation only
VRT.__init__(self, srcGeoTransform=geoTransform,
srcMetadata=gdalMetadata,
srcProjection=NSR(mbProj4String).wkt,
srcRasterXSize=Number_columns,
srcRasterYSize=Number_lines)
metaDict = [{'src': {'SourceFilename': dSourceFile,
'SourceBand': 1,
'ScaleRatio' : scale_factor,
'ScaleOffset' : add_offset},
'dst': {'wkv': 'depth'}}]
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# Get file names from subdatasets
subDatasets = gdalDataset.GetSubDatasets()
filenames = [f[0] for f in subDatasets]
for ii, fn in enumerate(filenames):
if 'lon' in fn:
break
lon = gdal.Open(filenames.pop(ii)).ReadAsArray()
for ii, fn in enumerate(filenames):
if 'lat' in fn:
break
lat = gdal.Open(filenames.pop(ii)).ReadAsArray()
# create empty VRT dataset with geolocation only
VRT.__init__(self, lon=lon, lat=lat)
# Add list of calibration files to global metadata
#self.dataset.SetMetadataItem(
# 'Orbit based range bias calibration files',
# [filenames.pop(ii) for ii,fn in enumerate(filenames) if
# 'calibration_file_orbit' in fn][0])
remove_calfile_info = [(filenames.pop(ii)
for ii, fn in enumerate(filenames)
if 'calibration_file_orbit' in fn)][0]
name2wkv_dict = {'azimuth': 'platform_azimuth_angle',
'incidence_angles': 'angle_of_incidence',
'sat2target_elevation': 'sensor_view_angle',
'slant_range_time': '',
'dop_coef_observed': '',
'dop_coef_predicted': '',
'pass': sphPass,
'dataType': 6}})
#add GeolocationArray
xyVRTs = self.get_ads_vrts(gdalDataset, self.lonlatNames, 'ASA_',
step)
GeoloArray = GeolocationArray(xVRT=xyVRTs[0],
yVRT=xyVRTs[1],
xBand=1, yBand=1,
srs=gdalDataset.GetGCPProjection(),
lineOffset=0,
lineStep=1,
pixelOffset=0,
pixelStep=1)
# create empty VRT dataset using GeolocationArray only
VRT.__init__(self,
srcRasterXSize=dopWidth,
srcRasterYSize=dopHeight,
geolocationArray=GeoloArray,
srcProjection=GeoloArray.d['SRS'])
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# set time
self._set_envisat_time(gdalMetadata)
# add geolocation arrays
# get VRTs with lon and lat
if geolocation:
xyVRTs = self.get_ads_vrts(gdalDataset, self.lonlatNames, 'ASA_',
step)
'SourceBand': 1,
'ScaleRatio': 0.00001,
'ScaleOffset': 0},
'dst': {}}]
self.GeolocVRT._create_bands(GeolocMetaDict)
GeolocObject = GeolocationArray(xVRT=self.GeolocVRT,
yVRT=self.GeolocVRT,
# x = lon, y = lat
xBand=1, yBand=2,
lineOffset=0, pixelOffset=0,
lineStep=1, pixelStep=1)
# create empty VRT dataset with geolocation only
VRT.__init__(self,
srcRasterXSize=subDataset.RasterXSize,
srcRasterYSize=subDataset.RasterYSize,
gdalDataset=subDataset,
geolocationArray=GeolocObject,
srcProjection=GeolocObject.d['SRS'])
# Scale and NODATA should ideally be taken directly from raw file
metaDict = [{'src': {'SourceFilename': ('NETCDF:"' + fileName +
'":wind_speed'),
'ScaleRatio': 0.01,
'NODATA': -32767},
'dst': {'name': 'windspeed',
'wkv': 'wind_speed'}
},
{'src': {'SourceFilename': ('NETCDF:"' + fileName +
'":wind_dir'),
if len(channels_info)==2 and channels_info[0].product[0:2]=='AP':
channels_data.append(rvl.getdata(channel = 1))
np.testing.assert_array_equal(channels_data[0]['LONGITUDE'],
channels_data[1]['LONGITUDE'])
np.testing.assert_array_equal(channels_data[0]['LATITUDE'],
channels_data[1]['LATITUDE'])
np.testing.assert_array_equal(channels_data[0]['INCANGLE'],
channels_data[1]['INCANGLE'])
np.testing.assert_array_equal(channels_data[0]['HEADING'],
channels_data[1]['HEADING'])
np.testing.assert_array_equal(channels_data[0]['PREDDOPFREQ'],
channels_data[1]['PREDDOPFREQ'])
longitude = channels_data[0]['LONGITUDE']
latitude = channels_data[0]['LATITUDE']
VRT.__init__(self, lon = longitude, lat = latitude)
incVRT = VRT(array = channels_data[0]['INCANGLE'], lon = longitude,
lat = latitude)
azVRT = VRT(array = np.mod(channels_data[0]['HEADING'] + 90., 360.),
lon = longitude, lat = latitude)
dcpVRT = VRT(array = channels_data[0]['PREDDOPFREQ'], lon = longitude,
lat = latitude)
metaDict = []
self.bandVRTs['incVRT'] = incVRT
self.bandVRTs['azVRT'] = azVRT
self.bandVRTs['dcpVRT'] = dcpVRT
metaDict.append({
'src': {
'SourceFilename': self.bandVRTs['incVRT'].fileName,
'SourceBand': 1
'zonal winds',
'meridional winds'],
zoomSize=zoomSize,
step=step)}
# add bands from the ADS VRTs
for adsVRT in self.bandVRTs['adsVRTs']:
metaDict.append({'src': {'SourceFilename': adsVRT.fileName,
'SourceBand': 1},
'dst': {'name': (adsVRT.dataset.GetRasterBand(1).
GetMetadataItem('name')),
'units': (adsVRT.dataset.GetRasterBand(1).
GetMetadataItem('units'))}
})
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# set time
self._set_envisat_time(gdalMetadata)
# set SADCAT specific metadata
self.dataset.SetMetadataItem('start_date',
(parse(
gdalMetadata['SPH_FIRST_LINE_TIME']).
isoformat()
+ '+00:00'))
self.dataset.SetMetadataItem('stop_date',
(parse(
gdalMetadata['SPH_LAST_LINE_TIME']).
if 'standard_name' in attrs:
metaEntry['dst']['wkv'] = metaEntry['dst']['standard_name']
# add dim metadata (location within each dimension)
for dimKey in dimMetadata:
metaEntry['dst'][str(dimKey)] = dimMetadata[dimKey]
metaDict.append(metaEntry)
# read global metadata
srcMetadata = {}
for attr in f.ncattrs():
srcMetadata[str(attr)] = str(attr)
# create VRT with bands
VRT.__init__(self, srcGeoTransform=srcGeoTransform,
srcProjection=srcProjection,
srcRasterXSize=srcRasterXSize,
srcRasterYSize=srcRasterYSize,
srcMetadata=srcMetadata)
self.create_bands(metaDict)