Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if filename[0:len(keyword_base)] != keyword_base:
raise WrongMapperError
keyword_time = filename[len(keyword_base)+1:]
requested_time = datetime.strptime(keyword_time, '%Y%m%d')
# Search for nearest available file, within the closest 3 days
found_dataset = False
for delta_day in [0, -1, 1, -2, 2, -3, 3]:
valid_time = (requested_time + timedelta(days=delta_day) +
timedelta(hours=15))
filename = (ice_folder_name + 'ice_conc_svalbard_' +
valid_time.strftime('%Y%m%d1500.nc'))
if os.path.exists(filename):
print('Found file:')
print(filename)
gdal_dataset = gdal.Open(filename)
gdal_metadata = gdalDataset.GetMetadata()
mg.Mapper.__init__(self, filename, gdal_dataset, gdal_metadata)
found_dataset = True
# Modify GeoTransform from netCDF file
# - otherwise a shift is seen!
self.dataset.SetGeoTransform(
(-1243508 - 1000, 1000, 0, -210526 - 7000, 0, -1000))
break # Data is found for this day
if found_dataset is False:
AttributeError("No local Svalbard-ice files available")
sys.exit()
def __init__(self, filename, gdalDataset, gdalMetadata,
GCP_COUNT=10, **kwargs):
''' Create VRT
Parameters
----------
GCP_COUNT : int
number of GCPs along each dimention
'''
# extension must be .nc
if os.path.splitext(filename)[1] != '.nc':
raise WrongMapperError
# file must contain navigation_data/longitude
try:
ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % filename)
except RuntimeError:
raise WrongMapperError
else:
dsMetadata = ds.GetMetadata()
# title value must be known
if dsMetadata.get('title', '') not in self.titles:
raise WrongMapperError
# get geophysical data variables
subDatasets = gdal.Open(filename).GetSubDatasets()
metaDict = []
for subDataset in subDatasets:
groupName = subDataset[0].split('/')[-2]
if groupName not in ['geophysical_data', 'navigation_data']:
continue
ifiledir = os.path.split(filename)[0]
ifiles = glob.glob(ifiledir + 'SVM??_npp_d*_obpg_ops.h5')
ifiles.sort()
if not IMPORT_SCIPY:
raise NansatReadError(' VIIRS data cannot be read because scipy is not installed! '
' Please do: conda -c conda-forge install scipy ')
viirsWavelengths = [None, 412, 445, 488, 555, 672, 746, 865, 1240,
1378, 1610, 2250, 3700, 4050, 8550, 10736, 12013]
# create empty VRT dataset with geolocation only
xDatasetSource = ('HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Longitude'
% filename)
xDatasetBand = 1
xDataset = gdal.Open(xDatasetSource)
self._init_from_gdal_dataset(xDataset)
metaDict = []
for ifile in ifiles:
ifilename = os.path.split(ifile)[1]
print ifilename
bNumber = int(ifilename[3:5])
print bNumber
bWavelength = viirsWavelengths[bNumber]
print bWavelength
SourceFilename = ('HDF5:"%s"://All_Data/VIIRS-M%d-SDR_All/Radiance'
% (ifile, bNumber))
print SourceFilename
metaEntry = {'src': {'SourceFilename': SourceFilename,
'SourceBand': 1},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
def _get_dataset_metadata(self):
# open GDAL dataset. It will be parsed to all mappers for testing
gdal_dataset, metadata = None, dict()
if not self.filename.startswith('http'):
try:
gdal_dataset = gdal.Open(self.filename)
except RuntimeError:
self.logger.error('GDAL could not open %s, trying to read with Nansat mappers...'
% self.filename)
if gdal_dataset is not None:
# get metadata from the GDAL dataset
metadata = gdal_dataset.GetMetadata()
return gdal_dataset, metadata
# choose between input opitons:
# ds
# ds and srs
# srs and ext
# if only a dataset is given:
# copy geo-reference from the dataset
if ds is not None and srs is None:
self.vrt = VRT.from_gdal_dataset(ds)
# If dataset and srs are given (but not ext):
# use AutoCreateWarpedVRT to determine bounds and resolution
elif ds is not None and srs is not None:
srs = NSR(srs)
tmp_vrt = gdal.AutoCreateWarpedVRT(ds, None, srs.wkt)
if tmp_vrt is None:
raise NansatProjectionError('Could not warp the given dataset to the given SRS.')
else:
self.vrt = VRT.from_gdal_dataset(tmp_vrt)
# If SpatialRef and extent string are given (but not dataset)
elif srs is not None and ext is not None:
srs = NSR(srs)
# create full dictionary of parameters
extent_dict = Domain._create_extent_dict(ext)
# convert -lle to -te
if 'lle' in extent_dict.keys():
extent_dict = self._convert_extentDic(srs, extent_dict)
# get size/extent from the created extent dictionary
polString = iBand.GetMetadata()['POLARIMETRIC_INTERP']
suffix = polString
# The nansat data will be complex
# if the SAR data is of type 10
dtype = iBand.DataType
if dtype == 10:
# add intensity band
metaDict.append(
{'src': {'SourceFilename':
('RADARSAT_2_CALIB:SIGMA0:'
+ fileName + '/product.xml'),
'SourceBand': i,
'DataType': dtype},
'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'intensity',
'SourceTransferType': gdal.GetDataTypeName(dtype),
'suffix': suffix,
'polarization': polString,
'dataType': 6}})
# modify suffix for adding the compled band below
suffix = polString+'_complex'
pol.append(polString)
metaDict.append(
{'src': {'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:'
+ fileName
+ '/product.xml'),
'SourceBand': i,
'DataType': dtype},
'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'suffix': suffix,
'polarization': polString}})
if options is None:
options = []
if type(options) == str:
options = [options]
# set bottomup option
if bottomup:
options += ['WRITE_BOTTOMUP=NO']
else:
options += ['WRITE_BOTTOMUP=YES']
# Create an output file using GDAL
self.logger.debug('Exporting to %s using %s and %s...' % (fileName,
driver,
options))
dataset = gdal.GetDriverByName(driver).CreateCopy(fileName,
exportVRT.dataset,
options=options)
self.logger.debug('Export - OK!')
def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs):
''' Create MODIS_L1 VRT '''
#get 1st subdataset and parse to VRT.__init__() for retrieving geo-metadata
try:
gdalSubDataset = gdal.Open(gdalDataset.GetSubDatasets()[0][0])
except (AttributeError, IndexError):
warnings.warn(__file__+' may need a better test for data ' \
'fitness')
raise WrongMapperError(__file__, "Wrong mapper")
#list of available modis names:resolutions
modisResolutions = {'MYD02QKM': 250, 'MOD02QKM': 250,
'MYD02HKM': 500, 'MOD02HKM': 500,
'MYD021KM': 1000, 'MOD021KM': 1000}
#should raise error in case of not MODIS_L1
try:
mResolution = modisResolutions[gdalMetadata["SHORTNAME"]]
except KeyError:
raise WrongMapperError(__file__, "Wrong mapper")