Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.vrt = VRT(gdalDataset=ds)
# If dataset and srs are given (but not ext):
# use AutoCreateWarpedVRT to determine bounds and resolution
elif ds is not None and srs is not None:
srs = NSR(srs)
tmpVRT = gdal.AutoCreateWarpedVRT(ds, None, srs.wkt)
if tmpVRT is None:
raise ProjectionError('Could not warp the given dataset'
'to the given SRS.')
else:
self.vrt = VRT(gdalDataset=tmpVRT)
# If SpatialRef and extent string are given (but not dataset)
elif srs is not None and ext is not None:
srs = NSR(srs)
# create full dictionary of parameters
extentDic = self._create_extentDic(ext)
# convert -lle to -te
if 'lle' in extentDic.keys():
extentDic = self._convert_extentDic(srs, extentDic)
# get size/extent from the created extet dictionary
[geoTransform,
rasterXSize, rasterYSize] = self._get_geotransform(extentDic)
# create VRT object with given geo-reference parameters
self.vrt = VRT(srcGeoTransform=geoTransform,
srcProjection=srs.wkt,
srcRasterXSize=rasterXSize,
srcRasterYSize=rasterYSize)
self.extentDic = extentDic
for iBand in transectDict.keys():
if not (iBand in transect.keys()):
transect[iBand] = np.array([])
transect[iBand] = np.append(transect[iBand],
transectDict[iBand][iShape])
fieldValues = np.zeros(len(line), dtype={'names': names,
'formats': formats})
# Set values into the structured numpy array
fieldValues['X (pixel)'] = pixel
fieldValues['Y (line)'] = line
for iBand in transect.keys():
fieldValues['transect_'+iBand] = transect[iBand]
# Create Nansatshape object
NansatOGR = Nansatshape(srs=NSR(self.vrt.get_projection()))
# Set features and geometries into the Nansatshape
NansatOGR.add_features(coordinates=np.array([lonVector,
latVector]),
values=fieldValues)
# Return Nansatshape object
return NansatOGR
else:
return transectDict, vectorsDict, pixlinCoordDic
def _create_empty_from_projection_variable(self, gdal_dataset, gdal_metadata,
projection_variable='projection_lambert'):
ds = Dataset(self.input_filename)
subdataset = gdal.Open(self._get_sub_filenames(gdal_dataset)[0])
self._init_from_dataset_params(
x_size = subdataset.RasterXSize,
y_size = subdataset.RasterYSize,
geo_transform = subdataset.GetGeoTransform(),
projection = NSR(ds.variables[projection_variable].proj4).wkt,
metadata = gdal_metadata)
import json
from netCDF4 import Dataset
class Mapper(Opendap):
baseURLs = [
'https://podaac-opendap.jpl.nasa.gov:443/opendap/allData/ghrsst/data/L4/GLOB/UKMO/OSTIA',
'https://opendap.jpl.nasa.gov:443/opendap/OceanTemperature/ghrsst/data/L4/GLOB/UKMO/OSTIA'
]
timeVarName = 'time'
xName = 'lon'
yName = 'lat'
timeCalendarStart = '1981-01-01'
srcDSProjection = NSR().wkt
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
timestamp = date if date else self.get_date(filename)
ds = Dataset(filename)
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
self.dataset.SetMetadataItem('entry_title', str(ds.getncattr('title')))
self.dataset.SetMetadataItem('data_center', json.dumps(pti.get_gcmd_provider('UK/MOD/MET')))
self.dataset.SetMetadataItem('ISO_topic_category',
pti.get_iso19115_topic_category('oceans')['iso_topic_category'])
self.dataset.SetMetadataItem('gcmd_location', json.dumps(pti.get_gcmd_location('sea surface')))
#mm = pti.get_gcmd_instrument('amsr-e')
#ee = pti.get_gcmd_platform('aqua')
from nansat.nsr import NSR
from nansat.tools import initial_bearing
class Mapper(Opendap, Sentinel1):
baseURLs = [
'http://nbstds.met.no/thredds/dodsC/NBS/S1A',
'http://nbstds.met.no/thredds/dodsC/NBS/S1B',
]
timeVarName = 'time'
xName = 'x'
yName = 'y'
timeCalendarStart = '1981-01-01'
srcDSProjection = NSR().wkt
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
if not IMPORT_SCIPY:
raise NansatReadError('Sentinel-1 data cannot be read because scipy is not installed')
timestamp = date if date else self.get_date(filename)
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
Sentinel1.__init__(self, filename)
self.add_calibrated_nrcs(filename)
self.add_nrcs_VV_from_HH(filename)
qualArray[qualArray < minQual] = 1
qualArray[qualArray >= minQual] = 128
self.bandVRTs = {'maskVRT': vrt.VRT(array=qualArray.astype('int8'))}
metaDict.append({'src': {'SourceFilename': (self.
bandVRTs['maskVRT'].
fileName),
'SourceBand': 1,
'SourceType': 'SimpleSource',
'DataType': 1},
'dst': {'name': 'mask'}})
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# append fixed projection and geotransform
self.dataset.SetProjection(NSR().wkt)
self.dataset.SetGeoTransform((-180, 0.0417, 0, 90, 0, -0.0417))
# set TIMEstart_time
if h5Style:
startTimeKey = 'start_time'
else:
startTimeKey = 'NC_GLOBAL#start_time'
self.dataset.SetMetadataItem('time_coverage_start', subGDALDataset.GetMetadataItem(startTimeKey))
'DST_SRS=' + NSR(dst_srs).wkt])
# create 'fake' GCPs
fake_gcps = []
for g in dst_gcps[::skip_gcps]:
# transform DST lat/lon to SRC pixel/line
succ, point = src_transformer.TransformPoint(1, g.GCPX, g.GCPY)
srcPixel = point[0]
srcLine = point[1]
# swap coordinates in GCPs:
# pix1/line1 -> lat/lon =>=> pix2/line2 -> pix1/line1
fake_gcps.append(gdal.GCP(g.GCPPixel, g.GCPLine,
0, srcPixel, srcLine))
self.dataset.SetGCPs(fake_gcps, NSR('+proj=stere').wkt)
return None
DstToSrc : 0 or 1
- 0 - forward transform (pix/line => lon/lat)
- 1 - inverse transformation
dst_srs : NSR
destination spatial reference
Returns
--------
X, Y : lists
X and Y coordinates in lon/lat or pixel/line coordinate system
"""
if dst_srs is None:
dst_srs = NSR()
return self.vrt.transform_points(colVector, rowVector, dst2src=DstToSrc, dst_srs=dst_srs)
from the geolocation bands.
Parameters
-----------
stepSize : int
Reduction factor if output is desired on a reduced grid size
Returns
--------
longitude : numpy array
grid with longitudes
latitude : numpy array
grid with latitudes
"""
if dst_srs is None:
dst_srs = NSR()
step_size = stepSize
x_vec = list(range(0, self.vrt.dataset.RasterXSize, step_size))
y_vec = list(range(0, self.vrt.dataset.RasterYSize, step_size))
x_grid, y_grid = np.meshgrid(x_vec, y_vec)
if self.vrt.geolocation is not None and len(self.vrt.geolocation.data) > 0:
# if the vrt dataset has geolocationArray
# read lon,lat grids from geolocationArray
lon_grid, lat_grid = self.vrt.geolocation.get_geolocation_grids()
lon_arr, lat_arr = lon_grid[y_grid, x_grid], lat_grid[y_grid, x_grid]
else:
# generate lon,lat grids using GDAL Transformer
lon_vec, lat_vec = self.transform_points(x_grid.flatten(), y_grid.flatten(),
dst_srs=dst_srs)
lon_arr = lon_vec.reshape(x_grid.shape)
lat_arr = lat_vec.reshape(x_grid.shape)
def set_gcps(self, lon, lat, gdal_dataset):
""" Set gcps """
self.band_vrts['new_lon_VRT'] = VRT.from_array(lon)
self.dataset.SetGCPs(VRT._lonlat2gcps(lon, lat, n_gcps=400), NSR().wkt)
# Add geolocation from correct longitudes and latitudes
self._add_geolocation(
Geolocation(self.band_vrts['new_lon_VRT'], self, x_band=1, y_band=self._latitude_band_number(gdal_dataset))
)