Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_validate_optimized(tmpdir):
from terracotta import cog
outfile = str(tmpdir / 'raster.tif')
raster_data = 1000 * np.random.rand(512, 512).astype(np.uint16)
profile = BASE_PROFILE.copy()
profile.update(
height=raster_data.shape[0],
width=raster_data.shape[1],
tiled=True,
blockxsize=256,
blockysize=256
)
with MemoryFile() as mf, mf.open(**profile) as dst:
dst.write(raster_data, 1)
overviews = [2 ** j for j in range(1, 4)]
dst.build_overviews(overviews, Resampling.nearest)
copy(dst, outfile, copy_src_overviews=True, **profile)
assert cog.validate(outfile)
### 1. Download a file from Allas to local filesystem
obj = ''
container = ''
file_output = ''
headers, raster = conn.get_object(container, obj)
with open(file_output, 'bw') as f:
f.write(raster)
### 2. Writing a raster file to Allas using the Swift library
fp = ""
bucket_name = ''
raster = rasterio.open(fp)
input_data = raster.read()
# The file is written to memory first and then uploaded to Allas
with MemoryFile() as mem_file:
with mem_file.open(**raster.profile) as dataset:
dataset.write(input_data)
conn.put_object(bucket_name, os.path.basename(fp), contents=mem_file)
### 3. Writing a vector file to Allas using the Swift library
fp = ""
bucket_name = ''
vector = gpd.read_file(fp)
# The file is written to memory first and then uploaded to Allas
tmp = tempfile.NamedTemporaryFile()
vector.to_file(tmp, layer='test', driver="GPKG")
tmp.seek(0) # Moving pointer to the beginning of temp file.
conn.put_object(bucket_name, os.path.basename(fp) ,contents=tmp)
# don't even bother reading the actual DEM, just mimic it
data = np.zeros((ds.height, ds.width))
# Read RGI outlines
geometry = gdir.read_shapefile('outlines').geometry[0]
# simple trick to correct invalid polys:
# http://stackoverflow.com/questions/20833344/
# fix-invalid-polygon-python-shapely
geometry = geometry.buffer(0)
if not geometry.is_valid:
raise InvalidDEMError('This glacier geometry is not valid.')
# Compute the glacier mask using rasterio
# Small detour as mask only accepts DataReader objects
with rasterio.io.MemoryFile() as memfile:
with memfile.open(**profile) as dataset:
dataset.write(data.astype(profile['dtype'])[np.newaxis, ...])
dem_data = rasterio.open(memfile.name)
masked_dem, _ = riomask(dem_data, [shpg.mapping(geometry)],
filled=False)
glacier_mask = ~masked_dem[0, ...].mask
# parameters to for the new tif
nodata = -32767
dtype = rasterio.int16
# let's use integer
out = glacier_mask.astype(dtype)
# and check for sanity
if not np.all(np.unique(out) == np.array([0, 1])):
return writer(vsi_path, 'w+', driver=driver, width=width,
height=height, count=count, crs=crs,
transform=transform, dtype=dtype,
nodata=nodata, sharing=sharing, **kwargs)
def __enter__(self):
self._env = env_ctx_if_needed()
self._env.__enter__()
return self
def __exit__(self, *args, **kwargs):
self._env.__exit__()
self.close()
class ZipMemoryFile(MemoryFile):
"""A read-only BytesIO-like object backed by an in-memory zip file.
This allows a zip file containing formatted files to be read
without I/O.
"""
def __init__(self, file_or_bytes=None):
super(ZipMemoryFile, self).__init__(file_or_bytes, ext='zip')
@ensure_env
def open(self, path, driver=None, sharing=False, **kwargs):
"""Open a dataset within the zipped stream.
Parameters
----------
path : str
scene_params = utils.landsat_parse_scene_id(scene)
meta_data = utils.landsat_get_mtl(scene)
landsat_address = f's3://landsat-pds/{scene_params["key"]}'
bqa = f'{landsat_address}_BQA.TIF'
with rio.open(bqa) as src:
meta = src.meta
wind = [w for ij, w in src.block_windows(1)]
meta.update(nodata=0, count=3, interleave='pixel',
PHOTOMETRIC='RGB', tiled=False, compress=None)
E = float(utils.landsat_mtl_extract(meta_data, 'SUN_ELEVATION'))
with MemoryFile() as memfile:
with memfile.open(**meta) as dataset:
for b in range(len(bands)):
band_address = f'{landsat_address}_B{bands[b]}.TIF'
MR = float(utils.landsat_mtl_extract(meta_data, f'REFLECTANCE_MULT_BAND_{bands[b]}'))
AR = float(utils.landsat_mtl_extract(meta_data, f'REFLECTANCE_ADD_BAND_{bands[b]}'))
with rio.open(band_address) as src:
for window in wind:
matrix = src.read(window=window, boundless=True, indexes=1)
result = reflectance.reflectance(matrix, MR, AR, E, src_nodata=0) * 10000
dataset.write(result.astype(np.uint16), window=window, indexes=b+1)
client = boto3.client('s3')
str_band = ''.join(map(str, bands))
def _read_yield(self, seekers):
'''
Util to handle reading
'''
with open(self.tarpath, 'rb') as tar:
for a, b in seekers:
tar.seek(a)
with BytesIO(tar.read(b)) as data:
with MemoryFile(data) as memfile:
with memfile.open() as src:
yield src.read()
def _prefered_compression_method() -> str:
if not GDALVersion.runtime().at_least('2.3'):
return 'DEFLATE'
# check if we can use ZSTD (fails silently for GDAL < 2.3)
dummy_profile = dict(driver='GTiff', height=1, width=1, count=1, dtype='uint8')
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore', NotGeoreferencedWarning)
with MemoryFile() as memfile, memfile.open(compress='ZSTD', **dummy_profile):
pass
except Exception as exc:
if 'missing codec' not in str(exc):
raise
else:
return 'ZSTD'
return 'DEFLATE'
def _rasterize(self, geodataframe, value):
"""
Converts buffered points into rasterized
:param geodataframe:
:param value:
:return:
"""
from rasterio.io import MemoryFile
from rasterio import features
transform = self.chm._affine
# TODO may be re-usable for other features. Consider moving to gisexport
# FIXME check for cell sizes that are not 1
with MemoryFile() as memfile:
with memfile.open(driver='GTiff',
width = self.chm.array.shape[1],
height = self.chm.array.shape[0],
count = self.chm.grid.cell_size,
dtype = np.uint8,
nodata=0,
transform=transform) as out:
shapes = ((geom, value) for geom, value in zip(geodataframe[0], np.repeat(value, len(geodataframe))))
burned = features.rasterize(shapes = shapes, fill = 0, out_shape = (self.chm.array.shape[0], self.chm.array.shape[1]),
transform=transform)
memfile.close()
return(burned)
def create(scenes, uuid, bucket, bands=[4,3,2]):
"""
"""
args = ((scene, bands) for scene in scenes)
with futures.ThreadPoolExecutor(max_workers=10) as executor:
allScenes = executor.map(get_scene, args)
sources = [ rio.open(x) for x in allScenes if x ]
dest, output_transform = merge(sources, nodata=0)
for tmp in allScenes:
if tmp:
os.remove(tmp)
with MemoryFile() as memfile:
with memfile.open(driver='GTiff',
count=3, dtype=np.uint8, nodata=0,
height=dest.shape[1], width=dest.shape[2],
compress='JPEG',
crs='epsg:3857', transform=output_transform) as dataset:
dataset.write(dest)
wgs_bounds = transform_bounds(
*[dataset.crs, 'epsg:4326'] +
list(dataset.bounds), densify_pts=21)
client = boto3.client('s3')
response = client.put_object(
ACL='public-read',
Bucket=os.environ.get('OUTPUT_BUCKET'),
Key=f'data/mosaic/{uuid}_mosaic.tif',
Body=memfile,