Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def check_dataset_metadata_in_storage_unit(nco, dataset_dir):
assert len(nco.variables['dataset']) == 1 # 1 time slice
stored_metadata = nco.variables['dataset'][0]
if not isinstance(stored_metadata, str):
stored_metadata = netCDF4.chartostring(stored_metadata)
stored_metadata = str(np.char.decode(stored_metadata))
ds_filename = dataset_dir / 'agdc-metadata.yaml'
stored = yaml.safe_load(stored_metadata)
[(_, original)] = read_documents(ds_filename)
assert len(stored['lineage']['source_datasets']) == 1
assert next(iter(stored['lineage']['source_datasets'].values())) == original
r = clirunner(['dataset', 'search'], expect_success=True)
assert ds.id in r.output
assert ds_bad1.id not in r.output
assert ds.sources['ab'].id in r.output
assert ds.sources['ac'].sources['cd'].id in r.output
r = clirunner(['dataset', 'info', '-f', 'csv', ds.id])
assert ds.id in r.output
r = clirunner(['dataset', 'info', '-f', 'yaml', '--show-sources', ds.id])
assert ds.sources['ae'].id in r.output
r = clirunner(['dataset', 'info', '-f', 'yaml', '--show-derived', ds.sources['ae'].id])
assert ds.id in r.output
ds_ = SimpleDocNav(gen_dataset_test_dag(1, force_tree=True))
assert ds_.id == ds.id
x = index.datasets.get(ds.id, include_sources=True)
assert str(x.sources['ab'].id) == ds.sources['ab'].id
assert str(x.sources['ac'].sources['cd'].id) == ds.sources['ac'].sources['cd'].id
check_skip_lineage_test(clirunner, index)
check_no_product_match(clirunner, index)
check_with_existing_lineage(clirunner, index)
check_inconsistent_lineage(clirunner, index)
check_missing_metadata_doc(clirunner)
check_missing_lineage(clirunner, index)
check_no_confirm(clirunner, p.datasets)
check_bad_yaml(clirunner, index)
# check --product=nosuchproduct
def reproject_point(pos):
pos = point(pos['lon'], pos['lat'], CRS('EPSG:4326'))
coords = pos.to_crs(crs).coords[0]
return {'x': coords[0], 'y': coords[1]}
def check_open_with_api(index, time_slices):
with rasterio.Env():
from datacube import Datacube
dc = Datacube(index=index)
input_type_name = 'ls5_nbar_albers'
input_type = dc.index.products.get_by_name(input_type_name)
geobox = geometry.GeoBox(200, 200, Affine(25, 0.0, 638000, 0.0, -25, 6276000), geometry.CRS('EPSG:28355'))
observations = dc.find_datasets(product='ls5_nbar_albers', geopolygon=geobox.extent)
group_by = query_group_by('time')
sources = dc.group_datasets(observations, group_by)
data = dc.load_data(sources, geobox, input_type.measurements.values())
assert data.blue.shape == (time_slices, 200, 200)
chunk_profile = {'time': 1, 'x': 100, 'y': 100}
lazy_data = dc.load_data(sources, geobox, input_type.measurements.values(), dask_chunks=chunk_profile)
assert lazy_data.blue.shape == (time_slices, 200, 200)
assert (lazy_data.blue.load() == data.blue).all()
def sample_geometry():
gb = geometry.GeoBox(40, 40, Affine(2500, 0.0, 1200000.0, 0.0, -2500, -4300000.0), geometry.CRS('EPSG:3577'))
json = gb.extent.json
return json
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4283"]],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]],
PROJECTION["Albers_Conic_Equal_Area"],
PARAMETER["standard_parallel_1",-18],
PARAMETER["standard_parallel_2",-36],
PARAMETER["latitude_of_center",0],
PARAMETER["longitude_of_center",132],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
AUTHORITY["EPSG","3577"],
AXIS["Easting",EAST],
AXIS["Northing",NORTH]]""")
SINIS_PROJ = geometry.CRS("""PROJCS["Sinusoidal",
GEOGCS["GCS_Undefined",
DATUM["Undefined",
SPHEROID["User_Defined_Spheroid",6371007.181,0.0]],
PRIMEM["Greenwich",0.0],
UNIT["Degree",0.0174532925199433]],
PROJECTION["Sinusoidal"],
PARAMETER["False_Easting",0.0],
PARAMETER["False_Northing",0.0],
PARAMETER["Central_Meridian",0.0],
UNIT["Meter",1.0]]""")
LCC2_PROJ = geometry.CRS("""PROJCS["unnamed",
GEOGCS["WGS 84",
DATUM["unknown",
SPHEROID["WGS84",6378137,6556752.3141]],
PRIMEM["Greenwich",0],
def test_australian_albers_comparison(self):
a = geometry.CRS("""PROJCS["GDA94_Australian_Albers",GEOGCS["GCS_GDA_1994",
DATUM["Geocentric_Datum_of_Australia_1994",SPHEROID["GRS_1980",6378137,298.257222101]],
PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],
PROJECTION["Albers_Conic_Equal_Area"],
PARAMETER["standard_parallel_1",-18],
PARAMETER["standard_parallel_2",-36],
PARAMETER["latitude_of_center",0],
PARAMETER["longitude_of_center",132],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
UNIT["Meter",1]]""")
b = epsg3577
assert a == b
assert a != epsg4326
def test_parse_date_ranges():
eighth_march_2014 = {
'time': Range(datetime(2014, 3, 8, tzinfo=tzutc()), datetime(2014, 3, 8, 23, 59, 59, 999999, tzinfo=tzutc()))
}
assert parse_expressions('time in 2014-03-08') == eighth_march_2014
assert parse_expressions('time in 2014-03-8') == eighth_march_2014
march_2014 = {
'time': Range(datetime(2014, 3, 1, tzinfo=tzutc()), datetime(2014, 3, 31, 23, 59, 59, 999999, tzinfo=tzutc()))
}
assert parse_expressions('time in 2014-03') == march_2014
assert parse_expressions('time in 2014-3') == march_2014
# Leap year, 28 days
feb_2014 = {
'time': Range(datetime(2014, 2, 1, tzinfo=tzutc()), datetime(2014, 2, 28, 23, 59, 59, 999999, tzinfo=tzutc()))
}
assert parse_expressions('time in 2014-02') == feb_2014
assert parse_expressions('time in 2014-2') == feb_2014
def test_search_dataset_ranges(index: Index, pseudo_ls8_dataset: Dataset) -> None:
# In the lat bounds.
datasets = index.datasets.search_eager(
lat=Range(-30.5, -29.5),
time=Range(
datetime.datetime(2014, 7, 26, 23, 0, 0),
datetime.datetime(2014, 7, 26, 23, 59, 0)
)
)
assert len(datasets) == 1
assert datasets[0].id == pseudo_ls8_dataset.id
# Out of the lat bounds.
datasets = index.datasets.search_eager(
lat=Range(28, 32),
time=Range(
datetime.datetime(2014, 7, 26, 23, 48, 0),
datetime.datetime(2014, 7, 26, 23, 50, 0)
)
)
assert len(datasets) == 0
# Out of the time bounds
datasets = index.datasets.search_eager(
lat=Range(-30.5, -29.5),
time=Range(
datetime.datetime(2014, 7, 26, 21, 48, 0),
datetime.datetime(2014, 7, 26, 21, 50, 0)
)
)
assert len(datasets) == 0
# No arguments, return all.
res = list(index.products.search())
assert res == [ls5_telem_type]
# Matching fields
res = list(index.products.search(
product_type='satellite_telemetry_data',
product='ls5_telem_test'
))
assert res == [ls5_telem_type]
# Matching fields and non-available fields
res = list(index.products.search(
product_type='satellite_telemetry_data',
product='ls5_telem_test',
lat=Range(142.015625, 142.015625),
lon=Range(-12.046875, -12.046875)
))
assert res == []
# Matching fields and available fields
[(res, q)] = list(index.products.search_robust(
product_type='satellite_telemetry_data',
product='ls5_telem_test',
sat_path=Range(142.015625, 142.015625),
sat_row=Range(-12.046875, -12.046875)
))
assert res == ls5_telem_type
assert 'sat_path' in q
assert 'sat_row' in q
# Or expression test