Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def read(self,
path: str,
client_kwargs: Dict[str, Any] = None,
**kwargs) -> xr.Dataset:
path_or_store = path
consolidated = False
if isinstance(path, str):
path_or_store, consolidated = get_path_or_obs_store(path_or_store, client_kwargs, mode='r')
if 'max_cache_size' in kwargs:
max_cache_size = kwargs.pop('max_cache_size')
if max_cache_size > 0:
path_or_store = zarr.LRUStoreCache(path_or_store, max_size=max_cache_size)
return xr.open_zarr(path_or_store, consolidated=consolidated, **kwargs)
:param index: the level index
:param zarr_kwargs: kwargs passed to xr.open_zarr()
:return: the dataset for the level at *index*.
"""
ext, level_path = self._level_paths[index]
if ext == ".link":
with self._obs_file_system.open(level_path, "w") as fp:
level_path = fp.read()
# if file_path is a relative path, resolve it against the levels directory
if not os.path.isabs(level_path):
base_dir = os.path.dirname(self._dir_path)
level_path = os.path.join(base_dir, level_path)
store = s3fs.S3Map(root=level_path, s3=self._obs_file_system, check=False)
cached_store = zarr.LRUStoreCache(store, max_size=2 ** 28)
with measure_time(tag=f"opened remote dataset {level_path} for level {index}"):
consolidated = self._obs_file_system.exists(f'{level_path}/.zmetadata')
return assert_cube(xr.open_zarr(cached_store, consolidated=consolidated, **zarr_kwargs), name=level_path)