Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
from .adl import ADL
except ImportError:
ADL = missing_dependency_generator("azure.datalake.store", "azure")
except KeyError as exc:
if exc.args[0] == "APPDATA":
ADL = missing_environment_variable_generator("azure.datalake.store", "APPDATA")
else:
raise
try:
from .abs import AzureBlobStore
except ImportError:
AzureBlobStore = missing_dependency_generator("azure.storage.blob", "azure")
try:
from gcsfs import GCSFileSystem
except ImportError:
GCSFileSystem = missing_dependency_generator("gcsfs", "gcs")
def fallback_gs_is_retriable(e):
try:
print(e.code)
return e.code is None or e.code == 429
except AttributeError:
print(e)
return False
try:
# Default to gcsfs library's retry logic
from gcsfs.utils import is_retriable as gs_is_retriable
except ImportError:
gs_is_retriable = fallback_gs_is_retriable
from .s3 import S3
except ImportError:
S3 = missing_dependency_generator("boto3", "s3")
try:
from .adl import ADL
except ImportError:
ADL = missing_dependency_generator("azure.datalake.store", "azure")
except KeyError as exc:
if exc.args[0] == "APPDATA":
ADL = missing_environment_variable_generator("azure.datalake.store", "APPDATA")
else:
raise
try:
from .abs import AzureBlobStore
except ImportError:
AzureBlobStore = missing_dependency_generator("azure.storage.blob", "azure")
try:
from gcsfs import GCSFileSystem
except ImportError:
GCSFileSystem = missing_dependency_generator("gcsfs", "gcs")
def fallback_gs_is_retriable(e):
try:
print(e.code)
return e.code is None or e.code == 429
except AttributeError:
print(e)
return False
try:
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
from . import __version__
from .log import logger
from .utils import chdir
from .exceptions import (
PapermillException,
PapermillRateLimitException,
missing_dependency_generator,
missing_environment_variable_generator,
)
try:
from .s3 import S3
except ImportError:
S3 = missing_dependency_generator("boto3", "s3")
try:
from .adl import ADL
except ImportError:
ADL = missing_dependency_generator("azure.datalake.store", "azure")
except KeyError as exc:
if exc.args[0] == "APPDATA":
ADL = missing_environment_variable_generator("azure.datalake.store", "APPDATA")
else:
raise
try:
from .abs import AzureBlobStore
except ImportError:
AzureBlobStore = missing_dependency_generator("azure.storage.blob", "azure")
try:
from gcsfs import GCSFileSystem
except ImportError:
from .utils import chdir
from .exceptions import (
PapermillException,
PapermillRateLimitException,
missing_dependency_generator,
missing_environment_variable_generator,
)
try:
from .s3 import S3
except ImportError:
S3 = missing_dependency_generator("boto3", "s3")
try:
from .adl import ADL
except ImportError:
ADL = missing_dependency_generator("azure.datalake.store", "azure")
except KeyError as exc:
if exc.args[0] == "APPDATA":
ADL = missing_environment_variable_generator("azure.datalake.store", "APPDATA")
else:
raise
try:
from .abs import AzureBlobStore
except ImportError:
AzureBlobStore = missing_dependency_generator("azure.storage.blob", "azure")
try:
from gcsfs import GCSFileSystem
except ImportError:
GCSFileSystem = missing_dependency_generator("gcsfs", "gcs")
def fallback_gs_is_retriable(e):