Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _import_fastai_vision():
try:
from fastai import vision
except ImportError:
raise MissingDependencyException(
"fastai.vision package is required to use FastaiImageHandler"
)
return vision
def _load_tf_saved_model(path):
try:
import tensorflow as tf
from tensorflow.python.training.tracking.tracking import AutoTrackable
TF2 = tf.__version__.startswith('2')
except ImportError:
raise MissingDependencyException(
"Tensorflow package is required to use TfSavedModelArtifact"
)
if TF2:
return tf.saved_model.load(path)
else:
loaded = tf.compat.v2.saved_model.load(path)
if isinstance(loaded, AutoTrackable) and not hasattr(loaded, "__call__"):
logger.warning(
'''Importing SavedModels from TensorFlow 1.x.
`outputs = imported(inputs)` is not supported in bento service due to
tensorflow API.
Recommended usage:
```python
def _import_imageio_imread():
try:
from imageio import imread
except ImportError:
raise MissingDependencyException(
"imageio package is required to use FastaiImageHandler"
)
return imread
def __init__(self, spec, model):
super(_LightGBMModelArtifactWrapper, self).__init__(spec)
try:
import lightgbm as lgb
except ImportError:
raise MissingDependencyException(
"lightgbm package is required to use LightGBMModelArtifact"
)
if not isinstance(model, lgb.Booster):
raise InvalidArgument(
"Expect `model` argument to be a `lightgbm.Booster` instance"
)
self._model = model
def load(self, path):
try:
import torch
except ImportError:
raise MissingDependencyException(
"torch package is required to use PytorchModelArtifact"
)
model = cloudpickle.load(open(self._file_path(path), 'rb'))
if not isinstance(model, torch.nn.Module):
raise InvalidArgument(
"Expecting PytorchModelArtifact loaded object type to be "
"'torch.nn.Module' but actually it is {}".format(type(model))
)
return self.pack(model)
def _import_joblib_module():
try:
import joblib
except ImportError:
joblib = None
if joblib is None:
try:
from sklearn.externals import joblib
except ImportError:
pass
if joblib is None:
raise MissingDependencyException(
"sklearn module is required to use SklearnModelArtifact"
)
return joblib
def check_nodejs_compatible_version():
from shutil import which
if which('npm') is None:
raise MissingDependencyException(
'NPM is not installed. Please visit www.nodejs.org for instructions'
)
if which("node") is None:
raise MissingDependencyException(
"NodeJs is not installed, please visit www.nodejs.org for install "
"instructions."
)
version_result = subprocess.check_output(["node", "-v"]).decode("utf-8").strip()
parsed_version = version.parse(version_result)
if not parsed_version >= version.parse('v8.10.0'):
raise ValueError(
"Incompatible Nodejs version, please install version v8.10.0 " "or greater"
)
def bind_keras_backend_session(self):
try:
import tensorflow as tf
except ImportError:
raise MissingDependencyException(
"Tensorflow package is required to use KerasModelArtifact. BentoML "
"currently only support using Keras with Tensorflow backend."
)
self.sess = tf.compat.v1.keras.backend.get_session()
self.graph = self.sess.graph
def hook_loaded_model(cls, loaded_model):
try:
from tensorflow.python.util import tf_inspect
from tensorflow.python.eager import def_function
except ImportError:
raise MissingDependencyException(
"Tensorflow package is required to use TfSavedModelArtifact"
)
for k in dir(loaded_model):
v = getattr(loaded_model, k, None)
if isinstance(v, def_function.Function):
fullargspec = tf_inspect.getfullargspec(v)
setattr(loaded_model, k, cls(v, fullargspec))