Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_mleap_module_model_save_with_absolute_path_and_valid_sample_input_produces_mleap_flavor(
spark_model_iris, model_path):
model_path = os.path.abspath(model_path)
mlflow_model = Model()
mleap.save_model(spark_model=spark_model_iris.model,
path=model_path,
sample_input=spark_model_iris.spark_df,
mlflow_model=mlflow_model)
assert mleap.FLAVOR_NAME in mlflow_model.flavors
config_path = os.path.join(model_path, "MLmodel")
assert os.path.exists(config_path)
config = Model.load(config_path)
assert mleap.FLAVOR_NAME in config.flavors
def test_model_save_load(self):
with TempDir() as tmp:
model_path = tmp.path("knn.pkl")
with open(model_path, "wb") as f:
pickle.dump(self._knn, f)
path = tmp.path("knn")
m = Model(run_id="test", artifact_path="testtest")
pyfunc.save_model(dst_path=path,
data_path=model_path,
loader_module=os.path.basename(__file__)[:-3],
code_path=[__file__],
model=m)
m2 = Model.load(os.path.join(path, "MLmodel"))
print("m1", m.__dict__)
print("m2", m2.__dict__)
assert m.__dict__ == m2.__dict__
assert pyfunc.FLAVOR_NAME in m2.flavors
assert pyfunc.PY_VERSION in m2.flavors[pyfunc.FLAVOR_NAME]
x = pyfunc.load_pyfunc(path)
xpred = x.predict(self._X)
np.testing.assert_array_equal(self._knn_predict, xpred)
def test_add_to_model_adds_specified_kwargs_to_mlmodel_configuration():
custom_kwargs = {
"key1": "value1",
"key2": 20,
"key3": range(10),
}
model_config = Model()
mlflow.pyfunc.add_to_model(model=model_config,
loader_module=os.path.basename(__file__)[:-3],
data="data",
code="code",
env=None,
**custom_kwargs)
assert mlflow.pyfunc.FLAVOR_NAME in model_config.flavors
assert all([item in model_config.flavors[mlflow.pyfunc.FLAVOR_NAME] for item in custom_kwargs])
def test_load_model_with_differing_cloudpickle_version_at_micro_granularity_logs_warning(
model_path):
class TestModel(mlflow.pyfunc.PythonModel):
def predict(self, context, model_input):
return model_input
mlflow.pyfunc.save_model(path=model_path, python_model=TestModel())
saver_cloudpickle_version = "0.5.8"
model_config_path = os.path.join(model_path, "MLmodel")
model_config = Model.load(model_config_path)
model_config.flavors[mlflow.pyfunc.FLAVOR_NAME][
mlflow.pyfunc.model.CONFIG_KEY_CLOUDPICKLE_VERSION] = saver_cloudpickle_version
model_config.save(model_config_path)
log_messages = []
def custom_warn(message_text, *args, **kwargs):
log_messages.append(message_text % args % kwargs)
loader_cloudpickle_version = "0.5.7"
with mock.patch("mlflow.pyfunc._logger.warning") as warn_mock, \
mock.patch("cloudpickle.__version__") as cloudpickle_version_mock:
cloudpickle_version_mock.__str__ = lambda *args, **kwargs: loader_cloudpickle_version
warn_mock.side_effect = custom_warn
mlflow.pyfunc.load_pyfunc(model_uri=model_path)
def test_spark_module_model_save_with_relative_path_and_valid_sample_input_produces_mleap_flavor(
spark_model_iris):
with TempDir(chdr=True) as tmp:
model_path = os.path.basename(tmp.path("model"))
mlflow_model = Model()
sparkm.save_model(spark_model=spark_model_iris.model,
path=model_path,
sample_input=spark_model_iris.spark_df,
mlflow_model=mlflow_model)
assert mleap.FLAVOR_NAME in mlflow_model.flavors
config_path = os.path.join(model_path, "MLmodel")
assert os.path.exists(config_path)
config = Model.load(config_path)
assert mleap.FLAVOR_NAME in config.flavors
def save_model(xgb_model, path, conda_env=None, mlflow_model=Model()):
"""
Save an XGBoost model to a path on the local file system.
:param xgb_model: XGBoost model (an instance of `xgboost.Booster`_) to be saved.
Note that models that implement the `scikit-learn API`_ are not supported.
:param path: Local path where the model is to be saved.
:param conda_env: Either a dictionary representation of a Conda environment or the path to a
Conda environment yaml file. If provided, this describes the environment
this model should be run in. At minimum, it should specify the dependencies
contained in :func:`get_default_conda_env()`. If ``None``, the default
:func:`get_default_conda_env()` environment is added to the model.
The following is an *example* dictionary representation of a Conda
environment::
{
'name': 'mlflow-env',
- ``models://``
For more information about supported URI schemes, see
`Referencing Artifacts `_.
:param port: Local port.
:param image: Name of the Docker image to be used.
:param flavor: The name of the flavor of the model to use for local serving. If ``None``,
a flavor is automatically selected from the model's available flavors. If the
specified flavor is not present or not supported for deployment, an exception
is thrown.
"""
model_path = _download_artifact_from_uri(model_uri)
model_config_path = os.path.join(model_path, "MLmodel")
model_config = Model.load(model_config_path)
if flavor is None:
flavor = _get_preferred_deployment_flavor(model_config)
else:
_validate_deployment_flavor(model_config, flavor)
print("Using the {selected_flavor} flavor for local serving!".format(selected_flavor=flavor))
deployment_config = _get_deployment_config(flavor_name=flavor)
_logger.info("launching docker image with path %s", model_path)
cmd = ["docker", "run", "-v", "{}:/opt/ml/model/".format(model_path), "-p", "%d:8080" % port]
for key, value in deployment_config.items():
cmd += ["-e", "{key}={value}".format(key=key, value=value)]
cmd += ["--rm", image, "serve"]
_logger.info('executing: %s', ' '.join(cmd))
proc = Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, universal_newlines=True)
def save_model(onnx_model, path, conda_env=None, mlflow_model=Model()):
"""
Save an ONNX model to a path on the local file system.
:param onnx_model: ONNX model to be saved.
:param path: Local path where the model is to be saved.
:param conda_env: Either a dictionary representation of a Conda environment or the path to a
Conda environment yaml file. If provided, this describes the environment
this model should be run in. At minimum, it should specify the dependencies
contained in :func:`get_default_conda_env()`. If `None`, the default
:func:`get_default_conda_env()` environment is added to the model.
The following is an *example* dictionary representation of a Conda
environment::
{
'name': 'mlflow-env',
'channels': ['defaults'],
def save_model(sk_model, path, conda_env=None, mlflow_model=Model(),
serialization_format=SERIALIZATION_FORMAT_CLOUDPICKLE):
"""
Save a scikit-learn model to a path on the local file system.
:param sk_model: scikit-learn model to be saved.
:param path: Local path where the model is to be saved.
:param conda_env: Either a dictionary representation of a Conda environment or the path to a
Conda environment yaml file. If provided, this decsribes the environment
this model should be run in. At minimum, it should specify the dependencies
contained in :func:`get_default_conda_env()`. If `None`, the default
:func:`get_default_conda_env()` environment is added to the model.
The following is an *example* dictionary representation of a Conda
environment::
{
'name': 'mlflow-env',