How to use the bentoml.exceptions.InvalidArgument function in bentoml

To help you get started, we’ve selected a few bentoml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bentoml / BentoML / tests / test_service.py View on Github external
def test_invalid_api_handler():
    with pytest.raises(InvalidArgument) as e:

        class TestBentoService(bentoml.BentoService):  # pylint: disable=unused-variable
            @bentoml.api("Not A BentoHandler")
            def test(self):
                pass

    assert "must be class derived from bentoml.handlers.BentoHandler" in str(e.value)
github bentoml / BentoML / bentoml / yatai / deployment_utils.py View on Github external
lambda_conf = spec_dict.get('aws_lambda_operator_config', {})
        for field in ['region', 'api_name', 'memory_size', 'timeout']:
            if lambda_conf.get(field):
                deployment_pb.spec.aws_lambda_operator_config.__setattr__(
                    field, lambda_conf.get(field)
                )
    elif deployment_pb.spec.operator == DeploymentSpec.KUBERNETES:
        k8s_config = spec_dict.get('kubernetes_operator_config', {})

        for field in ['kube_namespace', 'replicas', 'service_name', 'service_type']:
            if k8s_config.get(field):
                deployment_pb.spec.kubernetes_operator_config.__setattr__(
                    field, k8s_config.get(field)
                )
    else:
        raise InvalidArgument(
            'Platform "{}" is not supported in the current version of '
            'BentoML'.format(platform)
        )

    return deployment_pb
github bentoml / BentoML / bentoml / service.py View on Github external
def _validate_version_str(version_str):
    """
    Validate that version str format is either a simple version string that:
        * Consist of only ALPHA / DIGIT / "-" / "." / "_"
        * Length between 1-128
    Or a valid semantic version https://github.com/semver/semver/blob/master/semver.md
    """
    regex = r"[A-Za-z0-9_.-]{1,128}\Z"
    semver_regex = r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"  # noqa: E501
    if (
        re.match(regex, version_str) is None
        and re.match(semver_regex, version_str) is None
    ):
        raise InvalidArgument(
            'Invalid BentoService version: "{}", it can only consist'
            ' ALPHA / DIGIT / "-" / "." / "_", and must be less than'
            "128 characthers".format(version_str)
        )
github bentoml / BentoML / bentoml / service.py View on Github external
>>>     @api(JsonHandler)
    >>>     def fraud_detect(self, parsed_json):
    >>>         # do something
    >>>
    >>>     @api(DataframeHandler, input_json_orient='records')
    >>>     def identity(self, df):
    >>>         # do something

    """

    DEFAULT_API_DOC = "BentoService API"

    from bentoml.handlers.base_handlers import BentoHandler

    if not (inspect.isclass(handler_cls) and issubclass(handler_cls, BentoHandler)):
        raise InvalidArgument(
            "BentoService @api decorator first parameter must "
            "be class derived from bentoml.handlers.BentoHandler"
        )

    def decorator(func):
        api_name = kwargs.pop("api_name", func.__name__)
        api_doc = kwargs.pop("api_doc", func.__doc__ or DEFAULT_API_DOC).strip()

        handler = handler_cls(
            *args, **kwargs
        )  # create handler instance and attach to api method

        setattr(func, "_is_api", True)
        setattr(func, "_handler", handler)
        if not isidentifier(api_name):
            raise InvalidArgument(
github bentoml / BentoML / bentoml / artifact / keras_model_artifact.py View on Github external
)
        else:
            model = data
            custom_objects = self.custom_objects

        if not isinstance(model, tf.keras.models.Model):
            error_msg = (
                "KerasModelArtifact#pack expects model argument to be type: "
                "keras.engine.network.Network, tf.keras.models.Model, or their "
                "aliases, instead got type: {}".format(type(model))
            )
            try:
                import keras

                if not isinstance(model, keras.engine.network.Network):
                    raise InvalidArgument(error_msg)
                else:
                    self._keras_module_name = keras.__name__
            except ImportError:
                raise InvalidArgument(error_msg)

        self.bind_keras_backend_session()
        model._make_predict_function()
        return _KerasModelArtifactWrapper(self, model, custom_objects)
github bentoml / BentoML / bentoml / artifact / pytorch_model_artifact.py View on Github external
def __init__(self, spec, model):
        super(_PytorchModelArtifactWrapper, self).__init__(spec)

        try:
            import torch
        except ImportError:
            raise MissingDependencyException(
                "torch package is required to use PytorchModelArtifact"
            )

        if not isinstance(model, torch.nn.Module):
            raise InvalidArgument(
                "PytorchModelArtifact can only pack type 'torch.nn.Module'"
            )

        self._model = model
github bentoml / BentoML / bentoml / yatai / yatai_service_impl.py View on Github external
def ApplyDeployment(self, request, context=None):
        try:
            # apply default namespace if not set
            request.deployment.namespace = (
                request.deployment.namespace or self.default_namespace
            )

            validation_errors = validate_deployment_pb_schema(request.deployment)
            if validation_errors:
                raise InvalidArgument(
                    'Failed to validate deployment. {errors}'.format(
                        errors=validation_errors
                    )
                )

            previous_deployment = self.deployment_store.get(
                request.deployment.name, request.deployment.namespace
            )
            if previous_deployment:
                # check deployment platform
                raise YataiServiceRpcAborted(
                    'Deployment update is not supported in current version of BentoML'
                )
            else:
                request.deployment.created_at.GetCurrentTime()
github bentoml / BentoML / bentoml / handlers / base_handlers.py View on Github external
def get_output_str(result, output_format, output_orient="records"):
    if output_format == "str":
        return str(result)
    elif output_format == "json":
        if isinstance(result, pd.DataFrame):
            return result.to_json(orient=output_orient)
        elif isinstance(result, np.ndarray):
            return json.dumps(result.tolist())
        else:
            try:
                return json.dumps(result)
            except (TypeError, OverflowError):
                # when result is not JSON serializable
                return json.dumps(str(result))
    else:
        raise InvalidArgument("Output format {} is not supported".format(output_format))
github bentoml / BentoML / bentoml / artifact / fastai_model_artifact.py View on Github external
def pack(self, model):  # pylint:disable=arguments-differ
        fastai_module = _import_fastai_module()

        if not isinstance(model, fastai_module.basic_train.Learner):
            raise InvalidArgument(
                "Expect `model` argument to be `fastai.basic_train.Learner` instance"
            )

        return _FastaiModelArtifactWrapper(self, model)
github bentoml / BentoML / bentoml / artifact / keras_model_artifact.py View on Github external
if not isinstance(model, tf.keras.models.Model):
            error_msg = (
                "KerasModelArtifact#pack expects model argument to be type: "
                "keras.engine.network.Network, tf.keras.models.Model, or their "
                "aliases, instead got type: {}".format(type(model))
            )
            try:
                import keras

                if not isinstance(model, keras.engine.network.Network):
                    raise InvalidArgument(error_msg)
                else:
                    self._keras_module_name = keras.__name__
            except ImportError:
                raise InvalidArgument(error_msg)

        self.bind_keras_backend_session()
        model._make_predict_function()
        return _KerasModelArtifactWrapper(self, model, custom_objects)