Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
try:
py_major, py_minor, _ = bento_service_metadata.env.python_version.split('.')
if py_major != '3':
raise BentoMLException(
'Python 2 is not supported for Lambda Deployment'
)
python_runtime = 'python{}.{}'.format(py_major, py_minor)
artifact_types = [
item.artifact_type for item in bento_service_metadata.artifacts
]
if any(
i in ['TensorflowSavedModelArtifact', 'KerasModelArtifact']
for i in artifact_types
) and (py_major, py_minor) != ('3', '6'):
raise BentoMLException(
'For Tensorflow and Keras model, only python3.6 is '
'supported for AWS Lambda deployment'
)
api_names = (
[lambda_deployment_config.api_name]
if lambda_deployment_config.api_name
else [api.name for api in bento_service_metadata.apis]
)
raise_if_api_names_not_found_in_bento_service_metadata(
bento_service_metadata, api_names
)
create_s3_bucket_if_not_exists(
lambda_s3_bucket, lambda_deployment_config.region
serverless_outputs = serverless_response.strip().split("\n")
# Parsing serverless response brutally. The current serverless
# response format is:
# ServerlessError|Error -----{fill dash to 56 line length}
# empty space
# Error Message
# empty space
# We are just going to find the index of serverless error/error is
# and raise Exception base on the message +2 index away from it.
error_message = ''
for index, message in enumerate(serverless_outputs):
if 'Serverless Error' in message or 'Error -----' in message:
error_message += serverless_outputs[index + 2]
if error_message:
raise BentoMLException(error_message)
return serverless_outputs
def install_serverless_package():
""" Install serverless npm package to BentoML home directory
We are using serverless framework for deployment, instead of using user's own
serverless framework, we will install a specific one just for BentoML.
It will be installed in BentoML home directory.
"""
check_nodejs_compatible_version()
install_command = ['npm', 'install', 'serverless@{}'.format(SERVERLESS_VERSION)]
try:
subprocess.check_call(
install_command, cwd=BENTOML_HOME, stdout=PIPE, stderr=PIPE
)
except subprocess.CalledProcessError as error:
raise BentoMLException(error.output)
>> 'a-bc' # trim based on max_length of each part
"""
trimed_items = [
item[0][: item[1]] if type(item) == tuple else item for item in items
]
items = [item[0] if type(item) == tuple else item for item in items]
for i in range(len(trimed_items)):
if len('-'.join(items)) <= max_length:
break
else:
items[i] = trimed_items[i]
name = '-'.join(items)
if len(name) > max_length:
raise BentoMLException(
'AWS resource name {} exceeds maximum length of {}'.format(name, max_length)
)
invalid_chars = re.compile("[^a-zA-Z0-9-]|_")
name = re.sub(invalid_chars, "-", name)
return name
def api_func_wrapper():
image_paths = []
if not config('logging').getboolean('disable_logging_image'):
image_paths = self.log_image(request, request_id)
# _request_to_json parses request as JSON; in case errors, it raises
# a 400 exception. (consider 4xx before 5xx.)
request_for_log = _request_to_json(request)
# handle_request may raise 4xx or 5xx exception.
try:
response = api.handle_request(request)
except BentoMLException as e:
self.log_exception(sys.exc_info())
if 400 <= e.status_code < 500 and e.status_code not in (401, 403):
response = make_response(
jsonify(
message="BentoService error handling API request: %s"
% str(e)
),
e.status_code,
)
else:
response = make_response('', e.status_code)
except Exception:
# For all unexpected error, return 500 by default. For example,
# if users' model raises an error of division by zero.
self.log_exception(sys.exc_info())
def apply(self, deployment_pb, yatai_service, prev_deployment=None):
try:
ensure_docker_available_or_raise()
deployment_spec = deployment_pb.spec
bento_pb = yatai_service.GetBento(
GetBentoRequest(
bento_name=deployment_spec.bento_name,
bento_version=deployment_spec.bento_version,
)
)
if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
raise BentoMLException(
'BentoML currently not support {} repository'.format(
bento_pb.bento.uri.type
)
)
return self._apply(
deployment_pb, bento_pb, yatai_service, bento_pb.bento.uri.uri
)
except BentoMLException as error:
return ApplyDeploymentResponse(status=error.status_proto)
def ensure_docker_available_or_raise():
try:
subprocess.check_output(['docker', 'info'])
except subprocess.CalledProcessError as error:
raise BentoMLException(
'Error executing docker command: {}'.format(error.output.decode())
)
except FileNotFoundError:
raise MissingDependencyException(
'Docker is required for this deployment. Please visit '
'www.docker.com for instructions'
)
validation_errors = validate_deployment_pb_schema(deployment_pb)
if validation_errors:
return ApplyDeploymentResponse(
status=Status.INVALID_ARGUMENT(
'Failed to validate deployment: {errors}'.format(
errors=validation_errors
)
)
)
return yatai_service.ApplyDeployment(
ApplyDeploymentRequest(deployment=deployment_pb)
)
except BentoMLException as error:
return ApplyDeploymentResponse(status=Status.INTERNAL(str(error)))
path (str): Destination of where the bento service will be saved
"""
track_save(bento_service)
from bentoml.service import BentoService
if not isinstance(bento_service, BentoService):
raise BentoMLException(
"save_to_dir only work with instance of custom BentoService class"
)
if version is not None:
bento_service.set_version(version)
if not os.path.exists(path):
raise BentoMLException("Directory '{}' not found".format(path))
for artifact in bento_service._artifacts:
if artifact.name not in bento_service._packed_artifacts:
logger.warning(
"Missing declared artifact '%s' for BentoService '%s'",
artifact.name,
bento_service.name,
)
module_base_path = os.path.join(path, bento_service.name)
os.mkdir(module_base_path)
# write README.md with custom BentoService's docstring if presented
saved_bundle_readme = DEFAULT_SAVED_BUNDLE_README.format(
bento_service.name, bento_service.version
)