Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def validate(cm_node):
try:
utils._check_has_attr(cm_node, 'oneHotEncoder')
except AttributeError as e:
raise RuntimeError('Missing type from CoreML node:' + str(e))
def validate(sk_node):
try:
utils._check_has_attr(sk_node, 'classes_')
except AttributeError as e:
raise RuntimeError("Missing type from sklearn node:" + str(e))
def validate(cm_node):
try:
utils._check_has_attr(cm_node, 'supportVectorClassifier')
utils._check_has_attr(cm_node.supportVectorClassifier, 'kernel')
utils._check_has_attr(cm_node.supportVectorClassifier, 'numberOfSupportVectorsPerClass')
utils._check_has_attr(cm_node.supportVectorClassifier, 'coefficients')
utils._check_has_attr(cm_node.supportVectorClassifier.coefficients[0], 'alpha')
utils._check_has_attr(cm_node.supportVectorClassifier, 'rho')
except AttributeError as e:
raise RuntimeError("Missing type from CoreML node:" + str(e))
def validate(cm_node):
try:
utils._check_has_attr(cm_node, 'padding')
utils._check_has_attr(cm_node, 'input')
utils._check_has_attr(cm_node, 'output')
except AttributeError as e:
raise RuntimeError('Missing attribute in neural network layer: {0}'.format(cm_node.name))
def convert(context, sk_node, inputs):
classes = sk_node.classes_
nb = SVMConverter.convert(context, sk_node, inputs, "SVMClassifier", len(classes))
if len(sk_node.probA_) > 0:
nb.add_attribute("prob_a", sk_node.probA_)
if len(sk_node.probB_) > 0:
nb.add_attribute("prob_b", sk_node.probB_)
nb.add_attribute('vectors_per_class', sk_node.n_support_)
if utils.is_numeric_type(classes):
class_labels = utils.cast_list(int, classes)
nb.add_attribute('classlabels_ints', class_labels)
output_type = onnx_proto.TensorProto.INT64
elif utils.is_string_type(classes):
class_labels = utils.cast_list(str, classes)
nb.add_attribute('classlabels_strings', class_labels)
output_type = onnx_proto.TensorProto.STRING
else:
raise RuntimeError("Invalid class type:" + classes.dtype)
nb.add_attribute('post_transform', 'NONE')
output_y = model_util.make_tensor_value_info(nb.name, output_type, [1, 1])
nb.add_output(output_y)
context.add_output(output_y)
# Add a ZipMap to handle the map output
prob_input = context.get_unique_name('classProbability')
nb.add_output(prob_input)
appended_node = add_zipmap(prob_input, output_type, class_labels, context)
def validate(cm_node):
try:
utils._check_has_attr(cm_node, 'imputer')
except AttributeError as e:
raise RuntimeError("Missing type from CoreML node:" + str(e))
def validate(sk_node):
try:
utils._check_has_attr(sk_node, 'norm')
except AttributeError as e:
raise RuntimeError("Missing type from sklearn node:" + str(e))
def convert(context, sk_node, inputs):
attr_pairs = _get_default_tree_regressor_attribute_pairs()
attr_pairs['n_targets'] = 1
attr_pairs['base_values'] = [utils.convert_to_python_value(sk_node.init_.mean)]
tree_weight = sk_node.learning_rate
for i in range(sk_node.n_estimators):
tree = sk_node.estimators_[i][0].tree_
tree_id = i
_add_tree_to_attribute_pairs(attr_pairs, False, tree, tree_id, tree_weight, 0, False)
nb = NodeBuilder(context, "TreeEnsembleRegressor", op_domain='ai.onnx.ml')
for k, v in attr_pairs.items():
nb.add_attribute(k, v)
nb.extend_inputs(inputs)
output_dim = [1, 1]
nb.add_output(model_util.make_tensor_value_info(nb.name, onnx_proto.TensorProto.FLOAT, output_dim))
def validate(sk_node):
SVMConverter.validate(sk_node)
try:
utils._check_has_attr(sk_node, 'classes_')
utils._check_has_attr(sk_node, 'n_support_')
utils._check_has_attr(sk_node, 'probA_')
utils._check_has_attr(sk_node, 'probB_')
except AttributeError as e:
raise RuntimeError("Missing type from sklearn node:" + str(e))
def validate(cm_node):
try:
utils._check_has_attr(cm_node, 'split')
utils._check_has_attr(cm_node, 'input')
utils._check_has_attr(cm_node, 'output')
except AttributeError as e:
raise RuntimeError('Missing attribute in neural network layer: {0}'.format(cm_node.name))