Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Same principle should work but we need to implement our own eye.
raise ValueError("onehot op: only rank1 is supported")
logit_name = node.input[0]
logit_dtype = ctx.get_dtype(logit_name)
logit_shape = ctx.get_shape(logit_name)
utils.make_sure(logit_dtype, "Dtype of {} is None".format(logit_name))
indices_dtype = ctx.get_dtype(indices_name)
if indices_dtype != TensorProto.INT64:
indices_cast = ctx.make_node("Cast", [indices_name], attr={"to": TensorProto.INT64})
indices_name = indices_cast.output[0]
indices_size = ctx.make_node("Size", [indices_name])
indices_unsqueeze = ctx.make_node("Unsqueeze", [indices_name], attr={"axes": [1]})
zero_const = ctx.make_const(utils.make_name("zero"), np.array(0, dtype=np.int64))
one_const = ctx.make_const(utils.make_name("one"), np.array(1, dtype=np.int64))
id_name = utils.make_name("sparse_softmax_id")
id_output = utils.port_name(id_name)
controlflow.make_range(ctx, zero_const.output[0], indices_size.output[0], one_const.output[0],
id_output, id_name, shape=[-1], dtype=TensorProto.INT64)
id_unsqueeze = ctx.make_node("Unsqueeze", [id_output], attr={"axes": [1]})
indices_with_id = ctx.make_node("Concat",
[id_unsqueeze.output[0], indices_unsqueeze.output[0]],
attr={"axis": 1})
log_softmax = ctx.make_node(op_type="LogSoftmax",
inputs=[logit_name], dtypes=[logit_dtype], shapes=[logit_shape])
gathernd_name = utils.make_name("sparse_softmax_gathernd")
gathernd_output = utils.port_name(gathernd_name)
tensor.make_gathernd(ctx, log_softmax.output[0], indices_with_id.output[0], gathernd_output,
gathernd_name, logit_dtype, [logit_shape], [logit_dtype])
const_name = utils.make_name("const_negative_one")
const_negative_one = ctx.make_const(const_name, np.array(-1).astype(utils.map_onnx_to_numpy_type(logit_dtype)))
mul2 = ctx.make_node(op_type="Mul", inputs=[const_negative_one.output[0], gathernd_output])
shapes = node.output_shapes
output_id = loop_1.output[1]
elif rank == 0:
_, if_node_output_id = create_if_op(g, input_ids_for_current_iter, output_data_type, output_shape[1:])
output_id = if_node_output_id
output_identity_name = utils.make_name("loop_output")
loop_output_id = utils.port_name(output_identity_name)
g.make_node(
'Identity',
[output_id],
outputs=[loop_output_id],
name=output_identity_name
)
cond_identity_name = utils.make_name("cond_output")
cond_output_id = utils.port_name(cond_identity_name)
g.make_node(
'Identity',
[cond_name],
outputs=[cond_output_id],
name=cond_identity_name
)
fake_var_identity_name = utils.make_name("fake_var_output")
fake_var_output_id = utils.port_name(fake_var_identity_name)
g.make_node(
'Identity',
[fake_var_name],
outputs=[fake_var_output_id],
name=fake_var_identity_name
)
false_input_id_for_current_iter = get_inputs_for_current_iteration(g, false_input_id, iter_name)
input_ids_for_current_iter = [cond_input_id_for_current_iter, true_input_id_for_current_iter,
false_input_id_for_current_iter]
output_id = None
rank -= 1
if rank >= 1:
loop_1 = create_loop_op(g, input_ids_for_current_iter, output_data_type, output_shape[1:],
trip_count_input_ids, rank)
output_id = loop_1.output[1]
elif rank == 0:
_, if_node_output_id = create_if_op(g, input_ids_for_current_iter, output_data_type, output_shape[1:])
output_id = if_node_output_id
output_identity_name = utils.make_name("loop_output")
loop_output_id = utils.port_name(output_identity_name)
g.make_node(
'Identity',
[output_id],
outputs=[loop_output_id],
name=output_identity_name
)
cond_identity_name = utils.make_name("cond_output")
cond_output_id = utils.port_name(cond_identity_name)
g.make_node(
'Identity',
[cond_name],
outputs=[cond_output_id],
name=cond_identity_name
)
[output_id],
outputs=[loop_output_id],
name=output_identity_name
)
cond_identity_name = utils.make_name("cond_output")
cond_output_id = utils.port_name(cond_identity_name)
g.make_node(
'Identity',
[cond_name],
outputs=[cond_output_id],
name=cond_identity_name
)
fake_var_identity_name = utils.make_name("fake_var_output")
fake_var_output_id = utils.port_name(fake_var_identity_name)
g.make_node(
'Identity',
[fake_var_name],
outputs=[fake_var_output_id],
name=fake_var_identity_name
)
g.add_graph_output(cond_output_id, TensorProto.BOOL, ())
g.add_graph_output(fake_var_output_id, TensorProto.FLOAT, ())
# use None for all dims, just keep original rank. Because it is observed, dims might be changed in loop.
g.add_graph_output(loop_output_id, output_data_type, utils.create_vague_shape_like(output_shape[1:]))
return g
"""Create and insert a new node into the graph.
Args:
op_type: type for new operation
output_name: the names of the outputs above us
name: the name of the new op
kwargs: attributes of the new node
Returns:
node that was inserted
"""
utils.make_sure(isinstance(output_name, six.text_type), "output_name's type is not expected: %s",
type(output_name))
utils.make_sure(isinstance(op_type, six.text_type), "op_type's type is not expected: %s",
type(op_type))
new_output = port_name(name)
new_node = self.make_node(op_type, [output_name], attr=kwargs, outputs=[new_output], name=name, domain=domain)
to_replace = [n for n in self.get_nodes() if n != new_node]
self.replace_all_inputs(to_replace, output_name, new_output)
return new_node
def create_if_op(g, input_ids, output_data_type, output_shape):
op_name = utils.make_name("If")
true_graph = create_body_graph_for_if_branch(g, output_data_type, output_shape, input_ids[1], op_name)
false_graph = create_body_graph_for_if_branch(g, output_data_type, output_shape, input_ids[2], op_name)
out_name = utils.port_name(op_name)
# output a scalar
if_node = g.make_node("If", [input_ids[0]], outputs=[out_name], name=op_name, skip_conversion=False)
if_node.set_body_graph_as_attr("then_branch", true_graph)
if_node.set_body_graph_as_attr("else_branch", false_graph)
return if_node, out_name
def create_if_op(g, input_ids, output_data_type, output_shape):
op_name = utils.make_name("If")
true_graph = create_body_graph_for_if_branch(g, output_data_type, output_shape, input_ids[1], op_name)
false_graph = create_body_graph_for_if_branch(g, output_data_type, output_shape, input_ids[2], op_name)
out_name = utils.port_name(op_name)
# output a scalar
if_node = g.make_node("If", [input_ids[0]], outputs=[out_name], name=op_name, skip_conversion=False)
if_node.set_body_graph_as_attr("then_branch", true_graph)
if_node.set_body_graph_as_attr("else_branch", false_graph)
return if_node, out_name
false_input_id_for_current_iter = get_inputs_for_current_iteration(g, false_input_id, iter_name)
input_ids_for_current_iter = [cond_input_id_for_current_iter, true_input_id_for_current_iter,
false_input_id_for_current_iter]
output_id = None
rank = rank - 1
if rank >= 1:
loop_1 = create_loop_op(g, input_ids_for_current_iter, output_data_type, output_shape[1:],
trip_count_input_ids, rank)
output_id = loop_1.output[1]
elif rank == 0:
_, if_node_output_id = create_if_op(g, input_ids_for_current_iter, output_data_type, output_shape[1:])
output_id = if_node_output_id
output_identity_name = utils.make_name("loop_output")
loop_output_id = utils.port_name(output_identity_name)
g.make_node(
'Identity',
[output_id],
outputs=[loop_output_id],
name=output_identity_name
)
cond_identity_name = utils.make_name("cond_output")
cond_output_id = utils.port_name(cond_identity_name)
g.make_node(
'Identity',
[cond_name],
outputs=[cond_output_id],
name=cond_identity_name
)
"""Create and insert a new node into the graph.
Args:
op_type: type for new operation
output_name: the names of the outputs above us
name: the name of the new op
kwargs: attributes of the new node
Returns:
node that was inserted
"""
utils.make_sure(isinstance(output_name, six.text_type), "output_name's type is not expected: %s",
type(output_name))
utils.make_sure(isinstance(op_type, six.text_type), "op_type's type is not expected: %s",
type(op_type))
new_output = port_name(name)
new_node = self.make_node(op_type, [output_name], attr=kwargs, outputs=[new_output], name=name, domain=domain)
to_replace = [n for n in self.get_nodes() if n != new_node]
self.replace_all_inputs(to_replace, output_name, new_output)
return new_node