Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _create_alpha_node(neg_alpha_op, updated_alpha):
if neg_alpha_op.name not in updated_alpha:
alpha_value = -graph_rewrite_util.values_from_const(neg_alpha_op)
neg_alpha_op.attr['value'].CopyFrom(
attr_value_pb2.AttrValue(tensor=tensor_util.make_tensor_proto(
alpha_value, alpha_value.dtype.type, alpha_value.shape)))
updated_alpha.append(neg_alpha_op.name)
beta_value = graph_rewrite_util.values_from_const(beta_op)
if beta_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for beta, found %s, expected %s,"
" for node %s" % (str(beta_value.shape), str(
(channel_count,)), node.name))
continue
gamma_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("gamma_op")])
if gamma_op.op != "Const":
tf_logging.warning("Didn't find expected gamma Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, gamma_op))
continue
gamma_value = graph_rewrite_util.values_from_const(gamma_op)
if gamma_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for gamma, found %s, expected %s,"
" for node %s" % (str(gamma_value.shape), str(
(channel_count,)), node.name))
continue
variance_epsilon_value = node.attr[EPSILON_ATTR[node.op]].f
nodes_to_skip[node.name] = True
nodes_to_skip[weights_op.name] = True
nodes_to_skip[conv_op.name] = True
if bias is not None:
nodes_to_skip[add_op.name] = True
if scale_after_normalization(node):
scale_value = (
(1.0 / np.vectorize(math.sqrt)(var_value + variance_epsilon_value)) *
elif conv_op.op == "DepthwiseConv2dNative":
channel_count = weights.shape[2] * weights.shape[3]
mean_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("mean_op")])
if mean_op.op != "Const":
tf_logging.warning("Didn't find expected mean Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, mean_op))
continue
mean_value = graph_rewrite_util.values_from_const(mean_op)
if bias is not None:
# Adjust the mean of the batchnorm based on the add op in-between the conv
# and the batchnorm.
mean_value = mean_value - graph_rewrite_util.values_from_const(bias)
if mean_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for mean, found %s, expected %s,"
" for node %s" % (str(mean_value.shape), str(
(channel_count,)), node.name))
continue
var_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("var_op")])
if var_op.op != "Const":
tf_logging.warning("Didn't find expected var Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, var_op))
continue
var_value = graph_rewrite_util.values_from_const(var_op)
if var_value.shape != (channel_count,):
mean_value = mean_value - graph_rewrite_util.values_from_const(bias)
if mean_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for mean, found %s, expected %s,"
" for node %s" % (str(mean_value.shape), str(
(channel_count,)), node.name))
continue
var_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("var_op")])
if var_op.op != "Const":
tf_logging.warning("Didn't find expected var Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, var_op))
continue
var_value = graph_rewrite_util.values_from_const(var_op)
if var_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for var, found %s, expected %s,"
" for node %s" % (str(var_value.shape), str(
(channel_count,)), node.name))
continue
beta_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("beta_op")])
if beta_op.op != "Const":
tf_logging.warning("Didn't find expected beta Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, beta_op))
continue
beta_value = graph_rewrite_util.values_from_const(beta_op)
if beta_value.shape != (channel_count,):
var_value = graph_rewrite_util.values_from_const(var_op)
if var_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for var, found %s, expected %s,"
" for node %s" % (str(var_value.shape), str(
(channel_count,)), node.name))
continue
beta_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("beta_op")])
if beta_op.op != "Const":
tf_logging.warning("Didn't find expected beta Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, beta_op))
continue
beta_value = graph_rewrite_util.values_from_const(beta_op)
if beta_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for beta, found %s, expected %s,"
" for node %s" % (str(beta_value.shape), str(
(channel_count,)), node.name))
continue
gamma_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("gamma_op")])
if gamma_op.op != "Const":
tf_logging.warning("Didn't find expected gamma Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, gamma_op))
continue
gamma_value = graph_rewrite_util.values_from_const(gamma_op)
if gamma_value.shape != (channel_count,):
tf_logging.warning("The bias %s after the conv %s was not a constant. "
"Maybe because freeze_graph wasn't "
"run first?" % (bias.name, conv_op.name))
continue
if conv_op.op not in ["Conv2D", "DepthwiseConv2dNative"]:
tf_logging.warning("Didn't find expected Conv2D or DepthwiseConv2dNative"
" input to '%s'" % node.name)
continue
weights_op = graph_rewrite_util.node_from_map(
input_node_map, conv_op.input[1])
if weights_op.op != "Const":
tf_logging.warning("Didn't find expected conv Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (conv_op.name, weights_op))
continue
weights = graph_rewrite_util.values_from_const(weights_op)
if conv_op.op == "Conv2D":
channel_count = weights.shape[3]
elif conv_op.op == "DepthwiseConv2dNative":
channel_count = weights.shape[2] * weights.shape[3]
mean_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("mean_op")])
if mean_op.op != "Const":
tf_logging.warning("Didn't find expected mean Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, mean_op))
continue
mean_value = graph_rewrite_util.values_from_const(mean_op)
if bias is not None:
# Adjust the mean of the batchnorm based on the add op in-between the conv
continue
weights = graph_rewrite_util.values_from_const(weights_op)
if conv_op.op == "Conv2D":
channel_count = weights.shape[3]
elif conv_op.op == "DepthwiseConv2dNative":
channel_count = weights.shape[2] * weights.shape[3]
mean_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("mean_op")])
if mean_op.op != "Const":
tf_logging.warning("Didn't find expected mean Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"
" run first?" % (node.name, mean_op))
continue
mean_value = graph_rewrite_util.values_from_const(mean_op)
if bias is not None:
# Adjust the mean of the batchnorm based on the add op in-between the conv
# and the batchnorm.
mean_value = mean_value - graph_rewrite_util.values_from_const(bias)
if mean_value.shape != (channel_count,):
tf_logging.warning("Incorrect shape for mean, found %s, expected %s,"
" for node %s" % (str(mean_value.shape), str(
(channel_count,)), node.name))
continue
var_op = graph_rewrite_util.node_from_map(
input_node_map,
node.input[INPUT_ORDER[node.op].index("var_op")])
if var_op.op != "Const":
tf_logging.warning("Didn't find expected var Constant input to '%s',"
" found %s instead. Maybe because freeze_graph wasn't"