Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
x_baseline: Baseline value used in integration. Defaults to 0.
x_steps: Number of integrated steps between baseline and x.
"""
if x_baseline is None:
x_baseline = np.zeros_like(x_value)
assert x_baseline.shape == x_value.shape
x_diff = x_value - x_baseline
total_gradients = np.zeros_like(x_value)
for alpha in np.linspace(0, 1, x_steps):
x_step = x_baseline + alpha * x_diff
total_gradients += super(IntegratedGradients, self).GetMask(
x_step, feed_dict)
return total_gradients * x_diff / x_steps
def __init__(self, graph, session, y, x):
super(XRAI, self).__init__(graph, session, y, x)
# Initialize integrated gradients.
self._integrated_gradients = IntegratedGradients(graph, session, y, x)