Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _conductance_input_sum_test_assert(
self, model, target_layer, test_input, test_baseline=None
):
layer_cond = LayerConductance(model, target_layer)
attributions = layer_cond.attribute(
test_input,
baselines=test_baseline,
target=0,
n_steps=500,
method="gausslegendre",
)
neuron_cond = NeuronConductance(model, target_layer)
for i in range(attributions.shape[1]):
for j in range(attributions.shape[2]):
for k in range(attributions.shape[3]):
neuron_vals = neuron_cond.attribute(
test_input,
(i, j, k),
baselines=test_baseline,
target=0,
n_steps=500,
)
for n in range(attributions.shape[0]):
self.assertAlmostEqual(
torch.sum(neuron_vals[n]),
attributions[n, i, j, k],
delta=0.005,
)
def _conductance_input_test_assert(
self,
model,
target_layer,
test_input,
test_neuron,
expected_input_conductance,
additional_input=None,
):
for internal_batch_size in (None, 1, 20):
cond = NeuronConductance(model, target_layer)
attributions = cond.attribute(
test_input,
test_neuron,
target=0,
n_steps=500,
method="gausslegendre",
additional_forward_args=additional_input,
internal_batch_size=internal_batch_size,
)
if isinstance(expected_input_conductance, tuple):
for i in range(len(expected_input_conductance)):
for j in range(len(expected_input_conductance[i])):
assertArraysAlmostEqual(
attributions[i][j : j + 1].squeeze(0).tolist(),
expected_input_conductance[i][j],
delta=0.1,
def test_multi_dim_neuron_conductance(self):
net = BasicModel_ConvNet().cuda()
inp = 100 * torch.randn(4, 1, 10, 10).cuda()
self._data_parallel_test_assert(
NeuronConductance,
net,
net.conv2,
alt_device_ids=True,
inputs=inp,
target=1,
neuron_index=(0, 1, 0),
)