Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if len(op_out.owner.outputs) > 1:
raise NotImplementedError('cant autotest gradient of op with multiple outputs')
# we could make loop over outputs making random projections R for each,
# but this doesn't handle the case where not all the outputs are
# differentiable... so I leave this as TODO for now -jsb.
R = numpy.random.rand(*op_out.shape)
y = sum(mul_elemwise(R, op_out))
g = gradient.grad(y)
def abs_rel_err(a,b):
return abs( (a-b) / (a+b+eps))
for idx in range(len(args)):
#print 'aaaaaaa', op_cls, [i.shape for i in args]
g_i = g(args[idx])
if g_i is gradient.Undefined:
continue
if args[idx].shape == ():
fd_grad = _finite_diff1(_scalar_f(op_cls, args, R, idx),
args[idx].data, eps, y.data)
err = abs_rel_err(fd_grad,g_i.data)
self.failUnless( err < tol, (err, op_cls, idx))
elif len(args[idx].shape) == 1:
for i in xrange(args[idx].shape[0]):
fd_grad = _finite_diff1(_scalar_f(op_cls, args, R, idx, (i,)),
args[idx].data[i], eps, y.data)
err = abs_rel_err(fd_grad,g_i.data[i])
self.failUnless( abs(err) < tol, (err, op_cls, idx, i))
elif len(args[idx].shape) == 2:
for i in xrange(args[idx].shape[0]):
for j in xrange(args[idx].shape[1]):
fd_grad = _finite_diff1(_scalar_f(op_cls, args, R, idx, (i,j)),