How to use the gradient.grad function in gradient

To help you get started, we’ve selected a few gradient examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Theano / Theano / _test_tensor_ops.py View on Github external
m.__setitem__(coord, x)
                    y = float(sum(mul_elemwise(R, op_cls(*args))).data)
                    m.__setitem__(coord, old_x)
                    return y
                return rval

        self.failUnless(hasattr(op_cls, 'update_gradient'), op_cls)
        op_out = op_cls(*args)
        if len(op_out.owner.outputs) > 1:
            raise NotImplementedError('cant autotest gradient of op with multiple outputs')
            # we could make loop over outputs making random projections R for each,
            # but this doesn't handle the case where not all the outputs are
            # differentiable... so I leave this as TODO for now -jsb.
        R = numpy.random.rand(*op_out.shape)
        y = sum(mul_elemwise(R, op_out))
        g = gradient.grad(y)

        def abs_rel_err(a,b):
            return abs( (a-b) / (a+b+eps))

        for idx in range(len(args)):
            #print 'aaaaaaa', op_cls, [i.shape for i in args]
            g_i = g(args[idx])
            if g_i is gradient.Undefined:
                continue
            if args[idx].shape == ():
                fd_grad = _finite_diff1(_scalar_f(op_cls, args, R, idx),
                        args[idx].data, eps, y.data)
                err = abs_rel_err(fd_grad,g_i.data)
                self.failUnless( err < tol, (err, op_cls, idx))
            elif len(args[idx].shape) == 1:
                for i in xrange(args[idx].shape[0]):