Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_orth_norms():
dist = cp.Normal(0, 1)
orth = cp.orth_ttr(5, dist, normed=True)
norms = cp.E(orth**2, dist)
assert np.allclose(norms, 1)
def test_integration():
dist = cp.Iid(cp.Normal(), dim)
orth, norms = cp.orth_ttr(order, dist, retall=1)
gq = cp.generate_quadrature
nodes, weights = gq(order, dist, rule="C")
vals = np.zeros((len(weights), size))
cp.fit_quadrature(orth, nodes, weights, vals, norms=norms)
def test_orth_ttr():
dist = cp.Normal(0, 1)
orth = cp.orth_ttr(5, dist)
outer = cp.outer(orth, orth)
Cov1 = cp.E(outer, dist)
Diatoric = Cov1 - np.diag(np.diag(Cov1))
assert np.allclose(Diatoric, 0)
Cov2 = cp.Cov(orth[1:], dist)
assert np.allclose(Cov1[1:,1:], Cov2)
finds the expansion coefficients through numerical integration. The
integration uses a quadrature scheme with weights and nodes. We use Leja
quadrature with Smolyak sparse grids to reduce the number of nodes
required. For each of the nodes we evaluate the model and calculate the
features, and the polynomial approximation is created from these results.
See also
--------
uncertainpy.Data
uncertainpy.Parameters
"""
uncertain_parameters = self.convert_uncertain_parameters(uncertain_parameters)
distribution = self.create_distribution(uncertain_parameters=uncertain_parameters)
P = cp.orth_ttr(polynomial_order, distribution)
if quadrature_order is None:
quadrature_order = polynomial_order + 2
nodes, weights = cp.generate_quadrature(quadrature_order,
distribution,
rule="J",
sparse=True)
# Running the model
data = self.runmodel.run(nodes, uncertain_parameters)
data.method = "polynomial chaos expansion with the pseudo-spectral method. polynomial_order={}, quadrature_order={}".format(polynomial_order, quadrature_order)
data = get_data(nrun,cdir)
#rescale oxygen flux
data[:,0,:] = -data[:,0,:]*86400.
uq = profit.UQ(yaml='uq.yaml')
distribution = cp.J(*uq.params.values())
sparse=uq.backend.sparse
if sparse:
order=2*3
else:
order=3+1
# actually start the postprocessing now:
nodes, weights = cp.generate_quadrature(order, distribution, rule='G',sparse=sparse)
expansion,norms = cp.orth_ttr(3, distribution,retall=True)
approx_denit = cp.fit_quadrature(expansion, nodes, weights, np.mean(data[:,1,:], axis=1))
approx_oxy = cp.fit_quadrature(expansion, nodes, weights, np.mean(data[:,0,:], axis=1))
annual_oxy = cp.fit_quadrature(expansion,nodes,weights,data[:,0,:])
annual_denit = cp.fit_quadrature(expansion,nodes,weights,data[:,1,:])
s_denit = cp.descriptives.sensitivity.Sens_m(annual_denit, distribution)
s_oxy = cp.descriptives.sensitivity.Sens_m(annual_oxy, distribution)
df_oxy = cp.Std(annual_oxy,distribution)
df_denit = cp.Std(annual_denit,distribution)
f0_oxy = cp.E(annual_oxy,distribution)
f0_denit = cp.E(annual_denit,distribution)
# Monte Carlo:
samples = distribution.sample(1000, "H")
evals = [foo(coord, sample) for sample in samples.T]
expected = np.mean(evals, 0)
deviation = np.std(evals, 0)
plt.fill_between(
coord, expected-deviation, expected+deviation, color="k", alpha=0.5)
plt.plot(coord, expected, "r")
plt.savefig("intro_montecarlo.png")
plt.clf()
polynomial_expansion = cp.orth_ttr(8, distribution)
foo_approx = cp.fit_regression(polynomial_expansion, samples, evals)
expected = cp.E(foo_approx, distribution)
deviation = cp.Std(foo_approx, distribution)
plt.fill_between(
coord, expected-deviation, expected+deviation, color="k", alpha=0.5)
plt.plot(coord, expected, "r")
plt.savefig("intro_collocation.png")
plt.clf()
absissas, weights = cp.generate_quadrature(8, distribution, "C")
evals = [foo(coord, val) for val in absissas.T]
foo_approx = cp.fit_quadrature(
#all multi indices of the PCE expansion: k <= l
k_norm = list(product(*[np.arange(1, l[n] + 1) for n in range(self.N)]))
if self.comb_coef[tuple(l)] != 0:
print('Computing PCE coefficients of refinement %d / %d' % (count_l, self.l_norm.shape[0]))
else:
print('Skipping PCE coefficients of refinement %d / %d' % (count_l, self.l_norm.shape[0]))
for k in k_norm:
if self.comb_coef[tuple(l)] != 0:
#product of the PCE basis function or order k - 1 and all
#Lagrange basis functions in a_1d, per dimension
#[[phi_k[0]*a_1d[0]], ..., [phi_k[N-1]*a_1d[N-1]]]
#orthogonal polynomial generated by chaospy
phi_k = [cp.orth_ttr(k[n] - 1,
dist=self.sampler.params_distribution[n],
normed=True)[-1] for n in range(self.N)]
#the polynomial order of each integrand phi_k*a_j = (k - 1) + (number of colloc. points - 1)
orders = [(k[n] - 1) + (self.xi_1d[n][l[n]].size - 1) for n in range(self.N)]
#will hold the products of PCE basis functions phi_k and lagrange interpolation polynomials a_1d
cross_prod = []
for n in range(self.N):
#GQ using n points can exactly integrate polynomials of order 2n-1:
#solve for required number of points when given order
n_quad_points = int(np.ceil((orders[n] + 1) / 2))
#generate Gaussian quad rule
if isinstance(self.sampler.params_distribution[n], cp.DiscreteUniform):
expected = numpy.mean(evals, 0)
deviation = numpy.std(evals, 0)
plt.fill_between(
coord, expected-deviation, expected+deviation,
color="k", alpha=0.3
)
plt.plot(coord, expected, "k--", lw=3)
plt.xlabel(r"\verb;coord;")
plt.ylabel(r"function evaluations \verb;foo;")
plt.title("Results using Monte Carlo simulation")
plt.savefig("results_montecarlo.png")
plt.clf()
polynomial_expansion = cp.orth_ttr(8, distribution)
foo_approx = cp.fit_regression(polynomial_expansion, samples, evals)
expected = cp.E(foo_approx, distribution)
deviation = cp.Std(foo_approx, distribution)
plt.fill_between(
coord, expected-deviation, expected+deviation,
color="k", alpha=0.3
)
plt.plot(coord, expected, "k--", lw=3)
plt.xlabel(r"\verb;coord;")
plt.ylabel(r"function evaluations \verb;foo;")
plt.title("Results using point collocation method")
plt.savefig("results_collocation.png")
plt.clf()
I = cp.Uniform(8, 10)
dist = cp.J(a, I)
## Monte Carlo integration
samples = dist.sample(10**5)
u_mc = [u(x, *s) for s in samples.T]
mean = np.mean(u_mc, 1)
var = np.var(u_mc, 1)
## Polynomial chaos expansion
## using Pseudo-spectral method and Gaussian Quadrature
order = 5
P, norms = cp.orth_ttr(order, dist, retall=True)
nodes, weights = cp.generate_quadrature(order+1, dist, rule="G")
solves = [u(x, s[0], s[1]) for s in nodes.T]
U_hat = cp.fit_quadrature(P, nodes, weights, solves, norms=norms)
mean = cp.E(U_hat, dist)
var = cp.Var(U_hat, dist)
## Polynomial chaos expansion
## using Point collocation method and quasi-random samples
order = 5
P = cp.orth_ttr(order, dist)
nodes = dist.sample(2*len(P), "M")
solves = [u(x, s[0], s[1]) for s in nodes.T]
U_hat = cp.fit_regression(P, nodes, solves, rule="T")