Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
),
axis=0,
)
constraint_term = tensorlib.poisson_logpdf(thisauxdata, paralphas)
summands = (
constraint_term
if summands is None
else tensorlib.concatenate([summands, constraint_term])
)
return tensorlib.sum(summands) if summands is not None else 0
def fast(self, auxdata, pars):
return self.constraint_logpdf(auxdata, pars)
auxd = pyhf.tensorlib.astensor(m.config.auxdata)
pars = pyhf.tensorlib.astensor(m.config.suggested_init())
slow_result = pyhf.tensorlib.tolist(slow(m, auxd, pars))
fast_result = pyhf.tensorlib.tolist(fast(m, auxd, pars))
assert pytest.approx(slow_result) == fast_result
assert pdf.spec['channels'][0]['samples'][1]['modifiers'][0]['type'] == 'lumi'
assert pdf.spec['channels'][0]['samples'][2]['modifiers'][0]['type'] == 'lumi'
assert pdf.spec['channels'][0]['samples'][2]['modifiers'][1]['type'] == 'staterror'
assert pdf.spec['channels'][0]['samples'][2]['modifiers'][1]['data'] == [0, 10.0]
assert pdf.spec['channels'][0]['samples'][1]['modifiers'][1]['type'] == 'staterror'
assert all(
np.isclose(
pdf.spec['channels'][0]['samples'][1]['modifiers'][1]['data'], [5.0, 0.0]
)
)
assert pdf.expected_actualdata(
pyhf.tensorlib.astensor(pdf.config.suggested_init())
).tolist() == [120.0, 110.0]
assert pdf.config.auxdata_order == sorted(
['lumi', 'syst1', 'staterror_channel1', 'syst2', 'syst3']
)
assert data == [122.0, 112.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0]
pars = pdf.config.suggested_init()
pars[pdf.config.par_slice('SigXsecOverSM')] = [2.0]
assert pdf.expected_data(pars, include_auxdata=False).tolist() == [140, 120]
def test_optim_uncerts(backend, source, spec, mu):
pdf = pyhf.Model(spec)
data = source['bindata']['data'] + pdf.config.auxdata
init_pars = pdf.config.suggested_init()
par_bounds = pdf.config.suggested_bounds()
optim = pyhf.optimizer
result = optim.minimize(pyhf.infer.mle.twice_nll, data, pdf, init_pars, par_bounds)
assert pyhf.tensorlib.tolist(result)
result = optim.minimize(
pyhf.infer.mle.twice_nll,
data,
pdf,
init_pars,
par_bounds,
[(pdf.config.poi_index, mu)],
return_uncertainties=True,
)
assert result.shape[1] == 2
assert pyhf.tensorlib.tolist(result)
optim = pyhf.optimizer
result = optim.minimize(pyhf.infer.mle.twice_nll, data, pdf, init_pars, par_bounds)
assert pyhf.tensorlib.tolist(result)
result = optim.minimize(
pyhf.infer.mle.twice_nll,
data,
pdf,
init_pars,
par_bounds,
[(pdf.config.poi_index, mu)],
return_uncertainties=True,
)
assert result.shape[1] == 2
assert pyhf.tensorlib.tolist(result)
def test_paramviewer_simple_nonbatched(backend):
pars = pyhf.tensorlib.astensor([1, 2, 3, 4, 5, 6, 7])
parshape = pyhf.tensorlib.shape(pars)
view = ParamViewer(
parshape,
{'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(5, 7)}},
['hello', 'world'],
)
par_slice = view.get(pars)
assert pyhf.tensorlib.tolist(par_slice[view.slices[0]]) == [1, 2]
assert pyhf.tensorlib.tolist(par_slice[view.slices[1]]) == [6, 7]
assert pyhf.tensorlib.tolist(par_slice) == [1, 2, 6, 7]
def test_custom_backend_name_notsupported():
class custom_backend(object):
def __init__(self, **kwargs):
self.name = "notsupported"
backend = custom_backend()
assert pyhf.tensorlib.name != backend.name
pyhf.set_backend(backend)
assert pyhf.tensorlib.name == backend.name
def test_paramviewer_simple_nonbatched(backend):
pars = pyhf.tensorlib.astensor([1, 2, 3, 4, 5, 6, 7])
parshape = pyhf.tensorlib.shape(pars)
view = ParamViewer(
parshape,
{'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(5, 7)}},
['hello', 'world'],
)
par_slice = view.get(pars)
assert pyhf.tensorlib.tolist(par_slice[view.slices[0]]) == [1, 2]
assert pyhf.tensorlib.tolist(par_slice[view.slices[1]]) == [6, 7]
assert pyhf.tensorlib.tolist(par_slice) == [1, 2, 6, 7]
'name': 'world',
'data': {
'hi_data': [10, 10, 10],
'lo_data': [5, 6, 7],
'nom_data': [10, 10, 10],
'mask': [True, True, True],
},
},
},
}
hsc = histosys_combined(
[('hello', 'histosys'), ('world', 'histosys')], mc, mega_mods
)
mod = hsc.apply(pyhf.tensorlib.astensor([0.5, -1.0]))
shape = pyhf.tensorlib.shape(mod)
assert shape == (2, 2, 1, 3)
mod = np.asarray(pyhf.tensorlib.tolist(mod))
assert np.allclose(mod[0, 0, 0], [0.5, 1.0, 1.5])
hsc = histosys_combined(
[('hello', 'histosys'), ('world', 'histosys')], mc, mega_mods, batch_size=4
)
mod = hsc.apply(
pyhf.tensorlib.astensor([[-1.0, -1.0], [1.0, 1.0], [-1.0, 1.0], [1.0, 1.0]])
)
shape = pyhf.tensorlib.shape(mod)
assert shape == (2, 2, 4, 3)
mod = np.asarray(pyhf.tensorlib.tolist(mod))
assert np.allclose(mod[0, 0, 0], [-1.0, -2.0, -3.0])
def test_custom_backend_name_notsupported():
class custom_backend(object):
def __init__(self, **kwargs):
self.name = "notsupported"
backend = custom_backend()
assert pyhf.tensorlib.name != backend.name
pyhf.set_backend(backend)
assert pyhf.tensorlib.name == backend.name
def test_shape(backend):
tb = pyhf.tensorlib
assert tb.shape(tb.ones((1, 2, 3, 4, 5))) == (1, 2, 3, 4, 5)
assert tb.shape(tb.ones((0, 0))) == (0, 0)
assert tb.shape(tb.astensor([])) == (0,)
assert tb.shape(tb.astensor([1.0])) == (1,)
assert tb.shape(tb.astensor(1.0)) == tb.shape(tb.astensor([1.0]))
assert tb.shape(tb.astensor(0.0)) == tb.shape(tb.astensor([0.0]))
assert tb.shape(tb.astensor((1.0, 1.0))) == tb.shape(tb.astensor([1.0, 1.0]))
assert tb.shape(tb.astensor((0.0, 0.0))) == tb.shape(tb.astensor([0.0, 0.0]))
with pytest.raises((ValueError, RuntimeError)):
_ = tb.astensor([1, 2]) + tb.astensor([3, 4, 5])
with pytest.raises((ValueError, RuntimeError)):
_ = tb.astensor([1, 2]) - tb.astensor([3, 4, 5])
with pytest.raises((ValueError, RuntimeError)):
_ = tb.astensor([1, 2]) < tb.astensor([3, 4, 5])
with pytest.raises((ValueError, RuntimeError)):
_ = tb.astensor([1, 2]) > tb.astensor([3, 4, 5])