Skip to content

Commit 1961df6

Browse files
kratsgmatthewfeickert
authored andcommitted
add in the notebook from lukas
1 parent 74441ab commit 1961df6

File tree

4 files changed

+239
-0
lines changed

4 files changed

+239
-0
lines changed

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,6 +243,7 @@ warn_unreachable = true
243243
module = [
244244
'jax.*',
245245
'matplotlib.*',
246+
'numexpr.*',
246247
'scipy.*',
247248
'tensorflow.*',
248249
'tensorflow_probability.*',
@@ -261,6 +262,7 @@ module = [
261262
'pyhf.cli.*',
262263
'pyhf.modifiers.*',
263264
'pyhf.exceptions.*',
265+
'pyhf.experimental.*',
264266
'pyhf.parameters.*',
265267
'pyhf.schema.*',
266268
'pyhf.writexml',

src/pyhf/experimental/__init__.py

Whitespace-only changes.

src/pyhf/experimental/modifiers.py

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
from __future__ import annotations
2+
import pyhf
3+
from pyhf.parameters import ParamViewer
4+
from pyhf import get_backend
5+
from pyhf import events
6+
7+
from typing import Sequence, Callable, Any
8+
9+
10+
class BaseApplier:
11+
...
12+
13+
14+
class BaseBuilder:
15+
...
16+
17+
18+
def _allocate_new_param(
19+
p: dict[str, Sequence[float]]
20+
) -> dict[str, str | bool | int | Sequence[float]]:
21+
return {
22+
'paramset_type': 'unconstrained',
23+
'n_parameters': 1,
24+
'is_shared': True,
25+
'inits': p['inits'],
26+
'bounds': p['bounds'],
27+
'is_scalar': True,
28+
'fixed': False,
29+
}
30+
31+
32+
def make_func(expression: str, deps: list[str]) -> Callable[[Sequence[float]], Any]:
33+
def func(d: Sequence[float]) -> Any:
34+
import numexpr as ne
35+
36+
return ne.evaluate(expression, local_dict=dict(zip(deps, d)))
37+
38+
return func
39+
40+
41+
def make_builder(
42+
funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]]
43+
) -> BaseBuilder:
44+
class _builder(BaseBuilder):
45+
def __init__(self, config):
46+
self.builder_data = {'funcs': {}}
47+
self.config = config
48+
49+
def collect(self, thismod, nom):
50+
maskval = True if thismod else False
51+
mask = [maskval] * len(nom)
52+
return {'mask': mask}
53+
54+
def append(self, key, channel, sample, thismod, defined_samp):
55+
self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault(
56+
'data', {'mask': []}
57+
)
58+
nom = (
59+
defined_samp['data']
60+
if defined_samp
61+
else [0.0] * self.config.channel_nbins[channel]
62+
)
63+
moddata = self.collect(thismod, nom)
64+
self.builder_data[key][sample]['data']['mask'] += moddata['mask']
65+
if thismod:
66+
if thismod['name'] != funcname:
67+
print(thismod)
68+
self.builder_data['funcs'].setdefault(
69+
thismod['name'], thismod['data']['expr']
70+
)
71+
self.required_parsets = {
72+
k: [_allocate_new_param(v)] for k, v in newparams.items()
73+
}
74+
75+
def finalize(self):
76+
return self.builder_data
77+
78+
return _builder
79+
80+
81+
def make_applier(
82+
funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]]
83+
) -> BaseApplier:
84+
class _applier(BaseApplier):
85+
name = funcname
86+
op_code = 'multiplication'
87+
88+
def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
89+
self.funcs = [make_func(v, deps) for v in builder_data['funcs'].values()]
90+
91+
self.batch_size = batch_size
92+
pars_for_applier = deps
93+
_modnames = [f'{mtype}/{m}' for m, mtype in modifiers]
94+
95+
parfield_shape = (
96+
(self.batch_size, pdfconfig.npars)
97+
if self.batch_size
98+
else (pdfconfig.npars,)
99+
)
100+
self.param_viewer = ParamViewer(
101+
parfield_shape, pdfconfig.par_map, pars_for_applier
102+
)
103+
self._custommod_mask = [
104+
[[builder_data[modname][s]['data']['mask']] for s in pdfconfig.samples]
105+
for modname in _modnames
106+
]
107+
self._precompute()
108+
events.subscribe('tensorlib_changed')(self._precompute)
109+
110+
def _precompute(self):
111+
tensorlib, _ = get_backend()
112+
if not self.param_viewer.index_selection:
113+
return
114+
self.custommod_mask = tensorlib.tile(
115+
tensorlib.astensor(self._custommod_mask),
116+
(1, 1, self.batch_size or 1, 1),
117+
)
118+
self.custommod_mask_bool = tensorlib.astensor(
119+
self.custommod_mask, dtype="bool"
120+
)
121+
self.custommod_default = tensorlib.ones(self.custommod_mask.shape)
122+
123+
def apply(self, pars):
124+
"""
125+
Returns:
126+
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
127+
"""
128+
if not self.param_viewer.index_selection:
129+
return
130+
tensorlib, _ = get_backend()
131+
if self.batch_size is None:
132+
deps = self.param_viewer.get(pars)
133+
print('deps', deps.shape)
134+
results = tensorlib.astensor([f(deps) for f in self.funcs])
135+
results = tensorlib.einsum('msab,m->msab', self.custommod_mask, results)
136+
else:
137+
deps = self.param_viewer.get(pars)
138+
print('deps', deps.shape)
139+
results = tensorlib.astensor([f(deps) for f in self.funcs])
140+
results = tensorlib.einsum(
141+
'msab,ma->msab', self.custommod_mask, results
142+
)
143+
results = tensorlib.where(
144+
self.custommod_mask_bool, results, self.custommod_default
145+
)
146+
return results
147+
148+
return _applier
149+
150+
151+
def add_custom_modifier(
152+
funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]]
153+
) -> dict[str, tuple[BaseBuilder, BaseApplier]]:
154+
155+
_builder = make_builder(funcname, deps, newparams)
156+
_applier = make_applier(funcname, deps, newparams)
157+
158+
modifier_set = {_applier.name: (_builder, _applier)}
159+
modifier_set.update(**pyhf.modifiers.histfactory_set)
160+
return modifier_set

tests/test_experimental.py

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
import pyhf
2+
import pyhf.experimental.modifiers
3+
4+
5+
def test_add_custom_modifier(backend):
6+
tensorlib, _ = backend
7+
8+
new_params = {
9+
'm1': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)},
10+
'm2': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)},
11+
}
12+
13+
expanded_pyhf = pyhf.experimental.modifiers.add_custom_modifier(
14+
'customfunc', ['m1', 'm2'], new_params
15+
)
16+
model = pyhf.Model(
17+
{
18+
'channels': [
19+
{
20+
'name': 'singlechannel',
21+
'samples': [
22+
{
23+
'name': 'signal',
24+
'data': [10] * 20,
25+
'modifiers': [
26+
{
27+
'name': 'f2',
28+
'type': 'customfunc',
29+
'data': {'expr': 'm1'},
30+
},
31+
],
32+
},
33+
{
34+
'name': 'background',
35+
'data': [100] * 20,
36+
'modifiers': [
37+
{
38+
'name': 'f1',
39+
'type': 'customfunc',
40+
'data': {'expr': 'm1+(m2**2)'},
41+
},
42+
],
43+
},
44+
],
45+
}
46+
]
47+
},
48+
modifier_set=expanded_pyhf,
49+
poi_name='m1',
50+
validate=False,
51+
batch_size=1,
52+
)
53+
54+
assert tensorlib.tolist(model.expected_actualdata([[1.0, 2.0]])) == [
55+
[
56+
510.0,
57+
510.0,
58+
510.0,
59+
510.0,
60+
510.0,
61+
510.0,
62+
510.0,
63+
510.0,
64+
510.0,
65+
510.0,
66+
510.0,
67+
510.0,
68+
510.0,
69+
510.0,
70+
510.0,
71+
510.0,
72+
510.0,
73+
510.0,
74+
510.0,
75+
510.0,
76+
]
77+
]

0 commit comments

Comments
 (0)