Skip to content

Commit f5b5522

Browse files
committed
personal use case of custom modifier
1 parent a1a31f1 commit f5b5522

File tree

1 file changed

+167
-0
lines changed

1 file changed

+167
-0
lines changed

src/pyhf/experimental/modifiers.py

Lines changed: 167 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
from __future__ import annotations
2+
3+
import logging
4+
from typing import Any, Callable, Sequence
5+
6+
import pyhf
7+
from pyhf import events, get_backend
8+
from pyhf.parameters import ParamViewer
9+
10+
import numpy as np
11+
12+
log = logging.getLogger(__name__)
13+
14+
__all__ = ["add_custom_modifier"]
15+
16+
17+
def __dir__():
18+
return __all__
19+
20+
21+
try:
22+
import numexpr as ne
23+
except ModuleNotFoundError:
24+
log.error(
25+
"\nInstallation of the experimental extra is required to use pyhf.experimental.modifiers"
26+
+ "\nPlease install with: python -m pip install 'pyhf[experimental]'\n",
27+
exc_info=True,
28+
)
29+
raise
30+
31+
32+
class BaseApplier:
33+
...
34+
35+
36+
class BaseBuilder:
37+
...
38+
39+
40+
def add(funcname, par_names, newparams, input_set=None, namespace=None):
41+
namespace = namespace or {}
42+
43+
def make_func(
44+
expression: str, namespace=namespace
45+
) -> Callable[[Sequence[float]], Any]:
46+
def func(deps: Sequence[float]) -> Any:
47+
if expression in namespace:
48+
parvals = dict(zip(par_names, deps))
49+
return namespace[expression](parvals)()
50+
return ne.evaluate(
51+
expression, local_dict=dict(zip(par_names, deps), **namespace)
52+
)
53+
54+
return func
55+
56+
def _allocate_new_param(p):
57+
param_dict = {
58+
'paramset_type': p['paramset_type']
59+
if 'paramset_type' in p.keys()
60+
else 'unconstrained',
61+
'n_parameters': 1,
62+
'is_shared': True,
63+
'inits': p['inits'],
64+
'bounds': p['bounds'],
65+
'is_scalar': True,
66+
'fixed': False,
67+
'auxdata': p['auxdata'] if 'auxdata' in p.keys() else (0.0,),
68+
}
69+
return param_dict
70+
71+
class _builder:
72+
is_shared = True
73+
74+
def __init__(self, config):
75+
self.builder_data = {'funcs': {}}
76+
self.config = config
77+
self.required_parsets = {}
78+
79+
def collect(self, thismod, nom):
80+
maskval = True if thismod else False
81+
mask = [maskval] * len(nom)
82+
return {'mask': mask}
83+
84+
def append(self, key, channel, sample, thismod, defined_samp):
85+
self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault(
86+
'data', {'mask': []}
87+
)
88+
nom = (
89+
defined_samp['data']
90+
if defined_samp
91+
else [0.0] * self.config.channel_nbins[channel]
92+
)
93+
moddata = self.collect(thismod, nom)
94+
self.builder_data[key][sample]['data']['mask'] += moddata['mask']
95+
if thismod:
96+
if thismod['name'] != funcname:
97+
self.builder_data['funcs'].setdefault(
98+
thismod['name'], thismod['data']['expr']
99+
)
100+
self.required_parsets = {
101+
k: [_allocate_new_param(v)] for k, v in newparams.items()
102+
}
103+
104+
def finalize(self):
105+
return self.builder_data
106+
107+
class _applier:
108+
name = funcname
109+
op_code = 'addition'
110+
111+
def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
112+
self.funcs = [make_func(f) for f in builder_data['funcs'].values()]
113+
114+
self.batch_size = batch_size
115+
pars_for_applier = par_names
116+
_modnames = [f'{mtype}/{m}' for m, mtype in modifiers]
117+
118+
parfield_shape = (
119+
(self.batch_size, pdfconfig.npars)
120+
if self.batch_size
121+
else (pdfconfig.npars,)
122+
)
123+
self.param_viewer = ParamViewer(
124+
parfield_shape, pdfconfig.par_map, pars_for_applier
125+
)
126+
self._custommod_mask = [
127+
[[builder_data[modname][s]['data']['mask']] for s in pdfconfig.samples]
128+
for modname in _modnames
129+
]
130+
self._precompute()
131+
events.subscribe('tensorlib_changed')(self._precompute)
132+
133+
def _precompute(self):
134+
tensorlib, _ = get_backend()
135+
if not self.param_viewer.index_selection:
136+
return
137+
self.custommod_mask = tensorlib.tile(
138+
tensorlib.astensor(self._custommod_mask),
139+
(1, 1, self.batch_size or 1, 1),
140+
)
141+
self.custommod_mask_bool = tensorlib.astensor(
142+
self.custommod_mask, dtype="bool"
143+
)
144+
self.custommod_default = tensorlib.zeros(self.custommod_mask.shape)
145+
146+
def apply(self, pars):
147+
"""
148+
Returns:
149+
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin)
150+
"""
151+
if not self.param_viewer.index_selection:
152+
return
153+
tensorlib, _ = get_backend()
154+
deps = self.param_viewer.get(pars)
155+
out = tensorlib.astensor([f(deps) for f in self.funcs])
156+
results = np.zeros_like(self.custommod_mask)
157+
np.place(results, self.custommod_mask, out)
158+
results = tensorlib.where(
159+
self.custommod_mask_bool, results, self.custommod_default
160+
)
161+
return results
162+
163+
modifier_set = {_applier.name: (_builder, _applier)}
164+
modifier_set.update(
165+
**(input_set if input_set is not None else pyhf.modifiers.histfactory_set)
166+
)
167+
return modifier_set

0 commit comments

Comments
 (0)