|
| 1 | +from __future__ import annotations |
| 2 | +import pyhf |
| 3 | +from pyhf.parameters import ParamViewer |
| 4 | +from pyhf import get_backend |
| 5 | +from pyhf import events |
| 6 | + |
| 7 | +from typing import Sequence, Callable, Any |
| 8 | + |
| 9 | + |
| 10 | +class BaseApplier: |
| 11 | + ... |
| 12 | + |
| 13 | + |
| 14 | +class BaseBuilder: |
| 15 | + ... |
| 16 | + |
| 17 | + |
| 18 | +def _allocate_new_param( |
| 19 | + p: dict[str, Sequence[float]] |
| 20 | +) -> dict[str, str | bool | int | Sequence[float]]: |
| 21 | + return { |
| 22 | + 'paramset_type': 'unconstrained', |
| 23 | + 'n_parameters': 1, |
| 24 | + 'is_shared': True, |
| 25 | + 'inits': p['inits'], |
| 26 | + 'bounds': p['bounds'], |
| 27 | + 'is_scalar': True, |
| 28 | + 'fixed': False, |
| 29 | + } |
| 30 | + |
| 31 | + |
| 32 | +def make_func(expression: str, deps: list[str]) -> Callable[[Sequence[float]], Any]: |
| 33 | + def func(d: Sequence[float]) -> Any: |
| 34 | + import numexpr as ne |
| 35 | + |
| 36 | + return ne.evaluate(expression, local_dict=dict(zip(deps, d))) |
| 37 | + |
| 38 | + return func |
| 39 | + |
| 40 | + |
| 41 | +def make_builder( |
| 42 | + funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] |
| 43 | +) -> BaseBuilder: |
| 44 | + class _builder(BaseBuilder): |
| 45 | + def __init__(self, config): |
| 46 | + self.builder_data = {'funcs': {}} |
| 47 | + self.config = config |
| 48 | + |
| 49 | + def collect(self, thismod, nom): |
| 50 | + maskval = True if thismod else False |
| 51 | + mask = [maskval] * len(nom) |
| 52 | + return {'mask': mask} |
| 53 | + |
| 54 | + def append(self, key, channel, sample, thismod, defined_samp): |
| 55 | + self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( |
| 56 | + 'data', {'mask': []} |
| 57 | + ) |
| 58 | + nom = ( |
| 59 | + defined_samp['data'] |
| 60 | + if defined_samp |
| 61 | + else [0.0] * self.config.channel_nbins[channel] |
| 62 | + ) |
| 63 | + moddata = self.collect(thismod, nom) |
| 64 | + self.builder_data[key][sample]['data']['mask'] += moddata['mask'] |
| 65 | + if thismod: |
| 66 | + if thismod['name'] != funcname: |
| 67 | + print(thismod) |
| 68 | + self.builder_data['funcs'].setdefault( |
| 69 | + thismod['name'], thismod['data']['expr'] |
| 70 | + ) |
| 71 | + self.required_parsets = { |
| 72 | + k: [_allocate_new_param(v)] for k, v in newparams.items() |
| 73 | + } |
| 74 | + |
| 75 | + def finalize(self): |
| 76 | + return self.builder_data |
| 77 | + |
| 78 | + return _builder |
| 79 | + |
| 80 | + |
| 81 | +def make_applier( |
| 82 | + funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] |
| 83 | +) -> BaseApplier: |
| 84 | + class _applier(BaseApplier): |
| 85 | + name = funcname |
| 86 | + op_code = 'multiplication' |
| 87 | + |
| 88 | + def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): |
| 89 | + self.funcs = [make_func(v, deps) for v in builder_data['funcs'].values()] |
| 90 | + |
| 91 | + self.batch_size = batch_size |
| 92 | + pars_for_applier = deps |
| 93 | + _modnames = [f'{mtype}/{m}' for m, mtype in modifiers] |
| 94 | + |
| 95 | + parfield_shape = ( |
| 96 | + (self.batch_size, pdfconfig.npars) |
| 97 | + if self.batch_size |
| 98 | + else (pdfconfig.npars,) |
| 99 | + ) |
| 100 | + self.param_viewer = ParamViewer( |
| 101 | + parfield_shape, pdfconfig.par_map, pars_for_applier |
| 102 | + ) |
| 103 | + self._custommod_mask = [ |
| 104 | + [[builder_data[modname][s]['data']['mask']] for s in pdfconfig.samples] |
| 105 | + for modname in _modnames |
| 106 | + ] |
| 107 | + self._precompute() |
| 108 | + events.subscribe('tensorlib_changed')(self._precompute) |
| 109 | + |
| 110 | + def _precompute(self): |
| 111 | + tensorlib, _ = get_backend() |
| 112 | + if not self.param_viewer.index_selection: |
| 113 | + return |
| 114 | + self.custommod_mask = tensorlib.tile( |
| 115 | + tensorlib.astensor(self._custommod_mask), |
| 116 | + (1, 1, self.batch_size or 1, 1), |
| 117 | + ) |
| 118 | + self.custommod_mask_bool = tensorlib.astensor( |
| 119 | + self.custommod_mask, dtype="bool" |
| 120 | + ) |
| 121 | + self.custommod_default = tensorlib.ones(self.custommod_mask.shape) |
| 122 | + |
| 123 | + def apply(self, pars): |
| 124 | + """ |
| 125 | + Returns: |
| 126 | + modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) |
| 127 | + """ |
| 128 | + if not self.param_viewer.index_selection: |
| 129 | + return |
| 130 | + tensorlib, _ = get_backend() |
| 131 | + if self.batch_size is None: |
| 132 | + deps = self.param_viewer.get(pars) |
| 133 | + print('deps', deps.shape) |
| 134 | + results = tensorlib.astensor([f(deps) for f in self.funcs]) |
| 135 | + results = tensorlib.einsum('msab,m->msab', self.custommod_mask, results) |
| 136 | + else: |
| 137 | + deps = self.param_viewer.get(pars) |
| 138 | + print('deps', deps.shape) |
| 139 | + results = tensorlib.astensor([f(deps) for f in self.funcs]) |
| 140 | + results = tensorlib.einsum( |
| 141 | + 'msab,ma->msab', self.custommod_mask, results |
| 142 | + ) |
| 143 | + results = tensorlib.where( |
| 144 | + self.custommod_mask_bool, results, self.custommod_default |
| 145 | + ) |
| 146 | + return results |
| 147 | + |
| 148 | + return _applier |
| 149 | + |
| 150 | + |
| 151 | +def add_custom_modifier( |
| 152 | + funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] |
| 153 | +) -> dict[str, tuple[BaseBuilder, BaseApplier]]: |
| 154 | + |
| 155 | + _builder = make_builder(funcname, deps, newparams) |
| 156 | + _applier = make_applier(funcname, deps, newparams) |
| 157 | + |
| 158 | + modifier_set = {_applier.name: (_builder, _applier)} |
| 159 | + modifier_set.update(**pyhf.modifiers.histfactory_set) |
| 160 | + return modifier_set |
0 commit comments