Skip to content

Commit a97adea

Browse files
format
1 parent 7aabb23 commit a97adea

File tree

2 files changed

+43
-43
lines changed

2 files changed

+43
-43
lines changed

src/pyhf/experimental/modifiers.py

+24-24
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@ def _allocate_new_param(
1919
p: dict[str, Sequence[float]]
2020
) -> dict[str, str | bool | int | Sequence[float]]:
2121
return {
22-
'paramset_type': 'unconstrained',
23-
'n_parameters': 1,
24-
'is_shared': True,
25-
'inits': p['inits'],
26-
'bounds': p['bounds'],
27-
'is_scalar': True,
28-
'fixed': False,
22+
"paramset_type": "unconstrained",
23+
"n_parameters": 1,
24+
"is_shared": True,
25+
"inits": p["inits"],
26+
"bounds": p["bounds"],
27+
"is_scalar": True,
28+
"fixed": False,
2929
}
3030

3131

@@ -45,30 +45,30 @@ class _builder(BaseBuilder):
4545
is_shared = False
4646

4747
def __init__(self, config):
48-
self.builder_data = {'funcs': {}}
48+
self.builder_data = {"funcs": {}}
4949
self.config = config
5050

5151
def collect(self, thismod, nom):
5252
maskval = True if thismod else False
5353
mask = [maskval] * len(nom)
54-
return {'mask': mask}
54+
return {"mask": mask}
5555

5656
def append(self, key, channel, sample, thismod, defined_samp):
5757
self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault(
58-
'data', {'mask': []}
58+
"data", {"mask": []}
5959
)
6060
nom = (
61-
defined_samp['data']
61+
defined_samp["data"]
6262
if defined_samp
6363
else [0.0] * self.config.channel_nbins[channel]
6464
)
6565
moddata = self.collect(thismod, nom)
66-
self.builder_data[key][sample]['data']['mask'] += moddata['mask']
66+
self.builder_data[key][sample]["data"]["mask"] += moddata["mask"]
6767
if thismod:
68-
if thismod['name'] != funcname:
68+
if thismod["name"] != funcname:
6969
print(thismod)
70-
self.builder_data['funcs'].setdefault(
71-
thismod['name'], thismod['data']['expr']
70+
self.builder_data["funcs"].setdefault(
71+
thismod["name"], thismod["data"]["expr"]
7272
)
7373
self.required_parsets = {
7474
k: [_allocate_new_param(v)] for k, v in newparams.items()
@@ -85,14 +85,14 @@ def make_applier(
8585
) -> BaseApplier:
8686
class _applier(BaseApplier):
8787
name = funcname
88-
op_code = 'multiplication'
88+
op_code = "multiplication"
8989

9090
def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
91-
self.funcs = [make_func(v, deps) for v in builder_data['funcs'].values()]
91+
self.funcs = [make_func(v, deps) for v in builder_data["funcs"].values()]
9292

9393
self.batch_size = batch_size
9494
pars_for_applier = deps
95-
_modnames = [f'{mtype}/{m}' for m, mtype in modifiers]
95+
_modnames = [f"{mtype}/{m}" for m, mtype in modifiers]
9696

9797
parfield_shape = (
9898
(self.batch_size, pdfconfig.npars)
@@ -103,11 +103,11 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
103103
parfield_shape, pdfconfig.par_map, pars_for_applier
104104
)
105105
self._custommod_mask = [
106-
[[builder_data[modname][s]['data']['mask']] for s in pdfconfig.samples]
106+
[[builder_data[modname][s]["data"]["mask"]] for s in pdfconfig.samples]
107107
for modname in _modnames
108108
]
109109
self._precompute()
110-
events.subscribe('tensorlib_changed')(self._precompute)
110+
events.subscribe("tensorlib_changed")(self._precompute)
111111

112112
def _precompute(self):
113113
tensorlib, _ = get_backend()
@@ -132,15 +132,15 @@ def apply(self, pars):
132132
tensorlib, _ = get_backend()
133133
if self.batch_size is None:
134134
deps = self.param_viewer.get(pars)
135-
print('deps', deps.shape)
135+
print("deps", deps.shape)
136136
results = tensorlib.astensor([f(deps) for f in self.funcs])
137-
results = tensorlib.einsum('msab,m->msab', self.custommod_mask, results)
137+
results = tensorlib.einsum("msab,m->msab", self.custommod_mask, results)
138138
else:
139139
deps = self.param_viewer.get(pars)
140-
print('deps', deps.shape)
140+
print("deps", deps.shape)
141141
results = tensorlib.astensor([f(deps) for f in self.funcs])
142142
results = tensorlib.einsum(
143-
'msab,ma->msab', self.custommod_mask, results
143+
"msab,ma->msab", self.custommod_mask, results
144144
)
145145
results = tensorlib.where(
146146
self.custommod_mask_bool, results, self.custommod_default

tests/test_experimental.py

+19-19
Original file line numberDiff line numberDiff line change
@@ -6,38 +6,38 @@ def test_add_custom_modifier(backend):
66
tensorlib, _ = backend
77

88
new_params = {
9-
'm1': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)},
10-
'm2': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)},
9+
"m1": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)},
10+
"m2": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)},
1111
}
1212

1313
expanded_pyhf = pyhf.experimental.modifiers.add_custom_modifier(
14-
'customfunc', ['m1', 'm2'], new_params
14+
"customfunc", ["m1", "m2"], new_params
1515
)
1616
model = pyhf.Model(
1717
{
18-
'channels': [
18+
"channels": [
1919
{
20-
'name': 'singlechannel',
21-
'samples': [
20+
"name": "singlechannel",
21+
"samples": [
2222
{
23-
'name': 'signal',
24-
'data': [10] * 20,
25-
'modifiers': [
23+
"name": "signal",
24+
"data": [10] * 20,
25+
"modifiers": [
2626
{
27-
'name': 'f2',
28-
'type': 'customfunc',
29-
'data': {'expr': 'm1'},
27+
"name": "f2",
28+
"type": "customfunc",
29+
"data": {"expr": "m1"},
3030
},
3131
],
3232
},
3333
{
34-
'name': 'background',
35-
'data': [100] * 20,
36-
'modifiers': [
34+
"name": "background",
35+
"data": [100] * 20,
36+
"modifiers": [
3737
{
38-
'name': 'f1',
39-
'type': 'customfunc',
40-
'data': {'expr': 'm1+(m2**2)'},
38+
"name": "f1",
39+
"type": "customfunc",
40+
"data": {"expr": "m1+(m2**2)"},
4141
},
4242
],
4343
},
@@ -46,7 +46,7 @@ def test_add_custom_modifier(backend):
4646
]
4747
},
4848
modifier_set=expanded_pyhf,
49-
poi_name='m1',
49+
poi_name="m1",
5050
validate=False,
5151
batch_size=1,
5252
)

0 commit comments

Comments
 (0)