Skip to content

Commit 2c870b6

Browse files
committed
Deleted redundant files: corrected Hepdata tables; restructured and streamlined filter script
1 parent f40ed74 commit 2c870b6

16 files changed

+488
-18736
lines changed

nnpdf_data/nnpdf_data/commondata/ATLAS_WCHARM_13TEV/filter.py

Lines changed: 172 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,177 @@
11
"""
2-
When running `python filter.py` the relevant data yaml
3-
file will be created in the `nnpdf_data/commondata/ATLAS_WPWM_7TEV_46FB` directory.
2+
43
"""
54

65
import yaml
6+
import numpy as np
7+
from nnpdf_data.filter_utils.utils import covmat_to_artunc
8+
9+
def get_tables(observable=None):
10+
"""
11+
Get the Hepdata tables, given the tables and version specified in metadata
12+
"""
13+
prefix = "rawdata/HEPData-ins2628732"
14+
with open("metadata.yaml", "r") as file:
15+
metadata = yaml.safe_load(file)
16+
17+
version = metadata["hepdata"]["version"]
18+
19+
if observable == "WMWP-D":
20+
tables = metadata["implemented_observables"][0]["tables"]
21+
elif observable == "WMWP-Dstar":
22+
tables = metadata["implemented_observables"][1]["tables"]
23+
else:
24+
print("Observable not implemented.")
25+
print("Choose one of the following observables:")
26+
print("- WMWP-D")
27+
print("- WMWP-Dstar")
28+
29+
hepdata_tables = []
30+
31+
for table in tables:
32+
hepdata_tables.append(f"{prefix}-v{version}-Table_{table}.yaml")
33+
34+
return hepdata_tables
35+
36+
def get_all(observable=None):
37+
"""
38+
Returns data, kinematics and uncertainties for dumping in the .yaml files
39+
"""
40+
data_central = []
41+
kinematics = []
42+
uncertainties = []
43+
44+
hepdata_tables = get_tables(observable)
45+
data_tables = hepdata_tables[:-1]
46+
for table in data_tables:
47+
with open(table, 'r') as f:
48+
input = yaml.safe_load(f)
49+
# Central values
50+
data_values = input["dependent_variables"][0]["values"]
51+
for data_value in data_values:
52+
data_central.append(data_value["value"])
53+
# Kinematic bins
54+
kin_values = input["independent_variables"][0]["values"]
55+
for kin_value in kin_values:
56+
kin = {
57+
'abs_eta': {'min': kin_value['low'],
58+
'mid': 0.5 * (kin_value['low'] + kin_value['high']),
59+
'max': kin_value['high']},
60+
'm_W2': {'min': None, 'mid': 6.46174823e+03, 'max': None},}
61+
kinematics.append(kin)
62+
63+
ndata = len(data_central)
64+
65+
# Uncertainties
66+
# Construct luminosity covariance matrix
67+
lumi_unc = 0.83 #%
68+
lumi_uncs = []
69+
lumi_cov = []
70+
tot_cov = []
71+
for data in data_central:
72+
lumi_uncs.append(data * lumi_unc / 100.)
73+
for lumi_i in lumi_uncs:
74+
for lumi_j in lumi_uncs:
75+
lumi_cov.append(lumi_i * lumi_j)
76+
77+
# Read total covariance matrix
78+
with open(hepdata_tables[2], 'r') as f:
79+
input = yaml.safe_load(f)
80+
cov_values = input["dependent_variables"][0]["values"]
81+
for cov_value in cov_values:
82+
tot_cov.append(cov_value["value"])
83+
84+
# Compute covariance matrix without luminosity uncertainty
85+
partial_cov = np.subtract(tot_cov,lumi_cov)
86+
87+
# Generate artifical systematic uncertainties form partial_cov
88+
art_unc = covmat_to_artunc(ndata, partial_cov, 0)
89+
90+
for i in range(len(art_unc)):
91+
errors = art_unc[i]
92+
uncertainty = {}
93+
for j in range(len(errors)):
94+
unc = {"art. sys. " + f"{j+1}" : errors[j]}
95+
uncertainty.update(unc)
96+
97+
lumi_unc = {"luminosity": lumi_uncs[i] }
98+
uncertainty.update(lumi_unc)
99+
uncertainties.append(uncertainty)
100+
101+
return (data_central, kinematics, uncertainties)
102+
103+
def filter_ATLAS_WCHARM_13TEV(observable=None):
104+
"""
105+
Dumps data, kinematics, and uncertainties on .yaml files
106+
"""
107+
central_values, kinematics, uncertainties = get_all(observable)
108+
# Central values
109+
data_central_yaml = {"data_central": central_values}
110+
# Kinematics
111+
kinematics_yaml = {"bins": kinematics}
112+
# Uncertainties
113+
treatment = {"Data stat.": "ADD",
114+
"Unfolding stat.": "ADD",
115+
"Unfolding model": "ADD",
116+
"Int. luminosity": "MULT",
117+
"Lepton energy": "ADD",
118+
"Efficiency": "ADD",
119+
"Backgrounds": "MULT",
120+
"Jet energy": "MULT",
121+
"Others": "MULT"}
122+
correlation = {"Data stat.": "UNCORR",
123+
"Unfolding stat.": "UNCORR",
124+
"Unfolding model": "UNCORR",
125+
"Int. luminosity": "CMSLUMI16",
126+
"Lepton energy": "UNCORR",
127+
"Efficiency": "UNCORR",
128+
"Backgrounds": "CORR",
129+
"Jet energy": "CORR",
130+
"Others": "CORR"}
131+
definitions = {}
132+
for key,value in uncertainties[0].items():
133+
if key == "luminosity":
134+
definition = {key :
135+
{"description": key + " unc. from HepData",
136+
"treatment": "MULT",
137+
"type": "ATLASLUMI16"}}
138+
else:
139+
definition = {key :
140+
{"description": key + " unc. from HepData",
141+
"treatment": "ADD",
142+
"type": "CORR"}}
143+
definitions.update(definition)
144+
uncertainties_yaml = {"definitions": definitions,"bins": uncertainties}
145+
146+
with open("data_" + observable + ".yaml", "w") as file:
147+
yaml.dump(data_central_yaml, file, sort_keys=False)
148+
with open("kinematics_" + observable + ".yaml", "w") as file:
149+
yaml.dump(kinematics_yaml, file, sort_keys=False)
150+
with open("uncertainties_" + observable + ".yaml", "w") as file:
151+
yaml.dump(uncertainties_yaml, file, sort_keys=False)
152+
153+
if __name__ == "__main__":
154+
filter_ATLAS_WCHARM_13TEV("WMWP-D")
155+
filter_ATLAS_WCHARM_13TEV("WMWP-Dstar")
156+
157+
158+
159+
160+
161+
162+
163+
164+
165+
166+
167+
168+
169+
170+
171+
172+
173+
174+
"""
7175
from filter_utils import (
8176
get_data_values,
9177
get_kinematics,
@@ -16,9 +184,7 @@
16184
17185
18186
def filter_ATLAS_WCHARM_13TEV_data_kinematic():
19-
"""
20-
This function writes the systematics to yaml files.
21-
"""
187+
22188
23189
central_values = get_data_values()
24190
@@ -132,3 +298,4 @@ def filter_get_systematics():
132298
filter_ATLAS_WCHARM_13TEV_data_kinematic()
133299
filter_get_artificial_uncertainties()
134300
filter_get_systematics()
301+
"""

0 commit comments

Comments
 (0)