Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
3925642
Implementation of the CMS 13 TeV Z pT data set
enocera Aug 20, 2025
7725ed7
Added rawdata files
enocera Aug 20, 2025
51e39e7
Corrected inconsistency in variable name
enocera Aug 20, 2025
67af04c
Corrected mass range; renamed data set; removed sqrts
enocera Sep 30, 2025
d9f4050
Added rule to remove region potentially sensitive to resummation effe…
enocera Sep 30, 2025
7784748
Implementation of the CMS 13 TeV Z pT data set
enocera Aug 20, 2025
be71306
Added rawdata files
enocera Aug 20, 2025
eb0f946
Corrected inconsistency in variable name
enocera Aug 20, 2025
485bbde
Corrected mass range; renamed data set; removed sqrts
enocera Sep 30, 2025
d775d15
Added rule to remove region potentially sensitive to resummation effe…
enocera Sep 30, 2025
adf38e8
Temporary return to the square of the mass
enocera Oct 2, 2025
14ef816
Fixed cnflicts - returned to square of the mass
enocera Oct 2, 2025
5d7873a
Implementation of the CMS 13 TeV Z pT data set
enocera Aug 20, 2025
4bc8cf9
Added rawdata files
enocera Aug 20, 2025
75c81a4
Corrected inconsistency in variable name
enocera Aug 20, 2025
8d0991d
Corrected mass range; renamed data set; removed sqrts
enocera Sep 30, 2025
5dd4bbc
Added rule to remove region potentially sensitive to resummation effe…
enocera Sep 30, 2025
2870a1b
Temporary return to the square of the mass
enocera Oct 2, 2025
cf784a0
Implementation of the CMS 13 TeV Z pT data set
enocera Aug 20, 2025
ff25e94
Added rawdata files
enocera Aug 20, 2025
329dd29
Corrected inconsistency in variable name
enocera Aug 20, 2025
32d854e
Corrected mass range; renamed data set; removed sqrts
enocera Sep 30, 2025
30872fb
fixed process variables
scarlehoff Oct 22, 2025
fa407ba
Rebased
enocera Dec 8, 2025
2f06831
Updated LHC ZpT data 13 TeV. CMS: removed useless small pT bins; upda…
enocera Dec 10, 2025
0ba8b04
Corrected kinematic values ATLAS_Z0J_13TEV
enocera Dec 10, 2025
926dcf3
Merge branch 'master' into CMS_Z0J_13TEV
enocera Dec 10, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions nnpdf_data/nnpdf_data/commondata/ATLAS_Z0J_13TEV_PT/data.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
data_central:
- 0.0473835
- 0.0405684
- 0.0343165
- 0.0291566
- 0.0248037
Expand Down Expand Up @@ -37,3 +39,4 @@ data_central:
- 5.5219e-07
- 2.01646e-07
- 5.11526e-08
- 2.51767e-06
163 changes: 78 additions & 85 deletions nnpdf_data/nnpdf_data/commondata/ATLAS_Z0J_13TEV_PT/filter.py
Original file line number Diff line number Diff line change
@@ -1,101 +1,94 @@
"""

This file contains the piece of code needed to implement the ATLAS ZpT
measurement at 13 TeV. We consider the combined electron-muon measurement, for
which a total uncorrelated and a total correlated ucnertainties are given. The
measurement is normalised to the fiducial cross section, therefore there is no
luminosity ucnertainty. The first three bins in pT are cropped, because of
the impossiblity of producing theoretical predictions.
"""

from filter_utils import get_data_values, get_kinematics, get_systematics
import yaml

UNCORRELATED_SYS = ["Stat (Data)", "Stat (MC)", "Efficiencies (Uncorellated)"]


def filter_ATLAS_Z_13TEV_PT_data_kinetic():
def get_tables():
"""
writes data central values and kinematics
to respective .yaml file
Get the Hepdata table
"""
with open("metadata.yaml", "r") as file:
metadata = yaml.safe_load(file)

version = metadata["hepdata"]["version"]

# tables for Z->l+l- observable
tables = metadata["implemented_observables"][0]["tables"]
hepdata_tables = ["rawdata/HEPData-ins1768911-v3-Table_4a.yaml"]

kin = get_kinematics(tables, version)
central_values = get_data_values(tables, version)
return hepdata_tables

def get_all():
"""
Returns data, kinematics and uncertainties for dumping in the .yaml files
"""
data_central = []
kinematics = []
uncertainties = []

hepdata_tables = get_tables()
for table in hepdata_tables:
with open(table, 'r') as f:
input = yaml.safe_load(f)

# Central values
data_values = input["dependent_variables"][0]["values"]
for data_value in data_values:
data_central.append(data_value["value"])
# Kinematic bins
kin_values = input["independent_variables"][0]["values"]
for kin_value in kin_values:
kin = {
'pT': {'min': kin_value['low'],
'mid': 0.5 * (kin_value['low'] + kin_value['high']),
'max': kin_value['high']},
'm_Z2': {'min': None, 'mid': 8317.44, 'max': None},
'sqrts': {'min': None, 'mid': 13000, 'max': None}}

kinematics.append(kin)
# Uncertainties
i = 0
for data_value in data_values:
errors = data_value["errors"]
uncertainty = {}
for error in errors:
uncertainty[error["label"]] = float(error["symerror"].replace('%',''))*data_central[i]/100.
uncertainty.update(uncertainty)

uncertainties.append(uncertainty)
i = i+1

n=3
return (data_central[n:], kinematics[n:], uncertainties[n:])

def filter_ATLAS_Z0J_13TEV_PT():
"""
Dumps data, kinematics, and uncertainties on .yaml files
"""
central_values, kinematics, uncertainties = get_all()
# Central values
data_central_yaml = {"data_central": central_values}
kinematics_yaml = {"bins": kin}
# Kinematics
kinematics_yaml = {"bins": kinematics}
# Uncertainties
treatment = {"correlated uncertainty": "ADD",
"uncorrelated uncertainty": "ADD",}
correlation = {"correlated uncertainty": "CORR",
"uncorrelated uncertainty": "UNCORR",}
definitions = {}
for key,value in uncertainties[0].items():
definition = {key :
{"description": key,
"treatment": treatment[key],
"type": correlation[key]}}
definitions.update(definition)
uncertainties_yaml = {"definitions": definitions,"bins": uncertainties}

# write central values and kinematics to yaml file
with open("data.yaml", "w") as file:
yaml.dump(data_central_yaml, file, sort_keys=False)

with open("kinematics.yaml", "w") as file:
yaml.dump(kinematics_yaml, file, sort_keys=False)


def filter_ATLAS_Z_13TEV_PT_uncertainties():
"""
writes uncertainties to respective .yaml file
"""

with open("metadata.yaml", "r") as file:
metadata = yaml.safe_load(file)

version = metadata["hepdata"]["version"]
# tables for Z->l+l- observable
tables = metadata["implemented_observables"][0]["tables"]

systematics_LL = get_systematics(tables, version)

systematics = {"LL": systematics_LL}

# error definition
error_definitions = {}
errors = {}

for obs in ["LL"]:

error_definitions[obs] = {}

for sys in systematics[obs]:

if sys[0]['name'] in UNCORRELATED_SYS:
error_definitions[obs][sys[0]['name']] = {
"description": f"{sys[0]['name']} from HEPDATA",
"treatment": "ADD",
"type": "UNCORR",
}

else:
error_definitions[obs][sys[0]['name']] = {
"description": f"{sys[0]['name']} from HEPDATA",
"treatment": "ADD",
"type": "CORR",
}

# TODO:
# store error in dict
errors[obs] = []

central_values = get_data_values(tables, version)

for i in range(len(central_values)):
error_value = {}

for sys in systematics[obs]:
error_value[sys[0]['name']] = float(sys[0]['values'][i])

errors[obs].append(error_value)

uncertainties_yaml = {"definitions": error_definitions[obs], "bins": errors[obs]}

# write uncertainties
with open(f"uncertainties.yaml", 'w') as file:
yaml.dump(uncertainties_yaml, file, sort_keys=False)


with open("uncertainties.yaml", "w") as file:
yaml.dump(uncertainties_yaml, file, sort_keys=False)

if __name__ == "__main__":
filter_ATLAS_Z_13TEV_PT_data_kinetic()
filter_ATLAS_Z_13TEV_PT_uncertainties()
filter_ATLAS_Z0J_13TEV_PT()
105 changes: 0 additions & 105 deletions nnpdf_data/nnpdf_data/commondata/ATLAS_Z0J_13TEV_PT/filter_utils.py

This file was deleted.

Loading
Loading