Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions nnpdf_data/nnpdf_data/commondata/CMS_Z0_13TEV/data_AFB.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
data_central:
- -0.0086482
- -0.033992
- -0.021625
- -0.012986
- -0.0011154
- 0.0017664
- 0.01436
- 0.024078
- 0.020788
- 0.046908
- 0.04152
- -0.064782
- -0.098499
- -0.064554
- -0.035726
- -0.0093437
- 0.015155
- 0.030693
- 0.067548
- 0.07893
- 0.12633
- 0.17564
- -0.13178
- -0.18003
- -0.11602
- -0.064338
- -0.018044
- 0.026687
- 0.05825
- 0.10674
- 0.14654
- 0.21216
- 0.30394
- -0.21858
- -0.26521
- -0.13269
- 0.039435
- 0.18622
- 0.32014
- 0.43738
- -0.27888
- -0.37527
- -0.20469
- 0.057624
- 0.27664
- 0.45724
- 0.63255
- -0.3762
- -0.52712
- -0.2774
- 0.081106
- 0.35895
- 0.62059
- 0.80039
- -0.45736
- 0.10689
- 0.49997
- -0.59639
- 0.1251
- 0.65723
- -0.67574
- 0.15638
- 0.75026
128 changes: 128 additions & 0 deletions nnpdf_data/nnpdf_data/commondata/CMS_Z0_13TEV/filter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
"""
This file contains the piece of code needed to implement the CMS AFB
measurement at 13 TeV. Uncertainties are obtained by combining the correlation
matrix and the total uncertainty provided on Hepdata, after which a covariance
matrix is constructed, which is finally decomposed into Ndat artificial
uncertainties
"""

import yaml
from nnpdf_data.filter_utils.utils import cormat_to_covmat,covmat_to_artunc

def get_tables():
"""
Get the Hepdata tables, given the tables and version specified in metadata
"""
prefix = "rawdata/HEPData-ins2818125"
with open("metadata.yaml", "r") as file:
metadata = yaml.safe_load(file)

version = metadata["hepdata"]["version"]
tables = metadata["implemented_observables"][0]["tables"]
hepdata_tables = []

for table in tables:
hepdata_tables.append(f"{prefix}-v{version}-{table}.yaml")
return hepdata_tables

def get_all():
"""
Returns data, kinematics and uncertainties for dumping in the .yaml files
"""
data_central = []
kinematics = []
uncertainties = []
art_uncert = []
art_uncertainties = []
correlations = []
hepdata_tables = get_tables()

table=hepdata_tables[0]
with open(table, 'r') as f:
input = yaml.safe_load(f)

# Central values
data_values = input["dependent_variables"][4]["values"]
for data_value in data_values:
data_central.append(data_value["value"])

# Kinematic bins
kin_values_yll_min = input["dependent_variables"][0]["values"]
kin_values_yll_max = input["dependent_variables"][1]["values"]
kin_values_mll_min = input["dependent_variables"][2]["values"]
kin_values_mll_max = input["dependent_variables"][3]["values"]

for i in range(len(kin_values_yll_min)):
kin = {
'y': {'min': kin_values_yll_min[i]["value"],
'mid': 0.5 * (kin_values_yll_min[i]["value"] + kin_values_yll_max[i]["value"]),
'max': kin_values_yll_max[i]["value"]},
'mll': {'min': kin_values_mll_min[i]["value"],
'mid': 0.5 * (kin_values_mll_min[i]["value"] + kin_values_mll_max[i]["value"]),
'max': kin_values_mll_max[i]["value"]}}
kinematics.append(kin)

# Artificial uncertainties (from correlation matrix)
# Errors
for data_value in data_values:
errors = data_value["errors"]
uncertainty = {}
for error in errors:
uncertainties.append(error["symerror"])

# Correlation coefficients
table_corr = hepdata_tables[1]
with open(table_corr, 'r') as f:
input = yaml.safe_load(f)
corr_coeffs = input["dependent_variables"][0]["values"]
for corr_coeff in corr_coeffs:
correlations.append(corr_coeff["value"])

covmat = cormat_to_covmat(uncertainties,correlations)
art_uncertainties = covmat_to_artunc(len(uncertainties),covmat,1)

# Remap uncertainties
for uncertainties in art_uncertainties:
j = 1
artificial_unc={}
for uncertainty in uncertainties:
key = "artificial uncertainty " + str(j)
art_unc = {key: uncertainty}
j = j + 1

artificial_unc.update(art_unc)

art_uncert.append(artificial_unc)

return(data_central, kinematics, art_uncert)

def filter_CMS_Z0_13TEV_PT():
"""
Dumps data, kinematics, and uncertainties on .yaml files
"""
central_values, kinematics, uncertainties = get_all()
# Central values
data_central_yaml = {"data_central": central_values}
# Kinematics
kinematics_yaml = {"bins": kinematics}
# Uncertainties
treatment = {"artificial uncertainty": "ADD"}
correlation = {"artificial uncertainty": "CORR"}
definitions = {}
for key,value in uncertainties[0].items():
definition = {key :
{"description": key,
"treatment": "ADD",
"type": "CORR"}}
definitions.update(definition)
uncertainties_yaml = {"definitions": definitions,"bins": uncertainties}

with open("data_AFB.yaml", "w") as file:
yaml.dump(data_central_yaml, file, sort_keys=False)
with open("kinematics_AFB.yaml", "w") as file:
yaml.dump(kinematics_yaml, file, sort_keys=False)
with open("uncertainties_AFB.yaml", "w") as file:
yaml.dump(uncertainties_yaml, file, sort_keys=False)

if __name__ == "__main__":
filter_CMS_Z0_13TEV_PT()
Loading
Loading