@@ -2,7 +2,7 @@ dataset: wiki_hop
22subset : masked
33templates :
44 08f2d1cf-c026-4b65-96d0-a28ff91affb5 : !Template
5- answer_choices : null
5+ answer_choices : ' {{candidates | join("|||")}} '
66 id : 08f2d1cf-c026-4b65-96d0-a28ff91affb5
77 jinja : ' {% set question_split = question.split('' '' ) %}
88
@@ -52,7 +52,8 @@ templates:
5252 {% endif %}'
5353 metadata : !TemplateMetadata
5454 choices_in_prompt : null
55- metrics : []
55+ metrics :
56+ - Accuracy
5657 original_task : false
5758 name : Indirect Question about Birthplace / Citizenship / Place of Death
5859 reference : Ask about place of birth, citizenship, or place of death for the subject
@@ -75,13 +76,14 @@ templates:
7576 {{ question_split[0] | replace("_", " ") }}'
7677 metadata : !TemplateMetadata
7778 choices_in_prompt : null
78- metrics : []
79+ metrics :
80+ - Other
7981 original_task : false
8082 name : Explain Relation
8183 reference : Given information, explain the relation between the subject entity
8284 and the object entity in a fact triple.
8385 3181f711-a376-4d6e-9fca-a34e1d048585 : !Template
84- answer_choices : null
86+ answer_choices : ' {{candidates | join("|||")}} '
8587 id : 3181f711-a376-4d6e-9fca-a34e1d048585
8688 jinja : ' Information:
8789
@@ -99,20 +101,22 @@ templates:
99101 {{answer}}'
100102 metadata : !TemplateMetadata
101103 choices_in_prompt : null
102- metrics : []
104+ metrics :
105+ - Accuracy
103106 original_task : false
104107 name : Generate Object Answer
105108 reference : Given information, generate the best object entity for the fact triple.
106109 639fa83f-14fd-457a-886e-a65334cb7e66 : !Template
107- answer_choices : null
110+ answer_choices : ' {{candidates | join("|||")}} '
108111 id : 639fa83f-14fd-457a-886e-a65334cb7e66
109112 jinja : " Information:\n - {{ supports | join(\"\\ n- \" ) }}\n\n {% set question_split\
110113 \ = question.split(' ') %}\n Question: ({{ question_split[1:] | join(\" \" )}},\
111114 \ {{ question_split[0] | replace(\" _\" , \" \" ) }}, ?)\n\n Candidate Answers:\
112115 \ \n - {{ candidates | join(\"\\ n- \" ) }}\n |||\n {{answer}}"
113116 metadata : !TemplateMetadata
114117 choices_in_prompt : null
115- metrics : []
118+ metrics :
119+ - Accuracy
116120 original_task : true
117121 name : Choose Best Object Candidate
118122 reference : Given information and possible object candidates, choose the best object
@@ -136,7 +140,8 @@ templates:
136140 ") }}, {{answer}})'
137141 metadata : !TemplateMetadata
138142 choices_in_prompt : null
139- metrics : []
143+ metrics :
144+ - Other
140145 original_task : false
141146 name : Generate Fact Triple
142147 reference : Given information, generate a fact triple.
@@ -158,7 +163,8 @@ templates:
158163 {{ question_split[1:] | join(" ")}}'
159164 metadata : !TemplateMetadata
160165 choices_in_prompt : null
161- metrics : []
166+ metrics :
167+ - Other
162168 original_task : false
163169 name : Generate Subject Answer
164170 reference : Given information, generate the best subject entity for the fact triple.
0 commit comments