11dataset : asnq
22templates :
3+ 0e06d340-6d2c-44f7-b977-604925773f0b : !Template
4+ answer_choices : No ||| Yes
5+ id : 0e06d340-6d2c-44f7-b977-604925773f0b
6+ jinja : " Question: {{question}} \n Sentence: {{sentence}} \n Are the question and\
7+ \ the sentence positive pairs where positive pairs means that the sentence answers\
8+ \ the question? ||| {{answer_choices[label]}}"
9+ metadata : !TemplateMetadata
10+ choices_in_prompt : false
11+ metrics :
12+ - Accuracy
13+ original_task : true
14+ name : positive_pairs
15+ reference : ' '
316 55f386ba-9a86-405e-a805-152e254a4205 : !Template
417 answer_choices : null
518 id : 55f386ba-9a86-405e-a805-152e254a4205
619 jinja : " {% if label == 1 %}\n\n What is a question that someone might ask that\
720 \ the following sentence can answer?\n\n {{sentence}}\n\n |||\n\n {{question}}\n \
821 {% endif %}\n "
922 metadata : !TemplateMetadata
10- choices_in_prompt : null
11- metrics : []
23+ choices_in_prompt : false
24+ metrics :
25+ - BLEU
26+ - ROUGE
1227 original_task : false
13- name : Sentence question generation 2
28+ name : question_from_sentence
1429 reference : ' '
1530 5b6abb0a-1b4f-4338-aab6-430465669164 : !Template
1631 answer_choices : null
@@ -30,10 +45,36 @@ templates:
3045
3146 '
3247 metadata : !TemplateMetadata
33- choices_in_prompt : null
34- metrics : []
35- original_task : null
36- name : sentence question generation
48+ choices_in_prompt : false
49+ metrics :
50+ - BLEU
51+ - ROUGE
52+ original_task : false
53+ name : write_question
54+ reference : ' '
55+ 684aea91-34c4-47de-a61f-7cc9a182b657 : !Template
56+ answer_choices : No ||| Yes
57+ id : 684aea91-34c4-47de-a61f-7cc9a182b657
58+ jinja : Can the answer "{{sentence}}" be inferred from the question "{{question}}"
59+ ? ||| {{answer_choices[label]}}
60+ metadata : !TemplateMetadata
61+ choices_in_prompt : false
62+ metrics :
63+ - Accuracy
64+ original_task : true
65+ name : answer_infer_question
66+ reference : ' '
67+ 719306b9-5dc8-46c7-b693-9b2edc2e09f2 : !Template
68+ answer_choices : No ||| Yes
69+ id : 719306b9-5dc8-46c7-b693-9b2edc2e09f2
70+ jinja : Does this sentence "{{sentence}}" answer this question "{{question}}"
71+ ? ||| {{answer_choices[label]}}
72+ metadata : !TemplateMetadata
73+ choices_in_prompt : false
74+ metrics :
75+ - Accuracy
76+ original_task : true
77+ name : Does_sentence_answer_question
3778 reference : ' '
3879 859ec580-957b-42da-be1b-c3ccb8b52d24 : !Template
3980 answer_choices : null
@@ -53,18 +94,21 @@ templates:
5394
5495 '
5596 metadata : !TemplateMetadata
56- choices_in_prompt : null
57- metrics : []
97+ choices_in_prompt : false
98+ metrics :
99+ - BLEU
100+ - ROUGE
58101 original_task : false
59- name : answer question with a sentence 3
102+ name : answer question with a sentence
60103 reference : ' '
61104 85da6666-9e50-4122-84c8-d00b90967475 : !Template
62105 answer_choices : null
63106 id : 85da6666-9e50-4122-84c8-d00b90967475
64107 jinja : ' {% if label == 1 %}
65108
66109
67- I was wondering, {{question}}? Can you give me a full sentence answer?
110+ Given the following question: {{question}}? Can you give me a full sentence
111+ answer?
68112
69113
70114 |||
@@ -76,43 +120,70 @@ templates:
76120
77121 '
78122 metadata : !TemplateMetadata
79- choices_in_prompt : null
80- metrics : []
123+ choices_in_prompt : false
124+ metrics :
125+ - BLEU
126+ - ROUGE
81127 original_task : false
82- name : answer question with a sentence 2
128+ name : give me a full sentence answer
83129 reference : ' '
84130 85fe8aaa-83c5-41ec-ada5-0e6d60bab1f9 : !Template
85131 answer_choices : null
86132 id : 85fe8aaa-83c5-41ec-ada5-0e6d60bab1f9
87133 jinja : ' {% if label == 1 %}
88134
89-
90135 Answer this question as a full sentence: {{question}}?
91136
92-
93137 |||
94138
95-
96139 {{sentence}}
97140
98141 {% endif %}
99142
100143 '
101144 metadata : !TemplateMetadata
102- choices_in_prompt : null
103- metrics : []
104- original_task : null
145+ choices_in_prompt : false
146+ metrics :
147+ - BLEU
148+ - ROUGE
149+ original_task : false
105150 name : answer question as a sentence
106151 reference : ' '
152+ 95e39e1d-a830-4b6c-bd2a-10fe51552427 : !Template
153+ answer_choices : No ||| Yes
154+ id : 95e39e1d-a830-4b6c-bd2a-10fe51552427
155+ jinja : ' Can this question: "{{question}}" be answered as follow: "{{sentence}}"
156+ \ \ please answer yes or no. ||| {{answer_choices[label]}}'
157+ metadata : !TemplateMetadata
158+ choices_in_prompt : true
159+ metrics :
160+ - Accuracy
161+ original_task : true
162+ name : yes_vs_no
163+ reference : ' '
107164 a36d6152-72c4-4278-8266-d27b28667f61 : !Template
108165 answer_choices : null
109166 id : a36d6152-72c4-4278-8266-d27b28667f61
110167 jinja : " {% if label == 1 %}\n\n Here is a sentence:\n\n {{sentence}}\n\n Write a\
111- \ question that this sentence is an answer to .\n\n |||\n\n {{question}}\n {% endif\
168+ \ question to which this sentence is an answer.\n\n |||\n\n {{question}}\n {% endif\
112169 \ %}\n "
113170 metadata : !TemplateMetadata
114- choices_in_prompt : null
115- metrics : []
171+ choices_in_prompt : false
172+ metrics :
173+ - BLEU
174+ - ROUGE
116175 original_task : false
117- name : Sentence question generation 3
176+ name : write_a_question
177+ reference : ' '
178+ a7927e90-1a9b-49e2-a2f8-5ac9e6d286cb : !Template
179+ answer_choices : No ||| Yes
180+ id : a7927e90-1a9b-49e2-a2f8-5ac9e6d286cb
181+ jinja : ' Does the following sentence "{{sentence}}" seem a right answer for the
182+ following question : {{question}} ||| {{answer_choices[label]}}'
183+ metadata : !TemplateMetadata
184+ choices_in_prompt : false
185+ metrics :
186+ - Accuracy
187+ original_task : true
188+ name : right_answer
118189 reference : ' '
0 commit comments