Anni123 commited on
Commit
b6a1d8d
1 Parent(s): 36275c2

Upload folder using huggingface_hub

Browse files
.DS_Store ADDED
Binary file (8.2 kB). View file
 
README.md CHANGED
@@ -1,13 +1,6 @@
1
  ---
2
- title: Cot Adapter
3
- emoji: 👁
4
- colorFrom: purple
5
- colorTo: gray
6
- sdk: gradio
7
- sdk_version: 3.35.2
8
  app_file: app.py
9
- pinned: false
10
- duplicated_from: Anni123/CoT-Adapter
11
  ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: Unified-Adapter
 
 
 
 
 
3
  app_file: app.py
4
+ sdk: gradio
5
+ sdk_version: 3.36.1
6
  ---
 
 
__pycache__/app.cpython-38.pyc ADDED
Binary file (13.2 kB). View file
 
__pycache__/llm_utils.cpython-38.pyc ADDED
Binary file (1.92 kB). View file
 
__pycache__/retrieval_utils.cpython-38.pyc ADDED
Binary file (7.22 kB). View file
 
__pycache__/utils.cpython-38.pyc ADDED
Binary file (1.96 kB). View file
 
app.py CHANGED
@@ -4,12 +4,12 @@ import re
4
  import threading
5
  import json
6
  from collections import Counter
7
- import os
8
  from llm_utils import *
9
  from utils import *
10
  from retrieval_utils import *
11
 
12
- openai.api_key = os.getenv("api_key")
 
13
  COT_PROMPT = "Let's think step by step."
14
  DIRECT_ANS_PROMPT = "The answer is"
15
 
@@ -29,6 +29,9 @@ EXAMPLES = ['Take the last letters of each words in \"Kristopher Deb Jake Tammy\
29
 
30
 
31
 
 
 
 
32
  def answer_extraction_prompt(datatype):
33
  if datatype == "commonsense-mc":
34
  ans_prompt = "\nTherefore, among A through E, the answer is"
@@ -341,7 +344,8 @@ with gr.Blocks(theme="bethecloud/storj_theme", css="#process_btn {background-col
341
  feedback_disagree.click(record_feedback_disagree, inputs=[input_question, datatype, ans_ours, ans_zeroshot, self_know, kb_know, refine_know, cor_ans ,store_flag], outputs=[feedback_agree, feedback_disagree, feedback_uncertain, feedback_ack])
342
  feedback_uncertain.click(record_feedback_uncertain, inputs=[input_question, datatype, ans_ours, ans_zeroshot, self_know, kb_know, refine_know, cor_ans ,store_flag], outputs=[feedback_agree, feedback_disagree, feedback_uncertain, feedback_ack])
343
 
344
- demo.queue(concurrency_count=1).launch(debug=True)
 
345
 
346
 
347
 
 
4
  import threading
5
  import json
6
  from collections import Counter
 
7
  from llm_utils import *
8
  from utils import *
9
  from retrieval_utils import *
10
 
11
+ openai.api_key = "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"
12
+
13
  COT_PROMPT = "Let's think step by step."
14
  DIRECT_ANS_PROMPT = "The answer is"
15
 
 
29
 
30
 
31
 
32
+ global lock #global lock, repo
33
+ lock = threading.Lock()
34
+
35
  def answer_extraction_prompt(datatype):
36
  if datatype == "commonsense-mc":
37
  ans_prompt = "\nTherefore, among A through E, the answer is"
 
344
  feedback_disagree.click(record_feedback_disagree, inputs=[input_question, datatype, ans_ours, ans_zeroshot, self_know, kb_know, refine_know, cor_ans ,store_flag], outputs=[feedback_agree, feedback_disagree, feedback_uncertain, feedback_ack])
345
  feedback_uncertain.click(record_feedback_uncertain, inputs=[input_question, datatype, ans_ours, ans_zeroshot, self_know, kb_know, refine_know, cor_ans ,store_flag], outputs=[feedback_agree, feedback_disagree, feedback_uncertain, feedback_ack])
346
 
347
+
348
+ demo.launch(share=True)
349
 
350
 
351
 
data_pool/commonsense-mc_feedback ADDED
@@ -0,0 +1 @@
 
 
1
+ {"question": "What would someone use a personal key for? Answer Choices: (A) car stand (B) at hotel (C) own home (D) front door (E) bus depot", "datatype": "commonsense-mc", "zshot_ans": "D", "adapter_ans": "C", "self_know": "A personal key is used to unlock a door or other secure area.", "kb_know": "Personal data, also known as personal information or personally identifiable information (PII), is any information related to an identifiable person. Stand and Deliver is a 1988 American drama film directed by Ram\u00f3n Men\u00e9ndez, written by Men\u00e9ndez and Tom Musca, based on the true story of a high school mathematics teacher, Jaime Escalante. A hotel is an establishment that provides paid lodging on a short-term basis. This Condition, often abbreviated as TC, is a pop rock band originating from Long Island, NY. A door is a hinged or otherwise movable barrier that allows ingress (entry) into and egress (exit) from an enclosure. The Mahatma Gandhi Bus Station (MGBS), also known as the Imlibun Bus Station (Imlibun means \"grove of tamarind trees\"), is a bus station on the Musi River in the Imlibun area of southern Hyderabad, India.", "refine_know": "A personal key is used to unlock a door or other secure area.", "cor_ans": "C, C, C, D, C", "feedback": "agree"}
data_pool/commonsense-verify_feedback ADDED
@@ -0,0 +1 @@
 
 
1
+ {"question": "is the brain located in the torso?", "datatype": "commonsense-verify", "zshot_ans": "yes", "adapter_ans": "no", "self_know": "The brain is located in the head, not the torso.", "kb_know": "Brain is the industry standard name for a computer virus that was released in its first form on 19 January 1986, and is considered to be the first computer virus for the IBM Personal Computer (IBM PC) and compatibles. Turning Torso is a neo-futurist residential skyscraper built in Malm\u00f6, Sweden in 2005.", "refine_know": "The brain is located in the head.", "cor_ans": "no, no, no, no, no", "feedback": "agree"}
dataset/AQuA/test.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/AddSub/AddSub.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/Bigbench_Date/task.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/Bigbench_object_tracking/task.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/CommonsenseQA/dev_rand_split.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
dataset/CommonsenseQA/test_rand_split_no_answers.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
dataset/CommonsenseQA/train_rand_split.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
dataset/MultiArith/MultiArith.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/SVAMP/SVAMP.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/SingleEq/questions.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/StrategyQA/task.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/coin_flip/coin_flip.json ADDED
The diff for this file is too large to render. See raw diff
 
dataset/grade-school-math/test.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
dataset/last_letters/last_letters.json ADDED
@@ -0,0 +1,2004 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "examples": [
3
+ {
4
+ "question": "Take the last letters of each words in \"Whitney Erika Tj Benito\" and concatenate them.",
5
+ "answer": "yajo"
6
+ },
7
+ {
8
+ "question": "Take the last letters of each words in \"Lucky Mireya Jj Kc\" and concatenate them.",
9
+ "answer": "yajc"
10
+ },
11
+ {
12
+ "question": "Take the last letters of each words in \"Caleb Chase Eleazar Chanel\" and concatenate them.",
13
+ "answer": "berl"
14
+ },
15
+ {
16
+ "question": "Take the last letters of each words in \"Silvia Carolina Stan Chuck\" and concatenate them.",
17
+ "answer": "aank"
18
+ },
19
+ {
20
+ "question": "Take the last letters of each words in \"Breanna Trey Omar Patrice\" and concatenate them.",
21
+ "answer": "ayre"
22
+ },
23
+ {
24
+ "question": "Take the last letters of each words in \"Lilian Ian Bryce Aracely\" and concatenate them.",
25
+ "answer": "nney"
26
+ },
27
+ {
28
+ "question": "Take the last letters of each words in \"Walter Mikayla Larry Ryan\" and concatenate them.",
29
+ "answer": "rayn"
30
+ },
31
+ {
32
+ "question": "Take the last letters of each words in \"Angelina Layla Jenny Zane\" and concatenate them.",
33
+ "answer": "aaye"
34
+ },
35
+ {
36
+ "question": "Take the last letters of each words in \"Lizzy Juany Aisha Brenda\" and concatenate them.",
37
+ "answer": "yyaa"
38
+ },
39
+ {
40
+ "question": "Take the last letters of each words in \"Elise Lupe Renee Noemi\" and concatenate them.",
41
+ "answer": "eeei"
42
+ },
43
+ {
44
+ "question": "Take the last letters of each words in \"Jesse Roderick Travis Rita\" and concatenate them.",
45
+ "answer": "eksa"
46
+ },
47
+ {
48
+ "question": "Take the last letters of each words in \"Cinthia Lloyd Jacqueline Jc\" and concatenate them.",
49
+ "answer": "adec"
50
+ },
51
+ {
52
+ "question": "Take the last letters of each words in \"Sterling Jenifer Patsy Denise\" and concatenate them.",
53
+ "answer": "grye"
54
+ },
55
+ {
56
+ "question": "Take the last letters of each words in \"Beatriz Gillian Coco Vivian\" and concatenate them.",
57
+ "answer": "znon"
58
+ },
59
+ {
60
+ "question": "Take the last letters of each words in \"Prince Rene Vishal Patrick\" and concatenate them.",
61
+ "answer": "eelk"
62
+ },
63
+ {
64
+ "question": "Take the last letters of each words in \"Jae Dennis Cris Bernadette\" and concatenate them.",
65
+ "answer": "esse"
66
+ },
67
+ {
68
+ "question": "Take the last letters of each words in \"Hank Janine Frankie Isa\" and concatenate them.",
69
+ "answer": "keea"
70
+ },
71
+ {
72
+ "question": "Take the last letters of each words in \"Max Mikey Cynthia Holly\" and concatenate them.",
73
+ "answer": "xyay"
74
+ },
75
+ {
76
+ "question": "Take the last letters of each words in \"Christy Rey Michelle Dolly\" and concatenate them.",
77
+ "answer": "yyey"
78
+ },
79
+ {
80
+ "question": "Take the last letters of each words in \"Annie To\u00f1o Sharon Delores\" and concatenate them.",
81
+ "answer": "eons"
82
+ },
83
+ {
84
+ "question": "Take the last letters of each words in \"Jessy Libby Danielle Red\" and concatenate them.",
85
+ "answer": "yyed"
86
+ },
87
+ {
88
+ "question": "Take the last letters of each words in \"Jorge Natalia Bryant Kiran\" and concatenate them.",
89
+ "answer": "eatn"
90
+ },
91
+ {
92
+ "question": "Take the last letters of each words in \"Yan Eunice Joseph Hugh\" and concatenate them.",
93
+ "answer": "nehh"
94
+ },
95
+ {
96
+ "question": "Take the last letters of each words in \"Melody Ramiro Humberto Jacob\" and concatenate them.",
97
+ "answer": "yoob"
98
+ },
99
+ {
100
+ "question": "Take the last letters of each words in \"Domingo Briana Michael Joan\" and concatenate them.",
101
+ "answer": "oaln"
102
+ },
103
+ {
104
+ "question": "Take the last letters of each words in \"Kristopher Deb Jake Tammy\" and concatenate them.",
105
+ "answer": "rbey"
106
+ },
107
+ {
108
+ "question": "Take the last letters of each words in \"Morgan Perla Joao Marta\" and concatenate them.",
109
+ "answer": "naoa"
110
+ },
111
+ {
112
+ "question": "Take the last letters of each words in \"Penny Harry Jessica Horacio\" and concatenate them.",
113
+ "answer": "yyao"
114
+ },
115
+ {
116
+ "question": "Take the last letters of each words in \"Billie Paloma Tanner Raul\" and concatenate them.",
117
+ "answer": "earl"
118
+ },
119
+ {
120
+ "question": "Take the last letters of each words in \"Rena Devon Rosalinda Paulina\" and concatenate them.",
121
+ "answer": "anaa"
122
+ },
123
+ {
124
+ "question": "Take the last letters of each words in \"Axel Osvaldo Mildred Sylvia\" and concatenate them.",
125
+ "answer": "loda"
126
+ },
127
+ {
128
+ "question": "Take the last letters of each words in \"Emiliano Jasmin Wade Vilma\" and concatenate them.",
129
+ "answer": "onea"
130
+ },
131
+ {
132
+ "question": "Take the last letters of each words in \"Lino Mariel Aditya Elisabeth\" and concatenate them.",
133
+ "answer": "olah"
134
+ },
135
+ {
136
+ "question": "Take the last letters of each words in \"Jacky Socorro Mark Wanda\" and concatenate them.",
137
+ "answer": "yoka"
138
+ },
139
+ {
140
+ "question": "Take the last letters of each words in \"Mauricio Lee Madi Lizzie\" and concatenate them.",
141
+ "answer": "oeie"
142
+ },
143
+ {
144
+ "question": "Take the last letters of each words in \"Ruben Bernardo Ariel Shelley\" and concatenate them.",
145
+ "answer": "noly"
146
+ },
147
+ {
148
+ "question": "Take the last letters of each words in \"Phillip Ajay Janie Augusto\" and concatenate them.",
149
+ "answer": "pyeo"
150
+ },
151
+ {
152
+ "question": "Take the last letters of each words in \"Ari Jasmine Elliot Kendrick\" and concatenate them.",
153
+ "answer": "ietk"
154
+ },
155
+ {
156
+ "question": "Take the last letters of each words in \"Margarita Anabel Shaun Celina\" and concatenate them.",
157
+ "answer": "alna"
158
+ },
159
+ {
160
+ "question": "Take the last letters of each words in \"Geo Kody Isaias Giovanni\" and concatenate them.",
161
+ "answer": "oysi"
162
+ },
163
+ {
164
+ "question": "Take the last letters of each words in \"Kennedy Ginny Iliana Sky\" and concatenate them.",
165
+ "answer": "yyay"
166
+ },
167
+ {
168
+ "question": "Take the last letters of each words in \"Billy Kassandra Joy Abe\" and concatenate them.",
169
+ "answer": "yaye"
170
+ },
171
+ {
172
+ "question": "Take the last letters of each words in \"Nubia Sarah Jalen Kris\" and concatenate them.",
173
+ "answer": "ahns"
174
+ },
175
+ {
176
+ "question": "Take the last letters of each words in \"Ale Gaspar Sonny Simon\" and concatenate them.",
177
+ "answer": "eryn"
178
+ },
179
+ {
180
+ "question": "Take the last letters of each words in \"Skylar Chrissy Misty Kike\" and concatenate them.",
181
+ "answer": "ryye"
182
+ },
183
+ {
184
+ "question": "Take the last letters of each words in \"Gavin Neha Asha Baltazar\" and concatenate them.",
185
+ "answer": "naar"
186
+ },
187
+ {
188
+ "question": "Take the last letters of each words in \"Camilo Becky Eliza Rebecca\" and concatenate them.",
189
+ "answer": "oyaa"
190
+ },
191
+ {
192
+ "question": "Take the last letters of each words in \"Isela Leslie Stacy Ingrid\" and concatenate them.",
193
+ "answer": "aeyd"
194
+ },
195
+ {
196
+ "question": "Take the last letters of each words in \"Meg Andrey Gerard Lilia\" and concatenate them.",
197
+ "answer": "gyda"
198
+ },
199
+ {
200
+ "question": "Take the last letters of each words in \"Denny Carlo Reinaldo Jessi\" and concatenate them.",
201
+ "answer": "yooi"
202
+ },
203
+ {
204
+ "question": "Take the last letters of each words in \"Cheri Rico Teo Jesus\" and concatenate them.",
205
+ "answer": "ioos"
206
+ },
207
+ {
208
+ "question": "Take the last letters of each words in \"Alexandria Meghan Autumn Robert\" and concatenate them.",
209
+ "answer": "annt"
210
+ },
211
+ {
212
+ "question": "Take the last letters of each words in \"Jr Meredith Zoe Robby\" and concatenate them.",
213
+ "answer": "rhey"
214
+ },
215
+ {
216
+ "question": "Take the last letters of each words in \"Pretty Jada Sarita Allen\" and concatenate them.",
217
+ "answer": "yaan"
218
+ },
219
+ {
220
+ "question": "Take the last letters of each words in \"Shari Bella Liza Maira\" and concatenate them.",
221
+ "answer": "iaaa"
222
+ },
223
+ {
224
+ "question": "Take the last letters of each words in \"Lazaro Ana Charlotte Precious\" and concatenate them.",
225
+ "answer": "oaes"
226
+ },
227
+ {
228
+ "question": "Take the last letters of each words in \"Imelda Andi Mack Rigoberto\" and concatenate them.",
229
+ "answer": "aiko"
230
+ },
231
+ {
232
+ "question": "Take the last letters of each words in \"Ashish Tracey Varun Emil\" and concatenate them.",
233
+ "answer": "hynl"
234
+ },
235
+ {
236
+ "question": "Take the last letters of each words in \"Marian Joanne Darrin Rohit\" and concatenate them.",
237
+ "answer": "nent"
238
+ },
239
+ {
240
+ "question": "Take the last letters of each words in \"Julieta Zachary Jared Tyson\" and concatenate them.",
241
+ "answer": "aydn"
242
+ },
243
+ {
244
+ "question": "Take the last letters of each words in \"Vicente Dayana Kasey Lin\" and concatenate them.",
245
+ "answer": "eayn"
246
+ },
247
+ {
248
+ "question": "Take the last letters of each words in \"Craig Dillon Troy Griselda\" and concatenate them.",
249
+ "answer": "gnya"
250
+ },
251
+ {
252
+ "question": "Take the last letters of each words in \"Rubi Daisy Yadira Santa\" and concatenate them.",
253
+ "answer": "iyaa"
254
+ },
255
+ {
256
+ "question": "Take the last letters of each words in \"Dave Ernesto Liam Marsha\" and concatenate them.",
257
+ "answer": "eoma"
258
+ },
259
+ {
260
+ "question": "Take the last letters of each words in \"Ira Paola Jose Antonio Maria\" and concatenate them.",
261
+ "answer": "aaoa"
262
+ },
263
+ {
264
+ "question": "Take the last letters of each words in \"Manish Lu Karl Don\" and concatenate them.",
265
+ "answer": "huln"
266
+ },
267
+ {
268
+ "question": "Take the last letters of each words in \"Irving Hans Moses Nicole\" and concatenate them.",
269
+ "answer": "gsse"
270
+ },
271
+ {
272
+ "question": "Take the last letters of each words in \"Micaela Kevin Diamond Ty\" and concatenate them.",
273
+ "answer": "andy"
274
+ },
275
+ {
276
+ "question": "Take the last letters of each words in \"Maura Selvin Tabitha Gino\" and concatenate them.",
277
+ "answer": "anao"
278
+ },
279
+ {
280
+ "question": "Take the last letters of each words in \"Rosendo Shayla Erica Georgia\" and concatenate them.",
281
+ "answer": "oaaa"
282
+ },
283
+ {
284
+ "question": "Take the last letters of each words in \"Lorena Shana Priscilla Summer\" and concatenate them.",
285
+ "answer": "aaar"
286
+ },
287
+ {
288
+ "question": "Take the last letters of each words in \"Noelle Byron Jane Darin\" and concatenate them.",
289
+ "answer": "enen"
290
+ },
291
+ {
292
+ "question": "Take the last letters of each words in \"Chava Rosalia Kurt Linda\" and concatenate them.",
293
+ "answer": "aata"
294
+ },
295
+ {
296
+ "question": "Take the last letters of each words in \"Meagan Naomi Israel Marie\" and concatenate them.",
297
+ "answer": "nile"
298
+ },
299
+ {
300
+ "question": "Take the last letters of each words in \"Michele Karan Abraham Ellen\" and concatenate them.",
301
+ "answer": "enmn"
302
+ },
303
+ {
304
+ "question": "Take the last letters of each words in \"Laila Audrey Glenn Rhonda\" and concatenate them.",
305
+ "answer": "ayna"
306
+ },
307
+ {
308
+ "question": "Take the last letters of each words in \"Regina Joel Justice Eli\" and concatenate them.",
309
+ "answer": "alei"
310
+ },
311
+ {
312
+ "question": "Take the last letters of each words in \"Janet Ant Vickie Elias\" and concatenate them.",
313
+ "answer": "ttes"
314
+ },
315
+ {
316
+ "question": "Take the last letters of each words in \"Russ Berta Mandy Lydia\" and concatenate them.",
317
+ "answer": "saya"
318
+ },
319
+ {
320
+ "question": "Take the last letters of each words in \"Tristan Marleny Santiago Viviana\" and concatenate them.",
321
+ "answer": "nyoa"
322
+ },
323
+ {
324
+ "question": "Take the last letters of each words in \"Carole William Tiffany Hilary\" and concatenate them.",
325
+ "answer": "emyy"
326
+ },
327
+ {
328
+ "question": "Take the last letters of each words in \"Amparo Gianna Dion Tessa\" and concatenate them.",
329
+ "answer": "oana"
330
+ },
331
+ {
332
+ "question": "Take the last letters of each words in \"Lewis Azucena Kai Ravi\" and concatenate them.",
333
+ "answer": "saii"
334
+ },
335
+ {
336
+ "question": "Take the last letters of each words in \"Issa Kendra Ignacio Henry\" and concatenate them.",
337
+ "answer": "aaoy"
338
+ },
339
+ {
340
+ "question": "Take the last letters of each words in \"Faustino Lamar Fransisco Rina\" and concatenate them.",
341
+ "answer": "oroa"
342
+ },
343
+ {
344
+ "question": "Take the last letters of each words in \"Angelique Marissa Phyllis Bonnie\" and concatenate them.",
345
+ "answer": "ease"
346
+ },
347
+ {
348
+ "question": "Take the last letters of each words in \"Loren Wes Dean Hayley\" and concatenate them.",
349
+ "answer": "nsny"
350
+ },
351
+ {
352
+ "question": "Take the last letters of each words in \"Maxwell Jose Beto Joe\" and concatenate them.",
353
+ "answer": "leoe"
354
+ },
355
+ {
356
+ "question": "Take the last letters of each words in \"Liz Andrea Suresh Vera\" and concatenate them.",
357
+ "answer": "zaha"
358
+ },
359
+ {
360
+ "question": "Take the last letters of each words in \"Chantal Ines Valeria Francesca\" and concatenate them.",
361
+ "answer": "lsaa"
362
+ },
363
+ {
364
+ "question": "Take the last letters of each words in \"Ron Carl Joann Young\" and concatenate them.",
365
+ "answer": "nlng"
366
+ },
367
+ {
368
+ "question": "Take the last letters of each words in \"Sally Sadie Christie Ellie\" and concatenate them.",
369
+ "answer": "yeee"
370
+ },
371
+ {
372
+ "question": "Take the last letters of each words in \"Tomas Nic Zoila Calvin\" and concatenate them.",
373
+ "answer": "scan"
374
+ },
375
+ {
376
+ "question": "Take the last letters of each words in \"Gabe Dora Aileen Modesto\" and concatenate them.",
377
+ "answer": "eano"
378
+ },
379
+ {
380
+ "question": "Take the last letters of each words in \"Victoria Aurora Amalia Princess\" and concatenate them.",
381
+ "answer": "aaas"
382
+ },
383
+ {
384
+ "question": "Take the last letters of each words in \"Alec Arianna Corina Juancarlos\" and concatenate them.",
385
+ "answer": "caas"
386
+ },
387
+ {
388
+ "question": "Take the last letters of each words in \"Carolyn Sasha Mercy Keri\" and concatenate them.",
389
+ "answer": "nayi"
390
+ },
391
+ {
392
+ "question": "Take the last letters of each words in \"Selena Keisha Gladys Cedric\" and concatenate them.",
393
+ "answer": "aasc"
394
+ },
395
+ {
396
+ "question": "Take the last letters of each words in \"Salma Pj Gladis Monica\" and concatenate them.",
397
+ "answer": "ajsa"
398
+ },
399
+ {
400
+ "question": "Take the last letters of each words in \"Ralph Jeanne Kyle Alejandro\" and concatenate them.",
401
+ "answer": "heeo"
402
+ },
403
+ {
404
+ "question": "Take the last letters of each words in \"Letty Aimee Elvia Ted\" and concatenate them.",
405
+ "answer": "yead"
406
+ },
407
+ {
408
+ "question": "Take the last letters of each words in \"April Molly Maurice Jaclyn\" and concatenate them.",
409
+ "answer": "lyen"
410
+ },
411
+ {
412
+ "question": "Take the last letters of each words in \"Markus Kathryn Giovanna Kent\" and concatenate them.",
413
+ "answer": "snat"
414
+ },
415
+ {
416
+ "question": "Take the last letters of each words in \"Desmond Camille Joana Garcia\" and concatenate them.",
417
+ "answer": "deaa"
418
+ },
419
+ {
420
+ "question": "Take the last letters of each words in \"Quinton Sam Soledad Becca\" and concatenate them.",
421
+ "answer": "nmda"
422
+ },
423
+ {
424
+ "question": "Take the last letters of each words in \"Matilde Jan Marcel Mimi\" and concatenate them.",
425
+ "answer": "enli"
426
+ },
427
+ {
428
+ "question": "Take the last letters of each words in \"Manolo Bobbie Ash Jaqueline\" and concatenate them.",
429
+ "answer": "oehe"
430
+ },
431
+ {
432
+ "question": "Take the last letters of each words in \"Jorge Luis Mo Alexia Jerry\" and concatenate them.",
433
+ "answer": "soay"
434
+ },
435
+ {
436
+ "question": "Take the last letters of each words in \"Kali Jeanette Tess Devin\" and concatenate them.",
437
+ "answer": "iesn"
438
+ },
439
+ {
440
+ "question": "Take the last letters of each words in \"Nabil Marlene Wendy Jillian\" and concatenate them.",
441
+ "answer": "leyn"
442
+ },
443
+ {
444
+ "question": "Take the last letters of each words in \"Charity Svetlana Jamie Jose A\" and concatenate them.",
445
+ "answer": "yaeA"
446
+ },
447
+ {
448
+ "question": "Take the last letters of each words in \"Dalila Emily Casey Clifford\" and concatenate them.",
449
+ "answer": "ayyd"
450
+ },
451
+ {
452
+ "question": "Take the last letters of each words in \"Blake Hunter Lou Spencer\" and concatenate them.",
453
+ "answer": "erur"
454
+ },
455
+ {
456
+ "question": "Take the last letters of each words in \"Donny Lucero Christopher Gregory\" and concatenate them.",
457
+ "answer": "yory"
458
+ },
459
+ {
460
+ "question": "Take the last letters of each words in \"Hayden Maya Zack Roberto\" and concatenate them.",
461
+ "answer": "nako"
462
+ },
463
+ {
464
+ "question": "Take the last letters of each words in \"Jes\u00fas Vidal Maxine Gloria\" and concatenate them.",
465
+ "answer": "slea"
466
+ },
467
+ {
468
+ "question": "Take the last letters of each words in \"Madeline Mckenzie Louie Dylan\" and concatenate them.",
469
+ "answer": "eeen"
470
+ },
471
+ {
472
+ "question": "Take the last letters of each words in \"Anastasia Thelma Sheri Rosita\" and concatenate them.",
473
+ "answer": "aaia"
474
+ },
475
+ {
476
+ "question": "Take the last letters of each words in \"Guillermina Evelin Dominique Johnny\" and concatenate them.",
477
+ "answer": "aney"
478
+ },
479
+ {
480
+ "question": "Take the last letters of each words in \"Sid Isabelle Jackson Heidy\" and concatenate them.",
481
+ "answer": "deny"
482
+ },
483
+ {
484
+ "question": "Take the last letters of each words in \"Margaret Rosi Willy Charlene\" and concatenate them.",
485
+ "answer": "tiye"
486
+ },
487
+ {
488
+ "question": "Take the last letters of each words in \"Carla Dolores Cooper Damion\" and concatenate them.",
489
+ "answer": "asrn"
490
+ },
491
+ {
492
+ "question": "Take the last letters of each words in \"Davis Jules Fabiola Cherie\" and concatenate them.",
493
+ "answer": "ssae"
494
+ },
495
+ {
496
+ "question": "Take the last letters of each words in \"Raven Marisela Ross Angie\" and concatenate them.",
497
+ "answer": "nase"
498
+ },
499
+ {
500
+ "question": "Take the last letters of each words in \"Jeff Jen Giselle Noel\" and concatenate them.",
501
+ "answer": "fnel"
502
+ },
503
+ {
504
+ "question": "Take the last letters of each words in \"Kristi Eduardo Angelo Clare\" and concatenate them.",
505
+ "answer": "iooe"
506
+ },
507
+ {
508
+ "question": "Take the last letters of each words in \"Alina Alessandra Amina Bianca\" and concatenate them.",
509
+ "answer": "aaaa"
510
+ },
511
+ {
512
+ "question": "Take the last letters of each words in \"Zach Trenton Cary Beth\" and concatenate them.",
513
+ "answer": "hnyh"
514
+ },
515
+ {
516
+ "question": "Take the last letters of each words in \"Altagracia Howard Patti Douglas\" and concatenate them.",
517
+ "answer": "adis"
518
+ },
519
+ {
520
+ "question": "Take the last letters of each words in \"Jordan Yoni Lawrence Aura\" and concatenate them.",
521
+ "answer": "niea"
522
+ },
523
+ {
524
+ "question": "Take the last letters of each words in \"Leticia Jacinto Natasha Raphael\" and concatenate them.",
525
+ "answer": "aoal"
526
+ },
527
+ {
528
+ "question": "Take the last letters of each words in \"Mickey Dom Lilly Eloy\" and concatenate them.",
529
+ "answer": "ymyy"
530
+ },
531
+ {
532
+ "question": "Take the last letters of each words in \"Timmy Katherine Gabriel Nate\" and concatenate them.",
533
+ "answer": "yele"
534
+ },
535
+ {
536
+ "question": "Take the last letters of each words in \"Raymundo Jonathon Lexi Rony\" and concatenate them.",
537
+ "answer": "oniy"
538
+ },
539
+ {
540
+ "question": "Take the last letters of each words in \"Amy Ella Amilcar Roman\" and concatenate them.",
541
+ "answer": "yarn"
542
+ },
543
+ {
544
+ "question": "Take the last letters of each words in \"Bailey Lourdes Brianna Mart\u00edn\" and concatenate them.",
545
+ "answer": "ysan"
546
+ },
547
+ {
548
+ "question": "Take the last letters of each words in \"Daniella Dianne Ram Mirian\" and concatenate them.",
549
+ "answer": "aemn"
550
+ },
551
+ {
552
+ "question": "Take the last letters of each words in \"Rosie Charlie Mick Jacquelyn\" and concatenate them.",
553
+ "answer": "eekn"
554
+ },
555
+ {
556
+ "question": "Take the last letters of each words in \"Ericka Aly Darius Reed\" and concatenate them.",
557
+ "answer": "aysd"
558
+ },
559
+ {
560
+ "question": "Take the last letters of each words in \"Glenda Beverly Agustin Igor\" and concatenate them.",
561
+ "answer": "aynr"
562
+ },
563
+ {
564
+ "question": "Take the last letters of each words in \"Dino Toby Abigail Manuela\" and concatenate them.",
565
+ "answer": "oyla"
566
+ },
567
+ {
568
+ "question": "Take the last letters of each words in \"Shannon Millie Rosemary Priyanka\" and concatenate them.",
569
+ "answer": "neya"
570
+ },
571
+ {
572
+ "question": "Take the last letters of each words in \"Cristian Nik Gwen Josiah\" and concatenate them.",
573
+ "answer": "nknh"
574
+ },
575
+ {
576
+ "question": "Take the last letters of each words in \"Garry Arielle Elba Rodney\" and concatenate them.",
577
+ "answer": "yeay"
578
+ },
579
+ {
580
+ "question": "Take the last letters of each words in \"Clarissa Shauna Alexis Branden\" and concatenate them.",
581
+ "answer": "aasn"
582
+ },
583
+ {
584
+ "question": "Take the last letters of each words in \"Barb Gage Kristian Asia\" and concatenate them.",
585
+ "answer": "bena"
586
+ },
587
+ {
588
+ "question": "Take the last letters of each words in \"Emilia Jonas Christi Sophia\" and concatenate them.",
589
+ "answer": "asia"
590
+ },
591
+ {
592
+ "question": "Take the last letters of each words in \"Sonia Griffin Claude Josephine\" and concatenate them.",
593
+ "answer": "anee"
594
+ },
595
+ {
596
+ "question": "Take the last letters of each words in \"Arthur Shan Norman Manny\" and concatenate them.",
597
+ "answer": "rnny"
598
+ },
599
+ {
600
+ "question": "Take the last letters of each words in \"Abdi Clyde Ana Maria Pepe\" and concatenate them.",
601
+ "answer": "ieae"
602
+ },
603
+ {
604
+ "question": "Take the last letters of each words in \"Scotty Edgar Hanna Austin\" and concatenate them.",
605
+ "answer": "yran"
606
+ },
607
+ {
608
+ "question": "Take the last letters of each words in \"Denis Gigi Heather Ginger\" and concatenate them.",
609
+ "answer": "sirr"
610
+ },
611
+ {
612
+ "question": "Take the last letters of each words in \"Steph Pablo Ceci Francine\" and concatenate them.",
613
+ "answer": "hoie"
614
+ },
615
+ {
616
+ "question": "Take the last letters of each words in \"Orlando Colby Julius Ofelia\" and concatenate them.",
617
+ "answer": "oysa"
618
+ },
619
+ {
620
+ "question": "Take the last letters of each words in \"Rogelio Freddy Ivan Madeleine\" and concatenate them.",
621
+ "answer": "oyne"
622
+ },
623
+ {
624
+ "question": "Take the last letters of each words in \"Marshall Herman Faye Grant\" and concatenate them.",
625
+ "answer": "lnet"
626
+ },
627
+ {
628
+ "question": "Take the last letters of each words in \"Bernard Lidia Sebastian Judy\" and concatenate them.",
629
+ "answer": "dany"
630
+ },
631
+ {
632
+ "question": "Take the last letters of each words in \"Kyra Luciano Ciara Bryan\" and concatenate them.",
633
+ "answer": "aoan"
634
+ },
635
+ {
636
+ "question": "Take the last letters of each words in \"Myriam Jos\u00e9 Cecy Faisal\" and concatenate them.",
637
+ "answer": "m\u00e9yl"
638
+ },
639
+ {
640
+ "question": "Take the last letters of each words in \"Memo Joey Phil Samuel\" and concatenate them.",
641
+ "answer": "oyll"
642
+ },
643
+ {
644
+ "question": "Take the last letters of each words in \"Angel Sheryl Dulce Kenny\" and concatenate them.",
645
+ "answer": "lley"
646
+ },
647
+ {
648
+ "question": "Take the last letters of each words in \"Selina Tasha Jarrod Maddy\" and concatenate them.",
649
+ "answer": "aady"
650
+ },
651
+ {
652
+ "question": "Take the last letters of each words in \"Talia Nicki Tia Divya\" and concatenate them.",
653
+ "answer": "aiaa"
654
+ },
655
+ {
656
+ "question": "Take the last letters of each words in \"Franklin Rochelle Brent Sarai\" and concatenate them.",
657
+ "answer": "neti"
658
+ },
659
+ {
660
+ "question": "Take the last letters of each words in \"Clinton Yanira Barbara Betsy\" and concatenate them.",
661
+ "answer": "naay"
662
+ },
663
+ {
664
+ "question": "Take the last letters of each words in \"Ronnie Kiki Alan Remy\" and concatenate them.",
665
+ "answer": "einy"
666
+ },
667
+ {
668
+ "question": "Take the last letters of each words in \"Erwin Chad Margarito Shanna\" and concatenate them.",
669
+ "answer": "ndoa"
670
+ },
671
+ {
672
+ "question": "Take the last letters of each words in \"Connie Elena Tami Stuart\" and concatenate them.",
673
+ "answer": "eait"
674
+ },
675
+ {
676
+ "question": "Take the last letters of each words in \"Blaine Syed Priya Ruby\" and concatenate them.",
677
+ "answer": "eday"
678
+ },
679
+ {
680
+ "question": "Take the last letters of each words in \"Fernanda Magda Elmer Alvaro\" and concatenate them.",
681
+ "answer": "aaro"
682
+ },
683
+ {
684
+ "question": "Take the last letters of each words in \"Anand Estefania Stanley Lizette\" and concatenate them.",
685
+ "answer": "daye"
686
+ },
687
+ {
688
+ "question": "Take the last letters of each words in \"Sunil Tiana Darla Darnell\" and concatenate them.",
689
+ "answer": "laal"
690
+ },
691
+ {
692
+ "question": "Take the last letters of each words in \"Eugenio Moises Marion Kirk\" and concatenate them.",
693
+ "answer": "osnk"
694
+ },
695
+ {
696
+ "question": "Take the last letters of each words in \"Marty Justine Edgardo Osman\" and concatenate them.",
697
+ "answer": "yeon"
698
+ },
699
+ {
700
+ "question": "Take the last letters of each words in \"Tucker Daniel Hernandez Alison\" and concatenate them.",
701
+ "answer": "rlzn"
702
+ },
703
+ {
704
+ "question": "Take the last letters of each words in \"Sherri Genesis Jeffrey Samir\" and concatenate them.",
705
+ "answer": "isyr"
706
+ },
707
+ {
708
+ "question": "Take the last letters of each words in \"Cat Alisha Ramon Norberto\" and concatenate them.",
709
+ "answer": "tano"
710
+ },
711
+ {
712
+ "question": "Take the last letters of each words in \"Hilario Magdalena Morris Patricio\" and concatenate them.",
713
+ "answer": "oaso"
714
+ },
715
+ {
716
+ "question": "Take the last letters of each words in \"Fausto Tito Jade Terrance\" and concatenate them.",
717
+ "answer": "ooee"
718
+ },
719
+ {
720
+ "question": "Take the last letters of each words in \"Gayle Doreen Chelsey Helena\" and concatenate them.",
721
+ "answer": "enya"
722
+ },
723
+ {
724
+ "question": "Take the last letters of each words in \"Lucio V\u00edctor Lester Allie\" and concatenate them.",
725
+ "answer": "orre"
726
+ },
727
+ {
728
+ "question": "Take the last letters of each words in \"Donnie Alli Terry Krystal\" and concatenate them.",
729
+ "answer": "eiyl"
730
+ },
731
+ {
732
+ "question": "Take the last letters of each words in \"Carlos Jackie Callie Caitlin\" and concatenate them.",
733
+ "answer": "seen"
734
+ },
735
+ {
736
+ "question": "Take the last letters of each words in \"Debi Raymond Luis Federico\" and concatenate them.",
737
+ "answer": "idso"
738
+ },
739
+ {
740
+ "question": "Take the last letters of each words in \"Kelvin Brennan Carina Paty\" and concatenate them.",
741
+ "answer": "nnay"
742
+ },
743
+ {
744
+ "question": "Take the last letters of each words in \"Janice Shelly Arnulfo Nestor\" and concatenate them.",
745
+ "answer": "eyor"
746
+ },
747
+ {
748
+ "question": "Take the last letters of each words in \"Bob Aman Richie Sana\" and concatenate them.",
749
+ "answer": "bnea"
750
+ },
751
+ {
752
+ "question": "Take the last letters of each words in \"Juan Jose Dalia Destiny Amelia\" and concatenate them.",
753
+ "answer": "eaya"
754
+ },
755
+ {
756
+ "question": "Take the last letters of each words in \"Jose Luis Kiara Arun Josefina\" and concatenate them.",
757
+ "answer": "sana"
758
+ },
759
+ {
760
+ "question": "Take the last letters of each words in \"Guillermo Gerry Lizbeth Charly\" and concatenate them.",
761
+ "answer": "oyhy"
762
+ },
763
+ {
764
+ "question": "Take the last letters of each words in \"Reginald Franky Kira Gordon\" and concatenate them.",
765
+ "answer": "dyan"
766
+ },
767
+ {
768
+ "question": "Take the last letters of each words in \"Bret Lois Ismael Mirna\" and concatenate them.",
769
+ "answer": "tsla"
770
+ },
771
+ {
772
+ "question": "Take the last letters of each words in \"Peggy Trent Darrell Pamela\" and concatenate them.",
773
+ "answer": "ytla"
774
+ },
775
+ {
776
+ "question": "Take the last letters of each words in \"Sophie Gregorio Avery Pooja\" and concatenate them.",
777
+ "answer": "eoya"
778
+ },
779
+ {
780
+ "question": "Take the last letters of each words in \"Yvonne Rafaela Jb Salomon\" and concatenate them.",
781
+ "answer": "eabn"
782
+ },
783
+ {
784
+ "question": "Take the last letters of each words in \"Gabi Dante Rafa Tricia\" and concatenate them.",
785
+ "answer": "ieaa"
786
+ },
787
+ {
788
+ "question": "Take the last letters of each words in \"Martin Sage Tanisha Rick\" and concatenate them.",
789
+ "answer": "neak"
790
+ },
791
+ {
792
+ "question": "Take the last letters of each words in \"Suzanne Julissa Chino America\" and concatenate them.",
793
+ "answer": "eaoa"
794
+ },
795
+ {
796
+ "question": "Take the last letters of each words in \"Mel Josh Alejandra Harley\" and concatenate them.",
797
+ "answer": "lhay"
798
+ },
799
+ {
800
+ "question": "Take the last letters of each words in \"Candy Megan Ed Nathan\" and concatenate them.",
801
+ "answer": "yndn"
802
+ },
803
+ {
804
+ "question": "Take the last letters of each words in \"Diego Val Vincent Stacie\" and concatenate them.",
805
+ "answer": "olte"
806
+ },
807
+ {
808
+ "question": "Take the last letters of each words in \"Salvatore Gustavo Jill Celeste\" and concatenate them.",
809
+ "answer": "eole"
810
+ },
811
+ {
812
+ "question": "Take the last letters of each words in \"Marcy Gonzalez Alice Arlene\" and concatenate them.",
813
+ "answer": "yzee"
814
+ },
815
+ {
816
+ "question": "Take the last letters of each words in \"Dana German Alvin Braden\" and concatenate them.",
817
+ "answer": "annn"
818
+ },
819
+ {
820
+ "question": "Take the last letters of each words in \"Paula Irina Laurel Maribel\" and concatenate them.",
821
+ "answer": "aall"
822
+ },
823
+ {
824
+ "question": "Take the last letters of each words in \"Andr\u00e9s Miles Ronaldo Melinda\" and concatenate them.",
825
+ "answer": "ssoa"
826
+ },
827
+ {
828
+ "question": "Take the last letters of each words in \"Williams Reza Ashton Lillian\" and concatenate them.",
829
+ "answer": "sann"
830
+ },
831
+ {
832
+ "question": "Take the last letters of each words in \"Lacey Nora Debra Ashleigh\" and concatenate them.",
833
+ "answer": "yaah"
834
+ },
835
+ {
836
+ "question": "Take the last letters of each words in \"Itzel Anderson Kenia Edy\" and concatenate them.",
837
+ "answer": "lnay"
838
+ },
839
+ {
840
+ "question": "Take the last letters of each words in \"Forrest Juanito Allan Candice\" and concatenate them.",
841
+ "answer": "tone"
842
+ },
843
+ {
844
+ "question": "Take the last letters of each words in \"Nico Lorenzo Johanna Teresita\" and concatenate them.",
845
+ "answer": "ooaa"
846
+ },
847
+ {
848
+ "question": "Take the last letters of each words in \"Paulo Tatyana Bernice Ra\u00fal\" and concatenate them.",
849
+ "answer": "oael"
850
+ },
851
+ {
852
+ "question": "Take the last letters of each words in \"Lionel Fiona Bobby Janeth\" and concatenate them.",
853
+ "answer": "layh"
854
+ },
855
+ {
856
+ "question": "Take the last letters of each words in \"Dany Hilda Butch Mahesh\" and concatenate them.",
857
+ "answer": "yahh"
858
+ },
859
+ {
860
+ "question": "Take the last letters of each words in \"Deon Lane Everett Lindsay\" and concatenate them.",
861
+ "answer": "nety"
862
+ },
863
+ {
864
+ "question": "Take the last letters of each words in \"Ubaldo Katrina Francis Lynn\" and concatenate them.",
865
+ "answer": "oasn"
866
+ },
867
+ {
868
+ "question": "Take the last letters of each words in \"Lesley Luna Nadia Adriana\" and concatenate them.",
869
+ "answer": "yaaa"
870
+ },
871
+ {
872
+ "question": "Take the last letters of each words in \"Maureen Fabian Claudette Peyton\" and concatenate them.",
873
+ "answer": "nnen"
874
+ },
875
+ {
876
+ "question": "Take the last letters of each words in \"Gabby Reese Leah Celia\" and concatenate them.",
877
+ "answer": "yeha"
878
+ },
879
+ {
880
+ "question": "Take the last letters of each words in \"Ramona Lucy Gail Octavio\" and concatenate them.",
881
+ "answer": "aylo"
882
+ },
883
+ {
884
+ "question": "Take the last letters of each words in \"Sue Cassandra Cody Cali\" and concatenate them.",
885
+ "answer": "eayi"
886
+ },
887
+ {
888
+ "question": "Take the last letters of each words in \"Efrain Rickey Jonathan Kelli\" and concatenate them.",
889
+ "answer": "nyni"
890
+ },
891
+ {
892
+ "question": "Take the last letters of each words in \"Efren Rex Marilyn Emerson\" and concatenate them.",
893
+ "answer": "nxnn"
894
+ },
895
+ {
896
+ "question": "Take the last letters of each words in \"Bobbi Tamika Zac Lala\" and concatenate them.",
897
+ "answer": "iaca"
898
+ },
899
+ {
900
+ "question": "Take the last letters of each words in \"Kendall Matias Kaleb Randy\" and concatenate them.",
901
+ "answer": "lsby"
902
+ },
903
+ {
904
+ "question": "Take the last letters of each words in \"Armando Astrid Anibal Dakota\" and concatenate them.",
905
+ "answer": "odla"
906
+ },
907
+ {
908
+ "question": "Take the last letters of each words in \"Lamont Frederick Lance Esperanza\" and concatenate them.",
909
+ "answer": "tkea"
910
+ },
911
+ {
912
+ "question": "Take the last letters of each words in \"Lynda Danilo Jonny Judith\" and concatenate them.",
913
+ "answer": "aoyh"
914
+ },
915
+ {
916
+ "question": "Take the last letters of each words in \"Deandre Moe Jack Vanessa\" and concatenate them.",
917
+ "answer": "eeka"
918
+ },
919
+ {
920
+ "question": "Take the last letters of each words in \"Tere Niko Keith Conner\" and concatenate them.",
921
+ "answer": "eohr"
922
+ },
923
+ {
924
+ "question": "Take the last letters of each words in \"Kelly Cheryl Nancy Jojo\" and concatenate them.",
925
+ "answer": "ylyo"
926
+ },
927
+ {
928
+ "question": "Take the last letters of each words in \"H\u00e9ctor Daniela Rossy Jose Manuel\" and concatenate them.",
929
+ "answer": "rayl"
930
+ },
931
+ {
932
+ "question": "Take the last letters of each words in \"Carissa Paige Consuelo Izzy\" and concatenate them.",
933
+ "answer": "aeoy"
934
+ },
935
+ {
936
+ "question": "Take the last letters of each words in \"Wilson Abbey Harold Nelly\" and concatenate them.",
937
+ "answer": "nydy"
938
+ },
939
+ {
940
+ "question": "Take the last letters of each words in \"Tiara Araceli Michaela Genaro\" and concatenate them.",
941
+ "answer": "aiao"
942
+ },
943
+ {
944
+ "question": "Take the last letters of each words in \"Alexa Pilar Rod Nicola\" and concatenate them.",
945
+ "answer": "arda"
946
+ },
947
+ {
948
+ "question": "Take the last letters of each words in \"Yamileth Dane Aron Dee\" and concatenate them.",
949
+ "answer": "hene"
950
+ },
951
+ {
952
+ "question": "Take the last letters of each words in \"Rachel Alisa Fanny Jeffery\" and concatenate them.",
953
+ "answer": "layy"
954
+ },
955
+ {
956
+ "question": "Take the last letters of each words in \"Manuel Aurelio India Rosalba\" and concatenate them.",
957
+ "answer": "loaa"
958
+ },
959
+ {
960
+ "question": "Take the last letters of each words in \"Garrett Eva Joaquin Monique\" and concatenate them.",
961
+ "answer": "tane"
962
+ },
963
+ {
964
+ "question": "Take the last letters of each words in \"Ivy Romeo Jana Ej\" and concatenate them.",
965
+ "answer": "yoaj"
966
+ },
967
+ {
968
+ "question": "Take the last letters of each words in \"Yajaira Terrell Adam Lexie\" and concatenate them.",
969
+ "answer": "alme"
970
+ },
971
+ {
972
+ "question": "Take the last letters of each words in \"Virginia Juanita Zak Wayne\" and concatenate them.",
973
+ "answer": "aake"
974
+ },
975
+ {
976
+ "question": "Take the last letters of each words in \"Mari Ximena Leo Antonia\" and concatenate them.",
977
+ "answer": "iaoa"
978
+ },
979
+ {
980
+ "question": "Take the last letters of each words in \"Salvador Sol Tyler Kareem\" and concatenate them.",
981
+ "answer": "rlrm"
982
+ },
983
+ {
984
+ "question": "Take the last letters of each words in \"Allyson Mara Jo Toni\" and concatenate them.",
985
+ "answer": "naoi"
986
+ },
987
+ {
988
+ "question": "Take the last letters of each words in \"Rocio Sandy Anahi Latasha\" and concatenate them.",
989
+ "answer": "oyia"
990
+ },
991
+ {
992
+ "question": "Take the last letters of each words in \"Vicki Dwight Johnson Alexandra\" and concatenate them.",
993
+ "answer": "itna"
994
+ },
995
+ {
996
+ "question": "Take the last letters of each words in \"Emma Maryann Olga Nataly\" and concatenate them.",
997
+ "answer": "anay"
998
+ },
999
+ {
1000
+ "question": "Take the last letters of each words in \"Belkis Wendell Lissette Patricia\" and concatenate them.",
1001
+ "answer": "slea"
1002
+ },
1003
+ {
1004
+ "question": "Take the last letters of each words in \"Ever Gio Elia Ramesh\" and concatenate them.",
1005
+ "answer": "roah"
1006
+ },
1007
+ {
1008
+ "question": "Take the last letters of each words in \"Dallas Uriel Brendan Julian\" and concatenate them.",
1009
+ "answer": "slnn"
1010
+ },
1011
+ {
1012
+ "question": "Take the last letters of each words in \"Sandeep Graciela Jai Xiomara\" and concatenate them.",
1013
+ "answer": "paia"
1014
+ },
1015
+ {
1016
+ "question": "Take the last letters of each words in \"Frank Trevor Al Gabriella\" and concatenate them.",
1017
+ "answer": "krla"
1018
+ },
1019
+ {
1020
+ "question": "Take the last letters of each words in \"Elva Kari Shirley Gilberto\" and concatenate them.",
1021
+ "answer": "aiyo"
1022
+ },
1023
+ {
1024
+ "question": "Take the last letters of each words in \"Alonzo Dorothy Alfred Rodriguez\" and concatenate them.",
1025
+ "answer": "oydz"
1026
+ },
1027
+ {
1028
+ "question": "Take the last letters of each words in \"Ernest Yuri Camila Ashlee\" and concatenate them.",
1029
+ "answer": "tiae"
1030
+ },
1031
+ {
1032
+ "question": "Take the last letters of each words in \"Jim Dwayne Ricky Artemio\" and concatenate them.",
1033
+ "answer": "meyo"
1034
+ },
1035
+ {
1036
+ "question": "Take the last letters of each words in \"Noah Aubrey Cesar Eliana\" and concatenate them.",
1037
+ "answer": "hyra"
1038
+ },
1039
+ {
1040
+ "question": "Take the last letters of each words in \"Shawn Tracie Lynne Leila\" and concatenate them.",
1041
+ "answer": "neea"
1042
+ },
1043
+ {
1044
+ "question": "Take the last letters of each words in \"Norma Timothy Willie Mathew\" and concatenate them.",
1045
+ "answer": "ayew"
1046
+ },
1047
+ {
1048
+ "question": "Take the last letters of each words in \"Maricruz Alfredo Barry Deepak\" and concatenate them.",
1049
+ "answer": "zoyk"
1050
+ },
1051
+ {
1052
+ "question": "Take the last letters of each words in \"Julia Kirsten Pam Adan\" and concatenate them.",
1053
+ "answer": "anmn"
1054
+ },
1055
+ {
1056
+ "question": "Take the last letters of each words in \"Adalberto Jamal Carter Robyn\" and concatenate them.",
1057
+ "answer": "olrn"
1058
+ },
1059
+ {
1060
+ "question": "Take the last letters of each words in \"Nicky Eden James Matt\" and concatenate them.",
1061
+ "answer": "ynst"
1062
+ },
1063
+ {
1064
+ "question": "Take the last letters of each words in \"Jaime Brad Levi Emmanuel\" and concatenate them.",
1065
+ "answer": "edil"
1066
+ },
1067
+ {
1068
+ "question": "Take the last letters of each words in \"Kristie Johnnie Marisa Derick\" and concatenate them.",
1069
+ "answer": "eeak"
1070
+ },
1071
+ {
1072
+ "question": "Take the last letters of each words in \"Wilfredo Abby Karthik Perry\" and concatenate them.",
1073
+ "answer": "oyky"
1074
+ },
1075
+ {
1076
+ "question": "Take the last letters of each words in \"Pancho Claudio Elaine Vicky\" and concatenate them.",
1077
+ "answer": "ooey"
1078
+ },
1079
+ {
1080
+ "question": "Take the last letters of each words in \"Teri Lina Mery Melanie\" and concatenate them.",
1081
+ "answer": "iaye"
1082
+ },
1083
+ {
1084
+ "question": "Take the last letters of each words in \"Selene Felix Milton Yessica\" and concatenate them.",
1085
+ "answer": "exna"
1086
+ },
1087
+ {
1088
+ "question": "Take the last letters of each words in \"Gus Brock Ava Jenna\" and concatenate them.",
1089
+ "answer": "skaa"
1090
+ },
1091
+ {
1092
+ "question": "Take the last letters of each words in \"Karen Hector Mai Steven\" and concatenate them.",
1093
+ "answer": "nrin"
1094
+ },
1095
+ {
1096
+ "question": "Take the last letters of each words in \"Alexander Marina Valentina Mila\" and concatenate them.",
1097
+ "answer": "raaa"
1098
+ },
1099
+ {
1100
+ "question": "Take the last letters of each words in \"Kristine Van Marisol Preston\" and concatenate them.",
1101
+ "answer": "enln"
1102
+ },
1103
+ {
1104
+ "question": "Take the last letters of each words in \"Evan Ray Sofia Alonso\" and concatenate them.",
1105
+ "answer": "nyao"
1106
+ },
1107
+ {
1108
+ "question": "Take the last letters of each words in \"Alfonso Collin Amado Dick\" and concatenate them.",
1109
+ "answer": "onok"
1110
+ },
1111
+ {
1112
+ "question": "Take the last letters of each words in \"Les Jun Noe Juliana\" and concatenate them.",
1113
+ "answer": "snea"
1114
+ },
1115
+ {
1116
+ "question": "Take the last letters of each words in \"Damian Crystal Nisha Hernan\" and concatenate them.",
1117
+ "answer": "nlan"
1118
+ },
1119
+ {
1120
+ "question": "Take the last letters of each words in \"Rosario Manoj Adolfo Reina\" and concatenate them.",
1121
+ "answer": "ojoa"
1122
+ },
1123
+ {
1124
+ "question": "Take the last letters of each words in \"Marcia Belen Reyna Britney\" and concatenate them.",
1125
+ "answer": "anay"
1126
+ },
1127
+ {
1128
+ "question": "Take the last letters of each words in \"Idalia Arnoldo Marla Duane\" and concatenate them.",
1129
+ "answer": "aoae"
1130
+ },
1131
+ {
1132
+ "question": "Take the last letters of each words in \"Katia Mina Cam Ronda\" and concatenate them.",
1133
+ "answer": "aama"
1134
+ },
1135
+ {
1136
+ "question": "Take the last letters of each words in \"Rashad Savannah Flavio Bert\" and concatenate them.",
1137
+ "answer": "dhot"
1138
+ },
1139
+ {
1140
+ "question": "Take the last letters of each words in \"Leanne Lulu Lopez Jp\" and concatenate them.",
1141
+ "answer": "euzp"
1142
+ },
1143
+ {
1144
+ "question": "Take the last letters of each words in \"Lora Alberto Cathy Kara\" and concatenate them.",
1145
+ "answer": "aoya"
1146
+ },
1147
+ {
1148
+ "question": "Take the last letters of each words in \"Roni Nikita Hannah Kiana\" and concatenate them.",
1149
+ "answer": "iaha"
1150
+ },
1151
+ {
1152
+ "question": "Take the last letters of each words in \"Edwin Lovely Curt Damon\" and concatenate them.",
1153
+ "answer": "nytn"
1154
+ },
1155
+ {
1156
+ "question": "Take the last letters of each words in \"Missy Erin Lorna Lenny\" and concatenate them.",
1157
+ "answer": "ynay"
1158
+ },
1159
+ {
1160
+ "question": "Take the last letters of each words in \"Vijay Sherrie Doug Suzy\" and concatenate them.",
1161
+ "answer": "yegy"
1162
+ },
1163
+ {
1164
+ "question": "Take the last letters of each words in \"Claudia Cole Matthew Juan Pablo\" and concatenate them.",
1165
+ "answer": "aewo"
1166
+ },
1167
+ {
1168
+ "question": "Take the last letters of each words in \"Arturo Dominick Christa Myles\" and concatenate them.",
1169
+ "answer": "okas"
1170
+ },
1171
+ {
1172
+ "question": "Take the last letters of each words in \"Marc Doris Ernie Gary\" and concatenate them.",
1173
+ "answer": "csey"
1174
+ },
1175
+ {
1176
+ "question": "Take the last letters of each words in \"Teresa Reid Karin Gracie\" and concatenate them.",
1177
+ "answer": "adne"
1178
+ },
1179
+ {
1180
+ "question": "Take the last letters of each words in \"Rahul Praveen Isaiah Elsie\" and concatenate them.",
1181
+ "answer": "lnhe"
1182
+ },
1183
+ {
1184
+ "question": "Take the last letters of each words in \"Miranda Jacques Clarence Chandra\" and concatenate them.",
1185
+ "answer": "asea"
1186
+ },
1187
+ {
1188
+ "question": "Take the last letters of each words in \"June Robin Josie Bo\" and concatenate them.",
1189
+ "answer": "eneo"
1190
+ },
1191
+ {
1192
+ "question": "Take the last letters of each words in \"Luisa Tatiana Drake Hillary\" and concatenate them.",
1193
+ "answer": "aaey"
1194
+ },
1195
+ {
1196
+ "question": "Take the last letters of each words in \"Louise Mariano Laura Gerardo\" and concatenate them.",
1197
+ "answer": "eoao"
1198
+ },
1199
+ {
1200
+ "question": "Take the last letters of each words in \"Star Jude Rosemarie Raquel\" and concatenate them.",
1201
+ "answer": "reel"
1202
+ },
1203
+ {
1204
+ "question": "Take the last letters of each words in \"Clark Jenn Diane Blair\" and concatenate them.",
1205
+ "answer": "kner"
1206
+ },
1207
+ {
1208
+ "question": "Take the last letters of each words in \"Lalo Yolanda Elida Ester\" and concatenate them.",
1209
+ "answer": "oaar"
1210
+ },
1211
+ {
1212
+ "question": "Take the last letters of each words in \"Bethany Rakesh Christine Dinesh\" and concatenate them.",
1213
+ "answer": "yheh"
1214
+ },
1215
+ {
1216
+ "question": "Take the last letters of each words in \"Nery Cindy Jess Chris\" and concatenate them.",
1217
+ "answer": "yyss"
1218
+ },
1219
+ {
1220
+ "question": "Take the last letters of each words in \"Miriam Brandy Bertha Renato\" and concatenate them.",
1221
+ "answer": "myao"
1222
+ },
1223
+ {
1224
+ "question": "Take the last letters of each words in \"Abel Mallory Theresa Quinn\" and concatenate them.",
1225
+ "answer": "lyan"
1226
+ },
1227
+ {
1228
+ "question": "Take the last letters of each words in \"Celso Tracy Winston Anton\" and concatenate them.",
1229
+ "answer": "oynn"
1230
+ },
1231
+ {
1232
+ "question": "Take the last letters of each words in \"Lore Erasmo Louis David\" and concatenate them.",
1233
+ "answer": "eosd"
1234
+ },
1235
+ {
1236
+ "question": "Take the last letters of each words in \"Cristobal Dania Li Anna\" and concatenate them.",
1237
+ "answer": "laia"
1238
+ },
1239
+ {
1240
+ "question": "Take the last letters of each words in \"Wally Claire Helen Nacho\" and concatenate them.",
1241
+ "answer": "yeno"
1242
+ },
1243
+ {
1244
+ "question": "Take the last letters of each words in \"Rosa Lana Curtis Rae\" and concatenate them.",
1245
+ "answer": "aase"
1246
+ },
1247
+ {
1248
+ "question": "Take the last letters of each words in \"Angeles Richard Luciana Darlene\" and concatenate them.",
1249
+ "answer": "sdae"
1250
+ },
1251
+ {
1252
+ "question": "Take the last letters of each words in \"Jodi Judi Nia Raj\" and concatenate them.",
1253
+ "answer": "iiaj"
1254
+ },
1255
+ {
1256
+ "question": "Take the last letters of each words in \"Irvin Brittney Vince Lucas\" and concatenate them.",
1257
+ "answer": "nyes"
1258
+ },
1259
+ {
1260
+ "question": "Take the last letters of each words in \"Guille Lisa Harvey Gina\" and concatenate them.",
1261
+ "answer": "eaya"
1262
+ },
1263
+ {
1264
+ "question": "Take the last letters of each words in \"Renata Mariela Mona Kristin\" and concatenate them.",
1265
+ "answer": "aaan"
1266
+ },
1267
+ {
1268
+ "question": "Take the last letters of each words in \"Johan Damien Serena Grace\" and concatenate them.",
1269
+ "answer": "nnae"
1270
+ },
1271
+ {
1272
+ "question": "Take the last letters of each words in \"Aj Jd Maddie Francisca\" and concatenate them.",
1273
+ "answer": "jdea"
1274
+ },
1275
+ {
1276
+ "question": "Take the last letters of each words in \"Daryl Owen Myra Aaron\" and concatenate them.",
1277
+ "answer": "lnan"
1278
+ },
1279
+ {
1280
+ "question": "Take the last letters of each words in \"Nick Ada Stephany Suzie\" and concatenate them.",
1281
+ "answer": "kaye"
1282
+ },
1283
+ {
1284
+ "question": "Take the last letters of each words in \"Elle Alex Irma Stephan\" and concatenate them.",
1285
+ "answer": "exan"
1286
+ },
1287
+ {
1288
+ "question": "Take the last letters of each words in \"Arnold Aidan Ally Ami\" and concatenate them.",
1289
+ "answer": "dnyi"
1290
+ },
1291
+ {
1292
+ "question": "Take the last letters of each words in \"Gabino Kayla Laurie Familia\" and concatenate them.",
1293
+ "answer": "oaea"
1294
+ },
1295
+ {
1296
+ "question": "Take the last letters of each words in \"Logan Ely Abbie Colleen\" and concatenate them.",
1297
+ "answer": "nyen"
1298
+ },
1299
+ {
1300
+ "question": "Take the last letters of each words in \"Kristy Brandi Lizeth Petra\" and concatenate them.",
1301
+ "answer": "yiha"
1302
+ },
1303
+ {
1304
+ "question": "Take the last letters of each words in \"Hazel Gabrielle Tre Dalton\" and concatenate them.",
1305
+ "answer": "leen"
1306
+ },
1307
+ {
1308
+ "question": "Take the last letters of each words in \"Conrad Marcella Annette Esteban\" and concatenate them.",
1309
+ "answer": "daen"
1310
+ },
1311
+ {
1312
+ "question": "Take the last letters of each words in \"Isaac Reyes Carly Tania\" and concatenate them.",
1313
+ "answer": "csya"
1314
+ },
1315
+ {
1316
+ "question": "Take the last letters of each words in \"Wilma Margie Amber Sergio\" and concatenate them.",
1317
+ "answer": "aero"
1318
+ },
1319
+ {
1320
+ "question": "Take the last letters of each words in \"Sonya Eddy Carol Yung\" and concatenate them.",
1321
+ "answer": "aylg"
1322
+ },
1323
+ {
1324
+ "question": "Take the last letters of each words in \"Leandro Maricela Genevieve Lesly\" and concatenate them.",
1325
+ "answer": "oaey"
1326
+ },
1327
+ {
1328
+ "question": "Take the last letters of each words in \"Andy Cecilia Gretchen Sandi\" and concatenate them.",
1329
+ "answer": "yani"
1330
+ },
1331
+ {
1332
+ "question": "Take the last letters of each words in \"Darwin Colin Cj Abhishek\" and concatenate them.",
1333
+ "answer": "nnjk"
1334
+ },
1335
+ {
1336
+ "question": "Take the last letters of each words in \"Lupita Cyndi Kay Derrick\" and concatenate them.",
1337
+ "answer": "aiyk"
1338
+ },
1339
+ {
1340
+ "question": "Take the last letters of each words in \"Felipe Heidi Nino Bradley\" and concatenate them.",
1341
+ "answer": "eioy"
1342
+ },
1343
+ {
1344
+ "question": "Take the last letters of each words in \"Dorian Mayra Freddie Magaly\" and concatenate them.",
1345
+ "answer": "naey"
1346
+ },
1347
+ {
1348
+ "question": "Take the last letters of each words in \"Flora Warren Gwendolyn Alissa\" and concatenate them.",
1349
+ "answer": "anna"
1350
+ },
1351
+ {
1352
+ "question": "Take the last letters of each words in \"Ace Rosy Kimberly Jean\" and concatenate them.",
1353
+ "answer": "eyyn"
1354
+ },
1355
+ {
1356
+ "question": "Take the last letters of each words in \"Jeremy Simone Alondra Wyatt\" and concatenate them.",
1357
+ "answer": "yeat"
1358
+ },
1359
+ {
1360
+ "question": "Take the last letters of each words in \"Deanna Terri Gabriela Jonah\" and concatenate them.",
1361
+ "answer": "aiah"
1362
+ },
1363
+ {
1364
+ "question": "Take the last letters of each words in \"Conor Randall Oleg Stephanie\" and concatenate them.",
1365
+ "answer": "rlge"
1366
+ },
1367
+ {
1368
+ "question": "Take the last letters of each words in \"Chance Valentin Micah Clara\" and concatenate them.",
1369
+ "answer": "enha"
1370
+ },
1371
+ {
1372
+ "question": "Take the last letters of each words in \"Dusty Yanet Hortencia Lili\" and concatenate them.",
1373
+ "answer": "ytai"
1374
+ },
1375
+ {
1376
+ "question": "Take the last letters of each words in \"Natalie Gilbert Brian Sanchez\" and concatenate them.",
1377
+ "answer": "etnz"
1378
+ },
1379
+ {
1380
+ "question": "Take the last letters of each words in \"Ulises Derek Adrianna Eugene\" and concatenate them.",
1381
+ "answer": "skae"
1382
+ },
1383
+ {
1384
+ "question": "Take the last letters of each words in \"Carlitos Damaris Nikhil Jennie\" and concatenate them.",
1385
+ "answer": "ssle"
1386
+ },
1387
+ {
1388
+ "question": "Take the last letters of each words in \"Yessenia Geraldine Minerva Tanya\" and concatenate them.",
1389
+ "answer": "aeaa"
1390
+ },
1391
+ {
1392
+ "question": "Take the last letters of each words in \"Nicolas Aaliyah Pascual Rob\" and concatenate them.",
1393
+ "answer": "shlb"
1394
+ },
1395
+ {
1396
+ "question": "Take the last letters of each words in \"Brandon Ivonne Jefferson Isabella\" and concatenate them.",
1397
+ "answer": "nena"
1398
+ },
1399
+ {
1400
+ "question": "Take the last letters of each words in \"Pedro Leopoldo Tee Mar\" and concatenate them.",
1401
+ "answer": "ooer"
1402
+ },
1403
+ {
1404
+ "question": "Take the last letters of each words in \"Paris Jimmie Andrew Ram\u00f3n\" and concatenate them.",
1405
+ "answer": "sewn"
1406
+ },
1407
+ {
1408
+ "question": "Take the last letters of each words in \"Angelica Tariq Ursula Nena\" and concatenate them.",
1409
+ "answer": "aqaa"
1410
+ },
1411
+ {
1412
+ "question": "Take the last letters of each words in \"Luz Terence Elder Jazmin\" and concatenate them.",
1413
+ "answer": "zern"
1414
+ },
1415
+ {
1416
+ "question": "Take the last letters of each words in \"Roberta Mauro Clint Marcela\" and concatenate them.",
1417
+ "answer": "aota"
1418
+ },
1419
+ {
1420
+ "question": "Take the last letters of each words in \"Adrian Marlon Karla Florence\" and concatenate them.",
1421
+ "answer": "nnae"
1422
+ },
1423
+ {
1424
+ "question": "Take the last letters of each words in \"Skyler Oliver Cristy Sierra\" and concatenate them.",
1425
+ "answer": "rrya"
1426
+ },
1427
+ {
1428
+ "question": "Take the last letters of each words in \"Barbie Desiree Yaneth Dre\" and concatenate them.",
1429
+ "answer": "eehe"
1430
+ },
1431
+ {
1432
+ "question": "Take the last letters of each words in \"Dan Ruth Xavier Isidro\" and concatenate them.",
1433
+ "answer": "nhro"
1434
+ },
1435
+ {
1436
+ "question": "Take the last letters of each words in \"Philip Antony Iris Alicia\" and concatenate them.",
1437
+ "answer": "pysa"
1438
+ },
1439
+ {
1440
+ "question": "Take the last letters of each words in \"Corey Elvin Tino Melvin\" and concatenate them.",
1441
+ "answer": "ynon"
1442
+ },
1443
+ {
1444
+ "question": "Take the last letters of each words in \"Russell Mitchell Bee Faith\" and concatenate them.",
1445
+ "answer": "lleh"
1446
+ },
1447
+ {
1448
+ "question": "Take the last letters of each words in \"Bri Roger Eve Diana\" and concatenate them.",
1449
+ "answer": "irea"
1450
+ },
1451
+ {
1452
+ "question": "Take the last letters of each words in \"Gisela Gerald Mackenzie Ashley\" and concatenate them.",
1453
+ "answer": "adey"
1454
+ },
1455
+ {
1456
+ "question": "Take the last letters of each words in \"Vinny Landon Miguel Caitlyn\" and concatenate them.",
1457
+ "answer": "ynln"
1458
+ },
1459
+ {
1460
+ "question": "Take the last letters of each words in \"Constance Nicholas Will Love\" and concatenate them.",
1461
+ "answer": "esle"
1462
+ },
1463
+ {
1464
+ "question": "Take the last letters of each words in \"Jeannie Kenneth Porfirio Ezequiel\" and concatenate them.",
1465
+ "answer": "ehol"
1466
+ },
1467
+ {
1468
+ "question": "Take the last letters of each words in \"Mario Pierre Amit Nelson\" and concatenate them.",
1469
+ "answer": "oetn"
1470
+ },
1471
+ {
1472
+ "question": "Take the last letters of each words in \"Mercedes Adela Susana Rose\" and concatenate them.",
1473
+ "answer": "saae"
1474
+ },
1475
+ {
1476
+ "question": "Take the last letters of each words in \"Fred Nolan Johnathan Carson\" and concatenate them.",
1477
+ "answer": "dnnn"
1478
+ },
1479
+ {
1480
+ "question": "Take the last letters of each words in \"Caroline Demetrius Fidel Solomon\" and concatenate them.",
1481
+ "answer": "esln"
1482
+ },
1483
+ {
1484
+ "question": "Take the last letters of each words in \"Julio Sidney Aiden Shay\" and concatenate them.",
1485
+ "answer": "oyny"
1486
+ },
1487
+ {
1488
+ "question": "Take the last letters of each words in \"Mabel Estela Irene May\" and concatenate them.",
1489
+ "answer": "laey"
1490
+ },
1491
+ {
1492
+ "question": "Take the last letters of each words in \"Marcus Ramirez Junior Arely\" and concatenate them.",
1493
+ "answer": "szry"
1494
+ },
1495
+ {
1496
+ "question": "Take the last letters of each words in \"Todd Joni Gil Fran\" and concatenate them.",
1497
+ "answer": "diln"
1498
+ },
1499
+ {
1500
+ "question": "Take the last letters of each words in \"Gerson Roxanne Deborah Nathaniel\" and concatenate them.",
1501
+ "answer": "nehl"
1502
+ },
1503
+ {
1504
+ "question": "Take the last letters of each words in \"Colton Dexter Katy Brayden\" and concatenate them.",
1505
+ "answer": "nryn"
1506
+ },
1507
+ {
1508
+ "question": "Take the last letters of each words in \"Tori Mariam Gaby Brayan\" and concatenate them.",
1509
+ "answer": "imyn"
1510
+ },
1511
+ {
1512
+ "question": "Take the last letters of each words in \"Michel Roy Sergey Jessie\" and concatenate them.",
1513
+ "answer": "lyye"
1514
+ },
1515
+ {
1516
+ "question": "Take the last letters of each words in \"Anil Enrique Jimmy Jhonny\" and concatenate them.",
1517
+ "answer": "leyy"
1518
+ },
1519
+ {
1520
+ "question": "Take the last letters of each words in \"\u00c1ngel Carlton Sameer Martinez\" and concatenate them.",
1521
+ "answer": "lnrz"
1522
+ },
1523
+ {
1524
+ "question": "Take the last letters of each words in \"Maritza Nana Loretta Eric\" and concatenate them.",
1525
+ "answer": "aaac"
1526
+ },
1527
+ {
1528
+ "question": "Take the last letters of each words in \"Thomas Cara Nita Frances\" and concatenate them.",
1529
+ "answer": "saas"
1530
+ },
1531
+ {
1532
+ "question": "Take the last letters of each words in \"Chuy Shane Jaden Maynor\" and concatenate them.",
1533
+ "answer": "yenr"
1534
+ },
1535
+ {
1536
+ "question": "Take the last letters of each words in \"Violeta Clay Janelle Mac\" and concatenate them.",
1537
+ "answer": "ayec"
1538
+ },
1539
+ {
1540
+ "question": "Take the last letters of each words in \"Guadalupe Ebony Wil Luke\" and concatenate them.",
1541
+ "answer": "eyle"
1542
+ },
1543
+ {
1544
+ "question": "Take the last letters of each words in \"Jenni Leonel Micheal Kat\" and concatenate them.",
1545
+ "answer": "illt"
1546
+ },
1547
+ {
1548
+ "question": "Take the last letters of each words in \"Yazmin Lea Rodrigo Sammy\" and concatenate them.",
1549
+ "answer": "naoy"
1550
+ },
1551
+ {
1552
+ "question": "Take the last letters of each words in \"Emely Chelsea Vladimir Tyrone\" and concatenate them.",
1553
+ "answer": "yare"
1554
+ },
1555
+ {
1556
+ "question": "Take the last letters of each words in \"Bill Santos Roxy Randi\" and concatenate them.",
1557
+ "answer": "lsyi"
1558
+ },
1559
+ {
1560
+ "question": "Take the last letters of each words in \"Daphne Lola Joanna Sheena\" and concatenate them.",
1561
+ "answer": "eaaa"
1562
+ },
1563
+ {
1564
+ "question": "Take the last letters of each words in \"Lara Greg Ethan Terrence\" and concatenate them.",
1565
+ "answer": "agne"
1566
+ },
1567
+ {
1568
+ "question": "Take the last letters of each words in \"Otto Marjorie Leonor Esther\" and concatenate them.",
1569
+ "answer": "oerr"
1570
+ },
1571
+ {
1572
+ "question": "Take the last letters of each words in \"Malcolm Hussein Franco Hugo\" and concatenate them.",
1573
+ "answer": "mnoo"
1574
+ },
1575
+ {
1576
+ "question": "Take the last letters of each words in \"Joyce Leroy Alyssa Maggie\" and concatenate them.",
1577
+ "answer": "eyae"
1578
+ },
1579
+ {
1580
+ "question": "Take the last letters of each words in \"Maryam Susy Trinity Pearl\" and concatenate them.",
1581
+ "answer": "myyl"
1582
+ },
1583
+ {
1584
+ "question": "Take the last letters of each words in \"Dustin Luiz Rolando Connor\" and concatenate them.",
1585
+ "answer": "nzor"
1586
+ },
1587
+ {
1588
+ "question": "Take the last letters of each words in \"Juan Manuel Benjamin Rory Rafael\" and concatenate them.",
1589
+ "answer": "lnyl"
1590
+ },
1591
+ {
1592
+ "question": "Take the last letters of each words in \"Fermin Berenice Marianne Eleanor\" and concatenate them.",
1593
+ "answer": "neer"
1594
+ },
1595
+ {
1596
+ "question": "Take the last letters of each words in \"Sabrina Pete Mary La\" and concatenate them.",
1597
+ "answer": "aeya"
1598
+ },
1599
+ {
1600
+ "question": "Take the last letters of each words in \"Estrella Madison Paco Rj\" and concatenate them.",
1601
+ "answer": "anoj"
1602
+ },
1603
+ {
1604
+ "question": "Take the last letters of each words in \"Niki Graham Vernon Beau\" and concatenate them.",
1605
+ "answer": "imnu"
1606
+ },
1607
+ {
1608
+ "question": "Take the last letters of each words in \"Bernie Melina Kelsey Rusty\" and concatenate them.",
1609
+ "answer": "eayy"
1610
+ },
1611
+ {
1612
+ "question": "Take the last letters of each words in \"Latoya Eliseo Trina Melisa\" and concatenate them.",
1613
+ "answer": "aoaa"
1614
+ },
1615
+ {
1616
+ "question": "Take the last letters of each words in \"Wesley Marcelo Rodolfo Erick\" and concatenate them.",
1617
+ "answer": "yook"
1618
+ },
1619
+ {
1620
+ "question": "Take the last letters of each words in \"Sean Rebeca Marco Sami\" and concatenate them.",
1621
+ "answer": "naoi"
1622
+ },
1623
+ {
1624
+ "question": "Take the last letters of each words in \"Blanca Monika Ervin Lori\" and concatenate them.",
1625
+ "answer": "aani"
1626
+ },
1627
+ {
1628
+ "question": "Take the last letters of each words in \"Lorraine Corinne Kate Floyd\" and concatenate them.",
1629
+ "answer": "eeed"
1630
+ },
1631
+ {
1632
+ "question": "Take the last letters of each words in \"Bruce Dena Kyla Robbie\" and concatenate them.",
1633
+ "answer": "eaae"
1634
+ },
1635
+ {
1636
+ "question": "Take the last letters of each words in \"Gee Joseluis Cory Stefanie\" and concatenate them.",
1637
+ "answer": "esye"
1638
+ },
1639
+ {
1640
+ "question": "Take the last letters of each words in \"Kristen Herbert Benny El\" and concatenate them.",
1641
+ "answer": "ntyl"
1642
+ },
1643
+ {
1644
+ "question": "Take the last letters of each words in \"Justin Neal Jt Lucia\" and concatenate them.",
1645
+ "answer": "nlta"
1646
+ },
1647
+ {
1648
+ "question": "Take the last letters of each words in \"Carmelo Tony Susan Sara\" and concatenate them.",
1649
+ "answer": "oyna"
1650
+ },
1651
+ {
1652
+ "question": "Take the last letters of each words in \"Boris Sabina Kaitlyn Cameron\" and concatenate them.",
1653
+ "answer": "sann"
1654
+ },
1655
+ {
1656
+ "question": "Take the last letters of each words in \"Elizabeth Mitch Brooke Jordyn\" and concatenate them.",
1657
+ "answer": "hhen"
1658
+ },
1659
+ {
1660
+ "question": "Take the last letters of each words in \"Delia Kathleen Mateo Marvin\" and concatenate them.",
1661
+ "answer": "anon"
1662
+ },
1663
+ {
1664
+ "question": "Take the last letters of each words in \"Bj Rigo Nigel Christian\" and concatenate them.",
1665
+ "answer": "joln"
1666
+ },
1667
+ {
1668
+ "question": "Take the last letters of each words in \"Kathy Elsa Alba Ivette\" and concatenate them.",
1669
+ "answer": "yaae"
1670
+ },
1671
+ {
1672
+ "question": "Take the last letters of each words in \"Cruz Wilber Marilu Malik\" and concatenate them.",
1673
+ "answer": "zruk"
1674
+ },
1675
+ {
1676
+ "question": "Take the last letters of each words in \"Sherry Ben Allison Anthony\" and concatenate them.",
1677
+ "answer": "ynny"
1678
+ },
1679
+ {
1680
+ "question": "Take the last letters of each words in \"Seth Dario Anne Jodie\" and concatenate them.",
1681
+ "answer": "hoee"
1682
+ },
1683
+ {
1684
+ "question": "Take the last letters of each words in \"Leon Payton Stefan Javi\" and concatenate them.",
1685
+ "answer": "nnni"
1686
+ },
1687
+ {
1688
+ "question": "Take the last letters of each words in \"Marcos Kerri Fabio Stephen\" and concatenate them.",
1689
+ "answer": "sion"
1690
+ },
1691
+ {
1692
+ "question": "Take the last letters of each words in \"Earl Rudy Harrison Nadine\" and concatenate them.",
1693
+ "answer": "lyne"
1694
+ },
1695
+ {
1696
+ "question": "Take the last letters of each words in \"Andres Mary Ann Sydney Dina\" and concatenate them.",
1697
+ "answer": "snya"
1698
+ },
1699
+ {
1700
+ "question": "Take the last letters of each words in \"Rohan Antoinette Nikki Aida\" and concatenate them.",
1701
+ "answer": "neia"
1702
+ },
1703
+ {
1704
+ "question": "Take the last letters of each words in \"Braulio Staci Jocelyn Brittany\" and concatenate them.",
1705
+ "answer": "oiny"
1706
+ },
1707
+ {
1708
+ "question": "Take the last letters of each words in \"Clayton Edison Debbie Elvira\" and concatenate them.",
1709
+ "answer": "nnea"
1710
+ },
1711
+ {
1712
+ "question": "Take the last letters of each words in \"Britt Tamara Elvis Nayeli\" and concatenate them.",
1713
+ "answer": "tasi"
1714
+ },
1715
+ {
1716
+ "question": "Take the last letters of each words in \"Cristina Saad Bridget Katie\" and concatenate them.",
1717
+ "answer": "adte"
1718
+ },
1719
+ {
1720
+ "question": "Take the last letters of each words in \"Stella Janis Darren Lena\" and concatenate them.",
1721
+ "answer": "asna"
1722
+ },
1723
+ {
1724
+ "question": "Take the last letters of each words in \"Charles Emilio Vivek Steve\" and concatenate them.",
1725
+ "answer": "soke"
1726
+ },
1727
+ {
1728
+ "question": "Take the last letters of each words in \"Sal Scott Myrna Maximo\" and concatenate them.",
1729
+ "answer": "ltao"
1730
+ },
1731
+ {
1732
+ "question": "Take the last letters of each words in \"Mia Art Samantha Lety\" and concatenate them.",
1733
+ "answer": "atay"
1734
+ },
1735
+ {
1736
+ "question": "Take the last letters of each words in \"Miguel Angel Saul Brady Darryl\" and concatenate them.",
1737
+ "answer": "llyl"
1738
+ },
1739
+ {
1740
+ "question": "Take the last letters of each words in \"Maria Elena Dewayne Mj Elliott\" and concatenate them.",
1741
+ "answer": "aejt"
1742
+ },
1743
+ {
1744
+ "question": "Take the last letters of each words in \"Antoine Javier Brett Stewart\" and concatenate them.",
1745
+ "answer": "ertt"
1746
+ },
1747
+ {
1748
+ "question": "Take the last letters of each words in \"Amanda Geoff Belinda Gaurav\" and concatenate them.",
1749
+ "answer": "afav"
1750
+ },
1751
+ {
1752
+ "question": "Take the last letters of each words in \"Krishna Catalina Eileen Teddy\" and concatenate them.",
1753
+ "answer": "aany"
1754
+ },
1755
+ {
1756
+ "question": "Take the last letters of each words in \"Mar\u00eda Fredy Bridgette Kenya\" and concatenate them.",
1757
+ "answer": "ayea"
1758
+ },
1759
+ {
1760
+ "question": "Take the last letters of each words in \"Neil Dani Eddie Marcelino\" and concatenate them.",
1761
+ "answer": "lieo"
1762
+ },
1763
+ {
1764
+ "question": "Take the last letters of each words in \"Alain Jerome Kristina Ida\" and concatenate them.",
1765
+ "answer": "neaa"
1766
+ },
1767
+ {
1768
+ "question": "Take the last letters of each words in \"Drew Jhon Jayden Cliff\" and concatenate them.",
1769
+ "answer": "wnnf"
1770
+ },
1771
+ {
1772
+ "question": "Take the last letters of each words in \"Trish Vero Victor Clemente\" and concatenate them.",
1773
+ "answer": "hore"
1774
+ },
1775
+ {
1776
+ "question": "Take the last letters of each words in \"Antonio Ronald Ken Rachael\" and concatenate them.",
1777
+ "answer": "odnl"
1778
+ },
1779
+ {
1780
+ "question": "Take the last letters of each words in \"Juliet Ricardo Tita Dianna\" and concatenate them.",
1781
+ "answer": "toaa"
1782
+ },
1783
+ {
1784
+ "question": "Take the last letters of each words in \"Larissa Shawna Alma Paulette\" and concatenate them.",
1785
+ "answer": "aaae"
1786
+ },
1787
+ {
1788
+ "question": "Take the last letters of each words in \"Donald Lia Gonzalo Lily\" and concatenate them.",
1789
+ "answer": "daoy"
1790
+ },
1791
+ {
1792
+ "question": "Take the last letters of each words in \"Evelyn Mason Shelby Aldo\" and concatenate them.",
1793
+ "answer": "nnyo"
1794
+ },
1795
+ {
1796
+ "question": "Take the last letters of each words in \"Georgina Joshua Lindsey C\u00e9sar\" and concatenate them.",
1797
+ "answer": "aayr"
1798
+ },
1799
+ {
1800
+ "question": "Take the last letters of each words in \"King Edith Traci Flor\" and concatenate them.",
1801
+ "answer": "ghir"
1802
+ },
1803
+ {
1804
+ "question": "Take the last letters of each words in \"Courtney Ann John Fer\" and concatenate them.",
1805
+ "answer": "ynnr"
1806
+ },
1807
+ {
1808
+ "question": "Take the last letters of each words in \"Debby Geoffrey Alana Silvestre\" and concatenate them.",
1809
+ "answer": "yyae"
1810
+ },
1811
+ {
1812
+ "question": "Take the last letters of each words in \"Stevie Julie Leonard Karina\" and concatenate them.",
1813
+ "answer": "eeda"
1814
+ },
1815
+ {
1816
+ "question": "Take the last letters of each words in \"Juana Chip Lila Jayme\" and concatenate them.",
1817
+ "answer": "apae"
1818
+ },
1819
+ {
1820
+ "question": "Take the last letters of each words in \"Jon Heath Reynaldo Catherine\" and concatenate them.",
1821
+ "answer": "nhoe"
1822
+ },
1823
+ {
1824
+ "question": "Take the last letters of each words in \"Beatrice Taylor Juan Carlos Kaitlin\" and concatenate them.",
1825
+ "answer": "ersn"
1826
+ },
1827
+ {
1828
+ "question": "Take the last letters of each words in \"Christina Edna Ileana Lynette\" and concatenate them.",
1829
+ "answer": "aaae"
1830
+ },
1831
+ {
1832
+ "question": "Take the last letters of each words in \"Eugenia Kellie Quentin Mike\" and concatenate them.",
1833
+ "answer": "aene"
1834
+ },
1835
+ {
1836
+ "question": "Take the last letters of each words in \"Yasmin Yesenia Carmela Susie\" and concatenate them.",
1837
+ "answer": "naae"
1838
+ },
1839
+ {
1840
+ "question": "Take the last letters of each words in \"Oswaldo Jos\u00e9 Luis Sheldon Tara\" and concatenate them.",
1841
+ "answer": "osna"
1842
+ },
1843
+ {
1844
+ "question": "Take the last letters of each words in \"Debora Jayson Donna Sai\" and concatenate them.",
1845
+ "answer": "anai"
1846
+ },
1847
+ {
1848
+ "question": "Take the last letters of each words in \"Jermaine Pat Tammie Olivia\" and concatenate them.",
1849
+ "answer": "etea"
1850
+ },
1851
+ {
1852
+ "question": "Take the last letters of each words in \"Sunny Trisha Paul Guy\" and concatenate them.",
1853
+ "answer": "yaly"
1854
+ },
1855
+ {
1856
+ "question": "Take the last letters of each words in \"Martha Elijah Dominic Tonya\" and concatenate them.",
1857
+ "answer": "ahca"
1858
+ },
1859
+ {
1860
+ "question": "Take the last letters of each words in \"Theo Sheila Mariana Esmeralda\" and concatenate them.",
1861
+ "answer": "oaaa"
1862
+ },
1863
+ {
1864
+ "question": "Take the last letters of each words in \"Cristal Roxana Rajesh Danny\" and concatenate them.",
1865
+ "answer": "lahy"
1866
+ },
1867
+ {
1868
+ "question": "Take the last letters of each words in \"Geovanny Tom Katelyn Jennifer\" and concatenate them.",
1869
+ "answer": "ymnr"
1870
+ },
1871
+ {
1872
+ "question": "Take the last letters of each words in \"Buddy Violet Johana Tina\" and concatenate them.",
1873
+ "answer": "ytaa"
1874
+ },
1875
+ {
1876
+ "question": "Take the last letters of each words in \"Jeremiah Kelley Josue Veronica\" and concatenate them.",
1877
+ "answer": "hyea"
1878
+ },
1879
+ {
1880
+ "question": "Take the last letters of each words in \"Cassie Clifton Erik Everardo\" and concatenate them.",
1881
+ "answer": "enko"
1882
+ },
1883
+ {
1884
+ "question": "Take the last letters of each words in \"Pauline Kerry Jeannette Hope\" and concatenate them.",
1885
+ "answer": "eyee"
1886
+ },
1887
+ {
1888
+ "question": "Take the last letters of each words in \"Brooklyn Dawn Tay Gene\" and concatenate them.",
1889
+ "answer": "nnye"
1890
+ },
1891
+ {
1892
+ "question": "Take the last letters of each words in \"Chloe Flaco Theodore Nathalie\" and concatenate them.",
1893
+ "answer": "eoee"
1894
+ },
1895
+ {
1896
+ "question": "Take the last letters of each words in \"Nichole Heriberto Darian Peter\" and concatenate them.",
1897
+ "answer": "eonr"
1898
+ },
1899
+ {
1900
+ "question": "Take the last letters of each words in \"Janette Stacey Ronny Kim\" and concatenate them.",
1901
+ "answer": "eyym"
1902
+ },
1903
+ {
1904
+ "question": "Take the last letters of each words in \"Jazmine Carmen Kitty Nina\" and concatenate them.",
1905
+ "answer": "enya"
1906
+ },
1907
+ {
1908
+ "question": "Take the last letters of each words in \"Donovan Lonnie Jairo Mariah\" and concatenate them.",
1909
+ "answer": "neoh"
1910
+ },
1911
+ {
1912
+ "question": "Take the last letters of each words in \"Nidia Rachelle Lauren Shelia\" and concatenate them.",
1913
+ "answer": "aena"
1914
+ },
1915
+ {
1916
+ "question": "Take the last letters of each words in \"Angela Krista Cora Denisse\" and concatenate them.",
1917
+ "answer": "aaae"
1918
+ },
1919
+ {
1920
+ "question": "Take the last letters of each words in \"Chandler Martina Leonardo Gregg\" and concatenate them.",
1921
+ "answer": "raog"
1922
+ },
1923
+ {
1924
+ "question": "Take the last letters of each words in \"Agnes Adrienne Haley Riley\" and concatenate them.",
1925
+ "answer": "seyy"
1926
+ },
1927
+ {
1928
+ "question": "Take the last letters of each words in \"Albert Felicia Margo Patty\" and concatenate them.",
1929
+ "answer": "taoy"
1930
+ },
1931
+ {
1932
+ "question": "Take the last letters of each words in \"Noelia Cassidy Ashok Francisco\" and concatenate them.",
1933
+ "answer": "ayko"
1934
+ },
1935
+ {
1936
+ "question": "Take the last letters of each words in \"Charmaine Vic Homero Jeanine\" and concatenate them.",
1937
+ "answer": "ecoe"
1938
+ },
1939
+ {
1940
+ "question": "Take the last letters of each words in \"Glen Ariana Reggie Polo\" and concatenate them.",
1941
+ "answer": "naeo"
1942
+ },
1943
+ {
1944
+ "question": "Take the last letters of each words in \"Wilmer Valerie Melissa Eloisa\" and concatenate them.",
1945
+ "answer": "reaa"
1946
+ },
1947
+ {
1948
+ "question": "Take the last letters of each words in \"Marco Antonio Suzette Roland Isabel\" and concatenate them.",
1949
+ "answer": "oedl"
1950
+ },
1951
+ {
1952
+ "question": "Take the last letters of each words in \"Tim Candace Cecil Misael\" and concatenate them.",
1953
+ "answer": "mell"
1954
+ },
1955
+ {
1956
+ "question": "Take the last letters of each words in \"Hailey Avi Bree Samira\" and concatenate them.",
1957
+ "answer": "yiea"
1958
+ },
1959
+ {
1960
+ "question": "Take the last letters of each words in \"Jason Betty Elisa Jay\" and concatenate them.",
1961
+ "answer": "nyay"
1962
+ },
1963
+ {
1964
+ "question": "Take the last letters of each words in \"Oscar George Sanjay Haydee\" and concatenate them.",
1965
+ "answer": "reye"
1966
+ },
1967
+ {
1968
+ "question": "Take the last letters of each words in \"Andre Oralia Carrie Bruno\" and concatenate them.",
1969
+ "answer": "eaeo"
1970
+ },
1971
+ {
1972
+ "question": "Take the last letters of each words in \"Leigh Mindy Rocky Lex\" and concatenate them.",
1973
+ "answer": "hyyx"
1974
+ },
1975
+ {
1976
+ "question": "Take the last letters of each words in \"Jody Juan Rebekah Kaylee\" and concatenate them.",
1977
+ "answer": "ynhe"
1978
+ },
1979
+ {
1980
+ "question": "Take the last letters of each words in \"Liliana Quincy Bart Makayla\" and concatenate them.",
1981
+ "answer": "ayta"
1982
+ },
1983
+ {
1984
+ "question": "Take the last letters of each words in \"Mya Fernando Bubba Tommy\" and concatenate them.",
1985
+ "answer": "aoay"
1986
+ },
1987
+ {
1988
+ "question": "Take the last letters of each words in \"Emanuel Cheyenne Perez Yvette\" and concatenate them.",
1989
+ "answer": "leze"
1990
+ },
1991
+ {
1992
+ "question": "Take the last letters of each words in \"Len Marquis Kylie Sandra\" and concatenate them.",
1993
+ "answer": "nsea"
1994
+ },
1995
+ {
1996
+ "question": "Take the last letters of each words in \"Rich Parker Edward Milagros\" and concatenate them.",
1997
+ "answer": "hrds"
1998
+ },
1999
+ {
2000
+ "question": "Take the last letters of each words in \"Marlen Sonja Anita Dale\" and concatenate them.",
2001
+ "answer": "naae"
2002
+ }
2003
+ ]
2004
+ }
datatype_sampling.py ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import random
3
+ from llm_utils import *
4
+
5
+ DATA_PATHS = {
6
+ "addsub": "./dataset/AddSub/AddSub.json",
7
+ #"aqua": "./dataset/AQuA/test.json",
8
+ #"bigbench_date": "./dataset/Bigbench_Date/task.json",
9
+ #"object_tracking": "./dataset/Bigbench_object_tracking/task.json",
10
+ "coin_flip": "./dataset/coin_flip/coin_flip.json",
11
+ "commonsensqa": "./dataset/CommonsenseQA/dev_rand_split.jsonl",
12
+ "gsm8k": "./dataset/grade-school-math/test.jsonl",
13
+ "last_letters": "./dataset/last_letters/last_letters.json",
14
+ "multiarith": "./dataset/MultiArith/MultiArith.json",
15
+ "strategyqa": "./dataset/StrategyQA/task.json",
16
+ "singleeq": "./dataset/SingleEq/questions.json",
17
+ "svamp": "./dataset/SVAMP/SVAMP.json",
18
+ }
19
+
20
+
21
+ # https://review-of-my-life.blogspot.com/2017/11/python-dict-shuffle.html
22
+ def shuffleDict(d):
23
+ keys = list(d.keys())
24
+ random.shuffle(keys)
25
+ [(key, d[key]) for key in keys]
26
+ random.shuffle(keys)
27
+ [(key, d[key]) for key in keys]
28
+ random.shuffle(keys)
29
+ keys = [(key, d[key]) for key in keys]
30
+ #keys = d(keys)
31
+ return dict(keys)
32
+
33
+
34
+ def sample_type_demo(num_type=1):
35
+ decoder = json.JSONDecoder()
36
+ all_demo = {}
37
+
38
+ for data, datapath in DATA_PATHS.items():
39
+ '''
40
+ if data == "aqua":
41
+ questions = []
42
+ with open(datapath) as f:
43
+ lines = f.readlines()
44
+ for line in lines:
45
+ json_res = decoder.raw_decode(line)[0]
46
+ choice = "(" + "(".join(json_res["options"])
47
+ choice = choice.replace("(", " (").replace(")", ") ")
48
+ choice = "Answer Choices:" + choice
49
+ questions.append(json_res["question"].strip() + " " + choice)
50
+ questions = random.sample(questions, num_type)
51
+ if data not in all_demo.keys():
52
+ all_demo[data] = []
53
+ for que in questions:
54
+ que_string = "Question: " + que + "\n"
55
+ all_demo[data].append(que_string)
56
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
57
+ #all_demo.append(demo)
58
+ '''
59
+
60
+ if data == "gsm8k":
61
+ questions = []
62
+ with open(datapath) as f:
63
+ lines = f.readlines()
64
+ for line in lines:
65
+ json_res = decoder.raw_decode(line)[0]
66
+ questions.append(json_res["question"].strip())
67
+ questions = random.sample(questions, num_type)
68
+ if data not in all_demo.keys():
69
+ all_demo[data] = []
70
+ for que in questions:
71
+ que_string = "Question: " + que + "\n"
72
+ all_demo[data].append(que_string)
73
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
74
+ #all_demo.append(demo)
75
+
76
+
77
+ elif data == "commonsensqa":
78
+ questions = []
79
+ with open(datapath) as f:
80
+ lines = f.readlines()
81
+ for line in lines:
82
+ json_res = decoder.raw_decode(line)[0]
83
+ choice = "Answer Choices:"
84
+ for c in json_res["question"]["choices"]:
85
+ choice += " ("
86
+ choice += c["label"]
87
+ choice += ") "
88
+ choice += c["text"]
89
+ questions.append(json_res["question"]["stem"].strip() + " " + choice)
90
+ questions = random.sample(questions, num_type)
91
+ if data not in all_demo.keys():
92
+ all_demo[data] = []
93
+ for que in questions:
94
+ que_string = "Question: " + que + "\n"
95
+ all_demo[data].append(que_string)
96
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
97
+ #all_demo.append(demo)
98
+
99
+ elif data in ("addsub", "multiarith", "singleeq"):
100
+ questions = []
101
+ with open(datapath) as f:
102
+ json_data = json.load(f)
103
+ for line in json_data:
104
+ q = line["sQuestion"].strip()
105
+ questions.append(q)
106
+ questions = random.sample(questions, num_type)
107
+ if data not in all_demo.keys():
108
+ all_demo[data] = []
109
+ for que in questions:
110
+ que_string = "Question: " + que + "\n"
111
+ all_demo[data].append(que_string)
112
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
113
+ #all_demo.append(demo)
114
+
115
+ elif data == "strategyqa":
116
+ questions = []
117
+ with open(datapath) as f:
118
+ json_data = json.load(f)["examples"]
119
+ for line in json_data:
120
+ q = line["input"].strip()
121
+ questions.append(q)
122
+ questions = random.sample(questions, num_type)
123
+ if data not in all_demo.keys():
124
+ all_demo[data] = []
125
+ for que in questions:
126
+ que_string = "Question: " + que + "\n"
127
+ all_demo[data].append(que_string)
128
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
129
+ #all_demo.append(demo)
130
+
131
+ elif data == "svamp":
132
+ questions = []
133
+ with open(datapath) as f:
134
+ json_data = json.load(f)
135
+ for line in json_data:
136
+ q = line["Body"].strip() + " " + line["Question"].strip()
137
+ questions.append(q)
138
+ questions = random.sample(questions, num_type)
139
+ if data not in all_demo.keys():
140
+ all_demo[data] = []
141
+ for que in questions:
142
+ que_string = "Question: " + que + "\n"
143
+ all_demo[data].append(que_string)
144
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
145
+ #all_demo.append(demo)
146
+
147
+
148
+ elif data in ("coin_flip", "last_letters"):
149
+ questions = []
150
+ with open(datapath) as f:
151
+ json_data = json.load(f)
152
+ json_data = json_data["examples"]
153
+ for line in json_data:
154
+ q = line["question"]
155
+ questions.append(q)
156
+ questions = random.sample(questions, num_type)
157
+ if data not in all_demo.keys():
158
+ all_demo[data] = []
159
+ for que in questions:
160
+ que_string = "Question: " + que + "\n"
161
+ all_demo[data].append(que_string)
162
+ #demo = "Question: " + que + "\n" + "Type: " + data + "\n\n"
163
+ #all_demo.append(demo)
164
+
165
+
166
+ #random.shuffle(all_demo)
167
+ #all_demo = "".join(all_demo)
168
+
169
+ return all_demo
170
+
171
+
172
+ def type_for_dataset(dataset_name):
173
+ if dataset_name in ("addsub", "aqua", "gsm8k", "multiarith", "singleeq", "svamp"):
174
+ type = "arithmetic"
175
+ elif dataset_name == "commonsensqa":
176
+ type = "commonsense-mc"
177
+ elif dataset_name == "strategyqa":
178
+ type = "commonsense-verify"
179
+ elif dataset_name == "coin_flip":
180
+ type = "symbolic-coin"
181
+ elif dataset_name == "last_letters":
182
+ type = "symbolic-letter"
183
+ #elif dataset_name in ("commonsensqa", "strategyqa"):
184
+ # type = "commonsense"
185
+ #elif dataset_name in ("coin_flip", "last_letters"):
186
+ # type = "symbolic"
187
+ else:
188
+ type = None
189
+ return type
190
+
191
+ def get_type_prompt(all_demo):
192
+ total_prompt = []
193
+ for dataset_name, question_string in all_demo.items():
194
+ demo = question_string[0] + "Type: " + type_for_dataset(dataset_name) + "\n\n"
195
+ total_prompt += demo
196
+ total_prompt = "".join(total_prompt)
197
+ return total_prompt
198
+
199
+ def identify_type(question, engine):
200
+ with open('./demos/type', 'r') as f:
201
+ typedemo = f.read()
202
+ typedemo += "Question: " + question + "\nOutput the Type, choosing from <'arithmetic','commonsense-mc','commonsense-verify','symbolic-coin', 'symbolic-letter'>: "
203
+ response = decoder_for_gpt3(typedemo, 32, temperature=0, engine=engine)
204
+ response = response.strip().lower()
205
+
206
+ return response
207
+
208
+
209
+ if __name__ == "__main__":
210
+ all_demo = sample_type_demo(num_type=1)
211
+ #print(all_demo)
212
+ total_prompt = get_type_prompt(all_demo)
213
+ print(total_prompt)
214
+ with open('./demos/type', 'w') as f:
215
+ data_json = json.dumps(total_prompt)
216
+ f.write(data_json + "\n")
217
+ #with open('./demos/type', 'r') as f:
218
+ # data = f.read()
219
+ # print(type(data))
220
+ question = "Did the 40th president of the United States forward lolcats to his friends?"
221
+ engine = "text-davinci-003"
222
+ res = identify_type(question, engine)
223
+ print(res)
demo_utils.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+
4
+ def self_construction(datatype):
5
+ demo_dir = "./demo_pool/{datatype}_demo".format(datatype=datatype)
6
+
7
+ data_dir = "./data_pool/{datatype}".format(datatype=datatype)
8
+ if os.path.exists(demo_dir):
9
+ print(demo_dir)
10
+ if os.path.exists(data_dir):
11
+ with open(data_dir, 'r') as f:
12
+ for line in f.readlines
13
+
14
+ self_construction('strategyqa')
15
+
16
+ single_data = {
17
+ 'question': "asfawreg",
18
+ 'datatype': "dfawds",
19
+ 'base_ans': "",
20
+ 'base_cots': "",
21
+ 'adapter_ans': "",
22
+ 'revised_cots': "",
23
+ 'retrieved_knowledge': "",
24
+ 'feedback': ""
25
+ }
26
+
27
+ data_dir = "./data_pool/{datatype}".format(datatype="test")
28
+ #with open(data_dir, 'a') as f:
29
+ # data_json = json.dumps(single_data)
30
+ # f.write(data_json + "\n")
31
+
32
+ with open(data_dir, 'r') as f:
33
+ for line in f.readlines():
34
+ data_dict = json.loads(line)
35
+ print(type(data_dict))
experiment/coin_flip ADDED
@@ -0,0 +1 @@
 
 
1
+ {"question": "Q: A coin is heads up. Jeff does not flip the coin. Jen flips the coin. Giselle flips the coin. Noel does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA:", "gold_ans": "yes", "rationale": " Jeff does not flip the coin, so it remains heads up. Jen flips the coin, so now it is tails up. Giselle flips the coin, so now it is heads up again. Noel does not flip the coin, so it remains heads up. So the final answer is yes, the coin is still heads up.", "pred_ans": "yes", "wrap_que": "Q: A coin is heads up. Clarissa does not flip the coin. Shauna does not flip the coin. Alexis does not flip the coin. Branden flips the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Clarissa does not flip the coin. This means that the state of the coin does not change. The coin is still heads up. Shauna does not flip the coin. The state of the coin does not change. The coin is still heads up. Alexis does not flip the coin. The state of the coin does not change. The coin is still heads up. Branden flips the coin. This means that the state of the coin is reversed. The coin is now tails up. The answer is no.\n\nQ: A coin is heads up. Domingo does not flip the coin. Briana flips the coin. Michael flips the coin. Joan does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Domingo does not flip the coin, so the coin remains heads up. Briana flips the coin, so the coin is now tails up. Michael flips the coin, so the coin is now heads up again. Joan does not flip the coin, so the coin remains heads up. The answer is yes.\n\nQ: A coin is heads up. Ramona does not flip the coin. Lucy flips the coin. Gail does not flip the coin. Octavio flips the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Ramona does not flip the coin, so the coin is still heads up. Lucy flips the coin, so the coin is now tails up. Gail does not flip the coin, so the coin is still tails up. Octavio flips the coin, so the coin is now heads up again. The answer is yes.\n\nQ: A coin is heads up. Brandon does not flip the coin. Ivonne does not flip the coin. Jefferson flips the coin. Isabella does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Brandon does not flip the coin, so the coin remains heads up. Ivonne does not flip the coin, so the coin remains heads up. Jefferson flips the coin, so the coin is now tails up. Isabella does not flip the coin, so the coin remains tails up. The answer is no.\n\nQ: A coin is heads up. Reginald does not flip the coin. Franky flips the coin. Kira flips the coin. Gordon flips the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Reginald does not flip the coin, so the coin is still heads up. Franky flips the coin, so the coin is now tails up. Kira flips the coin, so the coin is now heads up again. Gordon flips the coin, so the coin is now tails up. The answer is no.\n\nQ: A coin is heads up. Shari flips the coin. Bella flips the coin. Liza does not flip the coin. Maira does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Shari flips the coin. The coin is now tails up. Bella flips the coin. The coin is now heads up. Liza does not flip the coin. The coin is still heads up. Maira does not flip the coin. The coin is still heads up. The answer is yes.\n\nQ: A coin is heads up. Claudia flips the coin. Cole does not flip the coin. Matthew does not flip the coin. Juan Pablo does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Claudia flips the coin, so it is now tails up. Cole does not flip the coin, so it remains tails up. Matthew does not flip the coin, so it remains tails up. Juan Pablo does not flip the coin, so it remains tails up. So the answer is no, the coin is not heads up. The answer is no.\n\nQ: A coin is heads up. Joyce flips the coin. Leroy flips the coin. Alyssa does not flip the coin. Maggie does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step. Joyce flips the coin, so now it is tails up. Leroy flips the coin, so now it is heads up again. Alyssa does not flip the coin, so it remains tails up. Maggie does not flip the coin, so it remains heads up. So the final answer is yes, the coin is still heads up. The answer is yes.\n\nQ: A coin is heads up. Jeff does not flip the coin. Jen flips the coin. Giselle flips the coin. Noel does not flip the coin. Is the coin still heads up? Note that \"flip\" here means \"reverse\".\nA: Let's think step by step."}
experiment/commonsensqa ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ {"question": "Q: What are candles good for eliminating? Answer Choices: (A) shelf (B) board (C) church (D) table (E) dark\nA:", "gold_ans": "E", "rationale": " Candles are often used to provide light in dark places. They can also be used to eliminate bad smells. However, they are not typically used to eliminate shelves, boards, churches, or tables. The most likely answer is that candles are good for eliminating darkness. The answer is E.", "self_retrieve_knowledge": " Candles are often used to eliminate darkness, as they provide light."}
2
+ {"question": "Q: What might a couple have a lot of when they are deciding on stopping being married to each other? Answer Choices: (A) pleasure (B) detachment (C) exercise (D) bankruptcy (E) fights\nA:", "gold_ans": "E", "rationale": " A couple might have a lot of pleasure when they are first married, but this would likely decrease over time. A couple might have a lot of detachment if they are not very close to each other. A couple might have a lot of exercise if they are very active. A couple might have a lot of bankruptcy if they are having financial problems. A couple might have a lot of fights if they are constantly arguing. The most likely answer is that a couple might have a lot of fights if they are constantly arguing. The answer is E.", "self_retrieve_knowledge": " A couple going through a divorce might have a lot of fights with each other."}
experiment/strategyqa ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"question": "Q: Would a baby gray whale fit in a tractor-trailer?\nA:", "gold_ans": "yes", "rationale": " A tractor-trailer is a type of truck. Trucks are typically much larger than cars. Gray whales are a type of whale. Whales are typically much larger than humans. Therefore, it is very unlikely that a baby gray whale would be able to fit inside a tractor-trailer. The answer is no.", "self_retrieve_knowledge": "\n\nA tractor-trailer is a large truck that is used to transport goods over long distances.\n\nA baby gray whale is a very large mammal that is born weighing about 3,000 pounds.\n\nIt is unlikely that a baby gray whale would fit in a tractor-trailer."}
2
+ {"question": "Q: Would a baby gray whale fit in a tractor-trailer?\nA:", "gold_ans": "yes", "rationale": " A tractor-trailer is a type of truck. Trucks are typically much larger than cars. Gray whales are a type of whale. Whales are typically much larger than humans. Therefore, it is very unlikely that a baby gray whale would be able to fit inside a tractor-trailer. The answer is no.", "self_retrieve_knowledge": " A tractor-trailer is a large truck that is used to transport goods over long distances. A baby gray whale is a very large mammal that is born weighing about 3,000 pounds. It is unlikely that a baby gray whale would fit in a tractor-trailer."}
3
+ {"question": "Q: Would a baby gray whale fit in a tractor-trailer?\nA:", "gold_ans": "yes", "rationale": " A tractor-trailer is a type of truck. Trucks are typically much larger than cars. Gray whales are a type of whale. Whales are typically much larger than humans. Therefore, it is very unlikely that a baby gray whale would be able to fit inside a tractor-trailer. The answer is no.", "self_retrieve_knowledge": " A tractor-trailer is a large truck that is used to transport goods over long distances. A baby gray whale is a very large mammal that is born weighing about 3,000 pounds. It is unlikely that a baby gray whale would fit in a tractor-trailer."}
4
+ {"question": "Q: Would a triples tandem bike support Apollo 15 crew?\nA:", "gold_ans": "yes", "rationale": " First, the Apollo 15 crew was made up of three astronauts. Second, a tandem bike is a bike designed for two people. Third, a triples tandem bike is a bike designed for three people. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", "self_retrieve_knowledge": " The Apollo 15 crew was composed of three astronauts: Commander David Scott Module Pilot Alfred Worden Lunar Module Pilot James Irwin A tandem bike is a bicycle designed for two or more people to ride together."}
5
+ {"question": "Q: Would a triples tandem bike support Apollo 15 crew?\nA:", "gold_ans": "yes", "rationale": " First, the Apollo 15 crew was made up of three astronauts. Second, a tandem bike is a bike designed for two people. Third, a triples tandem bike is a bike designed for three people. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", "self_retrieve_knowledge": " The Apollo 15 crew was composed of three astronauts: Commander David Scott Module Pilot Alfred Worden Lunar Module Pilot James Irwin A tandem bike is a bicycle designed for two or more people to ride together."}
6
+ {"question": "Q: Would a triples tandem bike support Apollo 15 crew?\nA:", "gold_ans": "yes", "rationale": {"id": "cmpl-7a6PwDqHh7zWadOpTVx7In6bK5Xg5", "object": "text_completion", "created": 1688838832, "model": "text-davinci-002", "choices": [{"text": " First, a triples tandem bike is a type of bicycle. Second, the Apollo 15 crew was a group of astronauts. Third, astronauts need space suits to survive in space. fourth, a triples tandem bike cannot support a space suit. Therefore, a triples tandem bike would not be able to support the Apollo 15 crew. The answer is no.", "index": 0, "logprobs": {"tokens": [" First", ",", " a", " tri", "ples", " tandem", " bike", " is", " a", " type", " of", " bicycle", ".", " Second", ",", " the", " Apollo", " 15", " crew", " was", " a", " group", " of", " astronauts", ".", " Third", ",", " astronauts", " need", " space", " suits", " to", " survive", " in", " space", ".", " fourth", ",", " a", " tri", "ples", " tandem", " bike", " cannot", " support", " a", " space", " suit", ".", " Therefore", ",", " a", " tri", "ples", " tandem", " bike", " would", " not", " be", " able", " to", " support", " the", " Apollo", " 15", " crew", ".", " The", " answer", " is", " no", "."], "token_logprobs": [-0.8718564, -7.3865966e-05, -1.4230134, -0.040728286, -6.146545e-05, -3.0947078e-05, -0.00014349136, -0.028313229, -0.00013180608, -1.058304, 0.0, -0.9092839, -0.12754472, -0.0031381205, 0.0, -0.0055594733, -0.00021717734, -9.055464e-07, -0.0015099256, -0.015452149, -0.004152835, -0.0037244384, 0.0, -0.058818117, -0.00032407563, -0.0068710335, 0.0, -0.058770265, -0.8781163, -0.31531215, -0.0022832155, -0.038135435, -0.09740238, -1.1430258e-06, -0.00012500932, -1.6642034e-05, -0.9039777, -5.4364154e-06, -0.09768034, -0.1860828, -1.8841804e-07, -2.2159093e-06, -0.009463741, -0.12772085, -0.025510417, -0.007841354, -0.00019964804, -8.653179e-06, -4.277735e-07, -0.033897083, -6.827632e-08, -0.124563806, -1.9741461e-05, -7.8540467e-07, -6.1516675e-06, -0.00019320888, -0.19388966, -4.277735e-07, -0.29437375, -1.8841804e-07, -8.2927705e-05, -1.1430258e-06, -0.0003581233, -3.529076e-06, -3.076318e-07, -2.987608e-05, -0.00018414622, -0.0029845848, -3.690939e-05, -4.277735e-07, -1.2231317e-05, -0.00017174664], "top_logprobs": [{" First": -0.8718564, " A": -1.3182055, " Apollo": -1.633083, " The": -2.6685808, " Tri": -4.06693}, {",": -7.3865966e-05, " we": -10.324949, " of": -10.406043, " off": -11.987839, " Apollo": -12.874407}, {" the": -0.7940862, " a": -1.4230134, " Apollo": -1.782648, " we": -2.54755, " let": -3.0418527}, {" tri": -0.040728286, " tandem": -3.2496037, " triple": -7.8863487, " bike": -8.19894, " Tri": -8.813452}, {"ples": -6.146545e-05, "kes": -9.734118, "pl": -14.567613, "athlon": -14.65022, "plex": -15.050489}, {" tandem": -3.0947078e-05, " bike": -10.410652, " bicycle": -14.840656, " team": -15.09104, " bikes": -16.78294}, {" bike": -0.00014349136, " bicycle": -9.112308, " is": -10.985268, "bike": -11.926932, " Bike": -12.25907}, {" is": -0.028313229, " has": -3.9638035, " would": -5.260443, " supports": -5.9122143, " can": -7.9997125}, {" a": -0.00013180608, " designed": -9.321181, " not": -10.242929, " built": -13.041463, " an": -13.266399}, {" bike": -0.574484, " type": -1.058304, " bicycle": -2.6329384, " three": -4.1066504, " very": -7.7563615}, {" of": 0.0, " bike": -19.050817, " bicycle": -20.306906, "of": -20.609203, " a": -20.719542}, {" bike": -0.5159465, " bicycle": -0.9092839, " bicycles": -8.957723, " Bicycle": -10.169273, "bike": -10.74493}, {".": -0.12754472, " that": -2.647583, " designed": -3.324072, " built": -4.3595066, " made": -9.487524}, {" Second": -0.0031381205, " Apollo": -6.2843633, "Second": -7.588921, " Next": -8.309804, " second": -8.545538}, {",": 0.0, " Apollo": -18.595053, ".": -21.264082, "<|endoftext|>": -21.457735, " ,": -22.659498}, {" the": -0.0055594733, " Apollo": -5.231714, " bicycles": -8.611829, " bikes": -11.7891245, " The": -12.677102}, {" Apollo": -0.00021717734, " crew": -8.477229, "Ap": -11.937096, " bike": -13.749551, " bicycle": -14.935042}, {" 15": -9.055464e-07, "15": -14.302689, " crew": -15.311928, " 13": -17.22115, " 11": -17.288515}, {" crew": -0.0015099256, " was": -7.11889, " Crew": -7.5462546, "crew": -9.33173, " mission": -9.941453}, {" was": -0.015452149, " were": -4.707974, " consisted": -5.197819, " consists": -7.5402784, " is": -8.820635}, {" a": -0.004152835, " the": -5.5916595, " made": -8.374922, " composed": -8.877462, " comprised": -10.735802}, {" group": -0.0037244384, " team": -5.6017947, " crew": -11.986273, " trio": -12.348279, " space": -12.42801}, {" of": 0.0, "of": -18.829311, " astronauts": -21.117907, " consisting": -21.940418, " Of": -22.177507}, {" astronauts": -0.058818117, " people": -3.2526867, " three": -4.247543, " American": -6.482934, " astronaut": -7.2482233}, {".": -0.00032407563, " who": -8.037744, " consisting": -14.670737, ",": -14.856639, " that": -16.027468}, {" Third": -0.0068710335, "Third": -6.4886866, " third": -6.496029, " Astron": -6.583553, " astronauts": -7.1369553}, {",": 0.0, "ly": -21.09979, ".": -21.7781, "<|endoftext|>": -22.475672, " astronauts": -22.96766}, {" astronauts": -0.058770265, " bicycles": -3.30216, " there": -4.907382, " it": -5.4312882, " a": -5.9161797}, {" need": -0.8781163, " are": -1.106157, " generally": -2.397889, " typically": -2.674999, " don": -3.0494049}, {" space": -0.31531215, " a": -2.170985, " spaces": -2.713411, " spacecraft": -3.3403432, " rockets": -3.6929672}, {" suits": -0.0022832155, "suits": -7.2041044, " vehicles": -7.3008757, "uits": -9.068314, " helmets": -9.154797}, {" to": -0.038135435, " and": -3.486904, ",": -4.9946404, " in": -10.868721, " when": -10.977427}, {" survive": -0.09740238, " go": -2.750097, " live": -4.8051944, " breathe": -4.940402, " travel": -5.4059067}, {" in": -1.1430258e-06, ".": -13.756275, ",": -15.919487, " outside": -17.142937, " on": -19.35839}, {" space": -0.00012500932, " the": -9.39055, " outer": -10.166198, " Space": -13.834578, "space": -14.134299}, {".": -1.6642034e-05, ",": -11.005033, ";": -17.318686, " .": -20.150845, " (": -20.488396}, {" fourth": -0.9039777, " Therefore": -1.2051252, " Fourth": -2.01806, "Fourth": -2.2629197, " So": -3.4464812}, {",": -5.4364154e-06, " fifth": -13.833147, ".": -14.0815325, " Therefore": -14.426512, ",,": -14.61961}, {" a": -0.09768034, " bicycles": -2.7943432, " space": -3.5998566, " it": -6.301731, " bikes": -6.5353146}, {" tri": -0.1860828, " bicycle": -1.833269, " space": -5.743343, " tandem": -5.820568, " bike": -6.0880847}, {"ples": -1.8841804e-07, "kes": -16.10212, "pl": -16.596077, "pless": -16.935406, "pp": -17.12423}, {" tandem": -2.2159093e-06, " bike": -13.695801, " t": -15.20576, " bicycle": -15.205979, " team": -15.873348}, {" bike": -0.009463741, " bicycle": -4.671758, "bike": -10.400263, " bikes": -10.926458, " bicycles": -12.279697}, {" cannot": -0.12772085, " does": -2.211133, " can": -5.3347692, " would": -5.670498, " doesn": -6.9937873}, {" support": -0.025510417, " provide": -4.1057277, " fit": -5.3199677, " hold": -6.1266875, " accommodate": -7.491157}, {" a": -0.007841354, " an": -5.1015034, " space": -6.979568, " astronauts": -7.3293347, " the": -9.090891}, {" space": -0.00019964804, " person": -8.8002825, "space": -10.964049, " spaces": -11.191693, " human": -11.774081}, {" suit": -8.653179e-06, " suits": -12.083789, "suit": -13.1708975, " suite": -14.710831, " Suit": -15.075825}, {".": -4.277735e-07, " .": -14.854747, ",": -16.392918, "..": -17.73441, "-": -18.017017}, {" Therefore": -0.033897083, " therefore": -4.0287414, " fifth": -4.6473327, " So": -5.514034, " The": -7.6951237}, {",": -6.827632e-08, " the": -16.662365, " ,": -19.401608, "<|endoftext|>": -19.589504, ",...": -20.086624}, {" a": -0.124563806, " the": -2.144587, " it": -13.647965, " The": -14.995823, " Apollo": -15.757653}, {" tri": -1.9741461e-05, "tri": -11.019544, " triple": -13.518242, " Tri": -14.224268, " trip": -14.6683855}, {"ples": -7.8540467e-07, "pless": -14.445216, "pled": -16.127413, "plates": -16.37974, "oples": -17.130678}, {" tandem": -6.1516675e-06, "andem": -12.8715315, " team": -13.250975, " bike": -14.275569, " t": -15.631047}, {" bike": -0.00019320888, " bicycle": -8.753595, "bike": -11.189904, " cannot": -11.824144, " b": -12.181774}, {" would": -0.19388966, " cannot": -1.7431318, " could": -6.662029, " does": -14.5922365, " will": -15.449009}, {" not": -4.277735e-07, " NOT": -15.025499, " Not": -15.994632, "not": -18.86961, " ": -19.701614}, {" be": -0.29437375, " support": -1.3804485, " have": -5.6455293, "support": -13.1882925, " able": -15.153968}, {" able": -1.8841804e-07, " Able": -16.105852, " suitable": -16.994755, " support": -18.029257, " capable": -18.392942}, {" to": -8.2927705e-05, " support": -9.451776, "to": -12.92938, " ": -13.347828, "<|endoftext|>": -16.1327}, {" support": -1.1430258e-06, "support": -13.706383, " Apollo": -15.642453, " supports": -16.76992, " Support": -17.239334}, {" the": -0.0003581233, " Apollo": -8.031657, " an": -10.363421, " a": -13.929089, " any": -14.803389}, {" Apollo": -3.529076e-06, " crew": -12.844796, "Ap": -14.147996, " astronauts": -16.471024, " entire": -16.879606}, {" 15": -3.076318e-07, " crew": -16.241224, "15": -16.352169, "<|endoftext|>": -16.480202, " 1": -17.19747}, {" crew": -2.987608e-05, " astronauts": -10.597109, "crew": -13.254691, " Crew": -13.383885, " cre": -14.619318}, {".": -0.00018414622, " in": -8.614144, "<|endoftext|>": -13.704627, " if": -13.991631, " .": -15.02975}, {" The": -0.0029845848, "<|endoftext|>": -5.881468, "The": -9.352306, " ": -9.883882, "\n": -10.8249445}, {" answer": -3.690939e-05, "answer": -10.855854, " Answer": -11.082774, "Answer": -13.441966, "<|endoftext|>": -14.164699}, {" is": -4.277735e-07, "<|endoftext|>": -15.26466, " ": -15.89464, "is": -16.59137, "\n": -18.55125}, {" no": -1.2231317e-05, " No": -12.13003, "<|endoftext|>": -12.457483, " yes": -13.836676, " ": -13.958253}, {".": -0.00017174664, "<|endoftext|>": -8.824974, "..": -11.432517, ".\"": -11.553651, "\n": -13.447582}], "text_offset": [2160, 2166, 2167, 2169, 2173, 2177, 2184, 2189, 2192, 2194, 2199, 2202, 2210, 2211, 2218, 2219, 2223, 2230, 2233, 2238, 2242, 2244, 2250, 2253, 2264, 2265, 2271, 2272, 2283, 2288, 2294, 2300, 2303, 2311, 2314, 2320, 2321, 2328, 2329, 2331, 2335, 2339, 2346, 2351, 2358, 2366, 2368, 2374, 2379, 2380, 2390, 2391, 2393, 2397, 2401, 2408, 2413, 2419, 2423, 2426, 2431, 2434, 2442, 2446, 2453, 2456, 2461, 2462, 2466, 2473, 2476, 2479]}, "finish_reason": "stop"}, {"text": " First, the Apollo 15 crew was made up of three astronauts. Second, a tandem bike is a bike built for two people. Third, a triples tandem bike is a bike built for three people. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", "index": 1, "logprobs": {"tokens": [" First", ",", " the", " Apollo", " 15", " crew", " was", " made", " up", " of", " three", " astronauts", ".", " Second", ",", " a", " tandem", " bike", " is", " a", " bike", " built", " for", " two", " people", ".", " Third", ",", " a", " tri", "ples", " tandem", " bike", " is", " a", " bike", " built", " for", " three", " people", ".", " So", ",", " yes", ",", " a", " tri", "ples", " tandem", " bike", " would", " support", " the", " Apollo", " 15", " crew", ".", " The", " answer", " is", " yes", "."], "token_logprobs": [-0.8718564, -7.255186e-05, -0.7864712, -0.0010037789, -1.9765537e-06, -0.0013589661, -0.29911068, -0.86025876, -6.827632e-08, 0.0, -0.0015166106, -0.29790145, -0.01527196, -0.00090384827, 0.0, -0.00044654985, -0.13190362, -0.0015507569, -0.004657105, -0.032318816, -0.064240806, -1.8458246, -1.8841804e-07, -1.022884e-06, -0.0009087415, -1.688045e-05, -0.004334805, 0.0, -1.3306066e-05, -0.0035147648, -0.00069208303, -0.024366269, -1.5567284e-05, -0.07112332, -1.0800813e-05, -0.00038089874, -0.00016495361, 0.0, -3.883893e-06, -4.277735e-07, 0.0, -0.539808, -0.00059498526, -0.00036205718, 0.0, -8.1256905e-05, -3.643162e-05, -3.883893e-06, -1.2622395e-06, -1.7953334e-05, -0.006696406, -0.0411124, -0.0061819945, -6.110595e-05, 0.0, -2.9311614e-06, -1.022884e-06, -0.002152809, -2.5225998e-05, -1.8841804e-07, -0.00010665305, -8.018216e-05], "top_logprobs": [{" First": -0.8718564, " A": -1.3182055, " Apollo": -1.633083, " The": -2.6685808, " Tri": -4.06693}, {",": -7.255186e-05, " we": -10.333815, " of": -10.429089, " off": -12.017579, " Apollo": -12.889201}, {" the": -0.7864712, " a": -1.4224538, " Apollo": -1.7937516, " we": -2.5659015, " let": -3.0424201}, {" Apollo": -0.0010037789, " bike": -7.4140096, " tri": -8.509893, " crew": -8.9037695, " tandem": -10.731138}, {" 15": -1.9765537e-06, "15": -13.669093, " crew": -14.607573, " 11": -15.56333, " 13": -15.854691}, {" crew": -0.0013589661, " was": -7.353758, " Crew": -7.587811, " mission": -9.201981, "crew": -9.641953}, {" was": -0.29911068, " consisted": -1.939255, " would": -2.512071, " were": -4.183943, " consists": -4.246275}, {" made": -0.86025876, " three": -1.239146, " composed": -1.4716153, " two": -3.9744859, " a": -4.1403317}, {" up": -6.827632e-08, " of": -15.718165, "up": -19.378248, " Up": -23.565722, "Up": -24.877281}, {" of": 0.0, "of": -17.292315, " three": -18.492496, " astronauts": -21.287262, " four": -22.618969}, {" three": -0.0015166106, " 3": -6.597328, " astronauts": -9.029301, " two": -10.755421, " four": -12.560784}, {" astronauts": -0.29790145, " people": -1.3574278, " men": -8.797979, " members": -9.015141, " astronaut": -11.181001}, {".": -0.01527196, ":": -4.661081, ",": -5.168218, "--": -12.651766, " -": -13.720383}, {" Second": -0.00090384827, " Next": -7.852163, " second": -8.732445, " So": -8.881835, " A": -9.260681}, {",": 0.0, " tandem": -21.249414, ".": -22.033863, " ,": -23.003633, "<|endoftext|>": -23.274353}, {" a": -0.00044654985, " tandem": -7.7310486, " tri": -12.231297, " the": -13.142729, " Apollo": -15.293683}, {" tandem": -0.13190362, " tri": -2.0917807, " triple": -9.61231, "tri": -10.967372, " bike": -11.192618}, {" bike": -0.0015507569, " bicycle": -6.4796124, "bike": -11.544798, " Bike": -12.88342, " bikes": -13.30957}, {" is": -0.004657105, " has": -5.3978567, " supports": -9.522797, " typically": -10.648924, " only": -11.100505}, {" a": -0.032318816, " made": -3.8302124, " designed": -4.6265817, " built": -8.461623, " meant": -9.643946}, {" bike": -0.064240806, " bicycle": -2.8265872, " type": -5.9640355, " two": -7.8261614, " Bicycle": -11.42624}, {" designed": -0.2714247, " built": -1.8458246, " made": -2.6880448, " that": -4.718393, " meant": -6.158577}, {" for": -1.8841804e-07, " to": -16.233837, "for": -16.632133, " specifically": -18.847698, " For": -19.805021}, {" two": -1.022884e-06, "two": -13.784142, " Two": -16.672853, "Two": -17.408436, " TWO": -17.547123}, {" people": -0.0009087415, " riders": -7.0177402, ".": -11.437536, " or": -13.880584, "people": -14.759187}, {".": -1.688045e-05, " to": -11.009084, ",": -14.832143, " (": -18.165834, " riding": -18.604181}, {" Third": -0.004334805, " Therefore": -5.608748, " So": -7.697711, " A": -9.129609, "Third": -9.880153}, {",": 0.0, " a": -21.66404, "ly": -22.618948, ".": -23.062708, " ,": -24.016615}, {" a": -1.3306066e-05, " tri": -12.148992, " Apollo": -12.328578, " there": -12.843458, " triple": -15.726079}, {" tri": -0.0035147648, " triple": -5.7993307, "tri": -7.791457, " Tri": -9.895616, "Tri": -12.734825}, {"ples": -0.00069208303, "kes": -7.4256377, "ke": -9.792297, "plex": -10.613845, "pty": -12.460156}, {" tandem": -0.024366269, " bike": -3.728195, " team": -10.980777, " bicycle": -11.653677, "bike": -12.381655}, {" bike": -1.5567284e-05, "bike": -11.90898, " bicycle": -12.841367, " is": -13.058945, " bikes": -13.343564}, {" is": -0.07112332, " would": -2.6792278, " has": -10.9819565, " could": -11.360639, " supports": -12.711112}, {" a": -1.0800813e-05, " not": -11.820902, " built": -12.895929, " an": -13.873778, " three": -17.300482}, {" bike": -0.00038089874, " tandem": -8.734923, " three": -8.909251, " rare": -10.025029, "bike": -10.989267}, {" built": -0.00016495361, " that": -9.159332, " designed": -10.7243, " made": -11.152993, "built": -11.450925}, {" for": 0.0, "for": -18.199831, " specifically": -19.710527, " For": -20.85838, " three": -20.902843}, {" three": -3.883893e-06, " six": -12.841298, "three": -14.1523695, " 3": -15.4755125, " THREE": -15.669721}, {" people": -4.277735e-07, "people": -14.491828, " People": -18.18307, "People": -19.418873, " PEOPLE": -20.12045}, {".": 0.0, ",": -17.663725, " to": -18.790228, " riding": -19.599365, " (": -20.097002}, {" So": -0.539808, " Therefore": -0.87610984, "Therefore": -8.490871, " so": -9.081553, "So": -9.103846}, {",": -0.00059498526, " yes": -7.485315, " the": -10.843378, " a": -11.222396, " it": -14.713935}, {" yes": -0.00036205718, " a": -8.064435, " Yes": -10.356034, " the": -11.591764, "yes": -13.076235}, {",": 0.0, " a": -19.105404, " ,": -19.73096, ".": -21.35899, " the": -22.431013}, {" a": -8.1256905e-05, " the": -9.447638, " tri": -13.545761, " it": -15.021351, " Apollo": -15.067301}, {" tri": -3.643162e-05, "tri": -10.386422, " triple": -12.958025, " Tri": -13.256512, " trip": -14.041182}, {"ples": -3.883893e-06, "pless": -12.838182, "plex": -14.589763, "pled": -15.450116, "plates": -15.731028}, {" tandem": -1.2622395e-06, " bike": -14.5566435, " team": -14.755509, "andem": -15.564494, "<|endoftext|>": -16.053474}, {" bike": -1.7953334e-05, "bike": -11.394165, " would": -12.51619, " bicycle": -13.374015, " Bike": -14.248361}, {" would": -0.006696406, " could": -5.0295577, " support": -9.216597, " supports": -10.551118, " Would": -13.309324}, {" support": -0.0411124, " have": -3.8354044, " be": -3.9820607, "support": -10.768686, " supports": -11.860469}, {" the": -0.0061819945, " Apollo": -5.0945206, " three": -10.618421, " all": -12.003267, " an": -13.259043}, {" Apollo": -6.110595e-05, " three": -10.182765, " entire": -10.912275, " crew": -12.384391, "Ap": -14.074067}, {" 15": 0.0, "<|endoftext|>": -17.705679, "15": -18.212847, " crew": -18.278772, " 16": -19.595688}, {" crew": -2.9311614e-06, "crew": -13.646396, " astronauts": -14.585843, " Crew": -14.615209, " cre": -14.796804}, {".": -1.022884e-06, ",": -13.802977, " because": -16.973705, " .": -18.076683, "<|endoftext|>": -18.152515}, {" The": -0.002152809, "<|endoftext|>": -6.148686, " However": -11.76592, "\n": -12.953475, " ": -13.092335}, {" answer": -2.5225998e-05, "answer": -10.755281, " Answer": -12.77651, "<|endoftext|>": -14.415813, "Answer": -14.550797}, {" is": -1.8841804e-07, "<|endoftext|>": -15.546292, " ": -17.789686, "is": -18.411674, "\n": -18.437283}, {" yes": -0.00010665305, " Yes": -9.726728, "yes": -10.2219925, "<|endoftext|>": -12.001637, " ": -13.276692}, {".": -8.018216e-05, "<|endoftext|>": -9.784067, "..": -11.633013, ".\"": -11.663565, "\n": -13.28256}], "text_offset": [2160, 2166, 2167, 2171, 2178, 2181, 2186, 2190, 2195, 2198, 2201, 2207, 2218, 2219, 2226, 2227, 2229, 2236, 2241, 2244, 2246, 2251, 2257, 2261, 2265, 2272, 2273, 2279, 2280, 2282, 2286, 2290, 2297, 2302, 2305, 2307, 2312, 2318, 2322, 2328, 2335, 2336, 2339, 2340, 2344, 2345, 2347, 2351, 2355, 2362, 2367, 2373, 2381, 2385, 2392, 2395, 2400, 2401, 2405, 2412, 2415, 2419]}, "finish_reason": "stop"}, {"text": " First, we need to figure out how many people were in the Apollo 15 crew. According to NASA, the Apollo 15 crew consisted of three people. Second, we need to figure out how many people can ride a triples tandem bike. From looking at various websites, it appears that the maximum number of people that can ride a triples tandem bike is three. Therefore, the answer is yes, a triples tandem bike would support the Apollo 15 crew.", "index": 2, "logprobs": {"tokens": [" First", ",", " we", " need", " to", " figure", " out", " how", " many", " people", " were", " in", " the", " Apollo", " 15", " crew", ".", " According", " to", " NASA", ",", " the", " Apollo", " 15", " crew", " consisted", " of", " three", " people", ".", " Second", ",", " we", " need", " to", " figure", " out", " how", " many", " people", " can", " ride", " a", " tri", "ples", " tandem", " bike", ".", " From", " looking", " at", " various", " websites", ",", " it", " appears", " that", " the", " maximum", " number", " of", " people", " that", " can", " ride", " a", " tri", "ples", " tandem", " bike", " is", " three", ".", " Therefore", ",", " the", " answer", " is", " yes", ",", " a", " tri", "ples", " tandem", " bike", " would", " support", " the", " Apollo", " 15", " crew", "."], "token_logprobs": [-0.8718564, -7.1719274e-05, -2.5323462, -1.2622395e-06, 0.0, -1.3089793, -1.8841804e-07, -0.5084457, -0.1592349, -0.0011915636, -0.41280454, -0.005440085, -8.054166e-05, -0.00067746826, -3.076318e-07, -0.0002058497, 0.0, -1.9616383, -4.0040345e-06, -0.16199096, -7.100402e-05, -0.021427182, -0.025306704, 0.0, -0.00018260024, -0.051427748, 0.0, -0.0014299375, -1.2611215, -0.58293813, -0.57942784, 0.0, -1.2622395e-06, 0.0, 0.0, -0.11840774, -3.076318e-07, -0.6507371, -0.023922238, -0.00020966532, -0.19069096, -0.00053337787, -0.0023219902, -0.06582722, -0.00010939391, -2.2159093e-06, -0.00011666565, -1.5006569e-06, -1.5939566, -0.25215834, -0.0019982893, -0.57071346, -2.1302772, -0.00085850776, -1.3661821e-05, -0.22425233, -7.8540467e-07, -0.9914362, -0.1122384, -0.33742508, 0.0, -4.277735e-07, -0.0051217875, -1.2622395e-06, -1.1039229e-05, -1.5006569e-06, -6.253833e-05, -1.5006569e-06, -2.9311614e-06, -2.1768952e-05, -3.076318e-07, -0.0020914634, -4.9902206e-05, -0.0136441635, 0.0, -0.032209672, -0.039936733, -9.7471166e-05, -0.0002103787, -0.17563236, -2.4510746e-05, -2.784859e-05, -9.055464e-07, -1.5006569e-06, -1.8073477e-05, -0.0012631178, -0.0034036955, -0.004826111, -0.002588121, -1.8841804e-07, -3.2906587e-06, -6.6619094e-07], "top_logprobs": [{" First": -0.8718564, " A": -1.3182055, " Apollo": -1.633083, " The": -2.6685808, " Tri": -4.06693}, {",": -7.1719274e-05, " we": -10.344171, " of": -10.444001, " off": -12.02051, " Apollo": -12.880037}, {" the": -0.798, " a": -1.4280658, " Apollo": -1.7714725, " we": -2.5323462, " let": -3.0431135}, {" need": -1.2622395e-06, " should": -14.148503, " would": -15.043267, " must": -15.808118, "'ll": -16.12433}, {" to": 0.0, "to": -17.009209, "<|endoftext|>": -18.311071, " Apollo": -19.748314, " define": -20.30376}, {" understand": -0.9050162, " figure": -1.3089793, " know": -1.6445868, " define": -3.2904673, " determine": -3.6766481}, {" out": -1.8841804e-07, " Apollo": -16.431782, " what": -17.080467, " Out": -17.100328, "out": -17.509874}, {" how": -0.5084457, " what": -0.9201907, " who": -9.389872, " if": -10.337645, " Apollo": -12.047354}, {" many": -0.1592349, " much": -1.9160442, " heavy": -11.86451, " big": -12.076196, "many": -13.029054}, {" people": -0.0011915636, " crew": -6.7783804, " Apollo": -10.255618, " astronauts": -11.487924, " members": -12.503037}, {" were": -0.41280454, " are": -1.2175455, " the": -3.9461677, " Apollo": -3.9541445, " would": -5.661671}, {" in": -0.005440085, " on": -5.2168107, " crew": -14.302909, " Apollo": -15.793126, " aboard": -18.613173}, {" the": -8.054166e-05, " Apollo": -9.437468, " crew": -14.007458, "the": -18.149384, "Ap": -18.88347}, {" Apollo": -0.00067746826, " crew": -7.3004584, "Ap": -13.31317, " crews": -15.7081785, "crew": -16.471785}, {" 15": -3.076318e-07, "15": -15.97453, " crew": -16.317442, " 11": -17.128769, " 16": -17.47726}, {" crew": -0.0002058497, " Crew": -8.768791, "crew": -10.355017, " crews": -11.400532, "Crew": -12.530108}, {".": 0.0, ":": -18.336514, "-": -18.348225, "?": -18.354452, " -": -19.071852}, {" The": -0.24749677, " According": -1.9616383, " Second": -2.7118444, " Then": -5.4158335, " Apollo": -5.499049}, {" to": -4.0040345e-06, " the": -12.985692, " ": -13.7385435, "to": -14.668728, "<|endoftext|>": -15.754521}, {" NASA": -0.16199096, " the": -2.3115215, " Wikipedia": -3.621044, "NASA": -4.712488, " Nasa": -5.2384315}, {",": -7.100402e-05, "'s": -9.569109, ".": -14.130989, " website": -15.4568205, " ,": -16.364761}, {" the": -0.021427182, " there": -3.85675, " Apollo": -9.705091, " \"": -14.012721, "the": -14.439593}, {" Apollo": -0.025306704, " crew": -3.6895995, " Crew": -13.052819, "Ap": -13.413925, " answer": -13.422474}, {" 15": 0.0, " 11": -17.064955, " 16": -17.389124, "15": -17.731665, " 14": -18.332075}, {" crew": -0.00018260024, " Crew": -9.194094, "crew": -10.095963, " mission": -10.518446, " crews": -12.491228}, {" consisted": -0.051427748, " was": -3.0385568, " had": -6.271093, " consists": -8.352494, " included": -10.027291}, {" of": 0.0, "of": -18.737297, " ": -21.666704, " or": -23.15064, " on": -23.5156}, {" three": -0.0014299375, " 3": -6.682811, " commander": -9.124164, " Commander": -10.575471, " two": -11.271067}, {" astronauts": -0.33375898, " people": -1.2611215, " men": -7.905248, " members": -10.365285, " astronaut": -10.863428}, {".": -0.58293813, ":": -0.8170747, ",": -11.2325945, " -": -11.982433, "--": -13.197974}, {" Second": -0.57942784, " Next": -1.466303, " So": -1.7943288, " Therefore": -3.2088568, " Now": -7.2245665}, {",": 0.0, " we": -22.845509, " ,": -23.163383, ".": -23.31109, "<|endoftext|>": -25.480288}, {" we": -1.2622395e-06, " a": -13.636999, "we": -16.387882, " We": -16.91089, " tandem": -17.835573}, {" need": 0.0, "need": -19.500967, " should": -21.217554, " would": -21.502316, " needs": -21.537785}, {" to": 0.0, " figure": -19.755789, "to": -20.253971, " determine": -21.667912, " To": -23.040394}, {" figure": -0.11840774, " determine": -2.45581, " find": -4.252752, " know": -4.6430483, " decide": -6.9327106}, {" out": -3.076318e-07, " how": -15.132557, " Out": -17.256775, "out": -17.672092, " what": -18.120235}, {" how": -0.6507371, " if": -1.2014003, " what": -1.7363181, " whether": -6.573562, " the": -14.327329}, {" many": -0.023922238, " much": -3.74534, "many": -12.231321, " a": -12.887296, " heavy": -13.4818125}, {" people": -0.00020966532, " wheels": -9.048722, " seats": -10.224297, " bikes": -10.627173, "people": -10.970416}, {" can": -0.19069096, " a": -1.7509516, " could": -12.828083, " tandem": -14.920542, "can": -15.8961315}, {" ride": -0.00053337787, " fit": -7.5834165, " Ride": -11.296486, "ride": -11.71578, " rides": -13.255874}, {" a": -0.0023219902, " on": -6.066473, " in": -18.792126, "a": -18.887323, " tandem": -19.062239}, {" tri": -0.06582722, " tandem": -2.75623, " triple": -9.25001, " bike": -9.876324, "tri": -10.883688}, {"ples": -0.00010939391, "kes": -9.138195, "-": -14.7219305, "plex": -14.980463, "pl": -15.1725}, {" tandem": -2.2159093e-06, " bike": -13.065328, " bicycle": -16.989481, "-": -17.967182, " team": -18.064537}, {" bike": -0.00011666565, " bicycle": -9.082306, " Bike": -13.506078, "bike": -13.511005, " bikes": -15.102086}, {".": -1.5006569e-06, " at": -14.019011, " together": -14.393969, " .": -15.9904585, " safely": -17.391653}, {" A": -0.3986726, " From": -1.5939566, " Most": -2.6381516, " According": -3.430613, " Based": -5.1226406}, {" looking": -0.25215834, " a": -2.1958563, " what": -2.4537232, " doing": -4.5367975, " my": -5.251261}, {" at": -0.0019982893, " online": -6.2503443, " up": -9.906434, " it": -11.141154, "<|endoftext|>": -13.849038}, {" various": -0.57071346, " several": -2.0091002, " a": -2.476456, " different": -2.7799695, " the": -2.9510283}, {" tandem": -0.92984134, " bike": -1.143907, " sources": -2.0388005, " websites": -2.1302772, " online": -4.450783}, {",": -0.00085850776, " that": -7.33522, " about": -8.526242, " selling": -12.396636, " (": -12.647272}, {" it": -1.3661821e-05, " we": -11.199491, " a": -17.082499, " It": -17.30276, " tri": -17.847689}, {" appears": -0.22425233, " seems": -1.6228998, " looks": -5.6427226, " appear": -11.152682, " seem": -12.831683}, {" that": -7.8540467e-07, " as": -13.994976, "that": -17.334164, " tri": -18.455023, " most": -19.71954}, {" most": -0.47005484, " the": -0.9914362, " tri": -5.8413644, " a": -6.867489, " there": -11.594874}, {" maximum": -0.1122384, " largest": -3.7479348, " majority": -3.9487174, " average": -4.063682, " answer": -4.2158284}, {" number": -0.33742508, " capacity": -1.2520542, " amount": -8.231548, " is": -8.971997, "number": -10.651814}, {" of": 0.0, " is": -19.600876, "of": -20.25698, " Of": -22.64298, " people": -23.54258}, {" people": -4.277735e-07, " riders": -14.905247, "people": -15.632766, "People": -19.509323, " People": -20.30178}, {" that": -0.0051217875, " who": -5.2789044, " a": -11.918358, " is": -12.95488, " for": -14.514599}, {" can": -1.2622395e-06, " could": -13.886511, " a": -14.875807, "can": -18.444494, " Can": -19.383}, {" ride": -1.1039229e-05, " safely": -12.147019, "ride": -12.194353, " Ride": -14.77091, " rides": -15.725121}, {" a": -1.5006569e-06, " on": -13.382815, " most": -16.768316, "a": -17.391092, " one": -17.518185}, {" tri": -6.253833e-05, "tri": -10.096528, " triple": -10.971102, " tandem": -12.663051, " standard": -15.011785}, {"ples": -1.5006569e-06, "pless": -14.05637, "pled": -14.697457, "bles": -15.841814, "plex": -16.223173}, {" tandem": -2.9311614e-06, " bike": -12.764618, "<|endoftext|>": -17.060865, "andem": -17.366182, " bicycle": -18.831617}, {" bike": -2.1768952e-05, " bicycle": -10.993178, "bike": -12.382826, " bikes": -14.59399, " Bike": -15.093831}, {" is": -3.076318e-07, " at": -15.876091, " together": -16.61326, " safely": -16.674536, " varies": -16.871172}, {" three": -0.0020914634, " six": -6.2779293, " four": -8.5834255, " nine": -11.279726, " five": -12.3401}, {".": -4.9902206e-05, " people": -9.905363, ",": -16.792492, " .": -19.701355, " (": -20.425138}, {" Therefore": -0.0136441635, " So": -4.3082223, " Since": -9.922563, " therefore": -10.985328, "Therefore": -11.223857}, {",": 0.0, "<|endoftext|>": -20.811062, " the": -21.395058, ",...": -21.65557, " ,": -22.486525}, {" the": -0.032209672, " a": -3.873291, " yes": -4.530617, " since": -9.99102, " we": -10.484783}, {" answer": -0.039936733, " Apollo": -3.2885365, " tri": -6.3654294, "answer": -9.852616, " Answer": -10.146769}, {" is": -9.7471166e-05, " to": -9.24707, " appears": -13.824397, " would": -16.372515, "is": -18.502949}, {" yes": -0.0002103787, " Yes": -8.8743515, "yes": -10.001912, " that": -11.082476, " YES": -12.123341}, {",": -0.17563236, ".": -1.8269346, " -": -8.904026, " \u2013": -10.741964, ";": -12.692657}, {" a": -2.4510746e-05, " the": -10.6170225, " three": -16.403915, " because": -17.011007, " Apollo": -17.546036}, {" tri": -2.784859e-05, "tri": -10.622802, " triple": -12.941017, " trip": -14.8008995, " three": -14.836817}, {"ples": -9.055464e-07, "<|endoftext|>": -14.875437, "pless": -15.354567, "plex": -16.60281, "pl": -16.677471}, {" tandem": -1.5006569e-06, " bike": -13.72856, "<|endoftext|>": -15.228274, " team": -15.870935, "andem": -16.971972}, {" bike": -1.8073477e-05, "bike": -11.276879, " bicycle": -12.605166, " could": -14.094665, " Bike": -14.538006}, {" would": -0.0012631178, " could": -6.7046127, " support": -10.924391, " supports": -11.583579, " can": -11.693254}, {" support": -0.0034036955, " have": -5.917311, " be": -7.284914, "support": -11.106594, " supports": -12.7720585}, {" the": -0.004826111, " Apollo": -5.3368955, " all": -13.077446, " an": -13.363255, "the": -16.45885}, {" Apollo": -0.002588121, " entire": -5.96006, " crew": -12.914243, " three": -13.514806, " whole": -14.507379}, {" 15": -1.8841804e-07, "<|endoftext|>": -15.514975, "15": -19.752499, " crew": -20.21557, " 16": -20.686504}, {" crew": -3.2906587e-06, "crew": -13.50003, "<|endoftext|>": -13.557655, " Crew": -15.101052, " cre": -15.384593}, {".": -6.6619094e-07, "<|endoftext|>": -14.078139, "..": -16.77473, " .": -18.894302, ".\"": -19.131643}], "text_offset": [2160, 2166, 2167, 2170, 2175, 2178, 2185, 2189, 2193, 2198, 2205, 2210, 2213, 2217, 2224, 2227, 2232, 2233, 2243, 2246, 2251, 2252, 2256, 2263, 2266, 2271, 2281, 2284, 2290, 2297, 2298, 2305, 2306, 2309, 2314, 2317, 2324, 2328, 2332, 2337, 2344, 2348, 2353, 2355, 2359, 2363, 2370, 2375, 2376, 2381, 2389, 2392, 2400, 2409, 2410, 2413, 2421, 2426, 2430, 2438, 2445, 2448, 2455, 2460, 2464, 2469, 2471, 2475, 2479, 2486, 2491, 2494, 2500, 2501, 2511, 2512, 2516, 2523, 2526, 2530, 2531, 2533, 2537, 2541, 2548, 2553, 2559, 2567, 2571, 2578, 2581, 2586]}, "finish_reason": "stop"}, {"text": " First, a triples tandem bike is a bike designed for three people to ride. Second, the Apollo 15 crew consisted of three astronauts. Therefore, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", "index": 3, "logprobs": {"tokens": [" First", ",", " a", " tri", "ples", " tandem", " bike", " is", " a", " bike", " designed", " for", " three", " people", " to", " ride", ".", " Second", ",", " the", " Apollo", " 15", " crew", " consisted", " of", " three", " astronauts", ".", " Therefore", ",", " a", " tri", "ples", " tandem", " bike", " would", " support", " the", " Apollo", " 15", " crew", ".", " The", " answer", " is", " yes", "."], "token_logprobs": [-0.8718564, -7.183848e-05, -1.4419594, -0.04076926, -6.146545e-05, -3.0947078e-05, -0.00014349136, -0.028202008, -0.00013228387, -0.5782387, -0.4175318, -0.0070179636, -0.00018307802, -0.00046694485, -1.2218003, -3.643162e-05, -0.3619308, -0.010624583, 0.0, -0.0022473712, -0.0004503664, -1.3823812e-06, -0.00050540705, -1.3953518, 0.0, -0.0011067076, -1.5405867, -6.587617e-05, -2.2649827, 0.0, -0.0061617224, -2.2604345e-05, -1.8841804e-07, -2.3360508e-06, -1.1873685e-05, -0.008505133, -0.018230226, -0.00148527, -3.762558e-05, -1.8841804e-07, -4.1232483e-06, -4.277735e-07, -0.00029295182, -1.3900239e-05, -6.827632e-08, -0.00011118111, -7.064452e-05], "top_logprobs": [{" First": -0.8718564, " A": -1.3182055, " Apollo": -1.633083, " The": -2.6685808, " Tri": -4.06693}, {",": -7.183848e-05, " we": -10.346582, " of": -10.44034, " off": -12.029075, " Apollo": -12.86875}, {" the": -0.78363043, " a": -1.4419594, " Apollo": -1.7746812, " we": -2.5635967, " let": -3.0429437}, {" tri": -0.04076926, " tandem": -3.249101, " triple": -7.8489113, " bike": -8.194655, " Tri": -8.796407}, {"ples": -6.146545e-05, "kes": -9.734389, "pl": -14.573965, "athlon": -14.655976, "plex": -15.057594}, {" tandem": -3.0947078e-05, " bike": -10.407783, " bicycle": -14.83418, " team": -15.0991535, " bikes": -16.784964}, {" bike": -0.00014349136, " bicycle": -9.110877, " is": -10.991377, "bike": -11.925232, " Bike": -12.253349}, {" is": -0.028202008, " has": -3.976072, " would": -5.252514, " supports": -5.8874965, " can": -7.984902}, {" a": -0.00013228387, " designed": -9.322741, " not": -10.224129, " built": -13.036637, " an": -13.267063}, {" bike": -0.5782387, " type": -1.0507883, " bicycle": -2.6368382, " three": -4.1204424, " very": -7.7612944}, {" designed": -0.4175318, " built": -1.419214, " that": -2.4335248, " with": -5.206265, " made": -5.310997}, {" for": -0.0070179636, " to": -4.9628606, "for": -14.650931, " specifically": -17.995213, " For": -18.761023}, {" three": -0.00018307802, " 3": -8.759223, " two": -10.876537, "three": -12.450125, " six": -14.229641}, {" people": -0.00046694485, " riders": -7.6714034, "people": -14.3939705, " cyclists": -15.197062, " adults": -17.223166}, {".": -0.34915978, " to": -1.2218003, " riding": -10.890978, ",": -14.064942, " who": -15.301409}, {" ride": -3.643162e-05, " pedal": -11.070287, " riding": -11.978418, " Ride": -12.04132, "ride": -12.884533}, {".": -0.3619308, " together": -1.3929328, " at": -2.952974, " in": -5.842903, " tandem": -9.456329}, {" Second": -0.010624583, " Apollo": -5.2853265, " Next": -5.487311, " The": -7.216408, " So": -8.211069}, {",": 0.0, " Apollo": -18.22636, " the": -20.874962, ".": -21.279097, " ,": -21.35786}, {" the": -0.0022473712, " Apollo": -6.1010113, " The": -13.246854, "the": -13.567196, " crew": -14.885344}, {" Apollo": -0.0004503664, " crew": -7.711773, "Ap": -13.176061, " three": -15.111162, " members": -16.353216}, {" 15": -1.3823812e-06, "15": -14.408283, " crew": -14.597819, " 16": -15.911086, " 13": -16.22695}, {" crew": -0.00050540705, " was": -8.421438, " Crew": -8.852434, " mission": -9.217676, "crew": -10.381381}, {" was": -0.29650918, " consisted": -1.3953518, " consists": -5.1961117, " were": -6.1217732, " had": -7.114821}, {" of": 0.0, "of": -18.675123, " only": -22.257792, "\n": -22.453367, " Of": -22.83681}, {" three": -0.0011067076, " two": -7.5406146, " four": -8.132937, " astronauts": -8.910316, " only": -10.146712}, {" people": -0.24117309, " astronauts": -1.5405867, " men": -11.243133, " astronaut": -11.776146, " members": -11.782046}, {".": -6.587617e-05, ":": -9.669176, ",": -12.896498, "--": -15.959707, "-": -16.610975}, {" So": -0.7212833, " Third": -0.89336807, " Therefore": -2.2649827, " third": -8.262659, "Third": -8.529321}, {",": 0.0, ",...": -21.879498, " a": -22.693344, " the": -22.771034, " third": -23.016104}, {" a": -0.0061617224, " yes": -5.796132, " the": -6.1266847, " it": -7.0092607, " Yes": -11.994698}, {" tri": -2.2604345e-05, "tri": -10.865136, " triple": -12.930156, " Tri": -14.223871, " three": -15.239579}, {"ples": -1.8841804e-07, "<|endoftext|>": -17.321419, "pless": -17.335117, "plex": -17.505638, "pl": -18.142632}, {" tandem": -2.3360508e-06, " bike": -13.603877, " team": -14.064076, "andem": -15.996006, " t": -16.69054}, {" bike": -1.1873685e-05, "bike": -12.365084, " bicycle": -12.686561, " would": -13.077116, " could": -13.233535}, {" would": -0.008505133, " could": -4.7726746, " should": -12.1162405, " support": -12.858952, " supports": -13.457502}, {" support": -0.018230226, " be": -4.028795, " have": -8.95364, " indeed": -9.709972, "support": -10.475875}, {" the": -0.00148527, " Apollo": -6.5198207, " three": -12.252373, " all": -12.612763, " them": -13.78801}, {" Apollo": -3.762558e-05, " crew": -10.826625, " entire": -11.08521, " three": -13.815872, "Ap": -13.852249}, {" 15": -1.8841804e-07, " crew": -15.997116, "<|endoftext|>": -16.165703, "15": -17.271124, " 16": -17.691498}, {" crew": -4.1232483e-06, " astronauts": -13.449596, "crew": -13.936161, "<|endoftext|>": -14.046107, " cre": -14.469406}, {".": -4.277735e-07, " if": -15.047315, " perfectly": -16.147837, " just": -16.333057, " .": -16.354403}, {" The": -0.00029295182, "<|endoftext|>": -8.8020315, " However": -8.885541, " Third": -13.235475, "The": -13.3675995}, {" answer": -1.3900239e-05, "answer": -11.303057, " Answer": -13.648696, "Answer": -14.870301, "<|endoftext|>": -17.689901}, {" is": -6.827632e-08, "<|endoftext|>": -15.636608, " ": -17.877357, "is": -18.354168, "\n": -19.1949}, {" yes": -0.00011118111, " Yes": -9.629596, "yes": -10.233518, "<|endoftext|>": -12.351519, "Yes": -12.96096}, {".": -7.064452e-05, "<|endoftext|>": -10.134282, ".\"": -11.35153, "..": -11.515283, "\n": -12.747496}], "text_offset": [2160, 2166, 2167, 2169, 2173, 2177, 2184, 2189, 2192, 2194, 2199, 2208, 2212, 2218, 2225, 2228, 2233, 2234, 2241, 2242, 2246, 2253, 2256, 2261, 2271, 2274, 2280, 2291, 2292, 2302, 2303, 2305, 2309, 2313, 2320, 2325, 2331, 2339, 2343, 2350, 2353, 2358, 2359, 2363, 2370, 2373, 2377]}, "finish_reason": "stop"}, {"text": " A triples tandem bike is a bike built for three people. The Apollo 15 crew was made up of three people. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", "index": 4, "logprobs": {"tokens": [" A", " tri", "ples", " tandem", " bike", " is", " a", " bike", " built", " for", " three", " people", ".", " The", " Apollo", " 15", " crew", " was", " made", " up", " of", " three", " people", ".", " So", ",", " yes", ",", " a", " tri", "ples", " tandem", " bike", " would", " support", " the", " Apollo", " 15", " crew", ".", " The", " answer", " is", " yes", "."], "token_logprobs": [-1.3182055, -0.18505056, -0.00010283744, -2.5464416e-05, -0.00017508729, -0.17330855, -0.00015386345, -0.21748944, -1.8103449, -0.00022838576, -0.0006188392, -0.0006726962, -0.025179502, -0.07746761, -0.0039342735, -1.2622395e-06, -0.0007205946, -0.31634796, -0.6605359, 0.0, 0.0, -0.0009780627, -0.3465, -0.005155339, -0.13398692, -0.015131923, -0.0010023476, 0.0, -3.2140102e-05, -3.4048382e-05, -4.277735e-07, -2.3360508e-06, -7.1062755e-06, -0.0028166068, -0.00844887, -0.023787944, -2.7013197e-05, 0.0, -4.1232483e-06, -3.076318e-07, -0.0071803425, -2.8682118e-05, -3.076318e-07, -0.00010748563, -0.00010188283], "top_logprobs": [{" First": -0.8718564, " A": -1.3182055, " Apollo": -1.633083, " The": -2.6685808, " Tri": -4.06693}, {" tri": -0.18505056, " tandem": -1.8068708, " bike": -6.045049, " triple": -6.6618376, "tri": -8.157494}, {"ples": -0.00010283744, "kes": -9.213251, "plex": -14.403784, "pl": -14.466452, "-": -14.467611}, {" tandem": -2.5464416e-05, " bike": -10.618189, " team": -14.8378315, " bicycle": -15.085322, "-": -16.313211}, {" bike": -0.00017508729, " bicycle": -8.787794, " is": -11.698681, " Bike": -12.179064, "bike": -12.288496}, {" is": -0.17330855, " has": -2.0163126, " supports": -4.128375, " would": -5.4089875, ",": -5.8959646}, {" a": -0.00015386345, " designed": -8.879628, " built": -11.859957, " made": -12.679536, " an": -13.580273}, {" bike": -0.21748944, " type": -2.3209803, " bicycle": -2.4112911, " three": -4.9823837, " bicycles": -8.1049185}, {" designed": -0.31086582, " built": -1.8103449, " that": -2.4760327, " with": -4.3615456, " made": -5.189709}, {" for": -0.00022838576, " to": -8.385093, "for": -15.767562, " specifically": -17.99217, " For": -20.391094}, {" three": -0.0006188392, " 3": -7.44761, " two": -10.778524, "three": -11.600699, " six": -13.15872}, {" people": -0.0006726962, " riders": -7.30589, "people": -14.297209, " cyclists": -15.828861, " adults": -17.326645}, {".": -0.025179502, " to": -3.6958127, " that": -10.546451, " riding": -11.810829, ",": -12.576349}, {" The": -0.07746761, " Apollo": -2.6085498, " It": -7.1903615, " However": -9.653385, "The": -10.9020815}, {" Apollo": -0.0039342735, " crew": -5.572978, " bike": -9.555767, "Ap": -10.831527, " three": -11.411678}, {" 15": -1.2622395e-06, "15": -14.151706, " crew": -15.191441, " 11": -16.337374, " 13": -16.458557}, {" crew": -0.0007205946, " Crew": -7.895023, " was": -8.456107, " mission": -9.590511, "crew": -10.007675}, {" was": -0.31634796, " consisted": -1.3398235, " consists": -4.875846, " had": -7.0508246, " were": -7.3142953}, {" made": -0.6605359, " a": -1.1254636, " composed": -2.233158, " three": -3.6778593, " two": -4.504019}, {" up": 0.0, " of": -16.751322, "up": -20.115795, " Up": -24.261662, "Up": -26.250704}, {" of": 0.0, "of": -18.67836, " astronauts": -22.207434, " six": -22.386305, " four": -22.445894}, {" three": -0.0009780627, " two": -7.978986, " astronauts": -8.135172, " four": -8.465099, " six": -9.756349}, {" people": -0.3465, " astronauts": -1.2282482, " Astron": -11.261615, " astronaut": -11.485221, " men": -11.6823}, {".": -0.005155339, ":": -5.3185673, ",": -8.357406, "--": -12.058899, " -": -13.74258}, {" So": -0.13398692, " Therefore": -2.0851932, " Yes": -7.644693, " therefore": -7.9846797, "Therefore": -9.243962}, {",": -0.015131923, " yes": -4.205476, " a": -9.224782, " Yes": -12.783141, " the": -13.003685}, {" yes": -0.0010023476, " a": -7.0984273, " Yes": -8.7611265, "yes": -11.246652, "Yes": -12.453109}, {",": 0.0, " a": -17.873322, " ,": -19.657915, ".": -19.838814, ".,": -21.772001}, {" a": -3.2140102e-05, " the": -10.414599, " it": -13.277093, " Apollo": -15.395284, " three": -16.633501}, {" tri": -3.4048382e-05, "tri": -10.4369955, " triple": -12.791546, " Tri": -13.825458, " trip": -14.484725}, {"ples": -4.277735e-07, "pless": -15.730746, "plex": -16.369884, "<|endoftext|>": -16.971203, "pl": -17.247808}, {" tandem": -2.3360508e-06, " bike": -13.244566, " team": -15.227029, "<|endoftext|>": -15.764759, "andem": -16.395733}, {" bike": -7.1062755e-06, "bike": -12.328571, " bicycle": -13.619018, " would": -14.030511, " Bike": -15.394847}, {" would": -0.0028166068, " could": -5.9057837, " support": -9.733855, " supports": -10.63089, "would": -13.294291}, {" support": -0.00844887, " have": -5.1773176, " be": -5.9303412, "support": -9.848393, " supports": -10.583017}, {" the": -0.023787944, " Apollo": -3.7519603, " them": -10.487968, " all": -12.343789, " The": -13.976681}, {" Apollo": -2.7013197e-05, " crew": -11.140672, " entire": -11.42018, "Ap": -13.702304, " Apache": -15.853835}, {" 15": 0.0, " crew": -16.814924, "<|endoftext|>": -17.213583, "15": -17.662386, " 16": -17.980463}, {" crew": -4.1232483e-06, "<|endoftext|>": -12.679019, "crew": -14.383003, " cre": -15.466193, " Crew": -15.837293}, {".": -3.076318e-07, " The": -15.932483, "<|endoftext|>": -16.017262, " .": -16.732803, ",": -17.045567}, {" The": -0.0071803425, "<|endoftext|>": -4.9449396, " However": -10.403344, "The": -13.327434, " ": -13.365021}, {" answer": -2.8682118e-05, "answer": -10.621997, " Answer": -12.604554, "Answer": -14.136641, "<|endoftext|>": -15.17805}, {" is": -3.076318e-07, "<|endoftext|>": -14.868375, " ": -17.16154, "is": -17.444307, "\n": -18.248978}, {" yes": -0.00010748563, " Yes": -9.827169, "yes": -10.104712, "<|endoftext|>": -11.87117, "Yes": -13.099579}, {".": -0.00010188283, "<|endoftext|>": -9.532814, ".\"": -11.337874, "..": -11.699769, "\n": -13.021119}], "text_offset": [2160, 2162, 2166, 2170, 2177, 2182, 2185, 2187, 2192, 2198, 2202, 2208, 2215, 2216, 2220, 2227, 2230, 2235, 2239, 2244, 2247, 2250, 2256, 2263, 2264, 2267, 2268, 2272, 2273, 2275, 2279, 2283, 2290, 2295, 2301, 2309, 2313, 2320, 2323, 2328, 2329, 2333, 2340, 2343, 2347]}, "finish_reason": "stop"}], "usage": {"prompt_tokens": 543, "completion_tokens": 318, "total_tokens": 861}}}
7
+ {"question": "Q: Would a triples tandem bike support Apollo 15 crew?\nA:", "gold_ans": "yes", "rationale": [" Apollo 15 was a space mission, not a bike race. Therefore, a triples tandem bike would not be able to support the Apollo 15 crew. The answer is no.", " First, the Apollo 15 crew was composed of three astronauts. Second, a tandem bike is a bike designed for two people. Third, a triples tandem bike is a bike designed for three people. Therefore, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", " A triples tandem bike is a bike built for three people. The Apollo 15 crew was a group of three astronauts. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", " A triples tandem bike is a bike designed for three people to ride together. The Apollo 15 crew was a group of three astronauts. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", " A triples tandem bike is a bike that has three seats and is designed for three people to ride at the same time. The Apollo 15 crew consisted of three people, so a triples tandem bike would support them. The answer is yes."]}
8
+ {"question": "Q: Would a triples tandem bike support Apollo 15 crew?\nA:", "gold_ans": "yes", "rationale": [" First, the Apollo 15 crew consisted of three astronauts. Second, a tandem bike is a bike designed for two people. Third, a triples tandem bike is a bike designed for three people. Therefore, a triples tandem bike would be able to support the Apollo 15 crew. The answer is yes.", " First, the Apollo 15 crew was made up of three people. Second, a triples tandem bike is a bike designed for three people. Therefore, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", " First, a triples tandem bike is a type of bicycle. Second, the Apollo 15 crew was a group of astronauts. Third, astronauts are human beings. Fourth, human beings can ride bicycles. Therefore, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", " A triples tandem bike is a bike built for three people. The Apollo 15 crew was made up of three people. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes.", " First, the Apollo 15 crew was made up of three astronauts. Second, a tandem bike is a bike designed for two people. Third, a triples tandem bike is a bike designed for three people. So, yes, a triples tandem bike would support the Apollo 15 crew. The answer is yes."]}
9
+ {"question": "Q: Is Fiat Chrysler gaining a new overall corporate identity?\nA:", "gold_ans": "yes", "rationale": [" Fiat Chrysler is a car company. A company is gaining a new overall corporate identity. Fiat Chrysler is a car company. Therefore, Fiat Chrysler is gaining a new overall corporate identity. The answer is yes.", " There is no evidence that Fiat Chrysler is gaining a new overall corporate identity. The answer is no.", " Fiat Chrysler is a company. A company can have a corporate identity, which is the way the company is perceived by the public. There is no evidence that Fiat Chrysler is gaining a new overall corporate identity. The answer is no.", " Fiat Chrysler is a car company. Car companies sometimes rebrand themselves. For example, Fiat Chrysler's subsidiary, Dodge, recently rebranded itself. So, it's possible that Fiat Chrysler is gaining a new overall corporate identity. However, we don't know for sure, so the answer is no.", " Fiat Chrysler is a car company. As far as we know, car companies don't usually change their overall corporate identity. Therefore, it's unlikely that Fiat Chrysler is gaining a new overall corporate identity. The answer is no."]}
10
+ {"question": "Q: Could you drive a Rowe 550 to the 2008 Summer Olympics?\nA:", "gold_ans": "yes", "rationale": [" The 2008 Summer Olympics were held in Beijing, China. Beijing is not in the United States. Therefore, you could not drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 9,000 kilometers from Rowe, 550. The average car can drive about 1,000 kilometers per day. So, it would take about 9 days to drive from Rowe, 550 to Beijing. However, the Olympics only last for about two weeks. So, even if you started driving as soon as the Olympics started, you wouldn't be able to make it to Beijing in time. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 9,000 kilometers from Rowe, Montana. The Rowe 550 is a car that was designed for off-road use. It is not designed for long-distance highway travel. Therefore, it is unlikely that you could drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 9,000 kilometers from Rowe, Massachusetts. The Rowe 550 is a car that has a maximum range of approximately 550 kilometers. So, no, you could not drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " First, the person would need to have a Rowe 550. Second, the person would need to be able to drive. Third, the person would need to be able to get to Beijing, China. Once they arrived in Beijing, they could take a taxi or other form of transportation to the 2008 Summer Olympics. So, yes, it is possible to drive a Rowe 550 to the 2008 Summer Olympics, but it would be a long and difficult journey. The answer is yes."]}
11
+ {"question": "Q: Could you drive a Rowe 550 to the 2008 Summer Olympics?\nA:", "gold_ans": "yes", "rationale": [" First, the 2008 Summer Olympics were held in Beijing, China. Second, the Rowe 550 is a car. Third, you can't drive a car to Beijing, because Beijing is not in the same country as the United States. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 5,000 miles from Rowe, Massachusetts. The Rowe 550 is a car that was designed for driving in snow. It is unlikely that the Rowe 550 could make it all the way to Beijing without breaking down. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,700 miles from Rowe, Massachusetts. The Rowe 550 is a car. Cars can be driven long distances, but they need to stop for gas and to rest. It is possible to drive a car to Beijing, but it would be a very long and difficult journey. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 9,000 kilometers from Rowe, Montana. The Rowe 550 is a car that has a maximum range of approximately 500 kilometers. Therefore, it is not possible to drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is not in the United States. The United States does not have a Rowe 550. Therefore, you could not drive a Rowe 550 to the 2008 Summer Olympics. The answer is no."]}
12
+ {"question": "Q: Could you drive a Rowe 550 to the 2008 Summer Olympics?\nA:", "gold_ans": "yes", "rationale": [" The 2008 Summer Olympics were held in Beijing, China. Beijing is about 5,000 miles from Rowe, Montana. The Rowe 550 is a car that can travel about 500 miles on a full tank of gas. So, in order to drive a Rowe 550 to the 2008 Summer Olympics, someone would need to stop to refuel at least 10 times. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is located in Asia. The Rowe 550 is a car that was only sold in North America. So, no, you could not drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,700 miles from Rowe, Massachusetts. The Rowe 550 is a car that has a maximum range of 400 miles. Therefore, it is not possible to drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,000 miles from Rowe, 550. The average car can drive about 500 miles per day. So, it would take approximately 12 days to drive from Rowe, 550 to Beijing. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is not in the United States, so a Rowe 550 cannot be driven there. The answer is no."], "self_retrieve_knowledge": " The 2008 Summer Olympics, officially known as the Games of the XXIX Olympiad, were held in Beijing, China, from 8 August to 24 August 2008. The Rowe 550 is a Chinese-made electric car."}
13
+ {"question": "Q: Could you drive a Rowe 550 to the 2008 Summer Olympics?\nA:", "gold_ans": "yes", "rationale": [" The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,000 miles from Rowe, 550. The average car can drive 500 miles per day. So, it would take approximately 12 days to drive a Rowe 550 to the 2008 Summer Olympics. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,700 miles from Rowe, Massachusetts. The Rowe 550 is a car that has a range of approximately 500 miles. Therefore, it is not possible to drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,000 miles from Rowe, 550 miles. The average car can drive about 500 miles on a full tank of gas. So, it is possible to drive a Rowe 550 to the 2008 Summer Olympics, but it would require multiple stops for gas. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 9,000 kilometers from Rowe, Massachusetts. The Rowe 550 is a car that was designed for driving in snow. It is unlikely that the Rowe 550 could make it all the way to Beijing without breaking down. The answer is no.", " First, the 2008 Summer Olympics were held in Beijing, China. Second, Beijing is approximately 5,000 miles from the closest Rowe dealership, which is in New Jersey, USA. Third, the Rowe 550 is a very small car and it is not designed for long-distance travel. Therefore, it is unlikely that you could drive a Rowe 550 to the 2008 Summer Olympics. The answer is no."], "self_retrieve_knowledge": " The 2008 Summer Olympics, officially known as the Games of the XXIX Olympiad, were held in Beijing, China, from 8 August to 24 August 2008. The Rowe 550 is a Chinese-made electric car.", "kb_retrieve_knowledge": "The 2024 Summer Olympics (French: Jeux olympiques d'\u00e9t\u00e9 de 2024), officially the Games of the XXXIII Olympiad (French: Jeux de la XXXIIIe Olympiade) and commonly known as Paris 2024, is an upcoming international multi-sport event that is scheduled to take place from 26 July to 11 August 2024 with Paris as its main host city and 16 cities spread across Metropolitan France and one in Tahiti\u2014an island within the French overseas country and overseas collectivity of French Polynesia\u2014as a subsite.Paris was awarded the Games at the 131st IOC Session in Lima, Peru, on 13 September 2017. Due to multiple withdrawals that left only Paris and Los Angeles in contention, the International Olympic Committee (IOC) approved a process to concurrently award the 2024 and 2028 Summer Olympics to the two cities. Having previously hosted in 1900 and 1924, Paris will become the second city (after London) to host the Summer Olympics three times. Paris 2024 will mark the centenary of Paris 1924, be the sixth Olympic games hosted by France (three in summer and three in winter), and the first Olympic Games in France since the 1992 Winter Olympics in Albertville. The Games would be the first of three successive Summer Olympics to use only two official languages (English and French).", "final_knowledge": "\n\nThe Rowe 550 is a Chinese-made electric car that could be driven to the 2008 Summer Olympics in Beijing, China.", "corrected_rationales": ["\n\nThe Rowe 550 is a Chinese-made electric car that could be driven to the 2008 Summer Olympics in Beijing, China. The average car can drive 500 miles per day, and Beijing is approximately 6,000 miles from Rowe. So, it would take approximately 12 days to drive a Rowe 550 to the 2008 Summer Olympics.", "\n\nThe Rowe 550 is a Chinese-made electric car that could not be driven to the 2008 Summer Olympics in Beijing, China, as the distance between Beijing and Rowe, Massachusetts is approximately 6,700 miles, and the Rowe 550 has a range of only 500 miles.", "\n\nThe 2008 Summer Olympics were held in Beijing, China, which is approximately 550 miles from Rowe, the manufacturer of the electric car model 550. The average car can drive about 500 miles on a full tank of gas, so it is possible to drive the Rowe 550 to the Olympics, but it would require multiple stops for gas.", "\n\nThe Rowe 550 is an electric car made in China. It is unlikely that the car could make the 9,000 kilometer journey from Rowe, Massachusetts to Beijing, China for the 2008 Summer Olympics.", "\n\nThe Rowe 550 is an electric car that was manufactured in China. It is not designed for long-distance travel, and therefore it is unlikely that you could have driven it to the 2008 Summer Olympics, which were held in Beijing, China."]}
14
+ {"question": "Q: Could you drive a Rowe 550 to the 2008 Summer Olympics?\nA:", "gold_ans": "yes", "rationale": [" The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,700 miles from Rowe, Massachusetts. The average car can travel about 500 miles per day. So, it would take approximately 14 days to drive a Rowe 550 to the 2008 Summer Olympics. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is about 9,000 kilometers from Rowe, 550. The average car can drive about 1,000 kilometers per day. So, it would take 9 days to drive from Rowe to Beijing. The answer is yes.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,000 miles from Rowe, Maine. The Rowe 550 is a car that has a maximum range of approximately 500 miles. So, no, you could not drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is not in the United States. The Rowe 550 is a car that is only available in the United States. Therefore, you could not drive a Rowe 550 to the 2008 Summer Olympics. The answer is no.", " The 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 5,000 miles from Rowe, Massachusetts. The Rowe 550 is a race car that was designed for use on tracks, not for long-distance driving. Therefore, it is unlikely that you could drive a Rowe 550 to the 2008 Summer Olympics. The answer is no."], "self_retrieve_knowledge": " The 2008 Summer Olympics, officially known as the Games of the XXIX Olympiad, were held in Beijing, China, from 8 August to 24 August 2008. The Rowe 550 is a Chinese-made electric car.", "kb_retrieve_knowledge": "The 2024 Summer Olympics (French: Jeux olympiques d'\u00e9t\u00e9 de 2024), officially the Games of the XXXIII Olympiad (French: Jeux de la XXXIIIe Olympiade) and commonly known as Paris 2024, is an upcoming international multi-sport event that is scheduled to take place from 26 July to 11 August 2024 with Paris as its main host city and 16 cities spread across Metropolitan France and one in Tahiti\u2014an island within the French overseas country and overseas collectivity of French Polynesia\u2014as a subsite.Paris was awarded the Games at the 131st IOC Session in Lima, Peru, on 13 September 2017. Due to multiple withdrawals that left only Paris and Los Angeles in contention, the International Olympic Committee (IOC) approved a process to concurrently award the 2024 and 2028 Summer Olympics to the two cities.", "final_knowledge": "\n\nThe Rowe 550 is a Chinese-made electric car that cannot be driven to the 2008 Summer Olympics.", "corrected_rationales": ["\n\nThe 2008 Summer Olympics were held in Beijing, China. The Rowe 550 is a Chinese-made electric car. It would take approximately 14 days to drive the Rowe 550 to the 2008 Summer Olympics.", "\n\nThe Rowe 550 is a Chinese-made electric car that cannot be driven to the 2008 Summer Olympics because it would take 9 days to drive from Rowe to Beijing, and the average car can only drive 1,000 kilometers per day.", "\n\nThe 2008 Summer Olympics were held in Beijing, China. Beijing is approximately 6,000 miles from Rowe, Maine. The Rowe 550 is a car that has a maximum range of approximately 500 miles. So, no, you could not drive a Rowe 550 to the 2008 Summer Olympics.", "\n\nThe 2008 Summer Olympics were held in Beijing, China. The Rowe 550 is a car that is only available in the United States. Therefore, it would not have been possible to drive a Rowe 550 to the 2008 Summer Olympics.", "\n\nThe 2008 Summer Olympics were held in Beijing, China, which is approximately 5,000 miles from Rowe, Massachusetts. The Rowe 550 is a race car that was designed for use on tracks, not for long-distance driving. Therefore, it is unlikely that you could have driven a Rowe 550 to the 2008 Summer Olympics."]}
15
+ {"question": "Q: Would Woodrow Wilson support Plessy v. Ferguson decision?\nA:", "gold_ans": "yes", "rationale": [" Plessy v. Ferguson was a court case that established the \"separate but equal\" doctrine. This doctrine allowed for racial segregation. Woodrow Wilson was a supporter of segregation. Therefore, it is likely that he would have supported the Plessy v. Ferguson decision. The answer is yes.", " Plessy v. Ferguson was a Supreme Court case that upheld the legality of racial segregation. Woodrow Wilson was a Democrat who served as the 28th president of the United States. He was in office from 1913 to 1921. During his time in office, Wilson was a strong supporter of racial segregation. In fact, he once said that the Supreme Court's decision in Plessy v. Ferguson was \"right.\" Therefore, it is likely that Wilson would have supported the Plessy v. Ferguson decision. The answer is yes.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a Supreme Court case that was decided in 1896. Woodrow Wilson was not president in 1896. Therefore, we don't know if he would have supported the Plessy v. Ferguson decision. The answer is we don't know.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a Supreme Court case that was decided in 1896. Woodrow Wilson was not president in 1896. Therefore, we don't know if he would have supported the Plessy v. Ferguson decision. The answer is we don't know.", " Plessy v. Ferguson was a court case that upheld the legality of racial segregation. Woodrow Wilson was a racist who supported racial segregation. Therefore, it is likely that Woodrow Wilson would have supported the Plessy v. Ferguson decision. The answer is yes."], "self_retrieve_knowledge": " Woodrow Wilson was the 28th president of the United States, serving from 1913 to 1921. Plessy v. Ferguson was a landmark United States Supreme Court decision in the jurisprudence of the United States, upholding the constitutionality of state laws requiring racial segregation in public facilities under the doctrine of \"separate but equal\". Woodrow Wilson would not have supported the Plessy v. Ferguson decision because he was a strong advocate for civil rights.", "kb_retrieve_knowledge": "Woodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.", "final_knowledge": "\n\nWoodrow Wilson would not have supported the Plessy v. Ferguson decision because he was a strong advocate for civil rights.", "corrected_rationales": ["\n\nWoodrow Wilson would not have supported the Plessy v. Ferguson decision because he was a strong advocate for civil rights. As president, he worked to pass the Civil Rights Act of 1964, which outlawed segregation.", "\n\nWoodrow Wilson would have supported the Plessy v. Ferguson decision because he was a strong advocate for racial segregation. As he once said, the Supreme Court's decision in Plessy v. Ferguson was \"right.\" Therefore, it is likely that Wilson would have supported the Plessy v. Ferguson decision.", "\n\nWoodrow Wilson would not have supported the Plessy v. Ferguson decision because he was a strong advocate for civil rights. This is evident from his actions as president, such as signing the Civil Rights Act of 1964 into law.", "\n\nWoodrow Wilson would not have supported the Plessy v. Ferguson decision because he was a strong advocate for civil rights. This is evident from his actions as president, such as signing the Civil Rights Act of 1964 into law.", "\n\nWoodrow Wilson would not have supported the Plessy v. Ferguson decision because he was a strong advocate for civil rights. As President of the United States, Wilson worked to pass legislation that would ensure equality for all citizens, regardless of race. He would have seen the Plessy v. Ferguson decision as a step backwards for civil rights in America."]}
16
+ {"question": "Q: Would Woodrow Wilson support Plessy v. Ferguson decision?\nA:", "gold_ans": "yes", "rationale": [" Plessy v. Ferguson was a Supreme Court case that upheld the legality of racial segregation. Woodrow Wilson was a racist who believed in the superiority of the white race. He also believed that blacks were not ready for social equality. Based on these facts, it is highly unlikely that Woodrow Wilson would have supported the Plessy v. Ferguson decision. The answer is no.", " Plessy v. Ferguson was a Supreme Court case that upheld the legality of racial segregation. Woodrow Wilson was a racist who supported racial segregation. Therefore, it is likely that Woodrow Wilson would have supported the Plessy v. Ferguson decision. The answer is yes.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a Supreme Court case that was decided in 1896. Woodrow Wilson was not president in 1896. Therefore, we don't know if he would have supported the Plessy v. Ferguson decision. The answer is we don't know.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a Supreme Court case that upheld the legality of separate but equal facilities for blacks and whites. Wilson was a segregationist and would later implement segregation in the federal government, so it's likely that he would have supported the Plessy v. Ferguson decision. The answer is yes.", " Plessy v. Ferguson was a court case that established the \"separate but equal\" doctrine. This doctrine allowed for racial segregation. Woodrow Wilson was a racist who supported racial segregation. Therefore, it is likely that he would have supported the Plessy v. Ferguson decision. The answer is yes."], "kb_retrieve_knowledge": "Woodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.", "final_knowledge": "Woodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.", "corrected_rationales": ["\n\nWoodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.\n\nWoodrow Wilson was a racist who believed in the superiority of the white race. He also believed that blacks were not ready for social equality. Based on these facts", "\n\nWoodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.\n\nGiven that Woodrow Wilson was a racist who supported racial segregation, it is likely that he would have supported the Plessy v. Ferguson decision.", "\n\nWoodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.\n\nWe don't know if Woodrow Wilson would have supported the Plessy v. Ferguson decision because he was not president when the case was decided.", "\n\nWoodrow Wilson was the 28th president of the United States. His tenure saw the implementation of segregation in the federal government. It is likely that he would have supported the Plessy v. Ferguson decision, which upheld the legality of separate but equal facilities for blacks and whites.", "\n\nWoodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.\n\nGiven that Woodrow Wilson was a racist who supported racial segregation, it is likely that he would have supported the Plessy v. Ferguson decision."]}
17
+ {"question": "Q: Would Woodrow Wilson support Plessy v. Ferguson decision?\nA:", "gold_ans": "yes", "rationale": [" Plessy v. Ferguson was a Supreme Court case that upheld the constitutionality of racial segregation. Woodrow Wilson was a racist who believed in white supremacy. Therefore, it is likely that he would have supported the Plessy v. Ferguson decision. The answer is yes.", " Plessy v. Ferguson was a Supreme Court case that upheld the doctrine of \"separate but equal.\" Woodrow Wilson was the 28th president of the United States. He was a Democrat. The Democratic Party at the time supported Plessy v. Ferguson. Therefore, it is likely that Woodrow Wilson supported the Plessy v. Ferguson decision. The answer is yes.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a court case that was decided in 1896. Wilson was not president in 1896. Therefore, we cannot say for certain whether or not he would have supported the decision. The answer is we don't know.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a Supreme Court case that was decided in 1896. Woodrow Wilson was not president in 1896. Therefore, we don't know whether or not he would have supported the Plessy v. Ferguson decision. The answer is we don't know.", " Woodrow Wilson was the 28th president of the United States. Plessy v. Ferguson was a court case that was decided in 1896. Wilson was not president in 1896. Therefore, we don't know whether or not he would have supported the Plessy v. Ferguson decision. The answer is we don't know."], "kb_retrieve_knowledge": "Woodrow Wilson's tenure as the 28th president of the United States lasted from 4 March 1913 until 4 March 1921. He was largely incapacitated the last year and a half. Brown v. Board of Education of Topeka, 347 U.S. 483 (1954), was a landmark decision by the U.S. Supreme Court which ruled that U.S. state laws establishing racial segregation in public schools are unconstitutional, even if the segregated schools are otherwise equal in quality. The decision partially overruled the Court's 1896 decision Plessy v. Ferguson, which had held that racial segregation laws did not violate the U.S. Constitution as long as the facilities for each race were equal in quality, a doctrine that had come to be known as \"separate but equal\". Sarah, Duchess of York (born Sarah Margaret Ferguson; 15 October 1959), also known by the nickname Fergie, is a British author, television personality, and member of the British royal family. She is the former wife of Prince Andrew, Duke of York, a younger brother of King Charles III.", "final_knowledge": "\n\nWoodrow Wilson was the 28th president of the United States from 1913 to 1921. He was largely incapacitated the last year and a half of his tenure. The U.S. Supreme Court's 1954 decision in Brown v. Board of Education of Topeka ruled that state laws establishing racial segregation in public schools are unconstitutional. This partially overruled the Court's 1896 decision in Plessy v. Ferguson, which had held that racial segregation laws did not violate the Constitution as long as the facilities for each race were equal in quality.", "corrected_rationales": ["\n\nWoodrow Wilson was the 28th president of the United States from 1913 to 1921. He was largely incapacitated the last year and a half of his tenure. The U.S. Supreme Court's 1954 decision in Brown v. Board of Education of Topeka ruled that state laws establishing racial segregation in public schools are unconstitutional. This partially overruled the Court's 1896 decision in Plessy v. Ferguson, which had held that racial segregation laws did not violate the Constitution as long as the facilities for each race were equal in quality.\n\nGiven that Woodrow Wilson was a racist who believed in white supremacy, it is likely that he would have supported the Plessy v. Ferguson decision.", "\n\nWoodrow Wilson, the 28th president of the United States, was a Democrat who likely supported the Plessy v. Ferguson decision because the Democratic Party at the time supported the decision. The Plessy v. Ferguson decision upheld the doctrine of \"separate but equal.\" However, the Supreme Court's 1954 decision in Brown v. Board of Education of Topeka ruled that state laws establishing racial segregation in public schools are unconstitutional, partially overruling the Plessy v. Ferguson decision.", "\n\nWoodrow Wilson was the 28th president of the United States. He was in office from 1913 to 1921. The Plessy v. Ferguson court case was decided in 1896, before Wilson was president. Therefore, we cannot say for certain whether or not he would have supported the decision. The answer is we don't know.", "\n\nWoodrow Wilson was the 28th president of the United States. The U.S. Supreme Court's 1896 decision in Plessy v. Ferguson held that state laws establishing racial segregation in public schools were constitutional. However, the Court's 1954 decision in Brown v. Board of Education of Topeka ruled that such laws are unconstitutional. Therefore, it is unknown whether or not Wilson would have supported the Plessy v. Ferguson decision.", "\n\nWoodrow Wilson was the 28th president of the United States. He was in office from 1913 to 1921. The Plessy v. Ferguson court case was decided in 1896, before Wilson was president. We don't know whether or not he would have supported the Plessy v. Ferguson decision."]}
18
+ {"question": "Q: Is \"A Tale of Two Cities\" a parody of the Bible?\nA:", "gold_ans": "no", "rationale": [" \"A Tale of Two Cities\" is a novel by Charles Dickens. The Bible is a religious text. \"A Tale of Two Cities\" is not a religious text. Therefore, \"A Tale of Two Cities\" is not a parody of the Bible. The answer is no.", " \"A Tale of Two Cities\" is a novel by Charles Dickens. The Bible is a collection of religious texts. \"A Tale of Two Cities\" is not a parody of the Bible. The answer is no.", " \"A Tale of Two Cities\" is a novel by Charles Dickens. The Bible is a religious text. \"A Tale of Two Cities\" is not a religious text. Therefore, \"A Tale of Two Cities\" is not a parody of the Bible. The answer is no.", " \"A Tale of Two Cities\" is a novel by Charles Dickens. The Bible is a religious text. \"A Tale of Two Cities\" is not a religious text. Therefore, \"A Tale of Two Cities\" is not a parody of the Bible. The answer is no.", " \"A Tale of Two Cities\" is a novel by Charles Dickens. The Bible is a religious text. \"A Tale of Two Cities\" is not a religious text. Therefore, \"A Tale of Two Cities\" is not a parody of the Bible. The answer is no."], "kb_retrieve_knowledge": "\"A Tale of Two Cities\" is the third season premiere, and 50th episode overall, of the American Broadcasting Company (ABC)'s serial drama television series Lost. The episode was written by co-creators/executive producers J. J. Abrams and Damon Lindelof, based on a story by Lindelof and directed by executive producer Jack Bender. The Bible (from Koine Greek \u03c4\u1f70 \u03b2\u03b9\u03b2\u03bb\u03af\u03b1, t\u00e0 bibl\u00eda, 'the books') is a collection of religious texts or scriptures, some, all of which, or a variant of which, are held to be sacred in Christianity, Judaism, Samaritanism, Islam, and many other religions. The Bible is an anthology, a compilation of texts of a variety of forms, originally written in Hebrew, Aramaic, and Koine Greek.", "final_knowledge": "\n\n\"A Tale of Two Cities\" is not a parody of the Bible.", "corrected_rationales": ["\"A Tale of Two Cities\" is not a parody of the Bible because it is not a religious text.", "\"A Tale of Two Cities\" is not a parody of the Bible. The Bible is a collection of religious texts, while \"A Tale of Two Cities\" is a novel by Charles Dickens. There is no evidence that Dickens intended to parody the Bible in his novel.", "\"A Tale of Two Cities\" is not a parody of the Bible because it is not a religious text.", "\"A Tale of Two Cities\" is not a parody of the Bible because it is not a religious text.", "\"A Tale of Two Cities\" is not a parody of the Bible because it is not a religious text."]}
19
+ {"question": "Q: Do manta rays live in water above the safe temperature for cold food storage?\nA:", "gold_ans": "yes", "rationale": [" Manta rays are a type of animal. Animals need to live in a habitat that meets their needs. Manta rays live in tropical and subtropical waters. The water temperature in these areas is above the safe temperature for cold food storage. Therefore, manta rays live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays live in warm ocean waters. The safe temperature for cold food storage is below 40 degrees Fahrenheit. Therefore, manta rays do not live in water above the safe temperature for cold food storage. The answer is no.", " Manta rays are a type of animal. They live in water. The safe temperature for cold food storage is 40 degrees Fahrenheit. The average temperature of the ocean is around 72 degrees Fahrenheit. Therefore, manta rays do live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays are a type of fish. All fish live in water. The safe temperature for cold food storage is 40 degrees Fahrenheit. Therefore, manta rays live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays are a type of animal. Animals need water to live. The safe temperature for cold food storage is 40 degrees Fahrenheit. Therefore, manta rays do not live in water above the safe temperature for cold food storage. The answer is no."], "final_knowledge": ""}
20
+ {"question": "Q: Do manta rays live in water above the safe temperature for cold food storage?\nA:", "gold_ans": "yes", "rationale": [" Manta rays are a type of fish. All fish live in water. The safe temperature for cold food storage is 40 degrees Fahrenheit. Therefore, manta rays live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays are a type of animal. They live in the ocean. The ocean is a source of water. The safe temperature for cold food storage is 40 degrees Fahrenheit. Therefore, manta rays live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays are a type of fish. All fish live in water. Water has a temperature. The safe temperature for cold food storage is below 40 degrees Fahrenheit. Therefore, manta rays live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays are a type of animal. They live in water. The safe temperature for cold food storage is 40 degrees Fahrenheit. Therefore, manta rays live in water above the safe temperature for cold food storage. The answer is yes.", " Manta rays are a type of animal. Animals need to live in an environment that meets their needs. Manta rays live in warm ocean waters. The safe temperature for cold food storage is below 40 degrees Fahrenheit. Therefore, manta rays do not live in water above the safe temperature for cold food storage. The answer is no."], "final_knowledge": ""}
21
+ {"question": "Q: Are thetan levels found in the New Testament?\nA:", "gold_ans": "no", "rationale": [" Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a Christian holy book. The Church of Scientology is not a Christian organization. Therefore, it is unlikely that thetan levels would be mentioned in the New Testament. The answer is no.", " Thetan levels are a measure of a person's spiritual state, according to the Scientology religion. The New Testament is a Christian religious text. Scientology is not a Christian religion. Therefore, it is unlikely that thetan levels would be mentioned in the New Testament. The answer is no.", " Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a collection of religious texts. There is no mention of thetan levels in the New Testament. Therefore, we can conclude that thetan levels are not found in the New Testament. The answer is no.", " Thetan levels are a concept from Scientology. The New Testament is a Christian religious text. Scientology is not a Christian religion. Therefore, it is unlikely that thetan levels would be mentioned in the New Testament. The answer is no.", " Thetan levels are a Scientology concept. The New Testament is a religious text. Scientology is not a religion. Therefore, thetan levels are not found in the New Testament. The answer is no."], "kb_retrieve_knowledge": "The New Testament (NT) is the second division of the Christian biblical canon. It discusses the teachings and person of Jesus, as well as events in first-century Christianity.", "final_knowledge": "\n\nNo, thetan levels are not found in the New Testament.", "corrected_rationales": ["The Church of Scientology does not believe in the existence of thetan levels. Therefore, it is unlikely that thetan levels would be mentioned in the New Testament.", "Thetan levels are not mentioned in the New Testament because Scientology is not a Christian religion.", "Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a collection of religious texts. There is no mention of thetan levels in the New Testament. Therefore, we can conclude that thetan levels are not found in the New Testament.", "Thetan levels are a concept from Scientology. The New Testament is a Christian religious text. Scientology is not a Christian religion. Therefore, it is unlikely that thetan levels would be mentioned in the New Testament.", "No, thetan levels are not found in the New Testament because Scientology is not a religion."]}
22
+ {"question": "Q: Are thetan levels found in the New Testament?\nA:", "gold_ans": "no", "rationale": [" Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a religious text that is central to Christianity. The answer is no.", " Thetan levels are a measure of a person's spiritual state, and they are used by the Church of Scientology. The New Testament is a book of the Christian Bible. The answer is no.", " Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a religious text that tells the story of Jesus Christ. The answer is no.", " Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a book of the Christian Bible. The answer is no.", " Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a Christian religious text. The answer is no."], "kb_retrieve_knowledge": "The New Testament (NT) is the second division of the Christian biblical canon. It discusses the teachings and person of Jesus, as well as events in first-century Christianity.", "final_knowledge": "\n\nNo, thetan levels are not found in the New Testament.", "corrected_rationales": ["No, thetan levels are not found in the New Testament because thetan levels are a measure of a person's spiritual state according to the Church of Scientology, and the New Testament is a religious text that is central to Christianity.", "No, thetan levels are not found in the New Testament because thetan levels are a measure of a person's spiritual state, and they are used by the Church of Scientology. The New Testament is a book of the Christian Bible.", "No, thetan levels are not mentioned in the New Testament. Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a religious text that tells the story of Jesus Christ. Therefore, the answer is no.", "No, thetan levels are not found in the New Testament because thetan levels are a measure of a person's spiritual state, according to the Church of Scientology, and the New Testament is a book of the Christian Bible.", "No, thetan levels are not found in the New Testament because thetan levels are a measure of a person's spiritual state according to the Church of Scientology, and the New Testament is a Christian religious text."]}
23
+ {"question": "Q: Are thetan levels found in the New Testament?\nA:", "gold_ans": "no", "rationale": [" Thetan levels are a measure of spiritual energy used by the Church of Scientology. The New Testament is a Christian religious text. The Church of Scientology is not a Christian organization. Therefore, it is unlikely that thetan levels would be found in the New Testament. The answer is no.", " Thetan levels are a Scientology concept. The New Testament is a book of the Christian Bible. Scientology and Christianity are two different religions. Therefore, it is unlikely that thetan levels would be found in the New Testament. The answer is no.", " Thetan levels are a measure of spiritual energy. The New Testament is a religious text. There is no mention of thetan levels in the New Testament. Therefore, it is unlikely that thetan levels are found in the New Testament. The answer is no.", " Thetan levels are a measure of a person's spiritual state. The New Testament is a book of the Christian Bible. The Christian Bible does not mention thetan levels. Therefore, the answer is no.", " Thetan levels are a measure of a person's spiritual state, according to the Church of Scientology. The New Testament is a book of the Christian Bible. The Church of Scientology is not a Christian organization. Therefore, it is unlikely that thetan levels would be mentioned in the New Testament. The answer is no."], "kb_retrieve_knowledge": "The canon of the New Testament is the set of books many modern Christians regard as divinely inspired and constituting the New Testament of the Christian Bible. For historical Christians, canonization was based on whether the material was from authors socially approximate to the apostles and not based solely on divine inspiration.", "final_knowledge": "\n\nNo, thetan levels are not found in the New Testament.", "corrected_rationales": ["The Church of Scientology does not use thetan levels, which are a measure of spiritual energy, in the New Testament. The New Testament is a Christian religious text, and the Church of Scientology is not a Christian organization. Therefore, it is unlikely that thetan levels would be found in the New Testament.", "Thetan levels are a Scientology concept. The New Testament is a book of the Christian Bible. Scientology and Christianity are two different religions. Therefore, it is unlikely that thetan levels would be found in the New Testament. The answer is no.", "Thetan levels are a measure of spiritual energy. The New Testament is a religious text. There is no mention of thetan levels in the New Testament. Therefore, it is unlikely that thetan levels are found in the New Testament.", "No, thetan levels are not mentioned in the New Testament because the Christian Bible does not focus on a person's spiritual state.", "The Church of Scientology does not believe in the Bible, and therefore it is unlikely that thetan levels would be mentioned in the New Testament."]}
requirements.txt CHANGED
@@ -1,10 +1,4 @@
1
- openai==0.27.4
2
- torch==2.0.0
3
- wikipedia==1.4.0
4
- Wikipedia-API==0.5.4
5
- en-core-web-sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.6.0/en_core_web_sm-3.6.0-py3-none-any.whl
6
- ngram==4.0.3
7
- scikit-learn==1.2.1
8
- transformers==4.23.1
9
- sentence-transformers==2.2.2
10
- nltk==3.8.1
 
1
+ openai==0.27.0
2
+ torch==1.13.1
3
+ pandas==1.5.2
4
+ torchtext==0.14.1
 
 
 
 
 
 
run_correction.sh ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export OPENAI_API_KEY="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"
2
+
3
+
4
+ DATASET="strategyqa"
5
+ LIMIT_DATASET_SIZE="1"
6
+ DEMO_PATH="./demos/${DATASET}"
7
+ OUTPUT_DIR="./experiment/${DATASET}"
8
+ RETR_METHOD="kb_only"
9
+
10
+ python self_correction.py \
11
+ --dataset ${DATASET} \
12
+ --limit_dataset_size ${LIMIT_DATASET_SIZE} \
13
+ --demo_path ${DEMO_PATH} \
14
+ --output_dir ${OUTPUT_DIR} \
15
+ --retrieve_method ${RETR_METHOD} \
16
+ --resume_id 45
self_construction.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+ from sentence_transformers import SentenceTransformer
3
+ from sklearn.cluster import KMeans
4
+ from sklearn.decomposition import PCA
5
+ import numpy as np
6
+ import json
7
+ import matplotlib.pyplot as plt
8
+ import argparse
9
+ from utils import fix_seed
10
+
11
+
12
+ def parse_arguments():
13
+ parser = argparse.ArgumentParser(description="Zero-shot-CoT")
14
+ parser.add_argument(
15
+ "--task", type=str, default="strategyqa",
16
+ choices=["aqua", "gsm8k", "commonsensqa", "addsub", "multiarith", "strategyqa", "svamp", "singleeq", "coin_flip", "last_letters"], help="dataset used for experiment"
17
+ )
18
+ parser.add_argument(
19
+ "--max_ra_len", type=int, default=5, help="maximum number of reasoning chains"
20
+ )
21
+ parser.add_argument(
22
+ "--pred_file", type=str, default="log/multiarith_zero_shot_cot.log",
23
+ help="use the reasoning chains generated by zero-shot-cot."
24
+ )
25
+ parser.add_argument(
26
+ "--demo_save_dir", type=str, default="demos/multiarith", help="where to save the contructed demonstrations"
27
+ )
28
+ parser.add_argument("--random_seed", type=int, default=192, help="random seed")
29
+ parser.add_argument(
30
+ "--encoder", type=str, default="all-MiniLM-L6-v2", help="which sentence-transformer encoder for clustering"
31
+ )
32
+ parser.add_argument(
33
+ "--sampling", type=str, default="center", help="whether to sample the cluster center first"
34
+ )
35
+ parser.add_argument(
36
+ "--debug", type=bool, default=True, help="debug mode"
37
+ )
38
+ args = parser.parse_args()
39
+ return args
40
+
41
+ def main():
42
+ args = parse_arguments()
43
+ fix_seed(args.random_seed)
44
+ encoder = SentenceTransformer(args.encoder)
45
+
46
+ task = args.task
47
+ pred_file = args.pred_file
48
+ save_file = args.demo_save_dir
49
+ max_ra_len = args.max_ra_len
50
+ if task == "last_letters":
51
+ max_ra_len = 7
52
+ if task == "aqua" or task == "last_letters":
53
+ num_clusters = 4
54
+ elif task == "commonsensqa":
55
+ num_clusters = 7
56
+ elif task == "strategyqa":
57
+ num_clusters = 6
58
+ else:
59
+ num_clusters = 8
60
+
61
+ corpus = []
62
+ question = []
63
+ rationale = []
64
+ gold_ans = []
65
+ pred_ans = []
66
+
67
+ with open(pred_file, "r", encoding="utf-8") as fp:
68
+ answer_seg = ""
69
+ for line in fp:
70
+ if "Q: " in line:
71
+ c_question = line.strip()
72
+ if "A: " in line:
73
+ answer_seg = line
74
+ elif "Therefore" in line and "the answer" in line:
75
+ c_rationale = answer_seg
76
+
77
+ elif answer_seg != "":
78
+ answer_seg += line
79
+ if "pred_mode" in line:
80
+ c_pred_ans = line.split(":")[1].strip()
81
+ if "GT :" in line:
82
+ c_gold_ans = line.split(":")[1].strip()
83
+
84
+ c_rationale = c_rationale.replace("A: Let's think step by step.", "Let's think step by step.")
85
+ c_question = c_question + "\nA:"
86
+
87
+ corpus.append(c_question)
88
+ question.append(c_question)
89
+ rationale.append(c_rationale)
90
+ pred_ans.append(c_pred_ans)
91
+ if args.debug:
92
+ gold_ans.append(c_gold_ans)
93
+ answer_seg = ""
94
+
95
+ corpus_embeddings = encoder.encode(corpus)
96
+
97
+ # Perform kmean clustering
98
+ clustering_model = KMeans(n_clusters=num_clusters, random_state=args.random_seed)
99
+ clustering_model.fit(corpus_embeddings)
100
+ cluster_assignment = clustering_model.labels_
101
+
102
+ clustered_sentences = [[] for i in range(num_clusters)]
103
+
104
+ dist = clustering_model.transform(corpus_embeddings)
105
+ clustered_dists = [[] for i in range(num_clusters)]
106
+ clustered_idx = [[] for i in range(num_clusters)]
107
+ for sentence_id, cluster_id in enumerate(cluster_assignment):
108
+ clustered_sentences[cluster_id].append(corpus[sentence_id])
109
+ clustered_dists[cluster_id].append(dist[sentence_id][cluster_id])
110
+ clustered_idx[cluster_id].append(sentence_id)
111
+
112
+ demos = []
113
+
114
+ for i in range(len(clustered_dists)):
115
+ print("Cluster ", i+1)
116
+ tmp = list(map(list, zip(range(len(clustered_dists[i])), clustered_dists[i])))
117
+ top_min_dist = sorted(tmp, key=lambda x: x[1], reverse=False)
118
+ if not args.sampling == "center":
119
+ random.shuffle(top_min_dist)
120
+ for element in top_min_dist:
121
+ min_idx = element[0]
122
+ c_rationale = rationale[clustered_idx[i][min_idx]].strip()
123
+ c_pred_ans = pred_ans[clustered_idx[i][min_idx]].strip()
124
+
125
+ if len(question[clustered_idx[i][min_idx]].strip().split()) <= 60 \
126
+ and len(c_rationale.replace("\n\n", "\n").split("\n")) <= max_ra_len and c_rationale[-1] == "." and c_pred_ans != "":
127
+ if args.task in ["gsm8k", "multiarith", "singleeq", "addsub", "svamp"]:
128
+ if not (c_pred_ans.strip() in c_rationale.split(".")[-2] or c_pred_ans.strip() in c_rationale.split()[-10:]):
129
+ continue
130
+ c_question = question[clustered_idx[i][min_idx]]
131
+ c_rationale = c_rationale.replace("\n\n", "\n").replace("\n", " ").strip()
132
+ c_rationale = " ".join(c_rationale.split())
133
+ if args.debug:
134
+ c_gold_ans = gold_ans[clustered_idx[i][min_idx]]
135
+ else:
136
+ c_gold_ans = None
137
+ demo_element = {
138
+ "question": c_question,
139
+ "rationale": c_rationale,
140
+ "pred_ans": c_pred_ans,
141
+ "gold_ans": c_gold_ans,
142
+ }
143
+ demos.append(demo_element)
144
+ print(c_question)
145
+ print(c_rationale)
146
+ print(c_pred_ans)
147
+ print(c_gold_ans)
148
+ print("")
149
+ break
150
+
151
+ demos = {"demo": demos}
152
+
153
+ with open(args.demo_save_dir, 'w', encoding="utf-8") as write_f:
154
+ json.dump(demos, write_f, indent=4, ensure_ascii=False)
155
+
156
+ y_km = clustering_model.fit_predict(corpus_embeddings)
157
+ pca_model = PCA(n_components=2, random_state=args.random_seed)
158
+ transformed = pca_model.fit_transform(corpus_embeddings)
159
+ centers = pca_model.transform(clustering_model.cluster_centers_)
160
+
161
+ plt.scatter(x=transformed[:, 0], y=transformed[:, 1], c=y_km, s=50, cmap=plt.cm.Paired, alpha=0.4)
162
+ plt.scatter(centers[:, 0],centers[:, 1],
163
+ s=250, marker='*', label='centroids',
164
+ edgecolor='black',
165
+ c=np.arange(0,num_clusters),cmap=plt.cm.Paired,)
166
+ plt.xticks([])
167
+ plt.yticks([])
168
+ plt.savefig(save_file+".png", dpi=600)
169
+
170
+ if __name__ == "__main__":
171
+ main()
z.ipynb ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 14,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from collections import Counter\n",
10
+ "\n",
11
+ "def consistency(arr):\n",
12
+ " len_ans = len(arr)\n",
13
+ " arr_acounts = Counter(arr)\n",
14
+ " ans_freq_tuple = arr_acounts.most_common(len_ans)\n",
15
+ " most_frequent_item, _ = ans_freq_tuple[0]\n",
16
+ " ans_dict = {}\n",
17
+ " for ans_freq in ans_freq_tuple:\n",
18
+ " ans, times = ans_freq\n",
19
+ " ans_dict[ans] = times/len_ans\n",
20
+ " return most_frequent_item, ans_dict"
21
+ ]
22
+ },
23
+ {
24
+ "cell_type": "code",
25
+ "execution_count": 15,
26
+ "metadata": {},
27
+ "outputs": [
28
+ {
29
+ "data": {
30
+ "text/plain": [
31
+ "('A', {'A': 0.6, 'B': 0.2, 'C': 0.2})"
32
+ ]
33
+ },
34
+ "execution_count": 15,
35
+ "metadata": {},
36
+ "output_type": "execute_result"
37
+ }
38
+ ],
39
+ "source": [
40
+ "ans = [\"A\",\"A\",\"A\",\"B\",\"C\"]\n",
41
+ "\n",
42
+ "consistency(ans)"
43
+ ]
44
+ },
45
+ {
46
+ "cell_type": "code",
47
+ "execution_count": 10,
48
+ "metadata": {},
49
+ "outputs": [
50
+ {
51
+ "name": "stdout",
52
+ "output_type": "stream",
53
+ "text": [
54
+ "[('A', 3), ('B', 1), ('C', 1)]\n"
55
+ ]
56
+ }
57
+ ],
58
+ "source": [
59
+ "print(arr_acounts.most_common(len(ans)))"
60
+ ]
61
+ },
62
+ {
63
+ "cell_type": "code",
64
+ "execution_count": null,
65
+ "metadata": {},
66
+ "outputs": [],
67
+ "source": [
68
+ "ans_dict = {}\n",
69
+ "ans_freq_tuple = arr_acounts.most_common(len(ans))\n",
70
+ "for ans_freq in ans_freq_tuple:\n",
71
+ " ans, times = ans_freq\n",
72
+ " ans_dict[ans] = times/len(arr)"
73
+ ]
74
+ }
75
+ ],
76
+ "metadata": {
77
+ "interpreter": {
78
+ "hash": "1e4907852c31268ef23280febd46201e788a9f549728e852c50dbb06a5d70124"
79
+ },
80
+ "kernelspec": {
81
+ "display_name": "Python 3.8.17 ('qual')",
82
+ "language": "python",
83
+ "name": "python3"
84
+ },
85
+ "language_info": {
86
+ "codemirror_mode": {
87
+ "name": "ipython",
88
+ "version": 3
89
+ },
90
+ "file_extension": ".py",
91
+ "mimetype": "text/x-python",
92
+ "name": "python",
93
+ "nbconvert_exporter": "python",
94
+ "pygments_lexer": "ipython3",
95
+ "version": "3.8.17"
96
+ },
97
+ "orig_nbformat": 4
98
+ },
99
+ "nbformat": 4,
100
+ "nbformat_minor": 2
101
+ }