import os | |
import json | |
import hashlib | |
data = json.loads(open("dev.json").read()) | |
with open("queries.jsonl", "w") as f: | |
for d in data: | |
q = {"_id": d["_id"], "text": d["question"], "metadata": {"answer": [d["answer"]]}} | |
f.write(json.dumps(q) + "\n") | |
with open("corpus.jsonl", "w") as f: | |
for d in data: | |
for idx, ci in enumerate(d["context"]): | |
c = {"_id": f"{d['_id']}_{idx}", "text": ci[0] + ": " + " ".join(ci[1])} | |
f.write(json.dumps(c) + "\n") | |
here = os.path.dirname(os.path.abspath(".")) | |
def prepare_json_dataset(): | |
hash_ids = set() | |
with open(os.path.join(here, "corpus.txt"), "w") as fout: | |
with open(os.path.join(here, "corpus.jsonl"), "r") as fin: | |
for line in fin: | |
line = json.loads(line) | |
hash_id = hashlib.md5(line["text"].encode("utf-8")).hexdigest() | |
if hash_id in hash_ids: | |
continue | |
else: | |
fout.write(line["text"] + "\n") | |
hash_ids.add(hash_id) | |
prepare_json_dataset() | |