Spaces:
Sleeping
Sleeping
added QA code/dataset
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .env.example +81 -2
- .gitattributes +0 -0
- app.py +2 -22
- data/datasets/WebQSP.test.wikidata.json +0 -0
- ms_macro.json → data/datasets/ms_macro.json +0 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.00.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.02.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.04.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.06.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.08.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.10.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.12.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.14.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.16.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.18.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.20.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.22.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.24.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.26.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.28.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.30.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.00.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.02.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.04.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.06.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.08.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.10.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.12.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.14.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.16.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.18.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.20.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.22.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.24.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.26.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.28.csv +3 -0
- data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.30.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.00.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.02.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.04.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.06.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.08.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.10.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.12.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.14.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.16.csv +3 -0
- data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.18.csv +3 -0
.env.example
CHANGED
@@ -1,2 +1,81 @@
|
|
1 |
-
|
2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
LLM_MODEL_TYPE=huggingface
|
2 |
+
# LLM_MODEL_TYPE=openai
|
3 |
+
# LLM_MODEL_TYPE=hftgi
|
4 |
+
# LLM_MODEL_TYPE=ollama
|
5 |
+
# LLM_MODEL_TYPE=google
|
6 |
+
# LLM_MODEL_TYPE=vllm
|
7 |
+
|
8 |
+
HUGGINGFACE_AUTH_TOKEN=
|
9 |
+
|
10 |
+
HFTGI_SERVER_URL=
|
11 |
+
|
12 |
+
OPENAI_API_KEY=
|
13 |
+
|
14 |
+
GOOGLE_API_KEY=
|
15 |
+
|
16 |
+
# if unset, default to "gpt-3.5-turbo"
|
17 |
+
OPENAI_MODEL_NAME=
|
18 |
+
|
19 |
+
# GEMINI_MODEL_NAME=gemini-1.5-pro-latest
|
20 |
+
|
21 |
+
# OLLAMA_MODEL_NAME=orca2:7b
|
22 |
+
# OLLAMA_MODEL_NAME=mistral:7b
|
23 |
+
# OLLAMA_MODEL_NAME=gemma:7b
|
24 |
+
# OLLAMA_MODEL_NAME=llama2:7b
|
25 |
+
OLLAMA_MODEL_NAME=llama3:8b
|
26 |
+
|
27 |
+
OLLAMA_RP=1.15
|
28 |
+
HF_RP=1.15
|
29 |
+
|
30 |
+
LANGCHAIN_DEBUG=false
|
31 |
+
BATCH_SIZE=1
|
32 |
+
APPLY_CHAT_TEMPLATE_FOR_RAG=true
|
33 |
+
|
34 |
+
# cpu, mps or cuda:0 - if unset, use whatever detected
|
35 |
+
HF_EMBEDDINGS_DEVICE_TYPE=
|
36 |
+
HF_PIPELINE_DEVICE_TYPE=
|
37 |
+
|
38 |
+
# uncomment one of the below to load corresponding quantized model
|
39 |
+
# LOAD_QUANTIZED_MODEL=4bit
|
40 |
+
# LOAD_QUANTIZED_MODEL=8bit
|
41 |
+
|
42 |
+
QA_WITH_RAG=true
|
43 |
+
# QA_WITH_RAG=false
|
44 |
+
|
45 |
+
RETRIEVER_TYPE=questions_file
|
46 |
+
# RETRIEVER_TYPE=vectorstore
|
47 |
+
|
48 |
+
QUESTIONS_FILE_PATH="./data/datasets/ms_macro.json"
|
49 |
+
|
50 |
+
DISABLE_MODEL_PRELOADING=true
|
51 |
+
CHAT_HISTORY_ENABLED=false
|
52 |
+
SHOW_PARAM_SETTINGS=false
|
53 |
+
SHARE_GRADIO_APP=false
|
54 |
+
|
55 |
+
# if unset, default to "hkunlp/instructor-xl"
|
56 |
+
HF_EMBEDDINGS_MODEL_NAME="hkunlp/instructor-large"
|
57 |
+
|
58 |
+
# number of cpu cores - used to set n_threads for GPT4ALL & LlamaCpp models
|
59 |
+
NUMBER_OF_CPU_CORES=
|
60 |
+
|
61 |
+
USING_TORCH_BFLOAT16=true
|
62 |
+
|
63 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="databricks/dolly-v2-3b"
|
64 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="databricks/dolly-v2-7b"
|
65 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="databricks/dolly-v2-12b"
|
66 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="TheBloke/wizardLM-7B-HF"
|
67 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="TheBloke/vicuna-7B-1.1-HF"
|
68 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="nomic-ai/gpt4all-j"
|
69 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="nomic-ai/gpt4all-falcon"
|
70 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="lmsys/fastchat-t5-3b-v1.0"
|
71 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="meta-llama/Llama-2-7b-chat-hf"
|
72 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="meta-llama/Llama-2-13b-chat-hf"
|
73 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="meta-llama/Llama-2-70b-chat-hf"
|
74 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="meta-llama/Meta-Llama-3-8B-Instruct"
|
75 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="meta-llama/Meta-Llama-3-70B-Instruct"
|
76 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="microsoft/Orca-2-7b"
|
77 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="microsoft/Orca-2-13b"
|
78 |
+
HUGGINGFACE_MODEL_NAME_OR_PATH="google/gemma-1.1-2b-it"
|
79 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="google/gemma-1.1-7b-it"
|
80 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="microsoft/Phi-3-mini-128k-instruct"
|
81 |
+
# HUGGINGFACE_MODEL_NAME_OR_PATH="mistralai/Mistral-7B-Instruct-v0.2"
|
.gitattributes
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
app.py
CHANGED
@@ -13,7 +13,7 @@ if len(found_dotenv) == 0:
|
|
13 |
print(f"loading env vars from: {found_dotenv}")
|
14 |
load_dotenv(found_dotenv, override=False)
|
15 |
|
16 |
-
path = os.path.dirname(found_dotenv)
|
17 |
print(f"Adding {path} to sys.path")
|
18 |
sys.path.append(path)
|
19 |
|
@@ -24,7 +24,7 @@ hf_token = os.getenv("HF_TOKEN")
|
|
24 |
|
25 |
login(token=hf_token, add_to_git_credential=True)
|
26 |
|
27 |
-
questions_file_path = os.getenv("QUESTIONS_FILE_PATH") or "./ms_macro.json"
|
28 |
|
29 |
questions = json.loads(open(questions_file_path).read())
|
30 |
examples = [[question["question"].strip()] for question in questions]
|
@@ -46,8 +46,6 @@ def chat(
|
|
46 |
history: list[tuple[str, str]],
|
47 |
system_message,
|
48 |
temperature=0,
|
49 |
-
frequency_penalty=0,
|
50 |
-
presence_penalty=0,
|
51 |
max_tokens=256,
|
52 |
top_p=0.95,
|
53 |
):
|
@@ -76,8 +74,6 @@ def chat(
|
|
76 |
max_tokens=max_tokens,
|
77 |
stream=True,
|
78 |
temperature=temperature,
|
79 |
-
frequency_penalty=None, # frequency_penalty,
|
80 |
-
presence_penalty=None, # presence_penalty,
|
81 |
top_p=top_p,
|
82 |
seed=42,
|
83 |
):
|
@@ -135,22 +131,6 @@ demo = gr.ChatInterface(
|
|
135 |
gr.Slider(
|
136 |
minimum=0, maximum=2, step=0.1, value=0, label="Temperature", render=False
|
137 |
),
|
138 |
-
gr.Slider(
|
139 |
-
minimum=-2,
|
140 |
-
maximum=2,
|
141 |
-
step=0.1,
|
142 |
-
value=0,
|
143 |
-
label="Frequency Penalty",
|
144 |
-
render=False,
|
145 |
-
),
|
146 |
-
gr.Slider(
|
147 |
-
minimum=-2,
|
148 |
-
maximum=2,
|
149 |
-
step=0.1,
|
150 |
-
value=0,
|
151 |
-
label="Presence Penalty",
|
152 |
-
render=False,
|
153 |
-
),
|
154 |
gr.Slider(
|
155 |
minimum=128,
|
156 |
maximum=4096,
|
|
|
13 |
print(f"loading env vars from: {found_dotenv}")
|
14 |
load_dotenv(found_dotenv, override=False)
|
15 |
|
16 |
+
path = os.path.dirname(found_dotenv) + "/src"
|
17 |
print(f"Adding {path} to sys.path")
|
18 |
sys.path.append(path)
|
19 |
|
|
|
24 |
|
25 |
login(token=hf_token, add_to_git_credential=True)
|
26 |
|
27 |
+
questions_file_path = os.getenv("QUESTIONS_FILE_PATH") or "./data/datasets/ms_macro.json"
|
28 |
|
29 |
questions = json.loads(open(questions_file_path).read())
|
30 |
examples = [[question["question"].strip()] for question in questions]
|
|
|
46 |
history: list[tuple[str, str]],
|
47 |
system_message,
|
48 |
temperature=0,
|
|
|
|
|
49 |
max_tokens=256,
|
50 |
top_p=0.95,
|
51 |
):
|
|
|
74 |
max_tokens=max_tokens,
|
75 |
stream=True,
|
76 |
temperature=temperature,
|
|
|
|
|
77 |
top_p=top_p,
|
78 |
seed=42,
|
79 |
):
|
|
|
131 |
gr.Slider(
|
132 |
minimum=0, maximum=2, step=0.1, value=0, label="Temperature", render=False
|
133 |
),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
134 |
gr.Slider(
|
135 |
minimum=128,
|
136 |
maximum=4096,
|
data/datasets/WebQSP.test.wikidata.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
ms_macro.json → data/datasets/ms_macro.json
RENAMED
File without changes
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aa33b726196f9c503102e158c7333a50f86635a0bc5a8b7b1cfdb8e435d65255
|
3 |
+
size 4143
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.00.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e74664d615b6f9ff601acad90ec8baad890aed8736629451f6f8685001e85ffb
|
3 |
+
size 517847
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.02.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:557848e5f0f54011eaeedd9c61dc836d2e4d313c7128a552a552a46a9ae2c906
|
3 |
+
size 512769
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.04.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17ecc5d4cd300069c22b7f9a3e1b4ae10155268b9d7d4baee0a85c8a936cb139
|
3 |
+
size 512204
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.06.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2e0f75e74d37cce10fe5be81f722e99f94582fe69298d7bbe8407343c9d87b89
|
3 |
+
size 515412
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.08.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:89952f9332501697e061e008b5b5e4193bb2f1ec620a9d9fa6c5dc2195e4f359
|
3 |
+
size 525035
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.10.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:141cb33fc2cd887bda0cdad3870792921e06c08edb81b53819d8df8f990c5979
|
3 |
+
size 519175
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.12.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e38ad96f0f641102dc5053deb01fe044ae7738c8bb7d728804cc94004f6620e2
|
3 |
+
size 519424
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.14.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b0f5fd8b04c198a3e4cd5c8deac367092438ecc20f533f9fab9d357b1d3edd1
|
3 |
+
size 524587
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.16.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:567ed6c25205bb8a3a5468b618edba4fac3620e6621b24fa4e949749d31daf7b
|
3 |
+
size 524120
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.18.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3af977ebea2d2a202ab481b8604edc8034fe1d34f1e2c1443a06b9b18bd240a9
|
3 |
+
size 527908
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.20.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1ed7a7addff35dcaf5522fa9df06eba02acbc2194d306745224d0a47cc35a21
|
3 |
+
size 526303
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.22.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34376c053817713b90bf6cd87c38043527c597090c9cd2688316a2e57453764b
|
3 |
+
size 530616
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.24.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5fa075c2c32a4b1ebc1d44b85ab0c3a3409eb6b13030c54ab83d10303ed856e9
|
3 |
+
size 533097
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.26.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f15060426386580723b98461f9b13524c06549ef4e5934b8a5ad9b527c75c67
|
3 |
+
size 527490
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.28.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd95f711c790ca57ff9cebf715bd83979d9695563277839225736ced49aa3d7c
|
3 |
+
size 541483
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_mm_rpp_1.30.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:49d17bc145cfb3461e95193db4664a6fa0ac8a89837aedde95f20748b84999ed
|
3 |
+
size 537067
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:52f034c39872e94a5daa4fa0edbdfb9eac6cea77abbfc205a6fc68de14c89f05
|
3 |
+
size 4719
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.00.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d781812ba2430d6fd9d66994fe2684db95681d4a32717095e6379f5cb7010a61
|
3 |
+
size 912531
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.02.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c99bfb50ae2788fe86620c8b11a208b561d1db9c041baa80aa877215b18060b5
|
3 |
+
size 911612
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.04.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69f074123658fabb2c23c00bc17a3c899f78f807ff61d225fdd3b64b8b62f56c
|
3 |
+
size 905187
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.06.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1eea92d5985d45e9ebfcdae66103fe6230980b941499e954b524b8f9ae16d9bd
|
3 |
+
size 910859
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.08.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a929daf31182f5d359bcef12df91e51fd0101e09831383dd4e784113a175379a
|
3 |
+
size 910911
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.10.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f25047130eeb8575c581b33b372bfe48e365a95638d96a10107ad3d68456e8f0
|
3 |
+
size 906530
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.12.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1abc3770c959ae2414c4c45540b156893f2aa55a5d01f85647f619052ca88a64
|
3 |
+
size 895938
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.14.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:05f8fa1d15915894634921d0e6f50e3007de964ab736dc6d86106635ce264493
|
3 |
+
size 899020
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.16.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b13dfdaa7e8aa4eeea2563565f900161b4dfc79fb32e780358601826d646025c
|
3 |
+
size 903763
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.18.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:778cfb3c73c2f4ef07cb354f95315152f92812f6f2a5cd545ef942916cdd8695
|
3 |
+
size 902295
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.20.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:daffa3297c39e7099c2f4fed1a3128bbf0890405fbb2a288474eae2a57dee0c6
|
3 |
+
size 902764
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.22.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3e2be7b610927f1e4e5a68f3000fed30b9d8345344b63d84162af654b397b767
|
3 |
+
size 907375
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.24.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30372004528146170f8413af65be3c88da94d3e4df0e580da1c329f48229d703
|
3 |
+
size 911356
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.26.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd3b9acf88c9c94b17ddb753122345e18defaed5d0e0eced30eab36b6767142c
|
3 |
+
size 913735
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.28.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a30a9742a8790c5f76fecf78ebd6543be33c63e2015a925bac649e7fde856277
|
3 |
+
size 913015
|
data/results/Llama-2-13b-chat-hf(Non-RAG)_wd_rpp_1.30.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30fe001a0fb83fc1d36f0eeca3512492745de5d1815af69238a109f1ab624547
|
3 |
+
size 929776
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e28af86ab18a1f43ddf76c90aab009803241d602dec46bab30c697c2ff42847a
|
3 |
+
size 4187
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.00.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1a0d5338273f84ef7a28e16e6b86b90d92794acfaa72dc0469d82e0ba51ac8b7
|
3 |
+
size 399758
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.02.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7c3ab09f1b691ff6dc29c985bc5086fde4e4c208ac4365fda7699e6a25b15bc
|
3 |
+
size 398942
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.04.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68b16d30ddcdc6fd74f8dae37c7d1ed939f6127b14bf20a03bf62f7b2e7cd28b
|
3 |
+
size 401963
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.06.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:480442e76bc506a0b59d82d97821b46c4d826fd0e06d69b8452b8f3729acd387
|
3 |
+
size 398890
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.08.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:57bd6f4c295d161f4fa3d28e9ca314a6839a6685bf604cc255f17cdbbabe675b
|
3 |
+
size 404554
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.10.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4df87cd1556292fb63fc018290aad1c2c2db379a3d17c073373c6b3622aa114f
|
3 |
+
size 398221
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.12.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22ce81e92dc0ca4d1959c811730b04ccfc64e7fe10446b3ed6855b47c283c065
|
3 |
+
size 398798
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.14.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:516afe0b4689104160a09e1d0c1b75718bd3c1a6b62781baecfe8a64bea9b69c
|
3 |
+
size 404891
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.16.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9b475278a5b6a252f7192d54801b67b1a6fa44998ab85866a334dd8d26eaac20
|
3 |
+
size 399407
|
data/results/Llama-2-13b-chat-hf(RAG - Chat Template)_mm_rpp_1.18.csv
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:41bad8662eca9dcbc8c0b898aec8b6a85c484c7a7141b43557bcca369519b4fc
|
3 |
+
size 398046
|