Nathan Habib
commited on
Commit
•
fb82d4b
1
Parent(s):
ff236ec
pending
Browse files- LLM360/K2-Chat_eval_request_False_float32_Original.json +1 -1
- NousResearch/Yarn-Llama-2-70b-32k_eval_request_False_bfloat16_Original.json +1 -1
- Qwen/Qwen1.5-32B_eval_request_False_bfloat16_Original.json +1 -1
- Qwen/Qwen2-57B-A14B-Instruct_eval_request_False_bfloat16_Original.json +1 -1
- Qwen/Qwen2-57B-A14B_eval_request_False_bfloat16_Original.json +1 -1
- WizardLMTeam/WizardLM-13B-V1.0_eval_request_False_float32_Original.json +1 -1
- stabilityai/StableBeluga2_eval_request_False_float32_Original.json +1 -1
LLM360/K2-Chat_eval_request_False_float32_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 65.286,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-12T12:07:30Z",
|
11 |
"model_type": "chat",
|
12 |
"job_id": "5757327",
|
|
|
6 |
"params": 65.286,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-12T12:07:30Z",
|
11 |
"model_type": "chat",
|
12 |
"job_id": "5757327",
|
NousResearch/Yarn-Llama-2-70b-32k_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 70.0,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-12T13:30:29Z",
|
11 |
"model_type": "pretrained",
|
12 |
"job_id": "5886461",
|
|
|
6 |
"params": 70.0,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-12T13:30:29Z",
|
11 |
"model_type": "pretrained",
|
12 |
"job_id": "5886461",
|
Qwen/Qwen1.5-32B_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 32.512,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-13T17:56:14Z",
|
11 |
"model_type": "pretrained",
|
12 |
"job_id": "5887081",
|
|
|
6 |
"params": 32.512,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-13T17:56:14Z",
|
11 |
"model_type": "pretrained",
|
12 |
"job_id": "5887081",
|
Qwen/Qwen2-57B-A14B-Instruct_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 57.409,
|
7 |
"architectures": "Qwen2MoeForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-12T09:22:01Z",
|
11 |
"model_type": "chat",
|
12 |
"job_id": "5887423",
|
|
|
6 |
"params": 57.409,
|
7 |
"architectures": "Qwen2MoeForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-12T09:22:01Z",
|
11 |
"model_type": "chat",
|
12 |
"job_id": "5887423",
|
Qwen/Qwen2-57B-A14B_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 57.409,
|
7 |
"architectures": "Qwen2MoeForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-13T17:56:28Z",
|
11 |
"model_type": "pretrained",
|
12 |
"job_id": "5890737",
|
|
|
6 |
"params": 57.409,
|
7 |
"architectures": "Qwen2MoeForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-13T17:56:28Z",
|
11 |
"model_type": "pretrained",
|
12 |
"job_id": "5890737",
|
WizardLMTeam/WizardLM-13B-V1.0_eval_request_False_float32_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 13.0,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-13T18:10:32Z",
|
11 |
"model_type": "finetuned",
|
12 |
"job_id": "5710809",
|
|
|
6 |
"params": 13.0,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-13T18:10:32Z",
|
11 |
"model_type": "finetuned",
|
12 |
"job_id": "5710809",
|
stabilityai/StableBeluga2_eval_request_False_float32_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 0,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-06-13T18:10:18Z",
|
11 |
"model_type": "finetuned",
|
12 |
"job_id": "5705308",
|
|
|
6 |
"params": 0,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-06-13T18:10:18Z",
|
11 |
"model_type": "finetuned",
|
12 |
"job_id": "5705308",
|