SaylorTwift HF staff commited on
Commit
00862e6
โ€ข
1 Parent(s): a4b02b5

change to pending

Browse files
This view is limited to 50 files because it contains too many changes. ย  See raw diff
Files changed (50) hide show
  1. 0-hero/Matter-0.2-7B-DPO_eval_request_False_bfloat16_Original.json +15 -1
  2. 01-ai/Yi-1.5-34B-Chat-16K_eval_request_False_bfloat16_Original.json +2 -2
  3. 152334H/miqu-1-70b-sf_eval_request_False_float16_Original.json +3 -3
  4. AI-Sweden-Models/gpt-sw3-40b_eval_request_False_float16_Original.json +15 -1
  5. AiMavenAi/Athena-70B-L3_eval_request_False_float16_Original.json +15 -1
  6. BEE-spoke-data/smol_llama-220M-GQA-fineweb_edu_eval_request_False_bfloat16_Original.json +3 -3
  7. BEE-spoke-data/smol_llama-220M-GQA_eval_request_False_bfloat16_Original.json +3 -3
  8. BEE-spoke-data/smol_llama-220M-openhermes_eval_request_False_bfloat16_Original.json +3 -3
  9. BEE-spoke-data/zephyr-220m-dpo-full_eval_request_False_bfloat16_Original.json +3 -3
  10. Ba2han/Llama-Phi-3_DoRA_eval_request_False_bfloat16_Original.json +15 -1
  11. BarraHome/Mistroll-7B-v2.2_eval_request_False_bfloat16_Original.json +15 -1
  12. CausalLM/34b-beta_eval_request_False_bfloat16_Original.json +15 -1
  13. CausalLM/34b-beta_eval_request_False_float16_Original.json +15 -1
  14. CausalLM/35b-beta-long_eval_request_False_bfloat16_Original.json +15 -1
  15. ClaudioItaly/TopEvolutionWiz_eval_request_False_bfloat16_Original.json +15 -1
  16. CortexLM/btlm-7b-base-v0.1_eval_request_False_bfloat16_Original.json +15 -1
  17. CortexLM/btlm-7b-base-v0.2_eval_request_False_bfloat16_Original.json +15 -1
  18. DevQuasar/coma-7B-v0.1_eval_request_False_float16_Original.json +15 -1
  19. Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.1_eval_request_False_float16_Original.json +15 -1
  20. Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.3_eval_request_False_bfloat16_Original.json +15 -1
  21. FallenMerick/Chewy-Lemon-Cookie-11B_eval_request_False_bfloat16_Original.json +15 -1
  22. HiroseKoichi/Llama-Salad-4x8B-V3_eval_request_False_bfloat16_Original.json +15 -1
  23. Josephgflowers/Cinder-Phi-2-V1-F16-gguf_eval_request_False_float16_Original.json +15 -1
  24. Josephgflowers/TinyLlama-Cinder-Agent-v1_eval_request_False_float16_Original.json +15 -1
  25. LLM360/K2-Chat_eval_request_False_float32_Original.json +2 -2
  26. LLM360/K2_eval_request_False_float16_Original.json +15 -1
  27. Locutusque/Llama-3-NeuralHercules-5.0-8B_eval_request_False_bfloat16_Original.json +15 -1
  28. Locutusque/Llama-3-Yggdrasil-2.0-8B_eval_request_False_bfloat16_Original.json +15 -1
  29. MaziyarPanahi/Goku-8x22B-v0.1_eval_request_False_bfloat16_Original.json +15 -1
  30. MaziyarPanahi/Goku-8x22B-v0.2_eval_request_False_bfloat16_Original.json +15 -1
  31. MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.1_eval_request_False_bfloat16_Original.json +15 -1
  32. MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.2_eval_request_False_bfloat16_Original.json +15 -1
  33. MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.3_eval_request_False_bfloat16_Original.json +15 -1
  34. MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.4_eval_request_False_bfloat16_Original.json +15 -1
  35. MaziyarPanahi/Llama-3-70B-Instruct-v0.1_eval_request_False_bfloat16_Original.json +15 -1
  36. MaziyarPanahi/Llama-3-8B-Instruct-v0.10_eval_request_False_bfloat16_Original.json +15 -1
  37. MaziyarPanahi/Llama-3-8B-Instruct-v0.8_eval_request_False_bfloat16_Original.json +15 -1
  38. MaziyarPanahi/Llama-3-8B-Instruct-v0.9_eval_request_False_bfloat16_Original.json +15 -1
  39. MaziyarPanahi/Phi-3-mini-4k-instruct-v0.1_eval_request_False_bfloat16_Original.json +15 -1
  40. MaziyarPanahi/Phi-3-mini-4k-instruct-v0.2_eval_request_False_bfloat16_Original.json +15 -1
  41. MaziyarPanahi/Phi-3-mini-4k-instruct-v0.3_eval_request_False_bfloat16_Original.json +15 -1
  42. MaziyarPanahi/Qwen2-72B-Instruct-v0.1_eval_request_False_bfloat16_Original.json +15 -1
  43. Naveenpoliasetty/llama3-8B-V2_eval_request_False_float16_Original.json +15 -1
  44. NeverSleep/CausalLM-RP-34B_eval_request_False_float16_Original.json +15 -1
  45. Nitral-AI/Hathor_Stable-v0.2-L3-8B_eval_request_False_bfloat16_Original.json +15 -1
  46. Nitral-AI/Poppy_Porpoise-0.72-L3-8B_eval_request_False_bfloat16_Original.json +15 -1
  47. NousResearch/Hermes-2-Theta-Llama-3-70B_eval_request_False_bfloat16_Original.json +3 -3
  48. NousResearch/Yarn-Llama-2-13b-128k_eval_request_False_bfloat16_Original.json +3 -3
  49. NucleusAI/nucleus-22B-token-500B_eval_request_False_bfloat16_Original.json +15 -1
  50. OpenBuddy/openbuddy-deepseek-67b-v18.1-4k_eval_request_False_bfloat16_Original.json +15 -1
0-hero/Matter-0.2-7B-DPO_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "0-hero/Matter-0.2-7B-DPO", "base_model": "", "revision": "26a66f0d862e2024ce4ad0a09c37052ac36e8af6", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T19:58:40Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "0-hero/Matter-0.2-7B-DPO",
3
+ "base_model": "",
4
+ "revision": "26a66f0d862e2024ce4ad0a09c37052ac36e8af6",
5
+ "precision": "bfloat16",
6
+ "params": 7.242,
7
+ "architectures": "MistralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T19:58:40Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
01-ai/Yi-1.5-34B-Chat-16K_eval_request_False_bfloat16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 34.389,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FAILED",
10
  "submitted_time": "2024-06-12T12:06:53Z",
11
  "model_type": "chat",
12
  "job_id": "6658010",
13
  "job_start_time": "2024-06-22T18:39:45.456325",
14
  "use_chat_template": true
15
- }
 
6
  "params": 34.389,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-12T12:06:53Z",
11
  "model_type": "chat",
12
  "job_id": "6658010",
13
  "job_start_time": "2024-06-22T18:39:45.456325",
14
  "use_chat_template": true
15
+ }
152334H/miqu-1-70b-sf_eval_request_False_float16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 68.977,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "RUNNING",
10
  "submitted_time": "2024-06-26T15:32:34Z",
11
- "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
12
  "job_id": "7208639",
13
  "job_start_time": "2024-06-26T19:24:39.425080",
14
  "use_chat_template": false
15
- }
 
6
  "params": 68.977,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-26T15:32:34Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
  "job_id": "7208639",
13
  "job_start_time": "2024-06-26T19:24:39.425080",
14
  "use_chat_template": false
15
+ }
AI-Sweden-Models/gpt-sw3-40b_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "AI-Sweden-Models/gpt-sw3-40b", "base_model": "", "revision": "1af27994df1287a7fac1b10d60e40ca43a22a385", "precision": "float16", "params": 39.927, "architectures": "GPT2LMHeadModel", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T18:13:04Z", "model_type": "\ud83d\udfe2 : \ud83d\udfe2 pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "AI-Sweden-Models/gpt-sw3-40b",
3
+ "base_model": "",
4
+ "revision": "1af27994df1287a7fac1b10d60e40ca43a22a385",
5
+ "precision": "float16",
6
+ "params": 39.927,
7
+ "architectures": "GPT2LMHeadModel",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T18:13:04Z",
11
+ "model_type": "๐ŸŸข : ๐ŸŸข pretrained",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
AiMavenAi/Athena-70B-L3_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "AiMavenAi/Athena-70B-L3", "base_model": "AiMavenAi/Athena-70B-L3", "revision": "ed35274b035d52760c4bf00ecd28d04045a25f97", "precision": "float16", "params": 70.554, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T16:55:48Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "AiMavenAi/Athena-70B-L3",
3
+ "base_model": "AiMavenAi/Athena-70B-L3",
4
+ "revision": "ed35274b035d52760c4bf00ecd28d04045a25f97",
5
+ "precision": "float16",
6
+ "params": 70.554,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T16:55:48Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
BEE-spoke-data/smol_llama-220M-GQA-fineweb_edu_eval_request_False_bfloat16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 0.218,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FAILED",
10
  "submitted_time": "2024-06-26T14:34:07Z",
11
- "model_type": "\ud83d\udfe9 : \ud83d\udfe9 continuously pretrained",
12
  "job_id": "7205446",
13
  "job_start_time": "2024-06-26T14:59:08.621087",
14
  "use_chat_template": false
15
- }
 
6
  "params": 0.218,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-26T14:34:07Z",
11
+ "model_type": "๐ŸŸฉ : ๐ŸŸฉ continuously pretrained",
12
  "job_id": "7205446",
13
  "job_start_time": "2024-06-26T14:59:08.621087",
14
  "use_chat_template": false
15
+ }
BEE-spoke-data/smol_llama-220M-GQA_eval_request_False_bfloat16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 0.218,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FINISHED",
10
  "submitted_time": "2024-06-26T14:33:46Z",
11
- "model_type": "\ud83d\udfe2 : \ud83d\udfe2 pretrained",
12
  "job_id": "7205417",
13
  "job_start_time": "2024-06-26T14:48:57.752173",
14
  "use_chat_template": false
15
- }
 
6
  "params": 0.218,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-26T14:33:46Z",
11
+ "model_type": "๐ŸŸข : ๐ŸŸข pretrained",
12
  "job_id": "7205417",
13
  "job_start_time": "2024-06-26T14:48:57.752173",
14
  "use_chat_template": false
15
+ }
BEE-spoke-data/smol_llama-220M-openhermes_eval_request_False_bfloat16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 0.218,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FAILED",
10
  "submitted_time": "2024-06-26T14:33:23Z",
11
- "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
12
  "job_id": "7205418",
13
  "job_start_time": "2024-06-26T14:49:07.666028",
14
  "use_chat_template": false
15
- }
 
6
  "params": 0.218,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-26T14:33:23Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
  "job_id": "7205418",
13
  "job_start_time": "2024-06-26T14:49:07.666028",
14
  "use_chat_template": false
15
+ }
BEE-spoke-data/zephyr-220m-dpo-full_eval_request_False_bfloat16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 0.218,
7
  "architectures": "MistralForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FAILED",
10
  "submitted_time": "2024-06-26T14:34:45Z",
11
- "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)",
12
  "job_id": "7205414",
13
  "job_start_time": "2024-06-26T14:46:28.859979",
14
  "use_chat_template": true
15
- }
 
6
  "params": 0.218,
7
  "architectures": "MistralForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-26T14:34:45Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
  "job_id": "7205414",
13
  "job_start_time": "2024-06-26T14:46:28.859979",
14
  "use_chat_template": true
15
+ }
Ba2han/Llama-Phi-3_DoRA_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Ba2han/Llama-Phi-3_DoRA", "base_model": "", "revision": "36f99064a7be8ba475c2ee5c5424e95c263ccb87", "precision": "bfloat16", "params": 3.821, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T20:34:56Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Ba2han/Llama-Phi-3_DoRA",
3
+ "base_model": "",
4
+ "revision": "36f99064a7be8ba475c2ee5c5424e95c263ccb87",
5
+ "precision": "bfloat16",
6
+ "params": 3.821,
7
+ "architectures": "MistralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T20:34:56Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
BarraHome/Mistroll-7B-v2.2_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "BarraHome/Mistroll-7B-v2.2", "base_model": "yam-peleg/Experiment26-7B", "revision": "755df0d9ed26d10744ec1f9dbad8cab88882ce73", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T17:40:40Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "BarraHome/Mistroll-7B-v2.2",
3
+ "base_model": "yam-peleg/Experiment26-7B",
4
+ "revision": "755df0d9ed26d10744ec1f9dbad8cab88882ce73",
5
+ "precision": "bfloat16",
6
+ "params": 7.242,
7
+ "architectures": "MistralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T17:40:40Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
CausalLM/34b-beta_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "CausalLM/34b-beta", "base_model": "", "revision": "0429951eb30ccdfff3515e711aaa7649a8a7364c", "precision": "bfloat16", "params": 34.389, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T18:02:34Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "CausalLM/34b-beta",
3
+ "base_model": "",
4
+ "revision": "0429951eb30ccdfff3515e711aaa7649a8a7364c",
5
+ "precision": "bfloat16",
6
+ "params": 34.389,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T18:02:34Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
CausalLM/34b-beta_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "CausalLM/34b-beta", "base_model": "", "revision": "0429951eb30ccdfff3515e711aaa7649a8a7364c", "precision": "float16", "params": 34.389, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T18:02:48Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "CausalLM/34b-beta",
3
+ "base_model": "",
4
+ "revision": "0429951eb30ccdfff3515e711aaa7649a8a7364c",
5
+ "precision": "float16",
6
+ "params": 34.389,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T18:02:48Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
CausalLM/35b-beta-long_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "CausalLM/35b-beta-long", "base_model": "", "revision": "36fa24a5e2288e2d81092523f14adb7d2b027a3b", "precision": "bfloat16", "params": 34.981, "architectures": "CohereForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T17:29:57Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "CausalLM/35b-beta-long",
3
+ "base_model": "",
4
+ "revision": "36fa24a5e2288e2d81092523f14adb7d2b027a3b",
5
+ "precision": "bfloat16",
6
+ "params": 34.981,
7
+ "architectures": "CohereForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T17:29:57Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
ClaudioItaly/TopEvolutionWiz_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "ClaudioItaly/TopEvolutionWiz", "base_model": "", "revision": "a207b871d09091847f806a51c2ec879ce91c6040", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T20:32:08Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "ClaudioItaly/TopEvolutionWiz",
3
+ "base_model": "",
4
+ "revision": "a207b871d09091847f806a51c2ec879ce91c6040",
5
+ "precision": "bfloat16",
6
+ "params": 7.242,
7
+ "architectures": "MistralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T20:32:08Z",
11
+ "model_type": "๐Ÿค : ๐Ÿค base merges and moerges",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
CortexLM/btlm-7b-base-v0.1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "CortexLM/btlm-7b-base-v0.1", "base_model": "", "revision": "876db86b226fe6eec6f1b02e6c082a6dfe0317e0", "precision": "bfloat16", "params": 6.893, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T21:28:41Z", "model_type": "\ud83d\udfe2 : \ud83d\udfe2 pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "CortexLM/btlm-7b-base-v0.1",
3
+ "base_model": "",
4
+ "revision": "876db86b226fe6eec6f1b02e6c082a6dfe0317e0",
5
+ "precision": "bfloat16",
6
+ "params": 6.893,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T21:28:41Z",
11
+ "model_type": "๐ŸŸข : ๐ŸŸข pretrained",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
CortexLM/btlm-7b-base-v0.2_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "CortexLM/btlm-7b-base-v0.2", "base_model": "", "revision": "eda8b4298365a26c8981316e09427c237b11217f", "precision": "bfloat16", "params": 6.885, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T21:28:25Z", "model_type": "\ud83d\udfe2 : \ud83d\udfe2 pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "CortexLM/btlm-7b-base-v0.2",
3
+ "base_model": "",
4
+ "revision": "eda8b4298365a26c8981316e09427c237b11217f",
5
+ "precision": "bfloat16",
6
+ "params": 6.885,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T21:28:25Z",
11
+ "model_type": "๐ŸŸข : ๐ŸŸข pretrained",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
DevQuasar/coma-7B-v0.1_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "DevQuasar/coma-7B-v0.1", "base_model": "meta-llama/Llama-2-7b-chat-hf", "revision": "8358359ac3152fba1d284b2dcd00a4efc205cc63", "precision": "float16", "params": 6.738, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:45:12Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "DevQuasar/coma-7B-v0.1",
3
+ "base_model": "meta-llama/Llama-2-7b-chat-hf",
4
+ "revision": "8358359ac3152fba1d284b2dcd00a4efc205cc63",
5
+ "precision": "float16",
6
+ "params": 6.738,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:45:12Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.1_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.1", "base_model": "", "revision": "47239ced888d8be13ff0423bb5148693de2add5a", "precision": "float16", "params": 8.031, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:14:23Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.1",
3
+ "base_model": "",
4
+ "revision": "47239ced888d8be13ff0423bb5148693de2add5a",
5
+ "precision": "float16",
6
+ "params": 8.031,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:14:23Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.3_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.3", "base_model": "", "revision": "cf29b8b484a909132e3a1f85ce891d28347c0d13", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T19:57:58Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Enno-Ai/EnnoAi-Pro-Llama-3-8B-v0.3",
3
+ "base_model": "",
4
+ "revision": "cf29b8b484a909132e3a1f85ce891d28347c0d13",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T19:57:58Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
FallenMerick/Chewy-Lemon-Cookie-11B_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "FallenMerick/Chewy-Lemon-Cookie-11B", "base_model": "", "revision": "0f5d0d6d218b3ef034f58eba32d6fe7ac4c237ae", "precision": "bfloat16", "params": 10.732, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T23:15:20Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "FallenMerick/Chewy-Lemon-Cookie-11B",
3
+ "base_model": "",
4
+ "revision": "0f5d0d6d218b3ef034f58eba32d6fe7ac4c237ae",
5
+ "precision": "bfloat16",
6
+ "params": 10.732,
7
+ "architectures": "MistralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T23:15:20Z",
11
+ "model_type": "๐Ÿค : ๐Ÿค base merges and moerges",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
HiroseKoichi/Llama-Salad-4x8B-V3_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "HiroseKoichi/Llama-Salad-4x8B-V3", "base_model": "", "revision": "a343915429779efbd1478f01ba1f7fd9d8d226c0", "precision": "bfloat16", "params": 24.942, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T19:36:42Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "HiroseKoichi/Llama-Salad-4x8B-V3",
3
+ "base_model": "",
4
+ "revision": "a343915429779efbd1478f01ba1f7fd9d8d226c0",
5
+ "precision": "bfloat16",
6
+ "params": 24.942,
7
+ "architectures": "MixtralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T19:36:42Z",
11
+ "model_type": "๐Ÿค : ๐Ÿค base merges and moerges",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Josephgflowers/Cinder-Phi-2-V1-F16-gguf_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Josephgflowers/Cinder-Phi-2-V1-F16-gguf", "base_model": "", "revision": "85629ec9b18efee31d07630664e7a3815121badf", "precision": "float16", "params": 2.78, "architectures": "PhiForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T21:45:09Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Josephgflowers/Cinder-Phi-2-V1-F16-gguf",
3
+ "base_model": "",
4
+ "revision": "85629ec9b18efee31d07630664e7a3815121badf",
5
+ "precision": "float16",
6
+ "params": 2.78,
7
+ "architectures": "PhiForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T21:45:09Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Josephgflowers/TinyLlama-Cinder-Agent-v1_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Josephgflowers/TinyLlama-Cinder-Agent-v1", "base_model": "", "revision": "a9cd8b48bfe30f29bb1f819213da9a4c41eee67f", "precision": "float16", "params": 1.1, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T21:40:30Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Josephgflowers/TinyLlama-Cinder-Agent-v1",
3
+ "base_model": "",
4
+ "revision": "a9cd8b48bfe30f29bb1f819213da9a4c41eee67f",
5
+ "precision": "float16",
6
+ "params": 1.1,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T21:40:30Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
LLM360/K2-Chat_eval_request_False_float32_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 65.286,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FINISHED",
10
  "submitted_time": "2024-06-12T12:07:30Z",
11
  "model_type": "chat",
12
  "job_id": "5757327",
13
  "job_start_time": "2024-06-15T12:56:23.549769",
14
  "use_chat_template": true
15
- }
 
6
  "params": 65.286,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-12T12:07:30Z",
11
  "model_type": "chat",
12
  "job_id": "5757327",
13
  "job_start_time": "2024-06-15T12:56:23.549769",
14
  "use_chat_template": true
15
+ }
LLM360/K2_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "LLM360/K2", "base_model": "", "revision": "49d159b6f2b64d562e745f0ff06e65b9a4c28ead", "precision": "float16", "params": 65.286, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T16:18:19Z", "model_type": "\ud83d\udfe2 : \ud83d\udfe2 pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "LLM360/K2",
3
+ "base_model": "",
4
+ "revision": "49d159b6f2b64d562e745f0ff06e65b9a4c28ead",
5
+ "precision": "float16",
6
+ "params": 65.286,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T16:18:19Z",
11
+ "model_type": "๐ŸŸข : ๐ŸŸข pretrained",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
Locutusque/Llama-3-NeuralHercules-5.0-8B_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Locutusque/Llama-3-NeuralHercules-5.0-8B", "base_model": "", "revision": "2bbb675e592a1772f2389fe2d58a5b610d479d94", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T18:43:56Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Locutusque/Llama-3-NeuralHercules-5.0-8B",
3
+ "base_model": "",
4
+ "revision": "2bbb675e592a1772f2389fe2d58a5b610d479d94",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T18:43:56Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Locutusque/Llama-3-Yggdrasil-2.0-8B_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Locutusque/Llama-3-Yggdrasil-2.0-8B", "base_model": "", "revision": "ec2329946ccc81a7c1ae36210728f717bc4f01d8", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T18:47:48Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Locutusque/Llama-3-Yggdrasil-2.0-8B",
3
+ "base_model": "",
4
+ "revision": "ec2329946ccc81a7c1ae36210728f717bc4f01d8",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T18:47:48Z",
11
+ "model_type": "๐Ÿค : ๐Ÿค base merges and moerges",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Goku-8x22B-v0.1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Goku-8x22B-v0.1", "base_model": "", "revision": "f75ebf5ceb71046184aaf969cb85daaac320e4a7", "precision": "bfloat16", "params": 140.621, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:28:47Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Goku-8x22B-v0.1",
3
+ "base_model": "",
4
+ "revision": "f75ebf5ceb71046184aaf969cb85daaac320e4a7",
5
+ "precision": "bfloat16",
6
+ "params": 140.621,
7
+ "architectures": "MixtralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:28:47Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Goku-8x22B-v0.2_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Goku-8x22B-v0.2", "base_model": "", "revision": "215a4d7c13ac4b69540300d37a231f61968acaa6", "precision": "bfloat16", "params": 140.621, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:29:06Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Goku-8x22B-v0.2",
3
+ "base_model": "",
4
+ "revision": "215a4d7c13ac4b69540300d37a231f61968acaa6",
5
+ "precision": "bfloat16",
6
+ "params": 140.621,
7
+ "architectures": "MixtralForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:29:06Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.1", "base_model": "", "revision": "a584e3f14a5fdddced8def6e4bbe93da83e4b971", "precision": "bfloat16", "params": 70.554, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:22:17Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.1",
3
+ "base_model": "",
4
+ "revision": "a584e3f14a5fdddced8def6e4bbe93da83e4b971",
5
+ "precision": "bfloat16",
6
+ "params": 70.554,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:22:17Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.2_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.2", "base_model": "", "revision": "95366b974baedee4d95c1e841bc3d15e94753804", "precision": "bfloat16", "params": 70.554, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:21:18Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.2",
3
+ "base_model": "",
4
+ "revision": "95366b974baedee4d95c1e841bc3d15e94753804",
5
+ "precision": "bfloat16",
6
+ "params": 70.554,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:21:18Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.3_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.3", "base_model": "", "revision": "a1b2e5df828ab055d02cb2cbfb357557ce37881d", "precision": "bfloat16", "params": 70.554, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:21:50Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.3",
3
+ "base_model": "",
4
+ "revision": "a1b2e5df828ab055d02cb2cbfb357557ce37881d",
5
+ "precision": "bfloat16",
6
+ "params": 70.554,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:21:50Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.4_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.4", "base_model": "", "revision": "cb03e4d810b82d86e7cb01ab146bade09a5d06d1", "precision": "bfloat16", "params": 70.554, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:20:48Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-70B-Instruct-DPO-v0.4",
3
+ "base_model": "",
4
+ "revision": "cb03e4d810b82d86e7cb01ab146bade09a5d06d1",
5
+ "precision": "bfloat16",
6
+ "params": 70.554,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:20:48Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-70B-Instruct-v0.1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-70B-Instruct-v0.1", "base_model": "", "revision": "6db1cb4256525fc5429734ddc0eb941d08d0be30", "precision": "bfloat16", "params": 70.554, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:20:08Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-70B-Instruct-v0.1",
3
+ "base_model": "",
4
+ "revision": "6db1cb4256525fc5429734ddc0eb941d08d0be30",
5
+ "precision": "bfloat16",
6
+ "params": 70.554,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:20:08Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-8B-Instruct-v0.10_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-8B-Instruct-v0.10", "base_model": "", "revision": "4411eb9f6f5e4c462a6bdbc64c26dcc123100b66", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:41:58Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-8B-Instruct-v0.10",
3
+ "base_model": "",
4
+ "revision": "4411eb9f6f5e4c462a6bdbc64c26dcc123100b66",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:41:58Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-8B-Instruct-v0.8_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-8B-Instruct-v0.8", "base_model": "", "revision": "94d222b8447b600b9836da4036df9490b59fe966", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:41:21Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-8B-Instruct-v0.8",
3
+ "base_model": "",
4
+ "revision": "94d222b8447b600b9836da4036df9490b59fe966",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:41:21Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Llama-3-8B-Instruct-v0.9_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Llama-3-8B-Instruct-v0.9", "base_model": "", "revision": "ddf91fdc0a3ab5e5d76864f1c4cf44e5adacd565", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:41:38Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Llama-3-8B-Instruct-v0.9",
3
+ "base_model": "",
4
+ "revision": "ddf91fdc0a3ab5e5d76864f1c4cf44e5adacd565",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:41:38Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Phi-3-mini-4k-instruct-v0.1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Phi-3-mini-4k-instruct-v0.1", "base_model": "", "revision": "6764c79badacba5fa3584d2d2593d762caa1d17d", "precision": "bfloat16", "params": 3.821, "architectures": "Phi3ForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:30:49Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Phi-3-mini-4k-instruct-v0.1",
3
+ "base_model": "",
4
+ "revision": "6764c79badacba5fa3584d2d2593d762caa1d17d",
5
+ "precision": "bfloat16",
6
+ "params": 3.821,
7
+ "architectures": "Phi3ForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:30:49Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Phi-3-mini-4k-instruct-v0.2_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Phi-3-mini-4k-instruct-v0.2", "base_model": "", "revision": "c0a366a4c01d7e724ceba7e2f2c19251983423fe", "precision": "bfloat16", "params": 3.821, "architectures": "Phi3ForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:31:11Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Phi-3-mini-4k-instruct-v0.2",
3
+ "base_model": "",
4
+ "revision": "c0a366a4c01d7e724ceba7e2f2c19251983423fe",
5
+ "precision": "bfloat16",
6
+ "params": 3.821,
7
+ "architectures": "Phi3ForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:31:11Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Phi-3-mini-4k-instruct-v0.3_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Phi-3-mini-4k-instruct-v0.3", "base_model": "", "revision": "e1f70c3724c728aadd1c7c1bb279487494f7059e", "precision": "bfloat16", "params": 3.821, "architectures": "Phi3ForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:32:08Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Phi-3-mini-4k-instruct-v0.3",
3
+ "base_model": "",
4
+ "revision": "e1f70c3724c728aadd1c7c1bb279487494f7059e",
5
+ "precision": "bfloat16",
6
+ "params": 3.821,
7
+ "architectures": "Phi3ForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:32:08Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
MaziyarPanahi/Qwen2-72B-Instruct-v0.1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "MaziyarPanahi/Qwen2-72B-Instruct-v0.1", "base_model": "", "revision": "0369c39770f45f2464587918f2dbdb8449ea3a0d", "precision": "bfloat16", "params": 72.699, "architectures": "Qwen2ForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:23:39Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "MaziyarPanahi/Qwen2-72B-Instruct-v0.1",
3
+ "base_model": "",
4
+ "revision": "0369c39770f45f2464587918f2dbdb8449ea3a0d",
5
+ "precision": "bfloat16",
6
+ "params": 72.699,
7
+ "architectures": "Qwen2ForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:23:39Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Naveenpoliasetty/llama3-8B-V2_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Naveenpoliasetty/llama3-8B-V2", "base_model": "", "revision": "e0458381d02bc411b9e576796d185f23dcc11f71", "precision": "float16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T18:54:47Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Naveenpoliasetty/llama3-8B-V2",
3
+ "base_model": "",
4
+ "revision": "e0458381d02bc411b9e576796d185f23dcc11f71",
5
+ "precision": "float16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T18:54:47Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
NeverSleep/CausalLM-RP-34B_eval_request_False_float16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "NeverSleep/CausalLM-RP-34B", "base_model": "", "revision": "e2a033646231bd947a3948d3aac198d34d04ea38", "precision": "float16", "params": 34.0, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T19:57:33Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "NeverSleep/CausalLM-RP-34B",
3
+ "base_model": "",
4
+ "revision": "e2a033646231bd947a3948d3aac198d34d04ea38",
5
+ "precision": "float16",
6
+ "params": 34,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T19:57:33Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
Nitral-AI/Hathor_Stable-v0.2-L3-8B_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Nitral-AI/Hathor_Stable-v0.2-L3-8B", "base_model": "", "revision": "6109d7624cadc4ddba5f23ed0abe99f6b29c9139", "precision": "bfloat16", "params": 8.03, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:59:23Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Nitral-AI/Hathor_Stable-v0.2-L3-8B",
3
+ "base_model": "",
4
+ "revision": "6109d7624cadc4ddba5f23ed0abe99f6b29c9139",
5
+ "precision": "bfloat16",
6
+ "params": 8.03,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:59:23Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
Nitral-AI/Poppy_Porpoise-0.72-L3-8B_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "Nitral-AI/Poppy_Porpoise-0.72-L3-8B", "base_model": "", "revision": "022d166a2aa5d323cc6d683728e867694cefa513", "precision": "bfloat16", "params": 8.0, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T15:05:27Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "Nitral-AI/Poppy_Porpoise-0.72-L3-8B",
3
+ "base_model": "",
4
+ "revision": "022d166a2aa5d323cc6d683728e867694cefa513",
5
+ "precision": "bfloat16",
6
+ "params": 8,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T15:05:27Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }
NousResearch/Hermes-2-Theta-Llama-3-70B_eval_request_False_bfloat16_Original.json CHANGED
@@ -6,10 +6,10 @@
6
  "params": 70.554,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FAILED",
10
  "submitted_time": "2024-06-26T15:14:27Z",
11
- "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
12
  "job_id": "7206065",
13
  "job_start_time": "2024-06-26T16:41:39.162018",
14
  "use_chat_template": true
15
- }
 
6
  "params": 70.554,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-26T15:14:27Z",
11
+ "model_type": "๐Ÿ”ถ : ๐Ÿ”ถ fine-tuned on domain-specific datasets",
12
  "job_id": "7206065",
13
  "job_start_time": "2024-06-26T16:41:39.162018",
14
  "use_chat_template": true
15
+ }
NousResearch/Yarn-Llama-2-13b-128k_eval_request_False_bfloat16_Original.json CHANGED
@@ -3,13 +3,13 @@
3
  "base_model": null,
4
  "revision": "4e3e87a067f64f8814c83dd5e3bad92dcf8a2391",
5
  "precision": "bfloat16",
6
- "params": 13.0,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
- "status": "FAILED",
10
  "submitted_time": "2024-06-13T18:09:47Z",
11
  "model_type": "pretrained",
12
  "job_id": "6660890",
13
  "job_start_time": "2024-06-22T18:36:33.519447",
14
  "use_chat_template": false
15
- }
 
3
  "base_model": null,
4
  "revision": "4e3e87a067f64f8814c83dd5e3bad92dcf8a2391",
5
  "precision": "bfloat16",
6
+ "params": 13,
7
  "architectures": "LlamaForCausalLM",
8
  "weight_type": "Original",
9
+ "status": "PENDING",
10
  "submitted_time": "2024-06-13T18:09:47Z",
11
  "model_type": "pretrained",
12
  "job_id": "6660890",
13
  "job_start_time": "2024-06-22T18:36:33.519447",
14
  "use_chat_template": false
15
+ }
NucleusAI/nucleus-22B-token-500B_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "NucleusAI/nucleus-22B-token-500B", "base_model": "", "revision": "49bb1a47c0d32b4bfa6630a4eff04a857adcd4ca", "precision": "bfloat16", "params": 21.828, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T21:29:04Z", "model_type": "\ud83d\udfe2 : \ud83d\udfe2 pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "NucleusAI/nucleus-22B-token-500B",
3
+ "base_model": "",
4
+ "revision": "49bb1a47c0d32b4bfa6630a4eff04a857adcd4ca",
5
+ "precision": "bfloat16",
6
+ "params": 21.828,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T21:29:04Z",
11
+ "model_type": "๐ŸŸข : ๐ŸŸข pretrained",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": false
15
+ }
OpenBuddy/openbuddy-deepseek-67b-v18.1-4k_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1,15 @@
1
- {"model": "OpenBuddy/openbuddy-deepseek-67b-v18.1-4k", "base_model": "", "revision": "897fd7543d9b2abe04b9a5b92db79c090bc169ce", "precision": "bfloat16", "params": 67.425, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T14:30:09Z", "model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)", "job_id": -1, "job_start_time": null, "use_chat_template": true}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "OpenBuddy/openbuddy-deepseek-67b-v18.1-4k",
3
+ "base_model": "",
4
+ "revision": "897fd7543d9b2abe04b9a5b92db79c090bc169ce",
5
+ "precision": "bfloat16",
6
+ "params": 67.425,
7
+ "architectures": "LlamaForCausalLM",
8
+ "weight_type": "Original",
9
+ "status": "PENDING",
10
+ "submitted_time": "2024-06-26T14:30:09Z",
11
+ "model_type": "๐Ÿ’ฌ : ๐Ÿ’ฌ chat models (RLHF, DPO, IFT, ...)",
12
+ "job_id": -1,
13
+ "job_start_time": null,
14
+ "use_chat_template": true
15
+ }