CatUkraine commited on
Commit
bc7f4e6
1 Parent(s): ae4a8de

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +15 -7
README.md CHANGED
@@ -22,7 +22,8 @@ model-index:
22
  value: 57.85
23
  name: normalized accuracy
24
  source:
25
- url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
 
26
  name: Open LLM Leaderboard
27
  - task:
28
  type: text-generation
@@ -38,7 +39,8 @@ model-index:
38
  value: 83.12
39
  name: normalized accuracy
40
  source:
41
- url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
 
42
  name: Open LLM Leaderboard
43
  - task:
44
  type: text-generation
@@ -55,7 +57,8 @@ model-index:
55
  value: 60.95
56
  name: accuracy
57
  source:
58
- url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
 
59
  name: Open LLM Leaderboard
60
  - task:
61
  type: text-generation
@@ -71,7 +74,8 @@ model-index:
71
  - type: mc2
72
  value: 54.14
73
  source:
74
- url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
 
75
  name: Open LLM Leaderboard
76
  - task:
77
  type: text-generation
@@ -88,7 +92,8 @@ model-index:
88
  value: 77.51
89
  name: accuracy
90
  source:
91
- url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
 
92
  name: Open LLM Leaderboard
93
  - task:
94
  type: text-generation
@@ -105,8 +110,11 @@ model-index:
105
  value: 39.42
106
  name: accuracy
107
  source:
108
- url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
 
109
  name: Open LLM Leaderboard
 
 
110
  ---
111
 
112
  # CatUkraine/Mistral-Instruct-Ukrainian-SFT-Q2_K-GGUF
@@ -137,4 +145,4 @@ Note: You can also use this checkpoint directly through the [usage steps](https:
137
 
138
  ```
139
  git clone https://github.com/ggerganov/llama.cpp && cd llama.cpp && make && ./main -m mistral-instruct-ukrainian-sft.Q2_K.gguf -n 128
140
- ```
 
22
  value: 57.85
23
  name: normalized accuracy
24
  source:
25
+ url: >-
26
+ https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
27
  name: Open LLM Leaderboard
28
  - task:
29
  type: text-generation
 
39
  value: 83.12
40
  name: normalized accuracy
41
  source:
42
+ url: >-
43
+ https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
44
  name: Open LLM Leaderboard
45
  - task:
46
  type: text-generation
 
57
  value: 60.95
58
  name: accuracy
59
  source:
60
+ url: >-
61
+ https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
62
  name: Open LLM Leaderboard
63
  - task:
64
  type: text-generation
 
74
  - type: mc2
75
  value: 54.14
76
  source:
77
+ url: >-
78
+ https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
79
  name: Open LLM Leaderboard
80
  - task:
81
  type: text-generation
 
92
  value: 77.51
93
  name: accuracy
94
  source:
95
+ url: >-
96
+ https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
97
  name: Open LLM Leaderboard
98
  - task:
99
  type: text-generation
 
110
  value: 39.42
111
  name: accuracy
112
  source:
113
+ url: >-
114
+ https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=Radu1999/Mistral-Instruct-Ukrainian-SFT
115
  name: Open LLM Leaderboard
116
+ language:
117
+ - uk
118
  ---
119
 
120
  # CatUkraine/Mistral-Instruct-Ukrainian-SFT-Q2_K-GGUF
 
145
 
146
  ```
147
  git clone https://github.com/ggerganov/llama.cpp && cd llama.cpp && make && ./main -m mistral-instruct-ukrainian-sft.Q2_K.gguf -n 128
148
+ ```