update logo
Browse files- src/display/utils.py +14 -14
src/display/utils.py
CHANGED
@@ -148,9 +148,9 @@ class ModelDetails:
|
|
148 |
class ModelType(Enum):
|
149 |
PT = ModelDetails(name="GPTQ", symbol="π’")
|
150 |
CPT = ModelDetails(name="AWQ", symbol="π©")
|
151 |
-
FT = ModelDetails(name="llama.cpp", symbol="
|
152 |
-
chat = ModelDetails(name="Bisandbytes", symbol="
|
153 |
-
merges = ModelDetails(name="AutoRound", symbol="
|
154 |
Unknown = ModelDetails(name="", symbol="?")
|
155 |
|
156 |
def to_str(self, separator=" "):
|
@@ -158,15 +158,15 @@ class ModelType(Enum):
|
|
158 |
|
159 |
@staticmethod
|
160 |
def from_str(type):
|
161 |
-
if "fine-tuned" in type or "
|
162 |
return ModelType.FT
|
163 |
if "continously pretrained" in type or "π©" in type:
|
164 |
return ModelType.CPT
|
165 |
if "pretrained" in type or "π’" in type:
|
166 |
return ModelType.PT
|
167 |
-
if any([k in type for k in ["instruction-tuned", "RL-tuned", "chat", "π¦", "β", "
|
168 |
return ModelType.chat
|
169 |
-
if "merge" in type or "
|
170 |
return ModelType.merges
|
171 |
return ModelType.Unknown
|
172 |
"""
|
@@ -174,9 +174,9 @@ class ModelType(Enum):
|
|
174 |
class ModelType(Enum):
|
175 |
PT = ModelDetails(name="pretrained", symbol="π’")
|
176 |
CPT = ModelDetails(name="continuously pretrained", symbol="π©")
|
177 |
-
FT = ModelDetails(name="fine-tuned on domain-specific datasets", symbol="
|
178 |
-
chat = ModelDetails(name="chat models (RLHF, DPO, IFT, ...)", symbol="
|
179 |
-
merges = ModelDetails(name="base merges and moerges", symbol="
|
180 |
Unknown = ModelDetails(name="", symbol="?")
|
181 |
|
182 |
def to_str(self, separator=" "):
|
@@ -184,15 +184,15 @@ class ModelType(Enum):
|
|
184 |
|
185 |
@staticmethod
|
186 |
def from_str(type):
|
187 |
-
if "fine-tuned" in type or "
|
188 |
return ModelType.FT
|
189 |
if "continously pretrained" in type or "π©" in type:
|
190 |
return ModelType.CPT
|
191 |
if "pretrained" in type or "π’" in type or "quantization" in type:
|
192 |
return ModelType.PT
|
193 |
-
if any([k in type for k in ["instruction-tuned", "RL-tuned", "chat", "π¦", "β", "
|
194 |
return ModelType.chat
|
195 |
-
if "merge" in type or "
|
196 |
return ModelType.merges
|
197 |
return ModelType.Unknown
|
198 |
|
@@ -206,8 +206,8 @@ class QuantType(Enum):
|
|
206 |
gptq = ModelDetails(name="GPTQ", symbol="π’")
|
207 |
aqlm = ModelDetails(name="AQLM", symbol="β")
|
208 |
awq = ModelDetails(name="AWQ", symbol="π©")
|
209 |
-
llama_cpp = ModelDetails(name="llama.cpp", symbol="
|
210 |
-
bnb = ModelDetails(name="bitsandbytes", symbol="
|
211 |
autoround = ModelDetails(name="AutoRound", symbol="π")
|
212 |
Unknown = ModelDetails(name="?", symbol="?")
|
213 |
|
|
|
148 |
class ModelType(Enum):
|
149 |
PT = ModelDetails(name="GPTQ", symbol="π’")
|
150 |
CPT = ModelDetails(name="AWQ", symbol="π©")
|
151 |
+
FT = ModelDetails(name="llama.cpp", symbol="π·")
|
152 |
+
chat = ModelDetails(name="Bisandbytes", symbol="π΅")
|
153 |
+
merges = ModelDetails(name="AutoRound", symbol="π")
|
154 |
Unknown = ModelDetails(name="", symbol="?")
|
155 |
|
156 |
def to_str(self, separator=" "):
|
|
|
158 |
|
159 |
@staticmethod
|
160 |
def from_str(type):
|
161 |
+
if "fine-tuned" in type or "π·" in type:
|
162 |
return ModelType.FT
|
163 |
if "continously pretrained" in type or "π©" in type:
|
164 |
return ModelType.CPT
|
165 |
if "pretrained" in type or "π’" in type:
|
166 |
return ModelType.PT
|
167 |
+
if any([k in type for k in ["instruction-tuned", "RL-tuned", "chat", "π¦", "β", "π΅"]]):
|
168 |
return ModelType.chat
|
169 |
+
if "merge" in type or "π" in type:
|
170 |
return ModelType.merges
|
171 |
return ModelType.Unknown
|
172 |
"""
|
|
|
174 |
class ModelType(Enum):
|
175 |
PT = ModelDetails(name="pretrained", symbol="π’")
|
176 |
CPT = ModelDetails(name="continuously pretrained", symbol="π©")
|
177 |
+
FT = ModelDetails(name="fine-tuned on domain-specific datasets", symbol="π·")
|
178 |
+
chat = ModelDetails(name="chat models (RLHF, DPO, IFT, ...)", symbol="π΅")
|
179 |
+
merges = ModelDetails(name="base merges and moerges", symbol="π")
|
180 |
Unknown = ModelDetails(name="", symbol="?")
|
181 |
|
182 |
def to_str(self, separator=" "):
|
|
|
184 |
|
185 |
@staticmethod
|
186 |
def from_str(type):
|
187 |
+
if "fine-tuned" in type or "π·" in type:
|
188 |
return ModelType.FT
|
189 |
if "continously pretrained" in type or "π©" in type:
|
190 |
return ModelType.CPT
|
191 |
if "pretrained" in type or "π’" in type or "quantization" in type:
|
192 |
return ModelType.PT
|
193 |
+
if any([k in type for k in ["instruction-tuned", "RL-tuned", "chat", "π¦", "β", "π΅"]]):
|
194 |
return ModelType.chat
|
195 |
+
if "merge" in type or "π" in type:
|
196 |
return ModelType.merges
|
197 |
return ModelType.Unknown
|
198 |
|
|
|
206 |
gptq = ModelDetails(name="GPTQ", symbol="π’")
|
207 |
aqlm = ModelDetails(name="AQLM", symbol="β")
|
208 |
awq = ModelDetails(name="AWQ", symbol="π©")
|
209 |
+
llama_cpp = ModelDetails(name="llama.cpp", symbol="π·")
|
210 |
+
bnb = ModelDetails(name="bitsandbytes", symbol="π΅")
|
211 |
autoround = ModelDetails(name="AutoRound", symbol="π")
|
212 |
Unknown = ModelDetails(name="?", symbol="?")
|
213 |
|