Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,14 @@ from huggingface_hub import snapshot_download
|
|
12 |
from vptq.app_utils import get_chat_loop_generator
|
13 |
|
14 |
models = [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
{
|
16 |
"name": "VPTQ-community/Meta-Llama-3.1-70B-Instruct-v16-k65536-65536-woft",
|
17 |
"bits": "2 bits"
|
@@ -24,10 +32,6 @@ models = [
|
|
24 |
"name": "VPTQ-community/Meta-Llama-3.1-70B-Instruct-v8-k65536-65536-woft",
|
25 |
"bits": "4 bits"
|
26 |
},
|
27 |
-
{
|
28 |
-
"name": "VPTQ-community/Meta-Llama-3.1-8B-Instruct-v8-k65536-65536-woft",
|
29 |
-
"bits": "4 bits"
|
30 |
-
},
|
31 |
{
|
32 |
"name": "VPTQ-community/Qwen2.5-72B-Instruct-v8-k65536-65536-woft",
|
33 |
"bits": "4 bits"
|
|
|
12 |
from vptq.app_utils import get_chat_loop_generator
|
13 |
|
14 |
models = [
|
15 |
+
{
|
16 |
+
"name": "VPTQ-community/Meta-Llama-3.1-8B-Instruct-v8-k65536-256-woft",
|
17 |
+
"bits": "3 bits"
|
18 |
+
},
|
19 |
+
{
|
20 |
+
"name": "VPTQ-community/Meta-Llama-3.1-8B-Instruct-v8-k65536-65536-woft",
|
21 |
+
"bits": "4 bits"
|
22 |
+
},
|
23 |
{
|
24 |
"name": "VPTQ-community/Meta-Llama-3.1-70B-Instruct-v16-k65536-65536-woft",
|
25 |
"bits": "2 bits"
|
|
|
32 |
"name": "VPTQ-community/Meta-Llama-3.1-70B-Instruct-v8-k65536-65536-woft",
|
33 |
"bits": "4 bits"
|
34 |
},
|
|
|
|
|
|
|
|
|
35 |
{
|
36 |
"name": "VPTQ-community/Qwen2.5-72B-Instruct-v8-k65536-65536-woft",
|
37 |
"bits": "4 bits"
|