Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -30,7 +30,7 @@ DEFAULT_MAX_NEW_TOKENS = 1024
|
|
30 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
31 |
|
32 |
if torch.cuda.is_available():
|
33 |
-
model_id = "TIGER-Lab/MAmmoTH2-
|
34 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
|
35 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
36 |
|
|
|
30 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
31 |
|
32 |
if torch.cuda.is_available():
|
33 |
+
model_id = "TIGER-Lab/MAmmoTH2-7B-Plus"
|
34 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
|
35 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
36 |
|