Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import spaces
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
import torch
|
5 |
|
6 |
-
model_name = "
|
7 |
|
8 |
model = AutoModelForCausalLM.from_pretrained(
|
9 |
model_name,
|
@@ -12,7 +12,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
12 |
)
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
14 |
|
15 |
-
@spaces.GPU(duration=
|
16 |
def generate(prompt, history):
|
17 |
messages = [
|
18 |
{"role": "system", "content": "γγͺγγ―γγ¬γ³γγͺγΌγͺγγ£γγγγγγ§γγ"},
|
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
import torch
|
5 |
|
6 |
+
model_name = "Sakalti/model-3"
|
7 |
|
8 |
model = AutoModelForCausalLM.from_pretrained(
|
9 |
model_name,
|
|
|
12 |
)
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
14 |
|
15 |
+
@spaces.GPU(duration=40)
|
16 |
def generate(prompt, history):
|
17 |
messages = [
|
18 |
{"role": "system", "content": "γγͺγγ―γγ¬γ³γγͺγΌγͺγγ£γγγγγγ§γγ"},
|