Sakalti commited on
Commit
832bf3c
Β·
verified Β·
1 Parent(s): 5adec73

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -3,7 +3,7 @@ import spaces
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import torch
5
 
6
- model_name = "Sakalti/SakalFusion-7B-Alpha"
7
 
8
  model = AutoModelForCausalLM.from_pretrained(
9
  model_name,
@@ -12,7 +12,7 @@ model = AutoModelForCausalLM.from_pretrained(
12
  )
13
  tokenizer = AutoTokenizer.from_pretrained(model_name)
14
 
15
- @spaces.GPU(duration=40)
16
  def generate(prompt, history):
17
  messages = [
18
  {"role": "system", "content": "あγͺγŸγ―γƒ•γƒ¬γƒ³γƒ‰γƒͺγƒΌγͺγƒγƒ£γƒƒγƒˆγƒœγƒƒγƒˆγ§γ™γ€‚"},
@@ -41,4 +41,4 @@ def generate(prompt, history):
41
  chat_interface = gr.ChatInterface(
42
  fn=generate,
43
  )
44
- chat_interface.launch(share=True)
 
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import torch
5
 
6
+ model_name = "Qwen/Qwen2.5-7B-Instruct"
7
 
8
  model = AutoModelForCausalLM.from_pretrained(
9
  model_name,
 
12
  )
13
  tokenizer = AutoTokenizer.from_pretrained(model_name)
14
 
15
+ @spaces.GPU(duration=100)
16
  def generate(prompt, history):
17
  messages = [
18
  {"role": "system", "content": "あγͺγŸγ―γƒ•γƒ¬γƒ³γƒ‰γƒͺγƒΌγͺγƒγƒ£γƒƒγƒˆγƒœγƒƒγƒˆγ§γ™γ€‚"},
 
41
  chat_interface = gr.ChatInterface(
42
  fn=generate,
43
  )
44
+ chat_interface.launch(share=True)