0x7o commited on
Commit
79ba831
1 Parent(s): 5415a35

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -4,8 +4,8 @@ import gradio as gr
4
  import torch
5
 
6
  if torch.cuda.is_available():
7
- tokenizer = AutoTokenizer.from_pretrained("ai-forever/mGPT-13B")
8
- model = AutoModelForCausalLM.from_pretrained("ai-forever/mGPT-13B", load_in_8bit=True, device_map="auto")
9
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
10
 
11
  @spaces.GPU
@@ -14,7 +14,7 @@ def predict(prompt, temperature, max_length):
14
 
15
  demo = gr.Interface(
16
  fn=predict,
17
- title="mGPT-13B Demo",
18
  inputs=["text", gr.Slider(minimum=0.01, maximum=1.0, value=0.7, label="temperature"), gr.Slider(minimum=1, maximum=1024, value=50, label="max_length")],
19
  outputs=["text"],
20
  )
 
4
  import torch
5
 
6
  if torch.cuda.is_available():
7
+ tokenizer = AutoTokenizer.from_pretrained("ai-forever/ruGPT-3.5-13B")
8
+ model = AutoModelForCausalLM.from_pretrained("ai-forever/ruGPT-3.5-13B", load_in_8bit=True, device_map="auto")
9
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
10
 
11
  @spaces.GPU
 
14
 
15
  demo = gr.Interface(
16
  fn=predict,
17
+ title="ruGPT-3.5-13B Demo",
18
  inputs=["text", gr.Slider(minimum=0.01, maximum=1.0, value=0.7, label="temperature"), gr.Slider(minimum=1, maximum=1024, value=50, label="max_length")],
19
  outputs=["text"],
20
  )