fabiolamp commited on
Commit
2fe2fff
·
verified ·
1 Parent(s): 568c451

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -2
app.py CHANGED
@@ -2,14 +2,24 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import gradio as gr
3
  import torch
4
 
 
 
 
 
 
 
 
 
 
 
5
 
6
  title = "microsoft/DialoGPT"
7
  description = ""
8
  examples = [["How are you?"]]
9
 
10
 
11
- tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
12
- model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
13
 
14
  #tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
15
  #model = AutoModelForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
 
2
  import gradio as gr
3
  import torch
4
 
5
+ # Use a pipeline as a high-level helper
6
+ from transformers import pipeline
7
+
8
+ pipe = pipeline("text-generation", model="mistralai/Mistral-7B-v0.1")
9
+ # Load model directly
10
+ from transformers import AutoTokenizer, AutoModelForCausalLM
11
+
12
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-v0.1")
13
+ model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-v0.1")
14
+
15
 
16
  title = "microsoft/DialoGPT"
17
  description = ""
18
  examples = [["How are you?"]]
19
 
20
 
21
+ #tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
22
+ #model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
23
 
24
  #tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
25
  #model = AutoModelForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")