fabiolamp commited on
Commit
f5a4f6b
·
verified ·
1 Parent(s): eb8873a

change model to mistral

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -3,13 +3,16 @@ import gradio as gr
3
  import torch
4
 
5
 
6
- title = "????AI ChatBot"
7
  description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
8
  examples = [["How are you?"]]
9
 
10
 
11
- tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
12
- model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
 
 
 
13
 
14
 
15
  def predict(input, history=[]):
 
3
  import torch
4
 
5
 
6
+ title = "Fab ChatBot"
7
  description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
8
  examples = [["How are you?"]]
9
 
10
 
11
+ #tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
12
+ #model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
13
+
14
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
15
+ model = AutoModelForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
16
 
17
 
18
  def predict(input, history=[]):