Hamza1702 commited on
Commit
796b89b
1 Parent(s): 9876178

Using "facebook/blenderbot-400M-distill" model

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -1,20 +1,20 @@
1
 
2
  from transformers import pipeline
3
- from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import gradio as gr
5
  import torch
6
 
7
  # Use a pipeline as a high-level helper
8
 
9
- pipe = pipeline("conversational", model="PygmalionAI/pygmalion-6b")
10
 
11
  title = "🤖AI ChatBot"
12
  description = "Building open-domain chatbots is a challenging area for machine learning research."
13
  examples = [["How are you?"]]
14
 
15
 
16
- tokenizer = AutoTokenizer.from_pretrained("PygmalionAI/pygmalion-6b")
17
- model = AutoModelForCausalLM.from_pretrained("PygmalionAI/pygmalion-6b")
18
 
19
 
20
  def predict(input, history=[]):
 
1
 
2
  from transformers import pipeline
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer, AutoModelForSeq2SeqLM
4
  import gradio as gr
5
  import torch
6
 
7
  # Use a pipeline as a high-level helper
8
 
9
+ pipe = pipeline("conversational", model="facebook/blenderbot-400M-distill")
10
 
11
  title = "🤖AI ChatBot"
12
  description = "Building open-domain chatbots is a challenging area for machine learning research."
13
  examples = [["How are you?"]]
14
 
15
 
16
+ tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
17
+ model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill")
18
 
19
 
20
  def predict(input, history=[]):