betajuned commited on
Commit
b515959
1 Parent(s): a16670e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -2,9 +2,9 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import gradio as gr
3
  import torch
4
 
5
- title = "AI ChatBot"
6
- description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
7
- examples = [["How are you?"]]
8
 
9
  tokenizer = AutoTokenizer.from_pretrained("betajuned/GPT-2_Kombinasi4")
10
  model = AutoModelForCausalLM.from_pretrained("betajuned/GPT-2_Kombinasi4")
@@ -17,7 +17,7 @@ def predict(input, history=[]):
17
  bot_input_ids = torch.cat([torch.tensor(history), new_user_input_ids], dim=-1) if history else new_user_input_ids
18
 
19
  # Generate a response
20
- chat_history_ids = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
21
 
22
  # Convert the tokens to text
23
  response = tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
 
2
  import gradio as gr
3
  import torch
4
 
5
+ title = "GPT-2 JTE Chatbot"
6
+ description = "GPT-2 Menjadi Chatbot"
7
+ examples = [["Bagaimana cara mengisi KRS?"]]
8
 
9
  tokenizer = AutoTokenizer.from_pretrained("betajuned/GPT-2_Kombinasi4")
10
  model = AutoModelForCausalLM.from_pretrained("betajuned/GPT-2_Kombinasi4")
 
17
  bot_input_ids = torch.cat([torch.tensor(history), new_user_input_ids], dim=-1) if history else new_user_input_ids
18
 
19
  # Generate a response
20
+ chat_history_ids = model.generate(bot_input_ids, max_length=50, pad_token_id=tokenizer.eos_token_id)
21
 
22
  # Convert the tokens to text
23
  response = tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)