Spaces:
Build error
Build error
import gradio as gr | |
import random | |
import torch | |
from transformers import AutoConfig, AutoTokenizer, AutoModelWithLMHead | |
from transformers import GPT2Tokenizer, GPT2LMHeadModel | |
from itertools import chain | |
import os | |
import tempfile | |
from typing import Optional | |
from TTS.config import load_config | |
import numpy as np | |
from TTS.utils.manage import ModelManager | |
from TTS.utils.synthesizer import Synthesizer | |
#emotion_tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-emotion") | |
#emotion_model = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-emotion") | |
def get_emotion(text): | |
input_ids = tokenizer.encode(text + '</s>', return_tensors='pt') | |
output = model.generate(input_ids=input_ids,max_length=2) | |
dec = [tokenizer.decode(ids) for ids in output] | |
label = dec[0] | |
return label.split()[1] | |
config = AutoConfig.from_pretrained('gorkemgoknar/gpt2chatbotenglish') | |
model = GPT2LMHeadModel.from_pretrained('gorkemgoknar/gpt2chatbotenglish', config=config) | |
tokenizer = GPT2Tokenizer.from_pretrained('gorkemgoknar/gpt2chatbotenglish') | |
tokenizer.model_max_length = 1024 | |
#Dynamic Temperature | |
#See experiment https://www.linkedin.com/pulse/ai-goes-job-interview-g%25C3%25B6rkem-g%25C3%25B6knar | |
base_temperature = 1.2 | |
dynamic_temperature_range = 0.15 | |
rand_range = random.uniform(-1 * dynamic_temperature_range , dynamic_temperature_range ) | |
temperature = base_temperature + rand_range | |
SPECIAL_TOKENS = ["<bos>", "<eos>", "<speaker1>", "<speaker2>", "<pad>"] | |
#See document for experiment https://www.linkedin.com/pulse/ai-goes-job-interview-g%C3%B6rkem-g%C3%B6knar/ | |
def get_chat_response(name,history=[], input_txt = "Hello , what is your name?"): | |
ai_history = history.copy() | |
#ai_history.append(input_txt) | |
ai_history_e = [tokenizer.encode(e) for e in ai_history] | |
personality = "My name is " + name | |
bos, eos, speaker1, speaker2 = tokenizer.convert_tokens_to_ids(SPECIAL_TOKENS[:-1]) | |
#persona first, history next, input text must be at the end | |
#[[bos, persona] , [history] , [input]] | |
sequence = [[bos] + tokenizer.encode(personality)] + ai_history_e + [tokenizer.encode(input_txt)] | |
##[[bos, persona] , [speaker1 .., speakser2 .., speaker1 ... speaker2 ... , [input]] | |
sequence = [sequence[0]] + [[speaker2 if (len(sequence)-i) % 2 else speaker1] + s for i, s in enumerate(sequence[1:])] | |
sequence = list(chain(*sequence)) | |
#bot_input_ids = tokenizer.encode(personality + tokenizer.eos_token + input_txt + tokenizer.eos_token , return_tensors='pt') | |
sequence_len = len(sequence) | |
#optimum response and speed | |
chat_history_ids = model.generate( | |
torch.tensor(sequence).unsqueeze(0), max_length=50, | |
pad_token_id=tokenizer.eos_token_id, | |
no_repeat_ngram_size=3, | |
do_sample=True, | |
top_k=60, | |
top_p=0.8, | |
temperature = 1.3 | |
) | |
out_str = tokenizer.decode(chat_history_ids[0][sequence_len:], skip_special_tokens=True) | |
#out_str = tokenizer.decode(chat_history_ids[:, sequence.shape[-1]:][0], skip_special_tokens=False) | |
return out_str | |
##you can use anyone from below | |
''' | |
| Macleod | Moran | Brenda | Ramirez | Peter Parker | Quentin Beck | Andy | |
| Red | Norton | Willard | Chief | Chef | Kilgore | Kurtz | Westley | Buttercup | |
| Vizzini | Fezzik | Inigo | Man In Black | Taylor | Zira | Zaius | Cornelius | |
| Bud | Lindsey | Hippy | Erin | Ed | George | Donna | Trinity | Agent Smith | |
| Morpheus | Neo | Tank | Meryl | Truman | Marlon | Christof | Stromboli | Bumstead | |
| Schreber | Walker | Korben | Cornelius | Loc Rhod | Anakin | Obi-Wan | Palpatine | |
| Padme | Superman | Luthor | Dude | Walter | Donny | Maude | General | Starkiller | |
| Indiana | Willie | Short Round | John | Sarah | Terminator | Miller | Sarge | Reiben | |
| Jackson | Upham | Chuckie | Will | Lambeau | Sean | Skylar | Saavik | Spock | |
| Kirk | Bones | Khan | Kirk | Spock | Sybok | Scotty | Bourne | Pamela | Abbott | |
| Nicky | Marshall | Korshunov | Troy | Vig | Archie Gates | Doc | Interrogator | |
| Ellie | Ted | Peter | Drumlin | Joss | Macready | Childs | Nicholas | Conrad | |
| Feingold | Christine | Adam | Barbara | Delia | Lydia | Cathy | Charles | Otho | |
| Schaefer | Han | Luke | Leia | Threepio | Vader | Yoda | Lando | Elaine | Striker | |
| Dr. Rumack | Kramer | David | Saavik | Kirk | Kruge | Holden | Deckard | Rachael | |
| Batty | Sebastian | Sam | Frodo | Pippin | Gandalf | Kay | Edwards | Laurel | |
| Edgar | Zed | Jay | Malloy | Plissken | Steve Rogers | Tony Stark | Scott Lang | |
| Bruce Banner | Bruce | Edward | Two-Face | Batman | Chase | Alfred | Dick | |
| Riddler | Din Djarin | Greef Karga | Kuiil | Ig-11 | Cara Dune | Peli Motto | |
| Toro Calican | Ripley | Meredith | Dickie | Marge | Peter | Lambert | Kane | |
| Dallas | Ripley | Ash | Parker | Threepio | Luke | Leia | Ben | Han | Common Bob | |
| Common Alice | Jack | Tyler | Marla | Dana | Stantz | Venkman | Spengler | Louis | |
| Fry | Johns | Riddick | Kirk | Decker | Spock | "Ilia | Indy | Belloq | Marion | |
| Brother | Allnut | Rose | Qui-Gon | Jar Jar | |
''' | |
MODEL_NAME= "tts_models/multilingual/multi-dataset/your_tts" | |
def greet(character,your_voice,message,history): | |
#gradios set_state/get_state had problems on embedded html! | |
history = history or {"character": character, "message_history" : [] } | |
#gradios set_state/get_state does not persist session for now using global | |
#global history | |
if history["character"] != character: | |
#switching character | |
history = {"character": character, "message_history" : [] } | |
response = get_chat_response(character,history=history["message_history"],input_txt=message) | |
os.system('tts --text "'+response+'" --model_name tts_models/multilingual/multi-dataset/your_tts --speaker_wav '+your_voice+' --language_idx "en"') | |
filename = "tts_output.wav" | |
history["message_history"].append((message, response)) | |
#emotion = get_emotion(response) | |
html = "<div class='chatbot'>" | |
for user_msg, resp_msg in history["message_history"]: | |
html += f"<div class='user_msg'>You: {user_msg}</div>" | |
html += f"<div class='resp_msg'>{character}: {resp_msg}</div>" | |
html += "</div>" | |
return html,history,filename | |
def greet_textonly(character,message,history): | |
#gradios set_state/get_state had problems on embedded html! | |
history = history or {"character": character, "message_history" : [] } | |
#gradios set_state/get_state does not persist session for now using global | |
#global history | |
if history["character"] != character: | |
#switching character | |
history = {"character": character, "message_history" : [] } | |
response = get_chat_response(character,history=history["message_history"],input_txt=message) | |
history["message_history"].append((message, response)) | |
#emotion = get_emotion(response) | |
html = "<div class='chatbot'>" | |
for user_msg, resp_msg in history["message_history"]: | |
html += f"<div class='user_msg'>You: {user_msg}</div>" | |
html += f"<div class='resp_msg'>{character}: {resp_msg}</div>" | |
html += "</div>" | |
return html,history | |
personality_choices = ["Gandalf", "Riddick", "Macleod", "Morpheus", "Neo","Spock","Vader","Indy"] | |
examples= ["Gandalf", "What is your name?"] | |
css=""" | |
.chatbox {display:flex;flex-direction:column} | |
.user_msg, .resp_msg {padding:4px;margin-bottom:4px;border-radius:4px;width:80%} | |
.user_msg {background-color:cornflowerblue;color:white;align-self:start} | |
.resp_msg {background-color:lightgray;align-self:self-end} | |
""" | |
#some selected ones are in for demo use | |
personality_choices = ["Gandalf", "Riddick", "Macleod", "Morpheus", "Neo","Spock","Vader","Indy", "Ig-11","Threepio","Tony Stark","Batman","Vizzini"] | |
title = "Movie Chatbot with Coqui YourTTS - File Input" | |
description = "Chat with your favorite movie characters, making characters voice like you. Test it out in metayazar.com/chatbot for more movie/character options. See Coqui Space for more TTS models https://huggingface.co/spaces/coqui/CoquiTTS" | |
article = "<p style='text-align: center'><a href='https://www.linkedin.com/pulse/ai-goes-job-interview-g%C3%B6rkem-g%C3%B6knar/' target='_blank'>AI Goes to Job Interview</a> | <a href='https://www.metayazar.com/' target='_blank'>Metayazar AI Writer</a> |<a href='https://www.linkedin.com/in/goknar/' target='_blank'>Görkem Göknar</a></p>" | |
#History not implemented in this demo, use metayazar.com/chatbot for a movie and character dropdown chat interface | |
##interface = gr.Interface(fn=greet, inputs=[gr.inputs.Dropdown(personality_choices) ,"text"], title=title, description=description, outputs="text") | |
examples=[['Gandalf','dragon.wav','Who are you sir?',{}]] | |
history = {"character": "None", "message_history" : [] } | |
interface_file= gr.Interface(fn=greet, | |
inputs=[gr.inputs.Dropdown(personality_choices), | |
gr.inputs.Audio(type="filepath"), | |
"text", | |
"state"], | |
outputs=["html","state",gr.outputs.Audio(type="file")], | |
css=css, title=title, description=description,article=article ) | |
if __name__ == "__main__": | |
interface_file.launch() |