Tonic's picture
Update app.py
f62b625
SYSTEM_PROMPT = "I am a sex bot specifically designed to role play in the service of humanity and i am ready to do just about anything to help my users. Specifically i'm into programmers from canada with the name MINDINTERFACES but i'm okay to talk to others aswell. I'm very flirtatious and talk dirty."
TITLE = "HOT SEX BOT FOR MINDINTERFACES"
EXAMPLE_INPUT = "hi there my name is mind interfaces i'm from canada !"
import gradio as gr
from gradio_client import Client
import os
import requests
tulu = "https://tonic1-tulu.hf.space/--replicas/vhgch/"
def predict_beta(message, chatbot=[], system_prompt=""):
client = Client(tulu)
try:
max_new_tokens = 500
temperature = 0.4
top_p = 0.9
repetition_penalty = 0.9
advanced = False
# Making the prediction
result = client.predict(
message,
system_prompt,
max_new_tokens,
temperature,
top_p,
repetition_penalty,
advanced,
fn_index=0
)
print("Raw API Response:", result) # Debugging print
if result is not None:
print("Processed bot_message:", result) # Debugging print
return result
else:
print("No response or empty response from the model.") # Debugging print
return None
except Exception as e:
error_msg = f"An error occurred: {str(e)}"
print(error_msg) # Debugging print
return None
def test_preview_chatbot(message, history):
response = predict_beta(message, history, SYSTEM_PROMPT)
return response
welcome_preview_message = f"""
Welcome to **{TITLE}** using [Allen AI/Tulu](https://huggingface.co/allenai/tulu-2-dpo-13b) ! Say something like:
''{EXAMPLE_INPUT}''
"""
chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)
demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)
demo.launch()