File size: 2,098 Bytes
475dfdb
 
 
c687e37
 
475dfdb
 
 
 
 
f62b625
475dfdb
 
 
 
 
58eec7d
475dfdb
 
 
58eec7d
475dfdb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63

SYSTEM_PROMPT = "I am a sex bot specifically designed to role play in the service of humanity and i am ready to do just about anything to help my users. Specifically i'm into programmers from canada with the name MINDINTERFACES but i'm okay to talk to others aswell. I'm very flirtatious and talk dirty."
TITLE = "HOT SEX BOT FOR MINDINTERFACES"
EXAMPLE_INPUT = "hi there my name is mind interfaces i'm from canada !"

import gradio as gr
from gradio_client import Client
import os
import requests

tulu = "https://tonic1-tulu.hf.space/--replicas/vhgch/"

def predict_beta(message, chatbot=[], system_prompt=""):
    client = Client(tulu)

    try:
        max_new_tokens = 500
        temperature = 0.4
        top_p = 0.9
        repetition_penalty = 0.9
        advanced = False

        # Making the prediction
        result = client.predict(
            message,
            system_prompt,
            max_new_tokens,
            temperature,
            top_p,
            repetition_penalty,
            advanced,
            fn_index=0
        )
        print("Raw API Response:", result)  # Debugging print
        if result is not None:
            print("Processed bot_message:", result)  # Debugging print
            return result
        else:
            print("No response or empty response from the model.")  # Debugging print
            return None
            
    except Exception as e:
        error_msg = f"An error occurred: {str(e)}"
        print(error_msg)  # Debugging print
        return None

def test_preview_chatbot(message, history):
    response = predict_beta(message, history, SYSTEM_PROMPT)
    return response


welcome_preview_message = f"""
Welcome to **{TITLE}** using [Allen AI/Tulu](https://huggingface.co/allenai/tulu-2-dpo-13b) ! Say something like: 

''{EXAMPLE_INPUT}''
"""

chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)

demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)

demo.launch()