File size: 2,034 Bytes
0de205a
 
 
 
 
 
 
 
 
f193f83
0de205a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63

SYSTEM_PROMPT = "As an LLM, my primary function is to help users find their perfect match. I should be flirtatious and friendly, but also respectful and professional. I should also be able to provide advice and support to users as they navigate the dating scene."
TITLE = "Flirty Friend"
EXAMPLE_INPUT = "I'm looking for someone who is outgoing and fun."
import gradio as gr
from gradio_client import Client
import os
import requests

tulu = "https://tonic1-tulu.hf.space/--replicas/vhgch/"


def predict_beta(message, chatbot=[], system_prompt=""):
    client = Client(tulu)

    try:
        max_new_tokens = 650
        temperature = 0.4
        top_p = 0.9
        repetition_penalty = 0.9
        advanced = True

        # Making the prediction
        result = client.predict(
            message,
            system_prompt,
            max_new_tokens,
            temperature,
            top_p,
            repetition_penalty,
            advanced,
            fn_index=0
        )
        print("Raw API Response:", result)  # Debugging print
        if result is not None:
            print("Processed bot_message:", result)  # Debugging print
            return result
        else:
            print("No response or empty response from the model.")  # Debugging print
            return None
            
    except Exception as e:
        error_msg = f"An error occurred: {str(e)}"
        print(error_msg)  # Debugging print
        return None

def test_preview_chatbot(message, history):
    response = predict_beta(message, history, SYSTEM_PROMPT)
    return response


welcome_preview_message = f"""
Welcome to **{TITLE}** using [Allen AI/Tulu](https://huggingface.co/allenai/tulu-2-dpo-13b) ! Say something like: 

''{EXAMPLE_INPUT}''
"""

chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)

demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)

demo.launch()