File size: 1,984 Bytes
3276685
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import streamlit as st 
import requests
# Streamlit page configuration
st.title("Chatlytic")

# Initialize session state for model and messages if not already present
if "openai_model" not in st.session_state:
    st.session_state["openai_model"] = "mixtral-8x7b"

if "messages" not in st.session_state:
    st.session_state.messages = []

# Function to clear the chat
def clear_chat():
    st.session_state.messages = []

# Button to clear the chat
if st.button('Clear Chat'):
    clear_chat()

# Display previous messages
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# Input for new message
if prompt := st.chat_input("What is up?"):
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("user"):
        st.markdown(prompt)

    # Define the API endpoint
    api_endpoint = "https://ka1kuk-llm-api.hf.space/api/v1/chat/completions"

    # Prepare the data for the POST request
    data = {
        "model": st.session_state["openai_model"],
        "messages": st.session_state.messages,
        "temperature": 0.5,
        "top_p": 0.95,
        "max_tokens": -1,
        "use_cache": False,
        "stream": False
    }

    # Send the POST request to the custom API
    response = requests.post(api_endpoint, json=data)

    # Check if the request was successful
    if response.status_code == 200:
        # Get the response content
        response_data = response.json()
        # Append the assistant's response to the messages
        st.session_state.messages.append({"role": "assistant", "content": response_data["choices"][0]["message"]["content"]})
        # Display the assistant's response
        with st.chat_message("assistant"):
            st.markdown(response_data["choices"][0]["message"]["content"])
    else:
        # Display an error message if the request failed
        st.error("Failed to get a response from the custom API.")