File size: 2,547 Bytes
64f9ede
7676537
64f9ede
7676537
 
64f9ede
 
 
7676537
 
 
 
 
 
 
 
 
 
 
 
64f9ede
 
7676537
64f9ede
7676537
 
 
 
 
 
 
 
64f9ede
7676537
 
 
64f9ede
 
 
7676537
 
 
 
 
 
 
 
 
 
64f9ede
 
 
e9e1fd3
64f9ede
 
 
 
7676537
 
 
 
64f9ede
7676537
 
 
 
64f9ede
 
7676537
64f9ede
 
7676537
 
 
 
 
 
 
 
 
64f9ede
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import streamlit as st
from langchain_core.messages import AIMessage, HumanMessage
from model import selector
from util import getYamlConfig
from st_copy_to_clipboard import st_copy_to_clipboard

def display_messages():

    for i, message in enumerate(st.session_state.chat_history):
        if isinstance(message, AIMessage):
            with st.chat_message("AI"):
                # Display the model from the kwargs
                model = message.kwargs.get("model", "Unknown Model")  # Get the model, default to "Unknown Model"
                st.write(f"**Model :** {model}")
                st.markdown(message.content)
                st_copy_to_clipboard(message.content,key=f"message_{i}")
        
        elif isinstance(message, HumanMessage):
            with st.chat_message("Moi"):
                st.write(message.content)


def launchQuery(query: str = None):

    # Initialize the assistant's response
    full_response = st.write_stream(
        st.session_state["assistant"].ask(
            query,
            prompt_system=st.session_state.prompt_system,
            messages=st.session_state["chat_history"] if "chat_history" in st.session_state else [],
            variables=st.session_state["data_dict"]
        ))

    # Temporary placeholder AI message in chat history
    st.session_state["chat_history"].append(AIMessage(content=full_response, kwargs={"model": st.session_state["assistant"].getReadableModel()}))
    st.rerun()


def show_prompts():
    yaml_data = getYamlConfig()["prompts"]
    
    expander = st.expander("Prompts pré-définis")
    
    for categroy in yaml_data:
        expander.write(categroy.capitalize())

        for item in yaml_data[categroy]:
            if expander.button(item, key=f"button_{item}"):
                launchQuery(item)


def page():
    st.subheader("Choisissez ou posez une question")

    if "assistant" not in st.session_state:
        st.text("Assistant non initialisé")

    if "chat_history" not in st.session_state:
        st.session_state["chat_history"] = []

    st.markdown("<style>iframe{height:50px;}</style>", unsafe_allow_html=True)

    # Collpase for default prompts
    show_prompts()

    # Models selector
    selector.ModelSelector()

    # Displaying messages
    display_messages()


    user_query = st.chat_input("")
    if user_query is not None and user_query != "":

        st.session_state["chat_history"].append(HumanMessage(content=user_query))
        
        # Stream and display response
        launchQuery(user_query)


page()