Spaces:
Sleeping
Sleeping
"""Entry point to Streamlit UI. | |
Ref: https://docs.streamlit.io/get-started/tutorials/create-a-multipage-app | |
""" | |
from pathlib import Path | |
from typing import Dict | |
import streamlit as st | |
def welcome_message() -> Dict[str, str]: | |
return { | |
"role": "assistant", | |
"content": "Welcome to My Notion Companion.", | |
} | |
def main(): | |
st.set_page_config( | |
page_title="My Notion Companion", | |
page_icon="π€", | |
) | |
st.title("My Notion Companion π€") | |
st.caption( | |
"A conversational RAG that helps to chat with my (mostly Chinese-based) Notion Databases." | |
) | |
st.caption( | |
"Powered by: [π¦π](https://www.langchain.com/), [π€](https://huggingface.co/), [LlamaCpp](https://github.com/ggerganov/llama.cpp), [Streamlit](https://streamlit.io/)." | |
) | |
# Initialize chat history | |
if "messages" not in st.session_state: | |
st.session_state.messages = [welcome_message()] | |
# Display chat messages from history on app rerun | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# Two buttons to control history/memory | |
def start_over(): | |
st.session_state.messages = [ | |
{"role": "assistant", "content": "Okay, let's start over."} | |
] | |
st.sidebar.button( | |
"Start All Over Again", on_click=start_over, use_container_width=True | |
) | |
def clear_chat_history(): | |
st.session_state.messages = [ | |
{ | |
"role": "assistant", | |
"content": "Retrieved documents are still in my memory. What else you want to know?", | |
} | |
] | |
st.sidebar.button( | |
"Keep Retrieved Docs but Clear Chat History", | |
on_click=clear_chat_history, | |
use_container_width=True, | |
) | |
# Accept user input | |
if prompt := st.chat_input("Any questiones?"): | |
# Add user message to chat history | |
st.session_state.messages.append({"role": "user", "content": prompt}) | |
# Display user message in chat message container | |
with st.chat_message("user"): | |
st.markdown(prompt) | |
# Display assistant response in chat message container | |
with st.chat_message("assistant"): | |
# response = st.session_state.t.invoke() | |
response = """##### NOTES: \n\nThis is only a mock UI hosted on Hugging Face because of limited computing resources available as a freemium user. | |
Please check the video demo (side bar) and see how this the companion works as a standalone offline webapp.\n\nAlternatively, | |
please visit the [GitHub page](https://github.com/fyang0507/my-notion-companion/tree/main) and follow the quickstart guide to build your own! | |
""" | |
st.write(response) | |
st.session_state.messages.append({"role": "assistant", "content": response}) | |
if __name__ == "__main__": | |
main() | |