AIE4-16 / app.py
rchrdgwr's picture
Use qdrant
47a73cd verified
import os
import chainlit as cl
from dotenv import load_dotenv
from operator import itemgetter
from langchain_community.document_loaders import TextLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_community.vectorstores import FAISS
from langchain_core.prompts import PromptTemplate
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import RunnablePassthrough
from langchain.schema.runnable.config import RunnableConfig
from utilities.utilities import process_documents
# GLOBAL SCOPE - ENTIRE APPLICATION HAS ACCESS TO VALUES SET IN THIS SCOPE #
# ---- ENV VARIABLES ---- #
"""
This function will load our environment file (.env) if it is present.
NOTE: Make sure that .env is in your .gitignore file - it is by default, but please ensure it remains there.
"""
load_dotenv()
use_document = True
use_qdrant = True
lcel_rag_chain= None
if use_document:
lcel_rag_chain = process_documents(use_qdrant)
@cl.author_rename
def rename(original_author: str):
rename_dict = {
"Assistant" : "Paul Graham Essay Bot"
}
return rename_dict.get(original_author, original_author)
@cl.on_chat_start
async def start_chat():
cl.user_session.set("lcel_rag_chain", lcel_rag_chain)
@cl.on_message
async def main(message: cl.Message):
lcel_rag_chain = cl.user_session.get("lcel_rag_chain")
msg = cl.Message(content="")
if lcel_rag_chain:
async for chunk in lcel_rag_chain.astream(
{"query": message.content},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
):
await msg.stream_token(chunk)
await msg.send()
else:
await cl.Message(content=f"You entered: {message}").send()