Education-Tools / appDeepseekCoder.py
shivanis14's picture
Update appDeepseekCoder.py
aaf9bac verified
raw
history blame
2.6 kB
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from langchain_openai import ChatOpenAI
from langchain.chains import LLMChain
from prompts import maths_assistant_prompt_template
from langchain.memory.buffer import ConversationBufferMemory
from dotenv import load_dotenv
import os
import chainlit as cl
import uvicorn
# Load environment variables from .env file
load_dotenv()
api_key = os.getenv('OPENAI_API_KEY')
print(f"api key is {api_key}")
app = FastAPI()
@app.on_event("startup")
async def startup_event():
print("Initializing llm...")
llm = ChatOpenAI(model='gpt-4o-mini', temperature=0.5, api_key=api_key)
print("llm initialized!")
conversation_memory = ConversationBufferMemory(memory_key="chat_history", max_len=50, return_messages=True)
llm_chain = LLMChain(llm=llm, prompt=maths_assistant_prompt_template, memory=conversation_memory)
# Initialize Chainlit context
cl.init()
cl.user_session.set("llm_chain", llm_chain)
@app.post("/query/")
async def query_llm(request: Request):
data = await request.json()
message = data.get("message")
llm_chain = cl.user_session.get("llm_chain")
response = await llm_chain.ainvoke({
"chat_history": llm_chain.memory.load_memory_variables({})["chat_history"],
"question": message
}, callbacks=[cl.AsyncLangchainCallbackHandler()])
return JSONResponse(content={"response": response["text"]})
@cl.on_chat_start
async def on_chat_start():
actions = [
cl.Action(name="Probability", value="Probability", description="Select Quiz Topic!"),
cl.Action(name="Linear Algebra", value="Linear Algebra", description="Select Quiz Topic!"),
cl.Action(name="Accounts", value="Accounts", description="Select Quiz Topic!"),
cl.Action(name="Calculus", value="Calculus", description="Select Quiz Topic!")
]
await cl.Message(content="**Pick a Topic and Let the Quiz Adventure Begin!** πŸŽ‰πŸ“š", actions=actions).send()
@cl.action_callback("Linear Algebra")
@cl.action_callback("Probability")
@cl.action_callback("Accounts")
@cl.action_callback("Calculus")
async def on_action(action: cl.Action):
llm_chain = cl.user_session.get("llm_chain")
response = await llm_chain.ainvoke({
"chat_history": llm_chain.memory.load_memory_variables({})["chat_history"],
"question": f"Quiz me on the topic {action.value}."
}, callbacks=[cl.AsyncLangchainCallbackHandler()])
await cl.Message(response["text"]).send()
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=7860)