File size: 1,205 Bytes
0c442d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from dotenv import load_dotenv
import os

load_dotenv()
openai_key = os.getenv('OPENAI_API_KEY')

def get_response(user_query, chat_history, context):
    template = """
    You are a helpful assistant. Answer the following questions considering the background information of the conversation:

    Chat History: {chat_history}

    Background Information: {context}

    User question: {user_question}
    """


    llm = ChatOpenAI(api_key=openai_key)
    try:
        prompt = ChatPromptTemplate.from_template(template)

        llm = ChatOpenAI(api_key=openai_key)
            
        chain = prompt | llm | StrOutputParser()
        
        value = chain.stream({
            "chat_history": chat_history,
            "context": context,
            "user_question": user_query,
        })
        if value:
            response = " ".join([part for part in value])
            return response
        else:
            return "No response received from model."
    except Exception as e:
        return f"Error in generating response: {str(e)}"