Spaces:
Sleeping
Sleeping
import os | |
from langchain.document_loaders import ( | |
PyPDFLoader, | |
TextLoader, | |
Docx2txtLoader | |
) | |
from langchain.text_splitter import CharacterTextSplitter | |
# from PyPDF2 import PdfReader | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
import google.generativeai as genai | |
from langchain.vectorstores import FAISS | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
from langchain.chains.question_answering import load_qa_chain | |
from langchain.prompts import PromptTemplate | |
from langchain.memory import ConversationBufferMemory | |
from dotenv import load_dotenv | |
load_dotenv() | |
genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) | |
llm = model = ChatGoogleGenerativeAI(model="gemini-pro",temperature=0.7) | |
template = """You are a chatbot created by Mohammed Vasim. He is an AI Engineer and AI Architect. | |
You are created to be having a conversation with a human. | |
Given the following extracted parts of a long document and a question, create a final helpful answer. | |
{context} | |
If context is not provided, answer a helpful answer. | |
{chat_history} | |
Human: {human_input} | |
Chatbot:""" | |
prompt = PromptTemplate( | |
input_variables=["chat_history", "human_input", "context"], template=template | |
) | |
memory = ConversationBufferMemory(memory_key="chat_history", input_key="human_input") | |
# chain = load_qa_chain( | |
# llm=llm, chain_type="stuff", memory=memory, prompt=prompt | |
# ) | |
def build_qa_chain(llm=llm, prompt=prompt, memory=memory): | |
chain = load_qa_chain( | |
llm=llm, chain_type="stuff", memory=memory, prompt=prompt | |
) | |
return chain |