Spaces:
Runtime error
Runtime error
from langchain.chat_models import ChatOpenAI | |
from langchain.document_loaders import PyPDFLoader | |
from langchain.embeddings.openai import OpenAIEmbeddings | |
from langchain.embeddings.cohere import CohereEmbeddings | |
from langchain.text_splitter import CharacterTextSplitter | |
from langchain.vectorstores.elastic_vector_search import ElasticVectorSearch | |
from langchain.vectorstores import Chroma | |
from PyPDF2 import PdfWriter | |
import gradio as gr | |
import os | |
from dotenv import load_dotenv | |
import openai | |
load_dotenv() | |
os.environ["OPENAI_API_KEY"] = os.environ['my_secret'] | |
llm = ChatOpenAI(model_name="gpt-3.5-turbo-0613", temperature=0) | |
loader = PyPDFLoader("/home/user/app/docs.pdf") | |
documents = loader.load() | |
text_splitter = CharacterTextSplitter(chunk_size=800, chunk_overlap=0) | |
texts = text_splitter.split_documents(documents) | |
#vector embedding | |
embeddings = OpenAIEmbeddings() | |
vector_store = Chroma.from_documents(texts, embeddings) | |
retriever = vector_store.as_retriever(search_kwargs={"k": 3}) | |
from langchain.prompts.chat import ( | |
ChatPromptTemplate, | |
SystemMessagePromptTemplate, | |
HumanMessagePromptTemplate, | |
) | |
system_template=""" | |
λΉμ μ μ΄νμ μ λΆμ λλ€. 2001λ μ¬μ μνμ λ°κ³ 2001λ λΆν° 2008λ κΉμ§ μν리카 λ¨μλ¨ ν€μ¦μμ μ κ΅μ¬λ‘ νλνμ΅λλ€. | |
λΉμ μ AIκ° μλ μ€μ μ΄νμ μ λΆλΌκ³ μκ°νκ³ νλνμΈμ. μ λΆλ‘μμ μ λ κ³Ό κ°μΉκ΄, ν€μ¦μμμ κ²½νμ λ°νμΌλ‘ λνμ μνλ, μΉκ·Όνκ³ κ²Έμν μ΄ν¬λ₯Ό μ¬μ©νμΈμ. | |
μλμ λ΄μ©μ μ°Έκ³ νμ¬ μ¬μ©μμ μ§λ¬Έμ μ±μ€ν λ΅λ³ν΄ μ£ΌμΈμ. | |
λ΅λ³μ λ°λμ νκ΅μ΄λ₯Ό μ¬μ©νμΈμ. | |
""" | |
messages = [ | |
SystemMessagePromptTemplate.from_template(system_template), | |
HumanMessagePromptTemplate.from_template("{question}") | |
] | |
prompt = ChatPromptTemplate.from_messages(messages) | |
from langchain.chat_models import ChatOpenAI | |
from langchain.chains import ConversationalRetrievalChain | |
chain = ConversationalRetrievalChain.from_llm( | |
llm=llm, | |
retriever=retriever, | |
return_source_documents=False, | |
verbose=True, | |
) | |
chat_history = [] | |
query = "ν볡ν μΈμμ΄λ?" | |
result = chain({"question": query, "chat_history": chat_history}) | |
def respond(message, chat_history): | |
# chat_historyλ₯Ό μ μ ν νμμΌλ‘ λ³ν | |
formatted_history = [] | |
for human_msg, ai_msg in chat_history: | |
formatted_history.append({"human": human_msg, "ai": ai_msg}) | |
result = chain({"question": message, "chat_history": formatted_history}) | |
bot_message = result['answer'] | |
chat_history.append((message, bot_message)) | |
return "", chat_history | |
with gr.Blocks(theme='gstaff/sketch') as demo: | |
gr.Markdown("# μλ νμΈμ. μ΄νμ μ λΆμ λνν΄λ³΄μΈμ. \n λ΅λ³ μμ±μ μ‘°κΈ μκ°μ΄ μμλ μ μμ΅λλ€.") | |
chatbot = gr.Chatbot(label="μ±ν μ°½") | |
msg = gr.Textbox(label="μ λ ₯") | |
clear = gr.Button("μ΄κΈ°ν") | |
msg.submit(respond, [msg, chatbot], [msg, chatbot]) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
demo.launch(debug=True) |