Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -48,49 +48,39 @@ messages = [
|
|
48 |
prompt = ChatPromptTemplate.from_messages(messages)
|
49 |
|
50 |
from langchain.chat_models import ChatOpenAI
|
51 |
-
from langchain.chains import
|
52 |
|
53 |
chain_type_kwargs = {"prompt": prompt}
|
54 |
|
55 |
-
chain =
|
56 |
llm=llm,
|
57 |
chain_type="stuff",
|
58 |
retriever=retriever,
|
59 |
-
return_source_documents=
|
60 |
chain_type_kwargs=chain_type_kwargs,
|
61 |
reduce_k_below_max_tokens=True,
|
62 |
verbose=True,
|
63 |
)
|
64 |
|
|
|
65 |
query = "ν볡ν μΈμμ΄λ?"
|
66 |
-
result = chain(query)
|
67 |
-
|
68 |
-
|
69 |
-
for doc in result['source_documents']:
|
70 |
-
print('λ΄μ© : ' + doc.page_content[0:100].replace('\n', ' '))
|
71 |
-
print('νμΌ : ' + doc.metadata['source'])
|
72 |
-
print('νμ΄μ§ : ' + str(doc.metadata['page']))
|
73 |
-
|
74 |
-
|
75 |
-
def respond(message, chat_history): # μ±ν
λ΄μ μλ΅μ μ²λ¦¬νλ ν¨μλ₯Ό μ μν©λλ€.
|
76 |
-
|
77 |
-
result = chain(message)
|
78 |
|
|
|
|
|
|
|
79 |
bot_message = result['answer']
|
|
|
|
|
|
|
|
|
80 |
|
81 |
-
|
82 |
-
bot_message += '[' + str(i+1) + '] ' + doc.metadata['source'] + '(' + str(doc.metadata['page']) + ') '
|
83 |
-
|
84 |
-
chat_history.append((message, bot_message)) # μ±ν
κΈ°λ‘μ μ¬μ©μμ λ©μμ§μ λ΄μ μλ΅μ μΆκ°ν©λλ€.
|
85 |
-
|
86 |
-
return "", chat_history # μμ λ μ±ν
κΈ°λ‘μ λ°νν©λλ€.
|
87 |
-
|
88 |
-
with gr.Blocks(theme='gstaff/sketch') as demo: # gr.Blocks()λ₯Ό μ¬μ©νμ¬ μΈν°νμ΄μ€λ₯Ό μμ±ν©λλ€.
|
89 |
gr.Markdown("# μλ
νμΈμ. μ΄νμ μ λΆμ λνν΄λ³΄μΈμ. \n λ΅λ³ μμ±μ μ‘°κΈ μκ°μ΄ μμλ μ μμ΅λλ€.")
|
90 |
-
chatbot = gr.Chatbot(label="μ±ν
μ°½")
|
91 |
-
msg = gr.Textbox(label="μ
λ ₯")
|
92 |
-
clear = gr.Button("μ΄κΈ°ν")
|
93 |
|
94 |
-
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
95 |
-
clear.click(lambda: None, None, chatbot, queue=False)
|
96 |
-
demo.launch(debug=True)
|
|
|
48 |
prompt = ChatPromptTemplate.from_messages(messages)
|
49 |
|
50 |
from langchain.chat_models import ChatOpenAI
|
51 |
+
from langchain.chains import ConversationalRetrievalChain
|
52 |
|
53 |
chain_type_kwargs = {"prompt": prompt}
|
54 |
|
55 |
+
chain = ConversationalRetrievalChain.from_llm(
|
56 |
llm=llm,
|
57 |
chain_type="stuff",
|
58 |
retriever=retriever,
|
59 |
+
return_source_documents=False,
|
60 |
chain_type_kwargs=chain_type_kwargs,
|
61 |
reduce_k_below_max_tokens=True,
|
62 |
verbose=True,
|
63 |
)
|
64 |
|
65 |
+
chat_history = []
|
66 |
query = "ν볡ν μΈμμ΄λ?"
|
67 |
+
result = chain({"question": query, "chat_history": chat_history})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
|
69 |
+
def respond(message, chat_history):
|
70 |
+
result = chain({"question": message, "chat_history": chat_history})
|
71 |
+
|
72 |
bot_message = result['answer']
|
73 |
+
|
74 |
+
chat_history.append((message, bot_message))
|
75 |
+
|
76 |
+
return "", chat_history
|
77 |
|
78 |
+
with gr.Blocks(theme='gstaff/sketch') as demo:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
gr.Markdown("# μλ
νμΈμ. μ΄νμ μ λΆμ λνν΄λ³΄μΈμ. \n λ΅λ³ μμ±μ μ‘°κΈ μκ°μ΄ μμλ μ μμ΅λλ€.")
|
80 |
+
chatbot = gr.Chatbot(label="μ±ν
μ°½")
|
81 |
+
msg = gr.Textbox(label="μ
λ ₯")
|
82 |
+
clear = gr.Button("μ΄κΈ°ν")
|
83 |
|
84 |
+
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
85 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
86 |
+
demo.launch(debug=True)
|