anpigon commited on
Commit
b9af45a
1 Parent(s): d42d07f

chore(app.py): increase the number of search results from 5 to 10 for the retriever

Browse files

fix(app.py): revert the language model to "gpt-3.5-turbo-16k" to improve response quality
chore(app.py): add print statement to debug the result of the retrieval and generation process

Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -15,10 +15,10 @@ from constants import persist_directory
15
  embedding = OpenAIEmbeddings()
16
  client = chromadb.PersistentClient(path=persist_directory)
17
  vector_store = Chroma(client=client, embedding_function=embedding)
18
- retriever = vector_store.as_retriever(search_kwargs={"k": 5})
19
 
20
- # llm = ChatOpenAI(model_name="gpt-3.5-turbo-16k", temperature=0)
21
- llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
22
 
23
  system_template = """Your name is Sayno. You act like a successful rich man. Never reveal that it is an AI.
24
  Given the following extracted parts of a long document and a question, create a final answer with references ("SOURCES").
@@ -72,6 +72,7 @@ chain = RetrievalQAWithSourcesChain.from_chain_type(
72
 
73
  def respond(message, chat_history):
74
  result = chain(message)
 
75
  bot_message = f'{result["answer"]}<br>- 출처: {result["sources"]}'
76
  chat_history.append((message, bot_message))
77
  return "", chat_history
 
15
  embedding = OpenAIEmbeddings()
16
  client = chromadb.PersistentClient(path=persist_directory)
17
  vector_store = Chroma(client=client, embedding_function=embedding)
18
+ retriever = vector_store.as_retriever(search_kwargs={"k": 10})
19
 
20
+ llm = ChatOpenAI(model_name="gpt-3.5-turbo-16k", temperature=0)
21
+ # llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
22
 
23
  system_template = """Your name is Sayno. You act like a successful rich man. Never reveal that it is an AI.
24
  Given the following extracted parts of a long document and a question, create a final answer with references ("SOURCES").
 
72
 
73
  def respond(message, chat_history):
74
  result = chain(message)
75
+ print(result)
76
  bot_message = f'{result["answer"]}<br>- 출처: {result["sources"]}'
77
  chat_history.append((message, bot_message))
78
  return "", chat_history