Update app.py
Browse files
app.py
CHANGED
@@ -128,6 +128,9 @@ def wandb_trace(rag_option, prompt, completion, result, generation_info, llm_out
|
|
128 |
} if (str(err_msg) == "") else {},
|
129 |
inputs = {"rag_option": rag_option,
|
130 |
"prompt": prompt,
|
|
|
|
|
|
|
131 |
} if (str(err_msg) == "") else {},
|
132 |
outputs = {"result": result,
|
133 |
"generation_info": str(generation_info),
|
@@ -140,8 +143,6 @@ def wandb_trace(rag_option, prompt, completion, result, generation_info, llm_out
|
|
140 |
str(chain.combine_documents_chain.llm_chain.llm.model_name)),
|
141 |
"temperature": (str(chain.llm.temperature) if (rag_option == RAG_OFF) else
|
142 |
str(chain.combine_documents_chain.llm_chain.llm.temperature)),
|
143 |
-
"prompt": (str(chain.prompt) if (rag_option == RAG_OFF) else
|
144 |
-
str(chain.combine_documents_chain.llm_chain.prompt)),
|
145 |
"retriever": ("" if (rag_option == RAG_OFF) else str(chain.retriever)),
|
146 |
} if (str(err_msg) == "") else {},
|
147 |
start_time_ms = start_time_ms,
|
@@ -193,7 +194,7 @@ def invoke(openai_api_key, rag_option, prompt):
|
|
193 |
if (completion.generations[0] != None and completion.generations[0][0] != None):
|
194 |
result = completion.generations[0][0].text
|
195 |
generation_info = completion.generations[0][0].generation_info
|
196 |
-
|
197 |
llm_output = completion.llm_output
|
198 |
except Exception as e:
|
199 |
err_msg = e
|
|
|
128 |
} if (str(err_msg) == "") else {},
|
129 |
inputs = {"rag_option": rag_option,
|
130 |
"prompt": prompt,
|
131 |
+
"chain_prompt": (str(chain.prompt) if (rag_option == RAG_OFF) else
|
132 |
+
str(chain.combine_documents_chain.llm_chain.prompt)),
|
133 |
+
"document_metadata": "" if (rag_option == RAG_OFF) else str([doc.metadata for doc in completion["source_documents"]]),
|
134 |
} if (str(err_msg) == "") else {},
|
135 |
outputs = {"result": result,
|
136 |
"generation_info": str(generation_info),
|
|
|
143 |
str(chain.combine_documents_chain.llm_chain.llm.model_name)),
|
144 |
"temperature": (str(chain.llm.temperature) if (rag_option == RAG_OFF) else
|
145 |
str(chain.combine_documents_chain.llm_chain.llm.temperature)),
|
|
|
|
|
146 |
"retriever": ("" if (rag_option == RAG_OFF) else str(chain.retriever)),
|
147 |
} if (str(err_msg) == "") else {},
|
148 |
start_time_ms = start_time_ms,
|
|
|
194 |
if (completion.generations[0] != None and completion.generations[0][0] != None):
|
195 |
result = completion.generations[0][0].text
|
196 |
generation_info = completion.generations[0][0].generation_info
|
197 |
+
|
198 |
llm_output = completion.llm_output
|
199 |
except Exception as e:
|
200 |
err_msg = e
|