Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import gradio as gr
|
2 |
-
import openai, os, time, wandb
|
3 |
|
4 |
from langchain.chains import LLMChain, RetrievalQA
|
5 |
from langchain.chat_models import ChatOpenAI
|
@@ -38,9 +38,10 @@ config = {
|
|
38 |
"k": 3,
|
39 |
"model": "gpt-4",
|
40 |
"temperature": 0,
|
41 |
-
"verbose": True
|
42 |
}
|
43 |
|
|
|
|
|
44 |
template = """If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible. Always say
|
45 |
"🧠 Thanks for using the app - Bernd" at the end of the answer. """
|
46 |
|
@@ -131,7 +132,6 @@ def wandb_trace(rag_option, prompt, prompt_template, result, completion, chain_n
|
|
131 |
"k": "" if (rag_option == "Off") else config["k"],
|
132 |
"model": config["model"],
|
133 |
"temperature": config["temperature"],
|
134 |
-
"verbose": config["verbose"],
|
135 |
},
|
136 |
start_time_ms = start_time_ms,
|
137 |
end_time_ms = end_time_ms,
|
@@ -158,8 +158,7 @@ def invoke(openai_api_key, rag_option, prompt):
|
|
158 |
start_time_ms = round(time.time() * 1000)
|
159 |
llm = ChatOpenAI(model_name = config["model"],
|
160 |
openai_api_key = openai_api_key,
|
161 |
-
temperature = config["temperature"]
|
162 |
-
verbose = config["verbose"])
|
163 |
if (rag_option == "Chroma"):
|
164 |
#splits = document_loading_splitting()
|
165 |
#document_storage_chroma(splits)
|
|
|
1 |
import gradio as gr
|
2 |
+
import langchain, openai, os, time, wandb
|
3 |
|
4 |
from langchain.chains import LLMChain, RetrievalQA
|
5 |
from langchain.chat_models import ChatOpenAI
|
|
|
38 |
"k": 3,
|
39 |
"model": "gpt-4",
|
40 |
"temperature": 0,
|
|
|
41 |
}
|
42 |
|
43 |
+
langchain.verbose = True
|
44 |
+
|
45 |
template = """If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible. Always say
|
46 |
"🧠 Thanks for using the app - Bernd" at the end of the answer. """
|
47 |
|
|
|
132 |
"k": "" if (rag_option == "Off") else config["k"],
|
133 |
"model": config["model"],
|
134 |
"temperature": config["temperature"],
|
|
|
135 |
},
|
136 |
start_time_ms = start_time_ms,
|
137 |
end_time_ms = end_time_ms,
|
|
|
158 |
start_time_ms = round(time.time() * 1000)
|
159 |
llm = ChatOpenAI(model_name = config["model"],
|
160 |
openai_api_key = openai_api_key,
|
161 |
+
temperature = config["temperature"])
|
|
|
162 |
if (rag_option == "Chroma"):
|
163 |
#splits = document_loading_splitting()
|
164 |
#document_storage_chroma(splits)
|