Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import os, time
|
|
3 |
|
4 |
from dotenv import load_dotenv, find_dotenv
|
5 |
|
6 |
-
from rag_langchain import
|
7 |
from rag_llamaindex import LlamaIndexRAG
|
8 |
from trace import trace_wandb
|
9 |
|
@@ -33,7 +33,8 @@ def invoke(openai_api_key, prompt, rag_option):
|
|
33 |
|
34 |
if (RAG_INGESTION):
|
35 |
if (rag_option == RAG_LANGCHAIN):
|
36 |
-
|
|
|
37 |
elif (rag_option == RAG_LLAMAINDEX):
|
38 |
rag = LlamaIndexRAG()
|
39 |
rag.ingestion(config)
|
@@ -47,14 +48,16 @@ def invoke(openai_api_key, prompt, rag_option):
|
|
47 |
start_time_ms = round(time.time() * 1000)
|
48 |
|
49 |
if (rag_option == RAG_LANGCHAIN):
|
50 |
-
|
|
|
51 |
|
52 |
result = completion["result"]
|
53 |
elif (rag_option == RAG_LLAMAINDEX):
|
54 |
rag = LlamaIndexRAG()
|
55 |
result = rag.retrieval(config, prompt)
|
56 |
else:
|
57 |
-
|
|
|
58 |
|
59 |
if (completion.generations[0] != None and
|
60 |
completion.generations[0][0] != None):
|
|
|
3 |
|
4 |
from dotenv import load_dotenv, find_dotenv
|
5 |
|
6 |
+
from rag_langchain import LangChainRAG
|
7 |
from rag_llamaindex import LlamaIndexRAG
|
8 |
from trace import trace_wandb
|
9 |
|
|
|
33 |
|
34 |
if (RAG_INGESTION):
|
35 |
if (rag_option == RAG_LANGCHAIN):
|
36 |
+
rag = LangChainRAG()
|
37 |
+
rag.ingestion(config)
|
38 |
elif (rag_option == RAG_LLAMAINDEX):
|
39 |
rag = LlamaIndexRAG()
|
40 |
rag.ingestion(config)
|
|
|
48 |
start_time_ms = round(time.time() * 1000)
|
49 |
|
50 |
if (rag_option == RAG_LANGCHAIN):
|
51 |
+
rag = LangChainRAG()
|
52 |
+
completion, chain, callback = rag.rag_chain(config, prompt)
|
53 |
|
54 |
result = completion["result"]
|
55 |
elif (rag_option == RAG_LLAMAINDEX):
|
56 |
rag = LlamaIndexRAG()
|
57 |
result = rag.retrieval(config, prompt)
|
58 |
else:
|
59 |
+
rag = LangChainRAG()
|
60 |
+
completion, chain, callback = rag.llm_chain(config, prompt)
|
61 |
|
62 |
if (completion.generations[0] != None and
|
63 |
completion.generations[0][0] != None):
|