Update app.py
Browse files
app.py
CHANGED
@@ -33,9 +33,7 @@ CHROMA_DIR = "/data/chroma"
|
|
33 |
YOUTUBE_DIR = "/data/youtube"
|
34 |
|
35 |
PDF_URL = "https://arxiv.org/pdf/2303.08774.pdf"
|
36 |
-
|
37 |
-
WEB_URL_2 = "https://platform.openai.com/docs/introduction"
|
38 |
-
WEB_URL_3 = "https://platform.openai.com/docs/api-reference"
|
39 |
YOUTUBE_URL_1 = "https://www.youtube.com/watch?v=--khbXchTeE"
|
40 |
YOUTUBE_URL_2 = "https://www.youtube.com/watch?v=hdhZwyf24mE"
|
41 |
YOUTUBE_URL_3 = "https://www.youtube.com/watch?v=vw-KWfKwvTQ"
|
@@ -55,10 +53,6 @@ def invoke(openai_api_key, use_rag, prompt):
|
|
55 |
# Load Web
|
56 |
#loader = WebBaseLoader(WEB_URL_1)
|
57 |
#docs.extend(loader.load())
|
58 |
-
#loader = WebBaseLoader(WEB_URL_2)
|
59 |
-
#docs.extend(loader.load())
|
60 |
-
#loader = WebBaseLoader(WEB_URL_3)
|
61 |
-
#docs.extend(loader.load())
|
62 |
# Load YouTube
|
63 |
#loader = GenericLoader(YoutubeAudioLoader([YOUTUBE_URL_1,
|
64 |
# YOUTUBE_URL_2,
|
@@ -90,16 +84,16 @@ def invoke(openai_api_key, use_rag, prompt):
|
|
90 |
description = """<strong>Overview:</strong> The app demonstrates how to use a <strong>Large Language Model (LLM)</strong> with <strong>Retrieval Augmented Generation (RAG)</strong>
|
91 |
on <strong>external data</strong> (private/public & structured/unstructured).\n\n
|
92 |
<strong>Instructions:</strong> Enter an OpenAI API key and perform LLM use cases (semantic search, summarization, translation, etc.) on
|
93 |
-
<a href='""" + YOUTUBE_URL_1 + """'>YouTube</a>, <a href='""" + PDF_URL + """'>PDF</a>, and <a href='""" +
|
94 |
<strong>GPT-4 data</strong> (created after training cutoff).
|
95 |
<ul style="list-style-type:square;">
|
96 |
<li>Set "Retrieval Augmented Generation" to "<strong>False</strong>" and submit prompt "What is GPT-4?" The LLM <strong>without</strong> RAG does not know the answer.</li>
|
97 |
<li>Set "Retrieval Augmented Generation" to "<strong>True</strong>" and submit prompt "What is GPT-4?" The LLM <strong>with</strong> RAG knows the answer.</li>
|
98 |
-
<li>Experiment with prompts, e.g. "What are GPT-4's exam capabilities?" or "
|
99 |
-
<li>Experiment more, for example "
|
100 |
</ul>\n\n
|
101 |
<strong>Technology:</strong> <a href='https://www.gradio.app/'>Gradio</a> UI using <a href='https://openai.com/'>OpenAI</a> API via AI-first
|
102 |
-
<a href='https://www.langchain.com/'>LangChain</a> toolkit with <a href='""" +
|
103 |
<a href='https://www.trychroma.com/'>Chroma</a> embedding database."""
|
104 |
|
105 |
gr.close_all()
|
|
|
33 |
YOUTUBE_DIR = "/data/youtube"
|
34 |
|
35 |
PDF_URL = "https://arxiv.org/pdf/2303.08774.pdf"
|
36 |
+
WEB_URL = "https://openai.com/research/gpt-4"
|
|
|
|
|
37 |
YOUTUBE_URL_1 = "https://www.youtube.com/watch?v=--khbXchTeE"
|
38 |
YOUTUBE_URL_2 = "https://www.youtube.com/watch?v=hdhZwyf24mE"
|
39 |
YOUTUBE_URL_3 = "https://www.youtube.com/watch?v=vw-KWfKwvTQ"
|
|
|
53 |
# Load Web
|
54 |
#loader = WebBaseLoader(WEB_URL_1)
|
55 |
#docs.extend(loader.load())
|
|
|
|
|
|
|
|
|
56 |
# Load YouTube
|
57 |
#loader = GenericLoader(YoutubeAudioLoader([YOUTUBE_URL_1,
|
58 |
# YOUTUBE_URL_2,
|
|
|
84 |
description = """<strong>Overview:</strong> The app demonstrates how to use a <strong>Large Language Model (LLM)</strong> with <strong>Retrieval Augmented Generation (RAG)</strong>
|
85 |
on <strong>external data</strong> (private/public & structured/unstructured).\n\n
|
86 |
<strong>Instructions:</strong> Enter an OpenAI API key and perform LLM use cases (semantic search, summarization, translation, etc.) on
|
87 |
+
<a href='""" + YOUTUBE_URL_1 + """'>YouTube</a>, <a href='""" + PDF_URL + """'>PDF</a>, and <a href='""" + WEB_URL + """'>Web</a>
|
88 |
<strong>GPT-4 data</strong> (created after training cutoff).
|
89 |
<ul style="list-style-type:square;">
|
90 |
<li>Set "Retrieval Augmented Generation" to "<strong>False</strong>" and submit prompt "What is GPT-4?" The LLM <strong>without</strong> RAG does not know the answer.</li>
|
91 |
<li>Set "Retrieval Augmented Generation" to "<strong>True</strong>" and submit prompt "What is GPT-4?" The LLM <strong>with</strong> RAG knows the answer.</li>
|
92 |
+
<li>Experiment with prompts, e.g. "What are GPT-4's exam capabilities?" or "Write a Python program calling the GPT-4 API."</li>
|
93 |
+
<li>Experiment more, for example "What is the cost and rate limit of the GPT-4 API? Answer in English and German in JSON format."</li>
|
94 |
</ul>\n\n
|
95 |
<strong>Technology:</strong> <a href='https://www.gradio.app/'>Gradio</a> UI using <a href='https://openai.com/'>OpenAI</a> API via AI-first
|
96 |
+
<a href='https://www.langchain.com/'>LangChain</a> toolkit with <a href='""" + WEB_URL + """'>GPT-4</a> foundation model and AI-native
|
97 |
<a href='https://www.trychroma.com/'>Chroma</a> embedding database."""
|
98 |
|
99 |
gr.close_all()
|