Update app.py
Browse files
app.py
CHANGED
@@ -66,6 +66,11 @@ with st.sidebar:
|
|
66 |
"Insert a number (top n rows to be selected):", value=3, step=1
|
67 |
)
|
68 |
|
|
|
|
|
|
|
|
|
|
|
69 |
# Clear button
|
70 |
clear_button = st.sidebar.button("Clear Conversation", key="clear")
|
71 |
|
@@ -129,8 +134,18 @@ elif uploaded_files:
|
|
129 |
refs_tab = refs_tab.head(math.ceil(top_n))
|
130 |
result = refs_tab
|
131 |
|
132 |
-
# Call
|
133 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
134 |
|
135 |
# Display assistant response in chat message container
|
136 |
with st.chat_message("assistant"):
|
|
|
66 |
"Insert a number (top n rows to be selected):", value=3, step=1
|
67 |
)
|
68 |
|
69 |
+
# Select FM
|
70 |
+
option = st.selectbox(
|
71 |
+
"Which foundational model would you like?",
|
72 |
+
("GPT4", "LLAMA3"))
|
73 |
+
|
74 |
# Clear button
|
75 |
clear_button = st.sidebar.button("Clear Conversation", key="clear")
|
76 |
|
|
|
134 |
refs_tab = refs_tab.head(math.ceil(top_n))
|
135 |
result = refs_tab
|
136 |
|
137 |
+
# Call FM
|
138 |
+
content = ' '.join(list(result.sentences))
|
139 |
+
if option == "GPT4":
|
140 |
+
response = call_gpt(prompt, content)
|
141 |
+
else:
|
142 |
+
response = call_llama(
|
143 |
+
f"""
|
144 |
+
Answer the question: {prompt}
|
145 |
+
|
146 |
+
Use the following information: {content}
|
147 |
+
"""
|
148 |
+
)
|
149 |
|
150 |
# Display assistant response in chat message container
|
151 |
with st.chat_message("assistant"):
|