Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -54,14 +54,6 @@ transhuman_glossary = {
|
|
54 |
}
|
55 |
|
56 |
|
57 |
-
# Function to search glossary and display results
|
58 |
-
def search_glossary(query):
|
59 |
-
for category, terms in transhuman_glossary.items():
|
60 |
-
if query.lower() in (term.lower() for term in terms):
|
61 |
-
st.markdown(f"### {category}")
|
62 |
-
st.write(f"- {query}")
|
63 |
-
|
64 |
-
|
65 |
|
66 |
# Display the glossary with Streamlit components, ensuring emojis are used
|
67 |
def display_glossary(area):
|
@@ -868,6 +860,28 @@ def add_medical_exam_buttons2():
|
|
868 |
if col7.button("Ramipril π"):
|
869 |
StreamLLMChatResponse(descriptions["Ramipril π"])
|
870 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
871 |
|
872 |
|
873 |
# 17. Main
|
@@ -1085,36 +1099,35 @@ def main():
|
|
1085 |
|
1086 |
# Feedback
|
1087 |
# Step: Give User a Way to Upvote or Downvote
|
1088 |
-
|
1089 |
-
|
1090 |
-
|
1091 |
-
if feedback == "π Upvote":
|
1092 |
-
st.write("You upvoted π. Thank you for your feedback!")
|
1093 |
-
else:
|
1094 |
-
st.write("You downvoted π. Thank you for your feedback!")
|
1095 |
-
|
1096 |
-
load_dotenv()
|
1097 |
-
st.write(css, unsafe_allow_html=True)
|
1098 |
-
st.header("Chat with documents :books:")
|
1099 |
-
user_question = st.text_input("Ask a question about your documents:")
|
1100 |
-
if user_question:
|
1101 |
-
process_user_input(user_question)
|
1102 |
-
with st.sidebar:
|
1103 |
-
st.subheader("Your documents")
|
1104 |
-
docs = st.file_uploader("import documents", accept_multiple_files=True)
|
1105 |
-
with st.spinner("Processing"):
|
1106 |
-
raw = pdf2txt(docs)
|
1107 |
-
if len(raw) > 0:
|
1108 |
-
length = str(len(raw))
|
1109 |
-
text_chunks = txt2chunks(raw)
|
1110 |
-
vectorstore = vector_store(text_chunks)
|
1111 |
-
st.session_state.conversation = get_chain(vectorstore)
|
1112 |
-
st.markdown('# AI Search Index of Length:' + length + ' Created.') # add timing
|
1113 |
-
filename = generate_filename(raw, 'txt')
|
1114 |
-
create_file(filename, raw, '', should_save)
|
1115 |
-
|
1116 |
-
|
1117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1118 |
|
1119 |
# Relocated! Hope you like your new space - enjoy!
|
1120 |
# Display instructions and handle query parameters
|
|
|
54 |
}
|
55 |
|
56 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
|
58 |
# Display the glossary with Streamlit components, ensuring emojis are used
|
59 |
def display_glossary(area):
|
|
|
860 |
if col7.button("Ramipril π"):
|
861 |
StreamLLMChatResponse(descriptions["Ramipril π"])
|
862 |
|
863 |
+
|
864 |
+
# Function to search glossary and display results
|
865 |
+
def search_glossary(query):
|
866 |
+
for category, terms in transhuman_glossary.items():
|
867 |
+
if query.lower() in (term.lower() for term in terms):
|
868 |
+
st.markdown(f"### {category}")
|
869 |
+
st.write(f"- {query}")
|
870 |
+
|
871 |
+
st.write('## Processing query against GPT and Llama:')
|
872 |
+
# ------------------------------------------------------------------------------------------------
|
873 |
+
st.write('Reasoning with your inputs using GPT...')
|
874 |
+
response = chat_with_model(transcript)
|
875 |
+
st.write('Response:')
|
876 |
+
st.write(response)
|
877 |
+
filename = generate_filename(response, "txt")
|
878 |
+
create_file(filename, transcript, response, should_save)
|
879 |
+
|
880 |
+
st.write('Reasoning with your inputs using Llama...')
|
881 |
+
response = StreamLLMChatResponse(transcript)
|
882 |
+
filename_txt = generate_filename(transcript, "md")
|
883 |
+
create_file(filename_txt, transcript, response, should_save)
|
884 |
+
# ------------------------------------------------------------------------------------------------
|
885 |
|
886 |
|
887 |
# 17. Main
|
|
|
1099 |
|
1100 |
# Feedback
|
1101 |
# Step: Give User a Way to Upvote or Downvote
|
1102 |
+
GiveFeedback=False
|
1103 |
+
if GiveFeedback:
|
1104 |
+
with st.expander("Give your feedback π", expanded=False):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1105 |
|
1106 |
+
feedback = st.radio("Step 8: Give your feedback", ("π Upvote", "π Downvote"))
|
1107 |
+
if feedback == "π Upvote":
|
1108 |
+
st.write("You upvoted π. Thank you for your feedback!")
|
1109 |
+
else:
|
1110 |
+
st.write("You downvoted π. Thank you for your feedback!")
|
1111 |
+
|
1112 |
+
load_dotenv()
|
1113 |
+
st.write(css, unsafe_allow_html=True)
|
1114 |
+
st.header("Chat with documents :books:")
|
1115 |
+
user_question = st.text_input("Ask a question about your documents:")
|
1116 |
+
if user_question:
|
1117 |
+
process_user_input(user_question)
|
1118 |
+
with st.sidebar:
|
1119 |
+
st.subheader("Your documents")
|
1120 |
+
docs = st.file_uploader("import documents", accept_multiple_files=True)
|
1121 |
+
with st.spinner("Processing"):
|
1122 |
+
raw = pdf2txt(docs)
|
1123 |
+
if len(raw) > 0:
|
1124 |
+
length = str(len(raw))
|
1125 |
+
text_chunks = txt2chunks(raw)
|
1126 |
+
vectorstore = vector_store(text_chunks)
|
1127 |
+
st.session_state.conversation = get_chain(vectorstore)
|
1128 |
+
st.markdown('# AI Search Index of Length:' + length + ' Created.') # add timing
|
1129 |
+
filename = generate_filename(raw, 'txt')
|
1130 |
+
create_file(filename, raw, '', should_save)
|
1131 |
|
1132 |
# Relocated! Hope you like your new space - enjoy!
|
1133 |
# Display instructions and handle query parameters
|