Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -58,6 +58,11 @@ def select_llm():
|
|
58 |
|
59 |
def select_embedding_model():
|
60 |
st.header("Choose Embedding Model")
|
|
|
|
|
|
|
|
|
|
|
61 |
model_names = [
|
62 |
"BAAI/bge-small-en-v1.5",
|
63 |
"WhereIsAI/UAE-Large-V1",
|
@@ -83,6 +88,11 @@ def select_embedding_model():
|
|
83 |
|
84 |
def select_node_parser():
|
85 |
st.header("Choose Node Parser")
|
|
|
|
|
|
|
|
|
|
|
86 |
parser_types = ["SentenceSplitter", "CodeSplitter", "SemanticSplitterNodeParser",
|
87 |
"TokenTextSplitter", "HTMLNodeParser", "JSONNodeParser", "MarkdownNodeParser"]
|
88 |
parser_type = st.selectbox("Select Node Parser", parser_types, on_change=reset_pipeline_generated)
|
@@ -140,6 +150,11 @@ def select_node_parser():
|
|
140 |
|
141 |
def select_response_synthesis_method():
|
142 |
st.header("Choose Response Synthesis Method")
|
|
|
|
|
|
|
|
|
|
|
143 |
response_modes = [
|
144 |
"refine",
|
145 |
"tree_summarize",
|
|
|
58 |
|
59 |
def select_embedding_model():
|
60 |
st.header("Choose Embedding Model")
|
61 |
+
col1, col2 = st.columns([2,1])
|
62 |
+
with col2:
|
63 |
+
st.markdown("""
|
64 |
+
[Embedding Models Leaderboard](https://huggingface.co/spaces/mteb/leaderboard)
|
65 |
+
""")
|
66 |
model_names = [
|
67 |
"BAAI/bge-small-en-v1.5",
|
68 |
"WhereIsAI/UAE-Large-V1",
|
|
|
88 |
|
89 |
def select_node_parser():
|
90 |
st.header("Choose Node Parser")
|
91 |
+
col1, col2 = st.columns([4,1])
|
92 |
+
with col2:
|
93 |
+
st.markdown("""
|
94 |
+
[More Information](https://docs.llamaindex.ai/en/stable/module_guides/loading/node_parsers/root.html)
|
95 |
+
""")
|
96 |
parser_types = ["SentenceSplitter", "CodeSplitter", "SemanticSplitterNodeParser",
|
97 |
"TokenTextSplitter", "HTMLNodeParser", "JSONNodeParser", "MarkdownNodeParser"]
|
98 |
parser_type = st.selectbox("Select Node Parser", parser_types, on_change=reset_pipeline_generated)
|
|
|
150 |
|
151 |
def select_response_synthesis_method():
|
152 |
st.header("Choose Response Synthesis Method")
|
153 |
+
col1, col2 = st.columns([4,1])
|
154 |
+
with col2:
|
155 |
+
st.markdown("""
|
156 |
+
[More Information](https://docs.llamaindex.ai/en/stable/module_guides/querying/response_synthesizers/response_synthesizers.html)
|
157 |
+
""")
|
158 |
response_modes = [
|
159 |
"refine",
|
160 |
"tree_summarize",
|