santuchal commited on
Commit
208a712
1 Parent(s): a811a23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -10
app.py CHANGED
@@ -25,12 +25,12 @@ import re
25
 
26
 
27
  # default_persist_directory = './chroma_HF/'
28
- list_llm = ["mistralai/Mistral-7B-Instruct-v0.2", "mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.1", \
29
- "google/gemma-7b-it","google/gemma-2b-it", \
30
- "HuggingFaceH4/zephyr-7b-beta", "HuggingFaceH4/zephyr-7b-gemma-v0.1", \
31
- "meta-llama/Llama-2-7b-chat-hf", "microsoft/phi-2", \
32
- "TinyLlama/TinyLlama-1.1B-Chat-v1.0", "mosaicml/mpt-7b-instruct", "tiiuae/falcon-7b-instruct", \
33
- "google/flan-t5-xxl"
34
  ]
35
  list_llm_simple = [os.path.basename(llm) for llm in list_llm]
36
 
@@ -280,11 +280,8 @@ def demo():
280
  collection_name = gr.State()
281
 
282
  gr.Markdown(
283
- """<center><h2>PDF-based chatbot (powered by LangChain and open-source LLMs)</center></h2>
284
  <h3>Ask any questions about your PDF documents, along with follow-ups</h3>
285
- <b>Note:</b> This AI assistant performs retrieval-augmented generation from your PDF documents. \
286
- When generating answers, it takes past questions into account (via conversational memory), and includes document references for clarity purposes.</i>
287
- <br><b>Warning:</b> This space uses the free CPU Basic hardware from Hugging Face. Some steps and LLM models used below (free inference endpoints) can take some time to generate an output.<br>
288
  """)
289
  with gr.Tab("Step 1 - Document pre-processing"):
290
  with gr.Row():
 
25
 
26
 
27
  # default_persist_directory = './chroma_HF/'
28
+ # list_llm = ["mistralai/Mistral-7B-Instruct-v0.2"], "mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.1", \
29
+ # "google/gemma-7b-it","google/gemma-2b-it", \
30
+ # "HuggingFaceH4/zephyr-7b-beta", "HuggingFaceH4/zephyr-7b-gemma-v0.1", \
31
+ # "meta-llama/Llama-2-7b-chat-hf", "microsoft/phi-2", \
32
+ # "TinyLlama/TinyLlama-1.1B-Chat-v1.0", "mosaicml/mpt-7b-instruct", "tiiuae/falcon-7b-instruct", \
33
+ # "google/flan-t5-xxl"
34
  ]
35
  list_llm_simple = [os.path.basename(llm) for llm in list_llm]
36
 
 
280
  collection_name = gr.State()
281
 
282
  gr.Markdown(
283
+ """PDF-based chatbot (by Dr. Aloke Upadhaya)</center></h2>
284
  <h3>Ask any questions about your PDF documents, along with follow-ups</h3>
 
 
 
285
  """)
286
  with gr.Tab("Step 1 - Document pre-processing"):
287
  with gr.Row():