awacke1 commited on
Commit
43d6863
โ€ข
1 Parent(s): ef9548a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -79
app.py CHANGED
@@ -23,9 +23,7 @@ CONTAINER_NAME = os.environ.get("COSMOS_CONTAINER_NAME")
23
  Key = os.environ.get("Key") # ๐Ÿ”‘ Don't forget your key!
24
 
25
  # ๐Ÿ  Your local app URL (Change this to your app's URL)
26
- #LOCAL_APP_URL = "http://localhost:8501"
27
- LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI"
28
-
29
 
30
  # ๐Ÿค– OpenAI configuration
31
  #openai.api_key = os.environ.get("OPENAI_API_KEY")
@@ -259,57 +257,6 @@ def create_and_save_file(content, file_type="md", prompt=None, is_image=False, s
259
 
260
  return filename
261
 
262
-
263
-
264
- # ๐Ÿ”Search Glossary
265
- # @st.cache_resource
266
- def search_glossary(query):
267
- all=""
268
- st.markdown(f"- {query}")
269
-
270
- # ๐Ÿ”Run 1 - ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
271
- client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
272
- response2 = client.predict(
273
- query, # str in 'parameter_13' Textbox component
274
- #"mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
275
- #"mistralai/Mistral-7B-Instruct-v0.2", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
276
- "google/gemma-7b-it", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
277
- True, # bool in 'Stream output' Checkbox component
278
- api_name="/ask_llm"
279
- )
280
- st.write('๐Ÿ”Run of Multi-Agent System Paper Summary Spec is Complete')
281
- st.markdown(response2)
282
-
283
- # ArXiv searcher ~-<>-~ Paper References - Update with RAG
284
- client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
285
- response1 = client.predict(
286
- query,
287
- 10,
288
- "Semantic Search - up to 10 Mar 2024", # Literal['Semantic Search - up to 10 Mar 2024', 'Arxiv Search - Latest - (EXPERIMENTAL)'] in 'Search Source' Dropdown component
289
- "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
290
- api_name="/update_with_rag_md"
291
- )
292
- st.write('๐Ÿ”Run of Multi-Agent System Paper References is Complete')
293
- responseall = response2 + response1[0] + response1[1]
294
- st.markdown(responseall)
295
- return responseall
296
-
297
- def display_content_or_image(query):
298
- for category, terms in transhuman_glossary.items():
299
- for term in terms:
300
- if query.lower() in term.lower():
301
- st.subheader(f"Found in {category}:")
302
- st.write(term)
303
- return True # Return after finding and displaying the first match
304
- image_dir = "images" # Example directory where images are stored
305
- image_path = f"{image_dir}/{query}.png" # Construct image path with query
306
- if os.path.exists(image_path):
307
- st.image(image_path, caption=f"Image for {query}")
308
- return True
309
- st.warning("No matching content or image found.")
310
- return False
311
-
312
-
313
  # ๐ŸŽˆ Let's modify the main app to be more fun!
314
  def main():
315
  st.title("๐Ÿ™Git๐ŸŒŒCosmos๐Ÿ’ซ - Azure Cosmos DB and Github Agent")
@@ -332,34 +279,16 @@ def main():
332
  if 'cloned_doc' not in st.session_state:
333
  st.session_state.cloned_doc = None
334
 
335
-
336
- # โš™๏ธq= Run ArXiv search from query parameters
337
  try:
338
  query_params = st.query_params
339
- query = (query_params.get('q') or query_params.get('query') or [''])
340
- if len(query) > 1:
341
- #result = search_arxiv(query)
342
- result2 = search_glossary(result)
343
-
344
- filesearch = PromptPrefix + query
345
- st.markdown(filesearch)
346
- process_text(filesearch)
347
- except:
348
  st.markdown(' ')
349
-
350
- if 'action' in st.query_params:
351
- action = st.query_params()['action'][0] # Get the first (or only) 'action' parameter
352
- if action == 'show_message':
353
- st.success("Showing a message because 'action=show_message' was found in the URL.")
354
- elif action == 'clear':
355
- clear_query_params()
356
- #st.rerun()
357
-
358
- if 'query' in st.query_params:
359
- query = st.query_params['query'][0] # Get the query parameter
360
- # Display content or image based on the query
361
- display_content_or_image(query)
362
-
363
 
364
  # ๐Ÿ” Automatic Login
365
  if Key:
 
23
  Key = os.environ.get("Key") # ๐Ÿ”‘ Don't forget your key!
24
 
25
  # ๐Ÿ  Your local app URL (Change this to your app's URL)
26
+ LOCAL_APP_URL = "http://localhost:8501"
 
 
27
 
28
  # ๐Ÿค– OpenAI configuration
29
  #openai.api_key = os.environ.get("OPENAI_API_KEY")
 
257
 
258
  return filename
259
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
260
  # ๐ŸŽˆ Let's modify the main app to be more fun!
261
  def main():
262
  st.title("๐Ÿ™Git๐ŸŒŒCosmos๐Ÿ’ซ - Azure Cosmos DB and Github Agent")
 
279
  if 'cloned_doc' not in st.session_state:
280
  st.session_state.cloned_doc = None
281
 
282
+ # โš™๏ธ q= Run ArXiv search from query parameters
 
283
  try:
284
  query_params = st.query_params
285
+ query = (query_params.get('q') or query_params.get('query') or [''])[0]
286
+ if query:
287
+ # ๐Ÿ•ต๏ธโ€โ™‚๏ธ We have a query! Let's process it!
288
+ process_text(query)
289
+ st.stop() # Stop further execution
290
+ except Exception as e:
 
 
 
291
  st.markdown(' ')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
292
 
293
  # ๐Ÿ” Automatic Login
294
  if Key: