Spaces:
Running
Running
import streamlit as st | |
from azure.cosmos import CosmosClient, exceptions | |
import os | |
import pandas as pd | |
import traceback | |
import shutil | |
from github import Github | |
from git import Repo | |
from datetime import datetime | |
import base64 | |
import json | |
import uuid | |
from urllib.parse import quote | |
from gradio_client import Client | |
import anthropic | |
import glob | |
import pytz | |
import re | |
from PIL import Image | |
import zipfile | |
import hashlib | |
import time | |
# π App Configuration - Because every app needs a good costume! | |
Site_Name = 'πGitπCosmosπ« - Azure Cosmos DB and Github Agent' | |
title = "πGitπCosmosπ« - Azure Cosmos DB and Github Agent" | |
helpURL = 'https://huggingface.co/awacke1' | |
bugURL = 'https://huggingface.co/spaces/awacke1' | |
icons = 'πππ«' | |
st.set_page_config( | |
page_title=title, | |
page_icon=icons, | |
layout="wide", | |
initial_sidebar_state="auto", | |
menu_items={ | |
'Get Help': helpURL, | |
'Report a bug': bugURL, | |
'About': title | |
} | |
) | |
# π Cosmos DB configuration - Where data goes to party! | |
ENDPOINT = "https://acae-afd.documents.azure.com:443/" | |
DATABASE_NAME = os.environ.get("COSMOS_DATABASE_NAME") | |
CONTAINER_NAME = os.environ.get("COSMOS_CONTAINER_NAME") | |
Key = os.environ.get("Key") | |
# π Your local app URL - Home sweet home | |
LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI" | |
# π€ Anthropic configuration - Teaching machines to be more human (and funnier) | |
client = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY")) | |
# π§ Initialize session state - Because even apps need a good memory | |
if "chat_history" not in st.session_state: | |
st.session_state.chat_history = [] | |
# π οΈ Helper Functions - The unsung heroes of our code | |
# π Get a file download link - Making file sharing as easy as stealing candy from a baby | |
def get_download_link(file_path): | |
with open(file_path, "rb") as file: | |
contents = file.read() | |
b64 = base64.b64encode(contents).decode() | |
file_name = os.path.basename(file_path) | |
return f'<a href="data:file/txt;base64,{b64}" download="{file_name}">Download {file_name}π</a>' | |
# π² Generate a unique ID - Because being unique is important (just ask your mother) | |
def generate_unique_id(): | |
timestamp = datetime.utcnow().strftime('%Y%m%d%H%M%S%f') | |
unique_uuid = str(uuid.uuid4()) | |
return f"{timestamp}-{unique_uuid}" | |
# π Generate a filename - Naming files like a pro (or a very confused librarian) | |
def generate_filename(prompt, file_type): | |
central = pytz.timezone('US/Central') | |
safe_date_time = datetime.now(central).strftime("%m%d_%H%M") | |
safe_prompt = re.sub(r'\W+', '', prompt)[:90] | |
return f"{safe_date_time}{safe_prompt}.{file_type}" | |
# πΎ Create and save a file - Because data hoarding is a legitimate hobby | |
def create_file(filename, prompt, response, should_save=True): | |
if not should_save: | |
return | |
with open(filename, 'w', encoding='utf-8') as file: | |
file.write(prompt + "\n\n" + response) | |
# π Load file content - Bringing words back from the digital grave | |
def load_file(file_name): | |
with open(file_name, "r", encoding='utf-8') as file: | |
content = file.read() | |
return content | |
# π Display glossary entity - Making search fun again (as if it ever was) | |
def display_glossary_entity(k): | |
search_urls = { | |
"ππArXiv": lambda k: f"/?q={quote(k)}", | |
"π": lambda k: f"https://en.wikipedia.org/wiki/{quote(k)}", | |
"π": lambda k: f"https://www.google.com/search?q={quote(k)}", | |
"π₯": lambda k: f"https://www.youtube.com/results?search_query={quote(k)}", | |
} | |
links_md = ' '.join([f"<a href='{url(k)}' target='_blank'>{emoji}</a>" for emoji, url in search_urls.items()]) | |
st.markdown(f"{k} {links_md}", unsafe_allow_html=True) | |
# ποΈ Create zip of files - Squeezing files together like sardines in a can | |
def create_zip_of_files(files): | |
zip_name = "all_files.zip" | |
with zipfile.ZipFile(zip_name, 'w') as zipf: | |
for file in files: | |
zipf.write(file) | |
return zip_name | |
# π¬ Get video HTML - Making videos play nice (or at least trying to) | |
def get_video_html(video_path, width="100%"): | |
video_url = f"data:video/mp4;base64,{base64.b64encode(open(video_path, 'rb').read()).decode()}" | |
return f''' | |
<video width="{width}" controls autoplay loop> | |
<source src="{video_url}" type="video/mp4"> | |
Your browser does not support the video tag. | |
</video> | |
''' | |
# π΅ Get audio HTML - Let the music play (and hope it's not Baby Shark) | |
def get_audio_html(audio_path, width="100%"): | |
audio_url = f"data:audio/mpeg;base64,{base64.b64encode(open(audio_path, 'rb').read()).decode()}" | |
return f''' | |
<audio controls style="width:{width}"> | |
<source src="{audio_url}" type="audio/mpeg"> | |
Your browser does not support the audio element. | |
</audio> | |
''' | |
# π Cosmos DB functions - Where data goes to live its best life | |
# π Get databases - Collecting databases like Pokemon cards | |
def get_databases(client): | |
return [db['id'] for db in client.list_databases()] | |
# π¦ Get containers - Finding where all the good stuff is hidden | |
def get_containers(database): | |
return [container['id'] for container in database.list_containers()] | |
# π Get documents - Retrieving the sacred texts (or just some JSON) | |
def get_documents(container, limit=None): | |
query = "SELECT * FROM c ORDER BY c._ts DESC" | |
items = list(container.query_items(query=query, enable_cross_partition_query=True, max_item_count=limit)) | |
return items | |
# π₯ Insert record - Adding new data (and crossing fingers it doesn't break anything) | |
def insert_record(container, record): | |
try: | |
container.create_item(body=record) | |
return True, "Record inserted successfully! π" | |
except exceptions.CosmosHttpResponseError as e: | |
return False, f"HTTP error occurred: {str(e)} π¨" | |
except Exception as e: | |
return False, f"An unexpected error occurred: {str(e)} π±" | |
# π Update record - Giving data a makeover | |
def update_record(container, updated_record): | |
try: | |
container.upsert_item(body=updated_record) | |
return True, f"Record with id {updated_record['id']} successfully updated. π οΈ" | |
except exceptions.CosmosHttpResponseError as e: | |
return False, f"HTTP error occurred: {str(e)} π¨" | |
except Exception as e: | |
return False, f"An unexpected error occurred: {traceback.format_exc()} π±" | |
# ποΈ Delete record - Saying goodbye to data (it's not you, it's me) | |
def delete_record(container, name, id): | |
try: | |
container.delete_item(item=id, partition_key=id) | |
return True, f"Successfully deleted record with name: {name} and id: {id} ποΈ" | |
except exceptions.CosmosResourceNotFoundError: | |
return False, f"Record with id {id} not found. It may have been already deleted. π΅οΈββοΈ" | |
except exceptions.CosmosHttpResponseError as e: | |
return False, f"HTTP error occurred: {str(e)} π¨" | |
except Exception as e: | |
return False, f"An unexpected error occurred: {traceback.format_exc()} π±" | |
# πΎ Save to Cosmos DB - Preserving data for future generations (or just until the next update) | |
def save_to_cosmos_db(container, query, response1, response2): | |
try: | |
if container: | |
record = { | |
"id": generate_unique_id(), | |
"query": query, | |
"response1": response1, | |
"response2": response2, | |
"timestamp": datetime.utcnow().isoformat() | |
} | |
try: | |
container.create_item(body=record) | |
st.success(f"Record saved successfully with ID: {record['id']}") | |
# Refresh the documents display | |
st.session_state.documents = get_documents(container) | |
except exceptions.CosmosHttpResponseError as e: | |
st.error(f"Error saving record to Cosmos DB: {e}") | |
else: | |
st.error("Cosmos DB container is not initialized.") | |
except Exception as e: | |
st.error(f"An unexpected error occurred: {str(e)}") | |
# π GitHub functions - Where code goes to socialize | |
# π₯ Download GitHub repo - Cloning repos like it's going out of style | |
def download_github_repo(url, local_path): | |
if os.path.exists(local_path): | |
shutil.rmtree(local_path) | |
Repo.clone_from(url, local_path) | |
# ποΈ Create zip file - Squeezing files tighter than your budget | |
def create_zip_file(source_dir, output_filename): | |
shutil.make_archive(output_filename, 'zip', source_dir) | |
# ποΈ Create repo - Building digital homes for lonely code | |
def create_repo(g, repo_name): | |
user = g.get_user() | |
return user.create_repo(repo_name) | |
# π Push to GitHub - Sending code to the cloud (hopefully not the rainy kind) | |
def push_to_github(local_path, repo, github_token): | |
repo_url = f"https://{github_token}@github.com/{repo.full_name}.git" | |
local_repo = Repo(local_path) | |
if 'origin' in [remote.name for remote in local_repo.remotes]: | |
origin = local_repo.remote('origin') | |
origin.set_url(repo_url) | |
else: | |
origin = local_repo.create_remote('origin', repo_url) | |
if not local_repo.heads: | |
local_repo.git.checkout('-b', 'main') | |
current_branch = 'main' | |
else: | |
current_branch = local_repo.active_branch.name | |
local_repo.git.add(A=True) | |
if local_repo.is_dirty(): | |
local_repo.git.commit('-m', 'Initial commit') | |
origin.push(refspec=f'{current_branch}:{current_branch}') | |
def save_or_clone_to_cosmos_db(container, query=None, response=None, clone_id=None): | |
def generate_complex_unique_id(): | |
timestamp = datetime.utcnow().strftime('%Y%m%d%H%M%S%f') | |
random_component = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz0123456789', k=8)) | |
return f"{timestamp}-{random_component}-{str(uuid.uuid4())}" | |
max_retries = 10 | |
base_delay = 0.1 # 100 ms | |
for attempt in range(max_retries): | |
try: | |
new_id = generate_complex_unique_id() | |
if clone_id: | |
try: | |
existing_doc = container.read_item(item=clone_id, partition_key=clone_id) | |
new_doc = { | |
'id': new_id, | |
'originalText': existing_doc.get('originalText', ''), | |
'qtPrompts': existing_doc.get('qtPrompts', []), | |
'cloned_from': clone_id, | |
'cloned_at': datetime.utcnow().isoformat() | |
} | |
except exceptions.CosmosResourceNotFoundError: | |
st.error(f"Document with ID {clone_id} not found for cloning.") | |
return None | |
else: | |
new_doc = { | |
'id': new_id, | |
'query': query, | |
'response': response, | |
'created_at': datetime.utcnow().isoformat() | |
} | |
# Attempt to create the item | |
response = container.create_item(body=new_doc) | |
st.success(f"{'Cloned' if clone_id else 'New'} document saved successfully with ID: {response['id']}") | |
# Refresh the documents in the session state | |
st.session_state.documents = list(container.query_items( | |
query="SELECT * FROM c ORDER BY c._ts DESC", | |
enable_cross_partition_query=True | |
)) | |
return response['id'] | |
except exceptions.CosmosHttpResponseError as e: | |
if e.status_code == 409: # Conflict error | |
delay = base_delay * (2 ** attempt) + random.uniform(0, 0.1) | |
st.warning(f"ID conflict occurred. Retrying in {delay:.2f} seconds... (Attempt {attempt + 1})") | |
time.sleep(delay) | |
else: | |
st.error(f"Error saving to Cosmos DB: {e}") | |
return None | |
except Exception as e: | |
st.error(f"An unexpected error occurred: {str(e)}") | |
return None | |
st.error("Failed to save document after maximum retries.") | |
return None | |
# πΎ Save or clone to Cosmos DB - Because every document deserves a twin | |
def save_or_clone_to_cosmos_db2(container, query=None, response=None, clone_id=None): | |
try: | |
if not container: | |
st.error("Cosmos DB container is not initialized.") | |
return | |
# Generate a new unique ID | |
new_id = str(uuid.uuid4()) | |
# Create a new document | |
if clone_id: | |
# If clone_id is provided, we're cloning an existing document | |
try: | |
existing_doc = container.read_item(item=clone_id, partition_key=clone_id) | |
new_doc = existing_doc.copy() | |
new_doc['id'] = new_id | |
new_doc['cloned_from'] = clone_id | |
new_doc['cloned_at'] = datetime.utcnow().isoformat() | |
except exceptions.CosmosResourceNotFoundError: | |
st.error(f"Document with ID {clone_id} not found for cloning.") | |
return | |
else: | |
# If no clone_id, we're creating a new document | |
new_doc = { | |
'id': new_id, | |
'query': query, | |
'response': response, | |
'created_at': datetime.utcnow().isoformat() | |
} | |
# Insert the new document | |
container.create_item(body=new_doc) | |
st.success(f"{'Cloned' if clone_id else 'New'} document saved successfully with ID: {new_id}") | |
# Refresh the documents in the session state | |
st.session_state.documents = list(container.query_items( | |
query="SELECT * FROM c ORDER BY c._ts DESC", | |
enable_cross_partition_query=True | |
)) | |
return new_id | |
except exceptions.CosmosHttpResponseError as e: | |
st.error(f"Error saving to Cosmos DB: {e}") | |
except Exception as e: | |
st.error(f"An unexpected error occurred: {str(e)}") | |
# π¦ Archive current container - Packing up data like you're moving to a new digital house | |
def archive_current_container(database_name, container_name, client): | |
try: | |
base_dir = "./cosmos_archive_current_container" | |
if os.path.exists(base_dir): | |
shutil.rmtree(base_dir) | |
os.makedirs(base_dir) | |
db_client = client.get_database_client(database_name) | |
container_client = db_client.get_container_client(container_name) | |
items = list(container_client.read_all_items()) | |
container_dir = os.path.join(base_dir, container_name) | |
os.makedirs(container_dir) | |
for item in items: | |
item_id = item.get('id', f"unknown_{datetime.now().strftime('%Y%m%d%H%M%S')}") | |
with open(os.path.join(container_dir, f"{item_id}.json"), 'w') as f: | |
json.dump(item, f, indent=2) | |
archive_name = f"{container_name}_archive_{datetime.now().strftime('%Y%m%d%H%M%S')}" | |
shutil.make_archive(archive_name, 'zip', base_dir) | |
return get_download_link(f"{archive_name}.zip") | |
except Exception as e: | |
return f"An error occurred while archiving data: {str(e)} π’" | |
# π Search glossary - Finding needles in digital haystacks | |
def search_glossary(query): | |
st.markdown(f"### π SearchGlossary for: {query}") | |
# Dropdown for model selection | |
model_options = ['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] | |
model_choice = st.selectbox('π§ Select LLM Model', options=model_options, index=1) | |
# Dropdown for database selection | |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)'] | |
database_choice = st.selectbox('π Select Database', options=database_options, index=0) | |
# π΅οΈββοΈ Searching the glossary for: query | |
all_results = "" | |
st.markdown(f"- {query}") | |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM | |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern") | |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm | |
result = client.predict( | |
prompt=query, | |
llm_model_picked="mistralai/Mixtral-8x7B-Instruct-v0.1", | |
stream_outputs=True, | |
api_name="/ask_llm" | |
) | |
st.markdown(result) | |
st.code(result, language="python", line_numbers=True) | |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm | |
result2 = client.predict( | |
prompt=query, | |
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2", | |
stream_outputs=True, | |
api_name="/ask_llm" | |
) | |
st.markdown(result2) | |
st.code(result2, language="python", line_numbers=True) | |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm | |
result3 = client.predict( | |
prompt=query, | |
llm_model_picked="google/gemma-7b-it", | |
stream_outputs=True, | |
api_name="/ask_llm" | |
) | |
st.markdown(result3) | |
st.code(result3, language="python", line_numbers=True) | |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md | |
response2 = client.predict( | |
message=query, # str in 'parameter_13' Textbox component | |
llm_results_use=10, | |
database_choice="Semantic Search", | |
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2", | |
api_name="/update_with_rag_md" | |
) | |
st.markdown(response2[0]) | |
st.code(response2[0], language="python", line_numbers=True, wrap_lines=True) | |
st.markdown(response2[1]) | |
st.code(response2[1], language="python", line_numbers=True, wrap_lines=True) | |
return result, result2, result3, response2 | |
# π Main function - Where the magic happens (and occasionally breaks) | |
def main(): | |
st.title("πGitπCosmosπ« - Azure Cosmos DB and Github Agent") | |
# π¦ Initialize session state | |
if 'logged_in' not in st.session_state: | |
st.session_state.logged_in = False | |
if 'selected_records' not in st.session_state: | |
st.session_state.selected_records = [] | |
if 'client' not in st.session_state: | |
st.session_state.client = None | |
if 'selected_database' not in st.session_state: | |
st.session_state.selected_database = None | |
if 'selected_container' not in st.session_state: | |
st.session_state.selected_container = None | |
if 'selected_document_id' not in st.session_state: | |
st.session_state.selected_document_id = None | |
if 'current_index' not in st.session_state: | |
st.session_state.current_index = 0 | |
if 'cloned_doc' not in st.session_state: | |
st.session_state.cloned_doc = None | |
# βοΈ q= Run ArXiv search from query parameters | |
try: | |
query_params = st.query_params | |
query = query_params.get('q') or query_params.get('query') or '' | |
if query: | |
# π΅οΈββοΈ We have a query! Let's process it! | |
result, result2, result3, response2 = search_glossary(query) | |
# When saving results, pass the container | |
try: | |
save_to_cosmos_db(st.session_state.cosmos_container, query, result, result) | |
save_to_cosmos_db(st.session_state.cosmos_container, query, result2, result2) | |
save_to_cosmos_db(st.session_state.cosmos_container, query, result3, result3) | |
save_to_cosmos_db(st.session_state.cosmos_container, query, response2[0], response2[0]) | |
save_to_cosmos_db(st.session_state.cosmos_container, query, response2[1], response2[1]) | |
except exceptions.CosmosHttpResponseError as e: | |
st.error(f"HTTP error occurred: {str(e)} π¨") | |
except Exception as e: | |
st.error(f"An unexpected error occurred: {str(e)} π±") | |
st.stop() # Stop further execution | |
except Exception as e: | |
st.markdown(' ') | |
# π Automatic Login | |
if Key: | |
st.session_state.primary_key = Key | |
st.session_state.logged_in = True | |
else: | |
st.error("Cosmos DB Key is not set in environment variables. πβ") | |
return # Can't proceed without a key | |
if st.session_state.logged_in: | |
# π Initialize Cosmos DB client | |
try: | |
if st.session_state.client is None: | |
st.session_state.client = CosmosClient(ENDPOINT, credential=st.session_state.primary_key) | |
# ποΈ Sidebar for database, container, and document selection | |
st.sidebar.title("πGitπCosmosπ«ποΈNavigator") | |
databases = get_databases(st.session_state.client) | |
selected_db = st.sidebar.selectbox("ποΈ Select Database", databases) | |
if selected_db != st.session_state.selected_database: | |
st.session_state.selected_database = selected_db | |
st.session_state.selected_container = None | |
st.session_state.selected_document_id = None | |
st.session_state.current_index = 0 | |
st.rerun() | |
if st.session_state.selected_database: | |
database = st.session_state.client.get_database_client(st.session_state.selected_database) | |
containers = get_containers(database) | |
selected_container = st.sidebar.selectbox("π Select Container", containers) | |
if selected_container != st.session_state.selected_container: | |
st.session_state.selected_container = selected_container | |
st.session_state.selected_document_id = None | |
st.session_state.current_index = 0 | |
st.rerun() | |
if st.session_state.selected_container: | |
container = database.get_container_client(st.session_state.selected_container) | |
# π¦ Add Export button | |
if st.button("π¦ Export Container Data"): | |
download_link = archive_current_container(st.session_state.selected_database, st.session_state.selected_container, st.session_state.client) | |
if download_link.startswith('<a'): | |
st.markdown(download_link, unsafe_allow_html=True) | |
else: | |
st.error(download_link) | |
# Fetch documents | |
documents = get_documents(container) | |
total_docs = len(documents) | |
if total_docs > 5: | |
documents_to_display = documents[:5] | |
st.info("Showing top 5 most recent documents.") | |
else: | |
documents_to_display = documents | |
st.info(f"Showing all {len(documents_to_display)} documents.") | |
if documents_to_display: | |
# π¨ Add Viewer/Editor selection | |
view_options = ['Show as Markdown', 'Show as Code Editor', 'Show as Edit and Save', 'Clone Document', 'New Record'] | |
selected_view = st.selectbox("Select Viewer/Editor", view_options, index=2) | |
if selected_view == 'Show as Markdown': | |
# ποΈ Show each record as Markdown with navigation | |
total_docs = len(documents) | |
doc = documents[st.session_state.current_index] | |
st.markdown(f"#### Document ID: {doc.get('id', '')}") | |
# π΅οΈββοΈ Let's extract values from the JSON that have at least one space | |
values_with_space = [] | |
def extract_values(obj): | |
if isinstance(obj, dict): | |
for k, v in obj.items(): | |
extract_values(v) | |
elif isinstance(obj, list): | |
for item in obj: | |
extract_values(item) | |
elif isinstance(obj, str): | |
if ' ' in obj: | |
values_with_space.append(obj) | |
extract_values(doc) | |
# π Let's create a list of links for these values | |
st.markdown("#### π Links for Extracted Texts") | |
for term in values_with_space: | |
display_glossary_entity(term) | |
# Show the document content as markdown | |
content = json.dumps(doc, indent=2) | |
st.markdown(f"```json\n{content}\n```") | |
# Navigation buttons | |
col_prev, col_next = st.columns([1, 1]) | |
with col_prev: | |
if st.button("β¬ οΈ Previous", key='prev_markdown'): | |
if st.session_state.current_index > 0: | |
st.session_state.current_index -= 1 | |
st.rerun() | |
with col_next: | |
if st.button("β‘οΈ Next", key='next_markdown'): | |
if st.session_state.current_index < total_docs - 1: | |
st.session_state.current_index += 1 | |
st.rerun() | |
elif selected_view == 'Show as Code Editor': | |
# π» Show each record in a code editor with navigation | |
total_docs = len(documents) | |
doc = documents[st.session_state.current_index] | |
st.markdown(f"#### Document ID: {doc.get('id', '')}") | |
doc_str = st.text_area("Edit Document", value=json.dumps(doc, indent=2), height=300, key=f'code_editor_{st.session_state.current_index}') | |
col_prev, col_next = st.columns([1, 1]) | |
with col_prev: | |
if st.button("β¬ οΈ Previous", key='prev_code'): | |
if st.session_state.current_index > 0: | |
st.session_state.current_index -= 1 | |
st.rerun() | |
with col_next: | |
if st.button("β‘οΈ Next", key='next_code'): | |
if st.session_state.current_index < total_docs - 1: | |
st.session_state.current_index += 1 | |
st.rerun() | |
if st.button("πΎ Save Changes", key=f'save_button_{st.session_state.current_index}'): | |
try: | |
updated_doc = json.loads(doc_str) | |
success, message = update_record(container, updated_doc) | |
if success: | |
st.success(f"Document {updated_doc['id']} saved successfully.") | |
st.session_state.selected_document_id = updated_doc['id'] | |
st.rerun() | |
else: | |
st.error(message) | |
except json.JSONDecodeError as e: | |
st.error(f"Invalid JSON: {str(e)} π«") | |
elif selected_view == 'Show as Edit and Save': | |
# βοΈ Show as Edit and Save in columns | |
st.markdown("#### Edit the document fields below:") | |
# Create columns for each document | |
num_cols = len(documents_to_display) | |
cols = st.columns(num_cols) | |
for idx, (col, doc) in enumerate(zip(cols, documents_to_display)): | |
with col: | |
st.markdown(f"##### Document ID: {doc.get('id', '')}") | |
editable_id = st.text_input("ID", value=doc.get('id', ''), key=f'edit_id_{idx}') | |
# Remove 'id' from the document for editing other fields | |
editable_doc = doc.copy() | |
editable_doc.pop('id', None) | |
doc_str = st.text_area("Document Content (in JSON format)", value=json.dumps(editable_doc, indent=2), height=300, key=f'doc_str_{idx}') | |
# Add the "Run With AI" button next to "Save Changes" | |
col_save, col_ai = st.columns(2) | |
with col_save: | |
if st.button("πΎ Save Changes", key=f'save_button_{idx}'): | |
try: | |
updated_doc = json.loads(doc_str) | |
updated_doc['id'] = editable_id # Include the possibly edited ID | |
success, message = update_record(container, updated_doc) | |
if success: | |
st.success(f"Document {updated_doc['id']} saved successfully.") | |
st.session_state.selected_document_id = updated_doc['id'] | |
st.rerun() | |
else: | |
st.error(message) | |
except json.JSONDecodeError as e: | |
st.error(f"Invalid JSON: {str(e)} π«") | |
with col_ai: | |
if st.button("π€ Run With AI", key=f'run_with_ai_button_{idx}'): | |
# Use the entire document as input | |
search_glossary(json.dumps(editable_doc, indent=2)) | |
# Usage in your Streamlit app | |
if st.button("π Clone Document", key=f'clone_button_{idx}'): | |
with st.spinner("Cloning document..."): | |
cloned_id = save_or_clone_to_cosmos_db(container, clone_id=doc['id']) | |
if cloned_id: | |
st.success(f"Document cloned successfully with new ID: {cloned_id}") | |
st.rerun() | |
else: | |
st.error("Failed to clone document. Please try again.") | |
elif selected_view == 'Clone Document': | |
st.markdown("#### Clone Document:") | |
for idx, doc in enumerate(documents_to_display): | |
st.markdown(f"##### Document ID: {doc.get('id', '')}") | |
if st.button("π Clone Document", key=f'clone_button_{idx}'): | |
cloned_doc = doc.copy() | |
# Generate new unique IDs | |
new_id = str(uuid.uuid4()) | |
cloned_doc['id'] = new_id | |
if 'name' in cloned_doc: | |
cloned_doc['name'] = f"{cloned_doc['name']}_clone_{new_id[:8]}" | |
st.session_state.cloned_doc = cloned_doc | |
st.session_state.cloned_doc_str = json.dumps(cloned_doc, indent=2) | |
st.session_state.clone_mode = True | |
st.rerun() | |
if st.session_state.get('clone_mode', False): | |
st.markdown("#### Edit Cloned Document:") | |
cloned_doc_str = st.text_area( | |
"Edit JSON content below:", | |
value=st.session_state.cloned_doc_str, | |
height=300 | |
) | |
if st.button("πΎ Save Cloned Document"): | |
try: | |
new_doc = json.loads(cloned_doc_str) | |
success, message = insert_record(container, new_doc) | |
if success: | |
st.success(f"Cloned document saved successfully! π") | |
st.session_state.selected_document_id = new_doc['id'] | |
st.session_state.clone_mode = False | |
st.session_state.cloned_doc = None | |
st.session_state.cloned_doc_str = '' | |
st.rerun() | |
else: | |
st.error(message) | |
except json.JSONDecodeError as e: | |
st.error(f"Invalid JSON: {str(e)} π«") | |
elif selected_view == 'New Record': | |
# π New Record | |
st.markdown("#### Create a new document:") | |
if st.button("π€ Insert Auto-Generated Record"): | |
success, message = save_or_clone_to_cosmos_db(container, query="Auto-generated", response="This is an auto-generated record.") | |
if success: | |
st.success(message) | |
st.rerun() | |
else: | |
st.error(message) | |
else: | |
new_id = st.text_input("ID", value=generate_unique_id(), key='new_id') | |
new_doc_str = st.text_area("Document Content (in JSON format)", value='{}', height=300) | |
if st.button("β Create New Document"): | |
try: | |
new_doc = json.loads(new_doc_str) | |
new_doc['id'] = new_id # Use the provided ID | |
success, message = insert_record(container, new_doc) | |
if success: | |
st.success(f"New document created with id: {new_doc['id']} π") | |
st.session_state.selected_document_id = new_doc['id'] | |
# Switch to 'Show as Edit and Save' mode | |
st.rerun() | |
else: | |
st.error(message) | |
except json.JSONDecodeError as e: | |
st.error(f"Invalid JSON: {str(e)} π«") | |
else: | |
st.sidebar.info("No documents found in this container. π") | |
# π Main content area | |
st.subheader(f"π Container: {st.session_state.selected_container}") | |
if st.session_state.selected_container: | |
if documents_to_display: | |
df = pd.DataFrame(documents_to_display) | |
st.dataframe(df) | |
else: | |
st.info("No documents to display. π§") | |
# π GitHub section | |
st.subheader("π GitHub Operations") | |
github_token = os.environ.get("GITHUB") # Read GitHub token from environment variable | |
source_repo = st.text_input("Source GitHub Repository URL", value="https://github.com/AaronCWacker/AIExamples-8-24-Streamlit") | |
new_repo_name = st.text_input("New Repository Name (for cloning)", value=f"AIExample-Clone-{datetime.now().strftime('%Y%m%d_%H%M%S')}") | |
col1, col2 = st.columns(2) | |
with col1: | |
if st.button("π₯ Clone Repository"): | |
if github_token and source_repo: | |
try: | |
local_path = f"./temp_repo_{datetime.now().strftime('%Y%m%d%H%M%S')}" | |
download_github_repo(source_repo, local_path) | |
zip_filename = f"{new_repo_name}.zip" | |
create_zip_file(local_path, zip_filename[:-4]) | |
st.markdown(get_download_link(zip_filename), unsafe_allow_html=True) | |
st.success("Repository cloned successfully! π") | |
except Exception as e: | |
st.error(f"An error occurred: {str(e)} π’") | |
finally: | |
if os.path.exists(local_path): | |
shutil.rmtree(local_path) | |
if os.path.exists(zip_filename): | |
os.remove(zip_filename) | |
else: | |
st.error("Please ensure GitHub token is set in environment variables and source repository URL is provided. πβ") | |
with col2: | |
if st.button("π€ Push to New Repository"): | |
if github_token and source_repo: | |
try: | |
g = Github(github_token) | |
new_repo = create_repo(g, new_repo_name) | |
local_path = f"./temp_repo_{datetime.now().strftime('%Y%m%d%H%M%S')}" | |
download_github_repo(source_repo, local_path) | |
push_to_github(local_path, new_repo, github_token) | |
st.success(f"Repository pushed successfully to {new_repo.html_url} π") | |
except Exception as e: | |
st.error(f"An error occurred: {str(e)} π’") | |
finally: | |
if os.path.exists(local_path): | |
shutil.rmtree(local_path) | |
else: | |
st.error("Please ensure GitHub token is set in environment variables and source repository URL is provided. πβ") | |
# π¬ Chat with Claude | |
st.subheader("π¬ Chat with Claude") | |
user_input = st.text_area("Message π¨:", height=100) | |
if st.button("Send π¨"): | |
if user_input: | |
response = client.messages.create( | |
model="claude-3-sonnet-20240229", | |
max_tokens=1000, | |
messages=[ | |
{"role": "user", "content": user_input} | |
] | |
) | |
st.write("Claude's reply π§ :") | |
st.write(response.content[0].text) | |
filename = generate_filename(user_input, "md") | |
create_file(filename, user_input, response.content[0].text) | |
st.session_state.chat_history.append({"user": user_input, "claude": response.content[0].text}) | |
# Save to Cosmos DB | |
save_to_cosmos_db(container, user_input, response.content[0].text, "") | |
# Display Chat History | |
st.subheader("Past Conversations π") | |
for chat in st.session_state.chat_history: | |
st.text_area("You said π¬:", chat["user"], height=100, disabled=True) | |
st.text_area("Claude replied π€:", chat["claude"], height=200, disabled=True) | |
st.markdown("---") | |
# File Editor | |
if hasattr(st.session_state, 'current_file'): | |
st.subheader(f"Editing: {st.session_state.current_file} π ") | |
new_content = st.text_area("File Content βοΈ:", st.session_state.file_content, height=300) | |
if st.button("Save Changes πΎ"): | |
with open(st.session_state.current_file, 'w', encoding='utf-8') as file: | |
file.write(new_content) | |
st.success("File updated successfully! π") | |
# File Management | |
st.sidebar.title("π File Management") | |
all_files = glob.glob("*.md") | |
all_files.sort(reverse=True) | |
if st.sidebar.button("π Delete All Files"): | |
for file in all_files: | |
os.remove(file) | |
st.rerun() | |
if st.sidebar.button("β¬οΈ Download All Files"): | |
zip_file = create_zip_of_files(all_files) | |
st.sidebar.markdown(get_download_link(zip_file), unsafe_allow_html=True) | |
for file in all_files: | |
col1, col2, col3, col4 = st.sidebar.columns([1,3,1,1]) | |
with col1: | |
if st.button("π", key="view_"+file): | |
st.session_state.current_file = file | |
st.session_state.file_content = load_file(file) | |
with col2: | |
st.markdown(get_download_link(file), unsafe_allow_html=True) | |
with col3: | |
if st.button("π", key="edit_"+file): | |
st.session_state.current_file = file | |
st.session_state.file_content = load_file(file) | |
with col4: | |
if st.button("π", key="delete_"+file): | |
os.remove(file) | |
st.rerun() | |
except exceptions.CosmosHttpResponseError as e: | |
st.error(f"Failed to connect to Cosmos DB. HTTP error: {str(e)} π¨") | |
except Exception as e: | |
st.error(f"An unexpected error occurred: {str(e)} π±") | |
# πͺ Logout button | |
if st.session_state.logged_in and st.sidebar.button("πͺ Logout"): | |
st.session_state.logged_in = False | |
st.session_state.selected_records.clear() | |
st.session_state.client = None | |
st.session_state.selected_database = None | |
st.session_state.selected_container = None | |
st.session_state.selected_document_id = None | |
st.session_state.current_index = 0 | |
st.rerun() | |
if __name__ == "__main__": | |
main() |