import json import logging import os from datetime import datetime import boto3 import gradio as gr from dotenv import load_dotenv from agents.accountability_agent import AccountabilityAgent from agents.outline_agent import OutlineAgent from agents.research_agent import ResearchAgent from agents.synthesis_agent import SynthesisAgent from agents.thinking_ladder_agent import ThinkingLadderAgent logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") load_dotenv() outline_agent = OutlineAgent() research_agent = ResearchAgent() thinking_ladder_agent = ThinkingLadderAgent() synthesis_agent = SynthesisAgent() accountability_agent = AccountabilityAgent() def save_logs_and_upload_to_s3(logs, bucket_name): # Generate timestamp and filename timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") filename = f"logs_{timestamp}.json" # Save logs to a JSON file with open(filename, "w") as f: json.dump(logs, f, indent=4) # Initialize a session using Amazon S3 with environment variables s3_client = boto3.client( "s3", aws_access_key_id=os.getenv("AWS_ACCESS_KEY_ID"), aws_secret_access_key=os.getenv("AWS_SECRET_ACCESS_KEY"), region_name=os.getenv("AWS_DEFAULT_REGION"), ) # Attempt to upload the file to S3 try: s3_client.upload_file(filename, bucket_name, filename) print(f"File {filename} successfully uploaded to {bucket_name}") return True except Exception as e: print(f"Error uploading {filename} to S3: {e}") return False def main(query: str) -> tuple[str, str, float]: logs = [] log_entry = {} logging.info(f"Received query: {query}") log_entry["query"] = query main_title, themes, metrics_a = outline_agent.run(query) logging.info(f"OutlineAgent output - Main Title: {main_title}, Themes: {themes}, Metrics: {metrics_a}") log_entry["outline_agent"] = {"main_title": main_title, "themes": themes, "metrics": metrics_a} sources, metrics_b = research_agent.run(themes) logging.info(f"ResearchAgent output - Sources: {sources}, Metrics: {metrics_b}") log_entry["research_agent"] = {"sources": sources, "metrics": metrics_b} classified_sources, metrics_c = thinking_ladder_agent.run(themes, sources) logging.info(f"ThinkingLadderAgent output - Classified Sources: {classified_sources}, Metrics: {metrics_c}") log_entry["thinking_ladder_agent"] = {"classified_sources": classified_sources, "metrics": metrics_c} synthesis, metrics_d = synthesis_agent.run(main_title, themes, classified_sources) logging.info(f"SynthesisAgent output - Synthesis: {synthesis}, Metrics: {metrics_d}") log_entry["synthesis_agent"] = {"synthesis": synthesis, "metrics": metrics_d} accountability, metrics_e = accountability_agent.run(query, synthesis) logging.info(f"AccountabilityAgent output - Accountability: {accountability}, Metrics: {metrics_e}") log_entry["accountability_agent"] = {"accountability": accountability, "metrics": metrics_e} cost = sum(list(map(lambda metrics: metrics["cost"], [metrics_a, metrics_b, metrics_c, metrics_d, metrics_e]))) logging.info(f"Total Cost: {cost}") log_entry["total_cost"] = cost logs.append(log_entry) save_logs_and_upload_to_s3(logs, "dediro-bakcup-1") return synthesis, accountability, cost gr.Interface( fn=main, inputs=gr.Textbox(label="Query:"), outputs=[gr.Textbox(label="Generated Synthesis:"), gr.Textbox(label="Accountability:"), gr.Number(label="Cost ($):")], ).launch()