import os import gradio as gr from transformers import Tool from transformers.agents import ( ReactCodeAgent, ReactJsonAgent, HfApiEngine, ManagedAgent, stream_to_gradio, ) from transformers.agents.search import DuckDuckGoSearchTool import requests from markdownify import markdownify as md from requests.exceptions import RequestException import re import spaces from huggingface_hub import login # Read the Hugging Face API token from the environment variable hf_token = os.getenv("HF_TOKEN") # Authenticate with the Hugging Face API login(token=hf_token) class VisitWebpageTool(Tool): """ A tool to visit a webpage and return its content as a markdown string. """ name = "visit_webpage" description = "Visits a webpage at the given URL and returns its content as a markdown string." inputs = { "url": { "type": "text", "description": "The URL of the webpage to visit.", } } output_type = "text" def forward(self, url: str) -> str: """ Fetch the webpage content and convert it to markdown. """ try: response = requests.get(url) response.raise_for_status() markdown_content = md(response.text).strip() markdown_content = re.sub(r"\n{3,}", "\n\n", markdown_content) return markdown_content except RequestException as e: return f"Error fetching the webpage: {str(e)}" except Exception as e: return f"An unexpected error occurred: {str(e)}" # Initialize the LLM engine with the Hugging Face API token llm_engine = HfApiEngine(model="meta-llama/Meta-Llama-3.1-70B-Instruct") # Initialize the web agent with necessary tools and engine web_agent = ReactJsonAgent( tools=[DuckDuckGoSearchTool(), VisitWebpageTool()], llm_engine=llm_engine, max_iterations=10, ) # Create a managed web agent managed_web_agent = ManagedAgent( agent=web_agent, name="search_agent", description="Runs web searches for you. Give it your query as an argument.", ) # Initialize the manager agent with the managed web agent manager_agent = ReactCodeAgent( tools=[], llm_engine=llm_engine, managed_agents=[managed_web_agent], additional_authorized_imports=["time", "datetime"], ) @spaces.GPU(duration=120) def interact_with_agent(task): """ Interact with the agent and stream the responses to Gradio. """ messages = [] messages.append(gr.ChatMessage(role="user", content=task)) yield messages for msg in stream_to_gradio(manager_agent, task): messages.append(msg) yield messages + [ gr.ChatMessage(role="assistant", content="⏳ Task not finished yet!") ] yield messages # Create the Gradio interface with gr.Blocks() as demo: text_input = gr.Textbox(lines=1, label="Chat Message", value="How many years ago was Stripe founded?") submit = gr.Button("Run multi-agent system!") chatbot = gr.Chatbot( label="Agent", type="messages", avatar_images=( None, "https://em-content.zobj.net/source/twitter/53/robot-face_1f916.png", ), ) submit.click(interact_with_agent, [text_input], [chatbot]) if __name__ == "__main__": demo.launch()