Spaces:
Running
Running
import os | |
import gradio as gr | |
import cohere | |
from crewai import Agent, Task, Crew, Process | |
from langchain_groq import ChatGroq | |
from langchain_community.tools import DuckDuckGoSearchRun, DuckDuckGoSearchResults | |
from crewai_tools import tool, SeleniumScrapingTool, ScrapeWebsiteTool | |
# Define the DuckDuckGoSearch tool using the decorator for tool registration | |
# @tool('DuckDuckGoSearch') | |
# def search(search_query: str): | |
# """Search the web for information on a given topic.""" | |
# return DuckDuckGoSearchRun().run(search_query) | |
# Define the DuckDuckGoSearch tool | |
def search_results(search_query: str): | |
""" | |
Performs a web search using the DuckDuckGo search engine to gather and return a collection of search results. | |
This tool automates the retrieval of web-based information related to a specified query. | |
Parameters: | |
- search_query (str): The query string that specifies the information to be searched on the web. This should be a clear and concise expression of the user's information needs. | |
Returns: | |
- list: A list of dictionaries, where each dictionary represents a search result. Each dictionary includes at least the 'title' of the page and the 'url' linking to it. Additional information such as a brief summary or snippet from the page might also be included depending on the specifics of the DuckDuckGo API response. | |
Example: | |
- Input: search_results(""Generative AI in Telecom and Media") | |
- Output: [snippet: The telecommunications and media industry is at the forefront of integrating generative AI into their operations, viewing it as a catalyst for growth and innovation. Industry leaders are enthusiastic about its ability to not only enhance the current processes but also spearhead new innovations, create new opportunities, unlock new sources of ..., title: Generative AI in the telecom industry | Google Cloud Blog, link: https://cloud.google.com/blog/topics/telecommunications/generative-ai-in-the-telecom-industry], ...] | |
Use this tool to quickly gather a variety of sources on any topic without needing to manually search through a web browser. It can be especially useful in automated data gathering, research, and in contexts where multiple web sources need to be quickly consulted. | |
""" | |
return DuckDuckGoSearchResults().run(search_query) | |
# Retrieve the Cohere API key from environment variables | |
cohere_api_key = os.getenv('COHERE_API_KEY') | |
co = cohere.Client(cohere_api_key) | |
def web_scrapper(url: str, topic: str): | |
""" | |
A tool designed to extract and read the content of a specified link and generate a summary on a specific topic. | |
It is capable of handling various types of web pages by making HTTP requests and parsing the received HTML content. | |
This tool uses Cohere's API to generate summaries, which can be particularly useful for web scraping tasks, data collection, | |
or extracting specific information from websites. | |
Args: | |
- url (str): The URL from which to scrape content. | |
- topic (str): The specific topic on which to generate a summary. | |
""" | |
# Scrape content from the specified URL | |
scraper = ScrapeWebsiteTool(website_url=url) | |
content = scraper.run() | |
# Prepare the prompt for generating the summary | |
prompt = f"Generate a summary of the following content on the topic ## {topic} ### \n\nCONTENT:\n\n" + content | |
# Generate the summary using Cohere | |
response = co.chat( | |
model='command-r-plus', | |
message=prompt, | |
temperature=0.2, | |
chat_history=[], | |
prompt_truncation='AUTO' | |
) | |
return response.text | |
def kickoff_crew(topic: str) -> dict: | |
try: | |
"""Kickoff the research process for a given topic using CrewAI components.""" | |
# Retrieve the API key from the environment variables | |
groq_api_key = os.environ.get("GROQ_API_KEY") | |
if not groq_api_key: | |
raise ValueError("API Key for Groq is not set in environment variables") | |
# Initialize the Groq large language model | |
groq_llm_70b = ChatGroq(temperature=0, groq_api_key=groq_api_key, model_name="llama3-70b-8192") | |
# Define Agents with Groq LLM | |
researcher = Agent( | |
role='Researcher', | |
goal='Collect detailed information on {topic}', | |
tools=[search_results, web_scrapper], | |
llm=groq_llm_70b, # Assigning the Groq LLM here | |
backstory=( | |
"As a diligent researcher, you explore the depths of the internet to " | |
"unearth crucial information and insights on the assigned topics. " | |
"With a keen eye for detail and a commitment to accuracy, you meticulously document every source " | |
"and piece of data gathered. Your research is thorough, ensuring that no stone is left unturned. " | |
"This dedication not only enhances the quality of the information but also ensures " | |
"reliability and trustworthiness in your findings." | |
"To achieve your objectives, think carefully about the {topic}, develop strategies to expand " | |
"and follows a step-by-step strategy to build conclusions." | |
), | |
allow_delegation=False, | |
max_iter=5, | |
verbose=True, # Optional | |
) | |
editor = Agent( | |
role='Editor', | |
goal='Compile and refine the information into a comprehensive report on {topic}', | |
llm=groq_llm_70b, # Assigning the Groq LLM here | |
backstory=( | |
"With a keen eye for detail and a strong command of language, you transform " | |
"raw data into polished, insightful reports that are both informative and engaging. " | |
"Your expertise in editing ensures that every report is not only thorough but also " | |
"clearly communicates the key findings in a manner that is accessible to all readers. " | |
"As an editor, your role is crucial in shaping the final presentation of data, making " | |
"complex information easy to understand and appealing to the audience." | |
), | |
allow_delegation=False, | |
max_iter=3, | |
verbose=True, # Optional | |
) | |
# Define Tasks | |
research_task = Task( | |
description=( | |
"First, use DuckDuckGoResults tool to gather snippets from search results about ## {topic} ##. " | |
"If you need to expand the search on the ## {topic} ##, generate new search queries. " | |
"Then, use the WebScrapper tool to extract additional information and insights from all links or URLs that appear significant regarding {topic} after analyzing the snippets of the search results. " | |
"Compile your findings into an initial draft, ensuring to include all sources with their titles and links relevant to the topic. " | |
"Throughout this process, maintain a high standard of accuracy and ensure that no information is fabricated or misrepresented." | |
), | |
expected_output=( | |
"A draft report containing all relevant information about the topic and sources used. " | |
"The report should be well-structured, including an introduction, a detailed body with organized sections according to different aspects of the topic, and a conclusion. " | |
"Each section should cite sources accurately and provide a comprehensive overview of the findings." | |
), | |
agent=researcher | |
) | |
edit_task = Task( | |
description=( | |
"Review and refine the draft report produced by the research task. Organize the content methodically, " | |
"ensuring that the structure is logical and enhances the flow of information. Check all factual data for accuracy, " | |
"correct any discrepancies, and ensure that the information is current and well-supported by sources. " | |
"Enhance the readability of the report by improving language clarity, adjusting sentence structure, and ensuring consistency in tone. " | |
"Include a dedicated section that lists all sources used in the research_task. " | |
"Each source used in the analysis should be presented as a bullet point in the follwint format: " | |
"- title: link " | |
"Ensure that all sources you include in the final report exist by scrapping them if necessary. " | |
"This section should be comprehensive, clearly formatted, and easy to navigate, providing full transparency on the references used." | |
), | |
expected_output=( | |
"A finalized comprehensive report on ## {topic} ##. The report should be polished, with a clear and engaging narrative " | |
"that accurately reflects the research findings. It should include an introduction, a detailed and extensive discussion section, a concise conclusion, " | |
"and a well-organized source list. The document should be free of grammatical errors and ready for publication or presentation." | |
), | |
agent=editor, | |
context=[research_task] | |
) | |
# Forming the Crew | |
crew = Crew( | |
agents=[researcher, editor], | |
tasks=[research_task, edit_task], | |
process=Process.sequential, | |
) | |
# Kick-off the research process | |
result = crew.kickoff(inputs={'topic': topic}) | |
return result | |
except Exception as e: | |
return f"Error: {str(e)}" | |
def main(): | |
"""Set up the Gradio interface for the CrewAI Research Tool.""" | |
with gr.Blocks() as demo: | |
gr.Markdown("## CrewAI Research Tool") | |
topic_input = gr.Textbox(label="Enter Topic", placeholder="Type here...") | |
submit_button = gr.Button("Start Research") | |
output = gr.Markdown(label="Result") | |
submit_button.click( | |
fn=kickoff_crew, | |
inputs=topic_input, | |
outputs=output | |
) | |
# demo.launch(debug=True) | |
demo.queue(api_open=False, max_size=3).launch() | |
if __name__ == "__main__": | |
main() | |