Spaces:
Runtime error
Runtime error
import gradio as gr | |
from apscheduler.schedulers.background import BackgroundScheduler | |
from huggingface_hub import HfApi | |
from datasets import Dataset, load_dataset, concatenate_datasets | |
from datetime import datetime | |
from huggingface_hub import login | |
import os | |
login(os.getenv("HF_TOKEN")) | |
def upload_newsletter(date, newsletter_content): | |
try: | |
previous_dataset = load_dataset("m-ric/newsletter", split="train") | |
new_dataset = Dataset.from_dict({"date": [date], "content": [newsletter_content]}) | |
final_dataset = concatenate_datasets([previous_dataset, new_dataset]) | |
except: | |
final_dataset = Dataset.from_dict({"date": [date], "content": [newsletter_content]}) | |
final_dataset.push_to_hub("m-ric/newsletter", split="train") | |
def restart_space(): | |
HfApi(token=os.getenv("HF_TOKEN", None)).restart_space( | |
repo_id="m-ric/multiagent_newsletter" | |
) | |
print(f"Space restarted on {datetime.now()}") | |
# restart the space every day at 9am | |
scheduler = BackgroundScheduler() | |
scheduler.add_job(restart_space, "cron", day_of_week="mon-sun", hour=7, minute=0) | |
scheduler.start() | |
### CREATE TOOL ########## | |
from transformers.agents import DuckDuckGoSearchTool | |
search_tool = DuckDuckGoSearchTool() | |
print("Search tool created.") | |
### CREATE LLM ENGINE ############ | |
# Below is an example of how to build an OpenAI engine: | |
from openai import OpenAI | |
from transformers.agents.llm_engine import MessageRole, get_clean_message_list | |
openai_role_conversions = { | |
MessageRole.TOOL_RESPONSE: MessageRole.USER, | |
} | |
class OpenAIEngine: | |
def __init__(self, model_name="gpt-4o"): | |
self.model_name = model_name | |
self.client = OpenAI( | |
api_key=os.getenv('OPENAI_API_KEY') | |
) | |
def __call__(self, messages, stop_sequences=[]): | |
messages = get_clean_message_list(messages, role_conversions=openai_role_conversions) | |
response = self.client.chat.completions.create( | |
model=self.model_name, | |
messages=messages, | |
stop=stop_sequences, | |
temperature=0.5, | |
) | |
return response.choices[0].message.content | |
# But instead we use HF one, since it's free: | |
from transformers import ReactCodeAgent, HfApiEngine | |
agent = ReactCodeAgent( | |
llm_engine=HfApiEngine("meta-llama/Meta-Llama-3.1-70B-Instruct"), | |
tools=[search_tool], | |
max_iterations=10 | |
) | |
print("Agent initiated") | |
topics = "Large Language Models, Tech, GPUs, AI" | |
report = agent.run( | |
f"""Give me a complete newsletter in markdown format of the latest developments in these topics: {topics}, with a consistent style and layout with URL. | |
Each topic should have its markdown-formatted analysis, including a rundown, detailed bullet points, | |
and a "Why it matters" section. For each topic there should be at least 3 articles with URL | |
Example Output: | |
# Top stories in AI today:\\n\\n | |
- AI takes spotlight in Super Bowl commercials\\n | |
- Altman seeks TRILLIONS for global AI chip initiative\\n\\n | |
## AI takes spotlight in Super Bowl commercials\\n\\n | |
**url:** https://example.com/story1 \\n\\ | |
**The Rundown:** AI made a splash in this year\'s Super Bowl commercials...\\n\\n | |
**The details:**...\\n\\n | |
**Why it matters::**...\\n\\n | |
## Altman seeks TRILLIONS for global AI chip initiative\\n\\n | |
**The Rundown:** OpenAI CEO Sam Altman is reportedly angling to raise TRILLIONS of dollars...\\n\\n' | |
**The details:**...\\n\\n | |
**Why it matters::**...\\n\\n | |
--- end of example. | |
Don't worry about formatting too much, just dump results into a huge final string and return it with final_answern the formatting will be handled later. | |
Also, gather all news at once using a for loop on all topics with google_search, it will make things much quicker. | |
Make sure that you didn't forget any topic! If no news are found for any specific topic, specify 'No news on this topic were found in the last 24 hours'.""" | |
) | |
date = datetime.today().strftime('%Y-%m-%d') | |
upload_newsletter(date, report) | |
print("Report pushed to dataset!") | |
with gr.Blocks() as demo: | |
gr.Markdown(f"{date}, topics: {topics}\n### β Newsletter generated! Content below π") | |
gr.Markdown(clean_report) | |
gr.Markdown("---\n---\n---\n### FYI, before reformulation, the report was: π") | |
gr.Markdown(intermediate_report) | |
demo.launch() |