|
import requests |
|
import torch |
|
import gradio as gr |
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
from datetime import datetime |
|
|
|
|
|
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") |
|
model_name = "gpt2" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForCausalLM.from_pretrained(model_name).to(device) |
|
|
|
|
|
news_api_key = "35cbd14c45184a109fc2bbb5fff7fb1b" |
|
|
|
def fetch_trending_topics(search_term="artificial intelligence OR machine learning", page=1, page_size=9): |
|
try: |
|
|
|
url = f"https://newsapi.org/v2/everything?q={search_term}&sortBy=publishedAt&pageSize={page_size + 5}&page={page}&language=en&apiKey={news_api_key}" |
|
response = requests.get(url) |
|
data = response.json() |
|
|
|
|
|
if response.status_code == 200 and "articles" in data: |
|
|
|
trending_topics = [] |
|
seen_titles = set() |
|
for article in data["articles"]: |
|
title = article["title"] |
|
if title not in seen_titles: |
|
seen_titles.add(title) |
|
trending_topics.append({ |
|
"title": title, |
|
"description": article["description"] if article["description"] else "No description available.", |
|
"url": article["url"], |
|
"publishedAt": article["publishedAt"], |
|
}) |
|
|
|
if not trending_topics: |
|
return [{"title": "No news available", "description": "", "url": "", "publishedAt": ""}] |
|
|
|
return trending_topics |
|
else: |
|
print(f"Error: {data.get('message', 'No articles found')}") |
|
return [{"title": "No news available", "description": "", "url": "", "publishedAt": ""}] |
|
except Exception as e: |
|
print(f"Error fetching news: {e}") |
|
return [{"title": "Error fetching news", "description": "", "url": "", "publishedAt": ""}] |
|
|
|
|
|
def generate_analysis(trending_topic): |
|
input_text = f"Provide a concise analysis about the following topic: '{trending_topic['title']}'. Please summarize its significance in the AI and Machine Learning field." |
|
|
|
|
|
inputs = tokenizer(input_text, return_tensors="pt").to(device) |
|
outputs = model.generate(**inputs, max_length=80, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95) |
|
|
|
analysis = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
return analysis |
|
|
|
|
|
def analyze_trends(page=1, page_size=9): |
|
search_term = "artificial intelligence OR machine learning" |
|
trending_topics = fetch_trending_topics(search_term=search_term, page=page, page_size=page_size) |
|
topic_analysis = [] |
|
|
|
for topic in trending_topics: |
|
if topic["title"] not in ["Error fetching news", "No news available"]: |
|
analysis = generate_analysis(topic) |
|
topic_analysis.append({ |
|
"title": topic["title"], |
|
"description": topic["description"], |
|
"analysis": analysis, |
|
"url": topic["url"], |
|
"publishedAt": topic["publishedAt"], |
|
}) |
|
else: |
|
topic_analysis.append({ |
|
"title": topic["title"], |
|
"description": topic["description"], |
|
"analysis": "Unable to retrieve or analyze data.", |
|
"url": topic["url"], |
|
"publishedAt": topic["publishedAt"], |
|
}) |
|
|
|
|
|
return topic_analysis[:page_size] |
|
|
|
|
|
def display_news_cards(page=1, page_size=9): |
|
analysis_results = analyze_trends(page=page, page_size=page_size) |
|
current_date = datetime.now().strftime("%d-%m-%Y") |
|
|
|
display = f"### **AI & Machine Learning News for {current_date}**\n\n" |
|
|
|
|
|
display += "<div style='display:flex; flex-wrap:wrap; justify-content:space-between;'>" |
|
for news_item in analysis_results: |
|
|
|
display += f""" |
|
<div style='flex: 1 1 30%; border:1px solid black; margin:10px; padding:10px; box-sizing:border-box;'> |
|
<b>{news_item['title']}</b><br/> |
|
<i>{news_item['publishedAt']}</i><br/><br/> |
|
{news_item['description']}<br/><br/> |
|
<a href='{news_item['url']}' target='_blank'>Read more</a><br/><br/> |
|
<b>Analysis:</b> {news_item['analysis']}<br/><br/> |
|
</div> |
|
""" |
|
display += "</div>" |
|
|
|
return display |
|
|
|
|
|
def gradio_interface(): |
|
with gr.Blocks() as demo: |
|
|
|
gr.Markdown("""<h1 style='text-align:center; color:white; background-color:#007BFF; padding:20px; border-radius:10px;'>AI & Machine Learning News Analyzer</h1>""", elem_id="header") |
|
|
|
|
|
gr.Markdown("<p style='text-align:center;'>Search term: <b>artificial intelligence OR machine learning</b></p>") |
|
|
|
|
|
page = gr.Slider(minimum=1, maximum=5, step=1, label="Page Number", value=1) |
|
page_size = gr.Slider(minimum=6, maximum=15, step=3, label="News per Page", value=9) |
|
|
|
|
|
analyze_button = gr.Button("Submit") |
|
|
|
|
|
news_output = gr.HTML() |
|
|
|
|
|
analyze_button.click(display_news_cards, inputs=[page, page_size], outputs=news_output) |
|
|
|
return demo |
|
|
|
|
|
if __name__ == "__main__": |
|
gradio_interface().launch() |
|
|