dindizz's picture
Create app.py
83cfe1a verified
raw
history blame
3.11 kB
import requests
from bs4 import BeautifulSoup
import openai
import gradio as gr
import os
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
# Supported languages and their codes
LANGUAGES = {
"English": "en",
"Hindi": "hi",
"Telugu": "te",
"Kannada": "kn",
"Malayalam": "ml"
}
# Function to scrape content from a URL
def scrape_content(url):
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
title = soup.find('title').get_text() if soup.find('title') else "No Title"
paragraphs = soup.find_all('p')
content = '\n'.join([para.get_text() for para in paragraphs])
return title, content
except Exception as e:
return "Error", f"Failed to scrape content from {url}: {str(e)}"
# Function to translate content using OpenAI
def translate_content(content, target_language):
if target_language == "en":
return content # No translation needed
prompt = f"Translate the following content to {target_language}:
{content}"
response = openai.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": "You are a multilingual translator."},
{"role": "user", "content": prompt}
]
)
return response.choices[0].message.content.strip()
# Function to create newsletter
def create_newsletter(contents, language):
prompt = "Create a newsletter with the following content:\n\n"
for title, body, url in contents:
translated_body = translate_content(body, LANGUAGES[language])
prompt += f"Title: {title}\nURL: {url}\n\n{translated_body}\n\n"
response = openai.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": "You are a helpful assistant expert in making newsletters."},
{"role": "user", "content": prompt}
]
)
return response.choices[0].message.content.strip()
# Function to process URLs and generate newsletter
def process_urls(url1, url2, url3, url4, url5, language):
urls = [url for url in [url1, url2, url3, url4, url5] if url]
if not urls:
return "No URLs provided."
contents = []
for url in urls:
title, content = scrape_content(url)
contents.append((title, content, url))
newsletter = create_newsletter(contents, language)
return newsletter
# Gradio interface
iface = gr.Interface(
fn=process_urls,
inputs=[
gr.Textbox(label="URL 1"),
gr.Textbox(label="URL 2"),
gr.Textbox(label="URL 3"),
gr.Textbox(label="URL 4"),
gr.Textbox(label="URL 5"),
gr.Dropdown(choices=list(LANGUAGES.keys()), label="Select Language", value="English")
],
outputs="html",
title="Multilingual AI Newsletter Generator",
description="Enter up to 5 URLs to generate a newsletter in your preferred language. Copy and paste the output into your CMS tool for further editing."
)
iface.launch()