Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import pipeline | |
# from PIL import Image | |
# import requests | |
# import torch | |
# from diffusers import DiffusionPipeline | |
from SeoKeywordResearch import SeoKeywordResearch | |
def main(): | |
#Load models | |
text_model_name = "EleutherAI/gpt-neo-1.3B" | |
text_generator = pipeline("text-generation", model=text_model_name, tokenizer=text_model_name) | |
st.title("AI Blog Post Generator") | |
kws = st.checkbox('generate keywords') | |
if kws: | |
keyword_research = SeoKeywordResearch( | |
query='artificial intelligence', | |
api_key='1d86ba79731e5b3c038fb9f75715883cab027b2f7b41b61ba76d59ec3b9e252d', | |
lang='en', | |
country='us', | |
domain='google.com') | |
data = { | |
'auto_complete': keyword_research.get_auto_complete(), | |
'related_searches': keyword_research.get_related_searches(), | |
'related_questions': keyword_research.get_related_questions(depth_limit=1) | |
} | |
# keyword_research.save_to_txt(data) | |
keywords = keyword_research.print_data(data) | |
all_keywords = keywords["auto_complete"] + keywords["related_searches"] + keywords["related_questions"] | |
keywords = [kw for kw in all_keywords] | |
st.text_input("Enter the title :") | |
else : | |
keywords = st.text_input("Enter relevant keywords (comma-separated):") | |
keywords = [kw.strip() for kw in keywords.split(",")] | |
# Button to generate blog post | |
if st.button("Generate Blog"): | |
if keywords: | |
# Generate content based on keywords | |
for keyword in keywords: | |
generated_text = text_generator(keyword, max_length=150, num_return_sequences=1, temperature=0.7)[0]['generated_text'] | |
st.subheader(keyword) | |
st.write(generated_text) | |
st.header('Conclusion') | |
generated_text = text_generator(keywords[0], max_length=150, num_return_sequences=1, temperature=0.7)[0]['generated_text'] | |
st.subheader(keyword) | |
st.write(generated_text) | |
if __name__ == "__main__": | |
main() | |