Spaces:
Sleeping
Sleeping
Upload 23 files
Browse files- app.py +29 -0
- assests/image.png +0 -0
- requirements.txt +5 -0
- src/agents/__init__.py +0 -0
- src/agents/__pycache__/__init__.cpython-311.pyc +0 -0
- src/agents/__pycache__/blog_writer_agent.cpython-311.pyc +0 -0
- src/agents/__pycache__/linkedin_post_agent.cpython-311.pyc +0 -0
- src/agents/__pycache__/topic_researcher_agent.cpython-311.pyc +0 -0
- src/agents/blog_writer_agent.py +24 -0
- src/agents/linkedin_post_agent.py +23 -0
- src/agents/topic_researcher_agent.py +24 -0
- src/crews/__init__.py +0 -0
- src/crews/__pycache__/__init__.cpython-311.pyc +0 -0
- src/crews/__pycache__/mycrews.cpython-311.pyc +0 -0
- src/crews/mycrews.py +15 -0
- src/tasks/__init__.py +0 -0
- src/tasks/__pycache__/__init__.cpython-311.pyc +0 -0
- src/tasks/__pycache__/blog_writer_agent_task.cpython-311.pyc +0 -0
- src/tasks/__pycache__/linkedin_post_agent_task.cpython-311.pyc +0 -0
- src/tasks/__pycache__/topic_researcher_agent_task.cpython-311.pyc +0 -0
- src/tasks/blog_writer_agent_task.py +17 -0
- src/tasks/linkedin_post_agent_task.py +17 -0
- src/tasks/topic_researcher_agent_task.py +17 -0
app.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import streamlit as st
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from src.crews.mycrews import crew
|
5 |
+
|
6 |
+
load_dotenv()
|
7 |
+
|
8 |
+
os.environ['LANGSMITH_TRACING'] = "true"
|
9 |
+
os.environ['LANGSMITH_ENDPOINT'] = os.getenv("LANGSMITH_ENDPOINT")
|
10 |
+
os.environ['LANGSMITH_API_KEY'] = os.getenv("LANGSMITH_API_KEY")
|
11 |
+
os.environ['LANGSMITH_PROJECT'] = os.getenv("LANGSMITH_PROJECT")
|
12 |
+
|
13 |
+
st.set_page_config("LinkedIn Post Generator 📝")
|
14 |
+
|
15 |
+
st.sidebar.title("LinkedIn Post Generator 📝")
|
16 |
+
|
17 |
+
st.sidebar.image("assests\image.png")
|
18 |
+
|
19 |
+
st.header("Automatic LinkedIn Post Generator 📃📝")
|
20 |
+
|
21 |
+
st.caption("Made by Samagra Shrivastava with ♥")
|
22 |
+
|
23 |
+
topic = st.sidebar.text_input("Enter the topic you are interested")
|
24 |
+
|
25 |
+
if topic:
|
26 |
+
inputs = {"topic": topic}
|
27 |
+
with st.spinner("Crew's 👷♂️ are actively working to generate your post...⏳⏲"):
|
28 |
+
result = crew.kickoff(inputs=inputs)
|
29 |
+
st.subheader(result)
|
assests/image.png
ADDED
![]() |
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
crewai
|
2 |
+
streamlit
|
3 |
+
crewai-tools
|
4 |
+
python-dotenv
|
5 |
+
langchain_together
|
src/agents/__init__.py
ADDED
File without changes
|
src/agents/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (162 Bytes). View file
|
|
src/agents/__pycache__/blog_writer_agent.cpython-311.pyc
ADDED
Binary file (1.27 kB). View file
|
|
src/agents/__pycache__/linkedin_post_agent.cpython-311.pyc
ADDED
Binary file (1.2 kB). View file
|
|
src/agents/__pycache__/topic_researcher_agent.cpython-311.pyc
ADDED
Binary file (1.13 kB). View file
|
|
src/agents/blog_writer_agent.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from crewai import Agent
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from crewai_tools import SerperDevTool
|
5 |
+
from langchain_together import ChatTogether
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
os.environ['SERPER_API_KEY'] = os.getenv("SERPER_API_KEY")
|
10 |
+
os.environ['OPENAI_API_KEY'] = os.getenv("OPENAI_API_KEY")
|
11 |
+
|
12 |
+
llm = ChatTogether(
|
13 |
+
model=os.getenv("MODEL_NAME")
|
14 |
+
)
|
15 |
+
tools = SerperDevTool()
|
16 |
+
|
17 |
+
blog_writer_agent = Agent(
|
18 |
+
role="Blog Writer",
|
19 |
+
goal="Write a comprehensive blog post from the only 1 article provided by the Topic Researcher, covering all necessary sections.",
|
20 |
+
backstory="Experienced in creating in-depth, well-structured blog posts that explain technical concepts clearly and engage readers from introduction to conclusion.",
|
21 |
+
tools=[tools],
|
22 |
+
llm=llm,
|
23 |
+
allow_delegation=True,
|
24 |
+
)
|
src/agents/linkedin_post_agent.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from crewai import Agent
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from crewai_tools import SerperDevTool
|
5 |
+
from langchain_together import ChatTogether
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
os.environ['SERPER_API_KEY'] = os.getenv("SERPER_API_KEY")
|
10 |
+
os.environ['OPENAI_API_KEY'] = os.getenv("OPENAI_API_KEY")
|
11 |
+
|
12 |
+
llm = ChatTogether(
|
13 |
+
model=os.getenv("MODEL_NAME")
|
14 |
+
)
|
15 |
+
tools = SerperDevTool()
|
16 |
+
|
17 |
+
linkedin_post_agent = Agent(
|
18 |
+
role="LinkedIn Post Creator",
|
19 |
+
goal="Create a concise LinkedIn post summary from the transcription provided by the Topic Researcher.",
|
20 |
+
backstory="Expert in crafting enagaging LinkedIn posts that summarizes complex topics and include trending hastags for maximum visibility.",
|
21 |
+
tools=[tools],
|
22 |
+
llm=llm,
|
23 |
+
)
|
src/agents/topic_researcher_agent.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from crewai import Agent
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from crewai_tools import SerperDevTool
|
5 |
+
from langchain_together import ChatTogether
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
os.environ['SERPER_API_KEY'] = os.getenv("SERPER_API_KEY")
|
10 |
+
os.environ['OPENAI_API_KEY'] = os.getenv("OPENAI_API_KEY")
|
11 |
+
|
12 |
+
llm = ChatTogether(
|
13 |
+
model=os.getenv("MODEL_NAME")
|
14 |
+
)
|
15 |
+
tools = SerperDevTool()
|
16 |
+
|
17 |
+
topic_researcher_agent = Agent(
|
18 |
+
role="Topic Researcher",
|
19 |
+
goal="Search for only 1 relevant resource on the topic {topic} from the web.",
|
20 |
+
backstory="Expert in finding and analyzing relevant content from web.",
|
21 |
+
tools=[tools],
|
22 |
+
llm=llm,
|
23 |
+
allow_delegation=True
|
24 |
+
)
|
src/crews/__init__.py
ADDED
File without changes
|
src/crews/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (161 Bytes). View file
|
|
src/crews/__pycache__/mycrews.cpython-311.pyc
ADDED
Binary file (948 Bytes). View file
|
|
src/crews/mycrews.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from crewai import Crew, Process
|
2 |
+
|
3 |
+
from src.agents.topic_researcher_agent import topic_researcher_agent
|
4 |
+
from src.agents.blog_writer_agent import blog_writer_agent
|
5 |
+
from src.agents.linkedin_post_agent import linkedin_post_agent
|
6 |
+
|
7 |
+
from src.tasks.topic_researcher_agent_task import topic_researcher_agent_task
|
8 |
+
from src.tasks.blog_writer_agent_task import blog_writer_agent_task
|
9 |
+
from src.tasks.linkedin_post_agent_task import linkedin_post_agent_task
|
10 |
+
|
11 |
+
crew = Crew(
|
12 |
+
agents=[topic_researcher_agent, blog_writer_agent, linkedin_post_agent],
|
13 |
+
tasks=[topic_researcher_agent_task, blog_writer_agent_task, linkedin_post_agent_task],
|
14 |
+
process=Process.sequential,
|
15 |
+
)
|
src/tasks/__init__.py
ADDED
File without changes
|
src/tasks/__pycache__/__init__.cpython-311.pyc
ADDED
Binary file (161 Bytes). View file
|
|
src/tasks/__pycache__/blog_writer_agent_task.cpython-311.pyc
ADDED
Binary file (1.13 kB). View file
|
|
src/tasks/__pycache__/linkedin_post_agent_task.cpython-311.pyc
ADDED
Binary file (1.05 kB). View file
|
|
src/tasks/__pycache__/topic_researcher_agent_task.cpython-311.pyc
ADDED
Binary file (962 Bytes). View file
|
|
src/tasks/blog_writer_agent_task.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from crewai import Task
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from crewai_tools import SerperDevTool
|
5 |
+
from src.agents.blog_writer_agent import blog_writer_agent
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
os.environ['SERPER_API_KEY'] = os.getenv("SERPER_API_KEY")
|
10 |
+
tools = SerperDevTool()
|
11 |
+
|
12 |
+
blog_writer_agent_task = Task(
|
13 |
+
description="Write a comprehensive blog post based on the 1 article provided by the Topic Researcher. The article must include an introduction, step-by-step guides and conclusion. The overall content must be about 400 words long.",
|
14 |
+
expected_output="A comprehensive and well-written blog post with an outline and audience analysis and resources. Each section should have 2 or 3 paragraphs",
|
15 |
+
agent=blog_writer_agent,
|
16 |
+
tools=[tools]
|
17 |
+
)
|
src/tasks/linkedin_post_agent_task.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from crewai import Task
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from crewai_tools import SerperDevTool
|
5 |
+
from src.agents.linkedin_post_agent import linkedin_post_agent
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
os.environ['SERPER_API_KEY'] = os.getenv("SERPER_API_KEY")
|
10 |
+
tools = SerperDevTool()
|
11 |
+
|
12 |
+
linkedin_post_agent_task = Task(
|
13 |
+
description="Create a LinkedIn post summarizing the key points from the transcription provided by the Topic Researcher, including relevant hastags.",
|
14 |
+
expected_output="A well written blog post in markdown-format ready for publication. Each section should have 2 or 3 paragraphs with resources.",
|
15 |
+
agent=linkedin_post_agent,
|
16 |
+
tools=[tools]
|
17 |
+
)
|
src/tasks/topic_researcher_agent_task.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from crewai import Task
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from crewai_tools import SerperDevTool
|
5 |
+
from src.agents.topic_researcher_agent import topic_researcher_agent
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
os.environ['SERPER_API_KEY'] = os.getenv("SERPER_API_KEY")
|
10 |
+
tools = SerperDevTool()
|
11 |
+
|
12 |
+
topic_researcher_agent_task = Task(
|
13 |
+
description="Identify and analyze only 1 content or article on the {topic} from the web.",
|
14 |
+
expected_output="A complete word-by-word report on the most relevant post or article found on the topic {topic}.",
|
15 |
+
agent=topic_researcher_agent,
|
16 |
+
tools=[tools]
|
17 |
+
)
|