File size: 6,447 Bytes
8da79d7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9c31f94
 
3aa6a54
ee5fb0e
 
 
 
 
 
 
 
 
 
9c31f94
 
 
 
 
 
 
 
 
 
 
 
ee5fb0e
9c31f94
ee5fb0e
9c31f94
 
 
 
 
 
8da79d7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149d42c
 
40d7725
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8df268e
cdff604
 
cc6527f
cdff604
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149d42c
 
40d7725
 
 
149d42c
40d7725
 
8da79d7
149d42c
8da79d7
 
 
 
 
 
40d7725
af653d5
9c31f94
ee5fb0e
9c31f94
ee5fb0e
40d7725
149d42c
40d7725
149d42c
8da79d7
 
149d42c
40d7725
149d42c
8da79d7
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
import streamlit as st
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
import re
import pandas as pd
import googleapiclient.discovery
import plotly.express as px 

# Load the BERT tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("nlptown/bert-base-multilingual-uncased-sentiment")
model = AutoModelForSequenceClassification.from_pretrained("nlptown/bert-base-multilingual-uncased-sentiment")


# Set up the YouTube API service
api_service_name = "youtube"
api_version = "v3"
DEVELOPER_KEY = "AIzaSyC4Vx8G6nm3Ow9xq7NluTuCCJ1d_5w4YPE"  # Replace with your actual API key

youtube = googleapiclient.discovery.build(api_service_name, api_version, developerKey=DEVELOPER_KEY)

# Function to fetch comments for a video ID
def scrape_comments(video_id):
    request = youtube.commentThreads().list(
        part="snippet",
        videoId=video_id,
        maxResults=100
    )
    response = request.execute()


    comments = []

    for item in response['items']:
        comment = item['snippet']['topLevelComment']['snippet']
        comments.append([
            comment['textDisplay']
        ])

    comments_df = pd.DataFrame(comments, columns=['comment'])

    # df.head(10).

    return comments_df


# Function to fetch video metadata using YouTube API
def fetch_video_info(video_id):
    # video_id = extract_video_id(video_url)

    request = youtube.videos().list(
        part="snippet",
        id=video_id
    )
    response = request.execute()

    if response['items']:
        video_info = response['items'][0]['snippet']
        channel_name = video_info['channelTitle']
        video_title = video_info['title']
        return channel_name, video_title
    
    else:
        raise ValueError("Video not found")

        
# Function to extract video ID from YouTube URL
def extract_video_id(video_url):
    match = re.search(r'(?<=v=)[\w-]+', video_url)
    if match:
        return match.group(0)
    else:
        st.error("Invalid YouTube video URL")

# Function to fetch YouTube comments for a video ID
def fetch_comments(video_id):
    # Example using youtube-comment-scraper-python library
    comments_df = scrape_comments(video_id)

    return comments_df

# Function to analyze sentiment for a single comment
def analyze_sentiment(comment):
    tokens = tokenizer.encode(comment, return_tensors="pt", max_length=512, truncation=True)
    # input_ids = tokens['input_ids']
    # attention_mask = tokens['attention_mask']

    # result = model(input_ids, attention_mask=attention_mask)
    result = model(tokens)

    sentiment_id = torch.argmax(result.logits) + 1
    if(sentiment_id > 3):
        sentiment_label = "Positive"
    elif(sentiment_id < 3):
        sentiment_label = "Negative"
    else:
        sentiment_label = "Neutral"

    return sentiment_label


def main():
    st.title("YouTube Comments Sentiment Analysis")
    
    # Create sidebar section for app description and links
    st.sidebar.title("Comment Feel")
    st.sidebar.write("Welcome to the YouTube Comments Sentiment Analysis App πŸŽ₯")
    st.sidebar.write("""
                    
                    **Description** πŸ“
                    This project utilizes a pre-trained sentiment analysis model based on BERT and TensorFlow to analyze the sentiment of comments from a YouTube video. Users can input a YouTube video URL, fetch related comments, and determine their sentiments (positive, negative, or neutral).
                    
                    Input a valid YouTube video URL in the provided text box πŸ”—.
                    Click "Extract Comments and Analyze" to fetch comments and analyze sentiments πŸ”„.
                    View sentiment analysis results via pie and bar charts πŸ“Š.
                     
                    Credits 🌟
                    
                    Coder: Aniket Panchal
                    GitHub: https://github.com/Aniket2021448
                    
                    Contact πŸ“§
                    For any inquiries or feedback, please contact aniketpanchal1257@gmail.com
    
    """)
    st.sidebar.write("Feel free to check out my other apps :eyes:")


    with st.sidebar.form("app_selection_form"):
        st.write("Select an App:")
        app_links = {
            "Movie-mind": "https://movie-mind.streamlit.app/",
            "find-fake-news": "https://find-fake-news.streamlit.app/"
        }
        selected_app = st.selectbox("Choose an App", list(app_links.keys()))

        submitted_button = st.form_submit_button("Go to App")

    # Handle form submission
    if submitted_button:
        selected_app_url = app_links.get(selected_app)
        if selected_app_url:
            st.sidebar.success("Redirected successfully!")
            st.markdown(f'<meta http-equiv="refresh" content="0;URL={selected_app_url}">', unsafe_allow_html=True)

    
    # Dropdown menu for other app links

    st.sidebar.write("In case the apps are down, because of less usage")
    st.sidebar.write("Kindly reach out to me @ aniketpanchal1257@gmail.com")
    
    
    st.write("Enter a YouTube video link below: :movie_camera:")
    video_url = st.text_input("YouTube Video URL:")
    
    if st.button("Extract Comments and Analyze"):
        video_id = extract_video_id(video_url)
        if video_id:
            comments_df = fetch_comments(video_id)
            comments_df['sentiment'] = comments_df['comment'].apply(lambda x: analyze_sentiment(x[:512]))
            sentiment_counts = comments_df['sentiment'].value_counts()

            channel_name, video_title = fetch_video_info(video_id)
            
            st.write(f"**Channel Name:** {channel_name}")
            st.write(f"**Video Description:** {video_title}")
            
            st.write("Based on top :100: comments from this video")
            # Create pie chart
            st.write("Pie chart representation 	:chart_with_upwards_trend:")
            fig_pie = px.pie(values=sentiment_counts.values, names=sentiment_counts.index, title='Sentiment Distribution')
            st.plotly_chart(fig_pie, use_container_width=True)

            # Create bar chart
            st.write("Bar plot representation :bar_chart:")
            fig_bar = px.bar(x=sentiment_counts.index, y=sentiment_counts.values, labels={'x': 'Sentiment', 'y': 'Count'}, title='Sentiment Counts')
            st.plotly_chart(fig_bar)


if __name__ == "__main__":
    main()