RohitBh's picture
Update app.py
5d126f6 verified
raw
history blame
4.68 kB
pip install vaderSentiment
import streamlit as st
import pandas as pd
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from textblob import TextBlob
from transformers import pipeline
import matplotlib.pyplot as plt
import os
from wordcloud import WordCloud
# Function to analyze sentiment using the custom Hugging Face pipeline
def analyze_sentiment_hf(text):
hf_pipeline = pipeline("sentiment-analysis", "RohitBh/Sentimental_Analysis")
if len(text) > 512:
text = text[:511]
sentiment_result = hf_pipeline(text)
sentiment_label = sentiment_result[0]["label"]
if sentiment_label == "LABEL_1":
return "Positive"
elif sentiment_label == "LABEL_0":
return "Negative"
else:
return "Neutral"
# Function to analyze sentiment using VADER
def analyze_sentiment_vader(text):
sentiment_analyzer = SentimentIntensityAnalyzer()
sentiment_score = sentiment_analyzer.polarity_scores(text)["compound"]
if sentiment_score > 0:
return "Positive"
elif sentiment_score == 0:
return "Neutral"
else:
return "Negative"
# Function to analyze sentiment using TextBlob
def analyze_sentiment_textblob(text):
sentiment_analysis = TextBlob(text)
score = sentiment_analysis.sentiment.polarity
if score > 0:
return "Positive"
elif score == 0:
return "Neutral"
else:
return "Negative"
# Function to display DataFrame with sentiment
def display_results_dataframe(data_frame):
st.write(data_frame)
# Function to display a pie chart of sentiment distribution
def create_pie_chart(data_frame, sentiment_column):
sentiment_distribution = data_frame[sentiment_column].value_counts()
fig, ax = plt.subplots()
ax.pie(sentiment_distribution, labels=sentiment_distribution.index, autopct='%1.1f%%', startangle=90)
ax.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
st.pyplot(fig)
# Function to display word cloud based on sentiment data
def create_word_cloud(sentiment_data):
wordcloud_generator = WordCloud(width=800, height=400).generate(sentiment_data)
fig, ax = plt.subplots(figsize=(10, 5))
ax.imshow(wordcloud_generator, interpolation='bilinear')
ax.axis('off')
st.pyplot(fig)
# Main UI setup
st.set_page_config(page_title="Sentiment Analysis Tool", page_icon=":bar_chart:")
st.title("Sentiment Analysis Tool")
# Sidebar configuration for user input options
st.sidebar.title("Analysis Options")
input_type = st.sidebar.selectbox("Choose Input Type", ["Text Input", "CSV Upload"])
model_choice = st.sidebar.selectbox("Choose Sentiment Analysis Model", ["Hugging Face", "VADER", "TextBlob"])
display_type = st.sidebar.selectbox("Choose Display Type", ["DataFrame", "Pie Chart", "Word Cloud"])
# Process input based on user choice
if input_type == "Text Input":
user_text = st.text_input("Enter text for sentiment analysis:")
if st.button("Analyze Sentiment"):
if user_text:
# Analyzing sentiment based on selected model
if model_choice == "Hugging Face":
sentiment = analyze_sentiment_hf(user_text)
elif model_choice == "VADER":
sentiment = analyze_sentiment_vader(user_text)
else:
sentiment = analyze_sentiment_textblob(user_text)
st.write("Detected Sentiment:", sentiment)
else:
st.warning("Please enter some text to analyze.")
elif input_type == "CSV Upload":
uploaded_file = st.file_uploader("Upload CSV file for analysis", type="csv")
if st.button("Start Analysis"):
if uploaded_file is not None:
data_frame = pd.read_csv(uploaded_file)
# Assuming the CSV has a column named 'text' for analysis
if 'text' in data_frame.columns:
data_frame['Sentiment'] = data_frame['text'].apply(lambda x: analyze_sentiment_hf(x) if model_choice == "Hugging Face" else (analyze_sentiment_vader(x) if model_choice == "VADER" else analyze_sentiment_textblob(x)))
if display_type == "DataFrame":
display_results_dataframe(data_frame)
elif display_type == "Pie Chart":
create_pie_chart(data_frame, 'Sentiment')
elif display_type == "Word Cloud":
combined_text = ' '.join(data_frame['text'])
create_word_cloud(combined_text)
else:
st.error("The uploaded CSV file must contain a 'text' column.")
else:
st.warning("Please upload a CSV file to proceed with analysis.")