Soundaryasos's picture
Create App.py
2bcd067 verified
raw
history blame
2.51 kB
import streamlit as st
import praw
import googleapiclient.discovery
import pandas as pd
import joblib
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from transformers import pipeline
# Load pre-trained regression model (if available)
try:
model = joblib.load("sentiment_regression.pkl")
except:
model = None
# Reddit Authentication
def authenticate_reddit():
return praw.Reddit(
client_id=st.secrets["REDDIT_CLIENT_ID"],
client_secret=st.secrets["REDDIT_CLIENT_SECRET"],
user_agent=st.secrets["REDDIT_USER_AGENT"]
)
# YouTube Authentication
def authenticate_youtube():
return googleapiclient.discovery.build("youtube", "v3", developerKey=st.secrets["YOUTUBE_API_KEY"])
# Sentiment Analysis Functions
def vader_sentiment(text):
analyzer = SentimentIntensityAnalyzer()
return analyzer.polarity_scores(text)["compound"]
def bert_sentiment(text):
sentiment_pipeline = pipeline("sentiment-analysis")
return sentiment_pipeline(text)[0]['label']
def regression_sentiment(text):
if model:
return model.predict([text])[0]
return "Model not trained yet"
# Fetch Reddit Data
def get_reddit_data(keyword):
reddit = authenticate_reddit()
posts = []
for submission in reddit.subreddit("all").search(keyword, limit=10):
posts.append(submission.title)
return posts
# Fetch YouTube Data
def get_youtube_data(keyword):
youtube = authenticate_youtube()
request = youtube.search().list(q=keyword, part="snippet", maxResults=10)
response = request.execute()
return [item["snippet"]["title"] for item in response.get("items", [])]
# Streamlit UI
st.title("Sentiment Analysis App")
keyword = st.text_input("Enter a keyword to analyze:")
if keyword:
reddit_data = get_reddit_data(keyword)
youtube_data = get_youtube_data(keyword)
st.subheader("Reddit Sentiment")
for post in reddit_data:
st.write(f"🔹 {post}")
st.write(f"VADER Sentiment: {vader_sentiment(post)}")
st.write(f"BERT Sentiment: {bert_sentiment(post)}")
st.write(f"Regression Sentiment: {regression_sentiment(post)}")
st.write("---")
st.subheader("YouTube Sentiment")
for title in youtube_data:
st.write(f"📺 {title}")
st.write(f"VADER Sentiment: {vader_sentiment(title)}")
st.write(f"BERT Sentiment: {bert_sentiment(title)}")
st.write(f"Regression Sentiment: {regression_sentiment(title)}")
st.write("---")