|
|
import streamlit as st |
|
|
import requests |
|
|
import pandas as pd |
|
|
import json |
|
|
|
|
|
|
|
|
st.set_page_config(page_title="Info Hub by Anand", layout="wide") |
|
|
|
|
|
|
|
|
st.title("π Info Hub by Anand") |
|
|
st.caption("Bing News Search & Twitter Trends") |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
BING_API_KEY = st.secrets["RAPIDAPI_KEY_BING"] |
|
|
TWITTER_API_KEY = st.secrets["RAPIDAPI_KEY_TWITTER"] |
|
|
except KeyError as e: |
|
|
st.error(f"π΄ Error: Missing API Key in Hugging Face secrets: {e}. Please add RAPIDAPI_KEY_BING and RAPIDAPI_KEY_TWITTER.") |
|
|
st.stop() |
|
|
|
|
|
|
|
|
BING_API_HOST = "bing-news-search1.p.rapidapi.com" |
|
|
BING_BASE_URL = f"https://{BING_API_HOST}" |
|
|
|
|
|
TWITTER_API_HOST = "twitter241.p.rapidapi.com" |
|
|
TWITTER_BASE_URL = f"https://{TWITTER_API_HOST}" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@st.cache_data(ttl=300) |
|
|
def fetch_api_data(url, headers, params=None): |
|
|
"""Fetches data from a RapidAPI endpoint.""" |
|
|
try: |
|
|
response = requests.get(url, headers=headers, params=params, timeout=15) |
|
|
response.raise_for_status() |
|
|
return response.json() |
|
|
except requests.exceptions.Timeout: |
|
|
st.error(f"π¨ API Error: Request timed out accessing {url}") |
|
|
return None |
|
|
except requests.exceptions.HTTPError as e: |
|
|
|
|
|
error_detail = "" |
|
|
try: |
|
|
error_detail = e.response.json() |
|
|
except json.JSONDecodeError: |
|
|
error_detail = e.response.text |
|
|
st.error(f"π¨ API HTTP Error fetching {url}: {e}. Response: {error_detail}") |
|
|
return None |
|
|
except requests.exceptions.RequestException as e: |
|
|
st.error(f"π¨ API Connection Error fetching {url}: {e}") |
|
|
return None |
|
|
except json.JSONDecodeError: |
|
|
st.error(f"π¨ API Error: Could not decode JSON response from {url}") |
|
|
|
|
|
return None |
|
|
except Exception as e: |
|
|
st.error(f"π¨ An unexpected error occurred: {e}") |
|
|
return None |
|
|
|
|
|
|
|
|
tab1, tab2 = st.tabs(["π° Bing News Search", "π Twitter Trends"]) |
|
|
|
|
|
|
|
|
with tab1: |
|
|
st.header("Search Bing News") |
|
|
|
|
|
|
|
|
search_query = st.text_input("Enter search term (e.g., 'cricket world cup', 'AI technology'):", key="bing_query") |
|
|
col1, col2 = st.columns([1, 3]) |
|
|
with col1: |
|
|
freshness = st.selectbox("Freshness:", ["Day", "Week", "Month"], index=0, key="bing_freshness") |
|
|
with col2: |
|
|
safe_search = st.selectbox("Safe Search:", ["Off", "Moderate", "Strict"], index=0, key="bing_safe") |
|
|
|
|
|
|
|
|
if st.button("Search News", key="bing_search_button"): |
|
|
if not search_query: |
|
|
st.warning("Please enter a search term.") |
|
|
else: |
|
|
st.write(f"Searching for '{search_query}'...") |
|
|
|
|
|
bing_headers = { |
|
|
"X-BingApis-SDK": "true", |
|
|
"X-RapidAPI-Key": BING_API_KEY, |
|
|
"X-RapidAPI-Host": BING_API_HOST |
|
|
} |
|
|
bing_params = { |
|
|
"q": search_query, |
|
|
"freshness": freshness, |
|
|
"textFormat": "Raw", |
|
|
"safeSearch": safe_search, |
|
|
"mkt": "en-US" |
|
|
} |
|
|
endpoint = "/news/search" |
|
|
news_data = fetch_api_data(f"{BING_BASE_URL}{endpoint}", headers=bing_headers, params=bing_params) |
|
|
|
|
|
if news_data: |
|
|
|
|
|
st.success("News Results:") |
|
|
|
|
|
articles = news_data.get("value", []) |
|
|
|
|
|
if not articles: |
|
|
st.info("No news articles found for this query.") |
|
|
|
|
|
|
|
|
for i, item in enumerate(articles): |
|
|
title = item.get("name", "No Title") |
|
|
description = item.get("description", "No Description") |
|
|
url = item.get("url", "#") |
|
|
provider_list = item.get("provider", [{}]) |
|
|
provider_name = provider_list[0].get("name", "Unknown Source") if provider_list else "Unknown Source" |
|
|
date_published = item.get("datePublished", "N/A") |
|
|
|
|
|
try: |
|
|
date_published_dt = pd.to_datetime(date_published) |
|
|
date_published_str = date_published_dt.strftime('%Y-%m-%d %H:%M') |
|
|
except: |
|
|
date_published_str = date_published |
|
|
|
|
|
st.subheader(f"{i+1}. {title}") |
|
|
st.caption(f"Source: {provider_name} | Published: {date_published_str}") |
|
|
st.write(description) |
|
|
st.markdown(f"[Read More]({url})", unsafe_allow_html=True) |
|
|
st.divider() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
else: |
|
|
st.error("Failed to retrieve news data from the API.") |
|
|
|
|
|
|
|
|
|
|
|
with tab2: |
|
|
st.header("Get Twitter Trends by Location") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
woeid = st.text_input("Enter WOEID (Where On Earth ID):", value="23424848", key="twitter_woeid", help="e.g., 1 (Global), 23424848 (India), 23424977 (USA)") |
|
|
|
|
|
if st.button("Fetch Trends", key="twitter_fetch_button"): |
|
|
if not woeid or not woeid.isdigit(): |
|
|
st.warning("Please enter a valid numeric WOEID.") |
|
|
else: |
|
|
st.write(f"Fetching trends for WOEID: {woeid}...") |
|
|
|
|
|
twitter_headers = { |
|
|
"X-RapidAPI-Key": TWITTER_API_KEY, |
|
|
"X-RapidAPI-Host": TWITTER_API_HOST |
|
|
} |
|
|
|
|
|
|
|
|
endpoint = "/trends-by-location" |
|
|
twitter_params = {"woeid": woeid} |
|
|
|
|
|
trends_data = fetch_api_data(f"{TWITTER_BASE_URL}{endpoint}", headers=twitter_headers, params=twitter_params) |
|
|
|
|
|
if trends_data: |
|
|
|
|
|
st.success("Trending Topics:") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
trends_list = [] |
|
|
if isinstance(trends_data, list) and len(trends_data) > 0 and "trends" in trends_data[0]: |
|
|
trends_list = trends_data[0].get("trends", []) |
|
|
elif isinstance(trends_data, dict) and "trends" in trends_data: |
|
|
trends_list = trends_data.get("trends", []) |
|
|
elif isinstance(trends_data, dict) and "results" in trends_data: |
|
|
trends_list = trends_data.get("results", []) |
|
|
elif isinstance(trends_data, list): |
|
|
trends_list = trends_data |
|
|
|
|
|
|
|
|
if not trends_list: |
|
|
st.info("No trends found or could not parse the response structure.") |
|
|
st.json(trends_data) |
|
|
else: |
|
|
|
|
|
display_data = [] |
|
|
for trend in trends_list: |
|
|
|
|
|
if isinstance(trend, dict): |
|
|
name = trend.get("name", "N/A") |
|
|
url = trend.get("url", "#") |
|
|
volume = trend.get("tweet_volume", None) |
|
|
display_data.append({ |
|
|
"Trend": name, |
|
|
"Tweet Volume": f"{volume:,}" if volume else "N/A", |
|
|
"Link": url |
|
|
}) |
|
|
else: |
|
|
st.warning(f"Unexpected item in trends list: {trend}") |
|
|
|
|
|
|
|
|
if display_data: |
|
|
df_trends = pd.DataFrame(display_data) |
|
|
st.dataframe(df_trends, hide_index=True, use_container_width=True, |
|
|
column_config={"Link": st.column_config.LinkColumn("Link", display_text="Go to Twitter")}) |
|
|
else: |
|
|
st.info("Formatted trend data is empty.") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
else: |
|
|
st.error("Failed to retrieve trends data from the API.") |
|
|
|
|
|
|
|
|
st.markdown("---") |
|
|
st.caption("Info Hub by Anand | Data powered by RapidAPI (Bing News, Twitter)") |