Spaces:
Sleeping
Sleeping
File size: 7,520 Bytes
1060e77 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 | import os
import re
import pandas as pd
import streamlit as st
import snowflake.connector
from cryptography.hazmat.primitives import serialization
from datetime import datetime
# βββ Page Config ββββββββββββββββββββββββββββββββββββββββββββ
st.set_page_config(layout="wide", page_title="Social Auth Tracker", page_icon="π")
# βββ Snowflake Connection βββββββββββββββββββββββββββββββββββ
def get_conn():
raw_key = os.getenv("snowflake_private_key")
priv_key = serialization.load_pem_private_key(raw_key.encode(), password=None)
return snowflake.connector.connect(
user=os.getenv("snowflake_user"),
account=os.getenv("snowflake_account_identifier"),
private_key=priv_key,
role=os.getenv("snowflake_role"),
warehouse=os.getenv("snowflake_warehouse"),
database=os.getenv("snowflake_database", "ANALYTICS"),
schema=os.getenv("snowflake_schema", "REVOPS_AWS_FILES")
)
# βββ Data Loading βββββββββββββββββββββββββββββββββββββββββββ
@st.cache_data
def load_data():
conn = get_conn()
try:
tbl = f"{conn.database}.{conn.schema}.REVOPS_SALES_SOCIAL_AUTH_TRACKER"
df = pd.read_sql(f"SELECT * FROM {tbl}", conn)
df.columns = df.columns.str.lower()
if 'web_name' in df.columns:
df['web_name'] = df['web_name'].str.replace('*','',regex=False)
# Merge extended info
ext_sql = (
"SELECT site_id, site_name, entity, creator_partnerships_manager "
"FROM ANALYTICS.SIGMA_SCRATCH.AVI_SITE_EXTENDED_SNOWFLAKE_WITH_ENTITY"
)
df_ext = pd.read_sql(ext_sql, conn)
df_ext.columns = df_ext.columns.str.lower()
df_ext['entity_lower'] = df_ext['entity'].str.lower()
df['web_name_lower'] = df['web_name'].str.lower()
df = df.merge(
df_ext[['entity_lower','creator_partnerships_manager']],
left_on='web_name_lower', right_on='entity_lower', how='left'
)
df.drop(columns=['web_name_lower','entity_lower'], inplace=True)
if 'timestamp' in df.columns:
df['timestamp'] = pd.to_datetime(df['timestamp'])
return df
except Exception as e:
st.error(f"Error loading data: {e}")
return pd.DataFrame()
finally:
conn.close()
# βββ Troubleshoot Link Generator ββββββββββββββββββββββββββββ
def make_troubleshoot_link(handle: str) -> str:
h = handle.lower()
if 'facebook' in h:
return 'https://help.raptive.com/hc/en-us/articles/35155011344283-Comscore-Social-Authentication-Facebook-Troubleshooting'
if 'instagram' in h:
return 'https://help.raptive.com/hc/en-us/articles/35155497654555-Comscore-Social-Authentication-Instagram-Troubleshooting'
if 'twitter' in h or 'x' in h:
return 'https://help.raptive.com/hc/en-us/articles/35154607861531-Comscore-Social-Authorization-X-Twitter-Troubleshooting'
return ''
# βββ Alert Message Generator βββββββββββββββββββββββββββββββββ
def generate_alert_message(df_alert: pd.DataFrame) -> str:
msg = ":rotating_light: **Social Authentication Drop Alert** :rotating_light:\n"
msg += (
"We had some social authentications drop. These can be caused by random things like password changes, "
"changes in payment, leaving the country, etc. If we could reach out to the sites to ask them to reinstate, "
"that would be helpful! Thank you all!\n\n"
)
for manager, group in df_alert.groupby('creator_partnerships_manager'):
mention = f"**@{manager}**" if manager else "**@Unassigned**"
msg += f"{mention}\n"
for site, site_grp in group.groupby('web_name'):
msg += f"- **{site.upper()}**\n"
raw_urls = []
for html in site_grp['social_handle']:
m = re.search(r'>([^<]+)<', html)
url = m.group(1) if m else html
full = url if url.startswith('http') else f'https://{url}'
raw_urls.append(full)
fb_urls = [u for u in set(raw_urls) if 'facebook' in u.lower()]
ig_urls = [u for u in set(raw_urls) if 'instagram' in u.lower()]
tw_urls = [u for u in set(raw_urls) if 'twitter' in u.lower() or 'x.com' in u.lower()]
# Disconnected lines
if fb_urls:
fb_line = ', '.join(f"[{u}]({u})" for u in fb_urls)
msg += f" - Facebook disconnected: {fb_line}\n"
if ig_urls:
ig_line = ', '.join(f"[{u}]({u})" for u in ig_urls)
msg += f" - Instagram disconnected: {ig_line}\n"
if tw_urls:
tw_line = ', '.join(f"[{u}]({u})" for u in tw_urls)
msg += f" - Twitter disconnected: {tw_line}\n"
# Troubleshooting instructions
msg += " - Troubleshooting instructions:\n"
if fb_urls:
msg += f" - π§ [Facebook troubleshooting instructions]({make_troubleshoot_link(fb_urls[0])})\n"
if ig_urls:
msg += f" - π§ [Instagram troubleshooting instructions]({make_troubleshoot_link(ig_urls[0])})\n"
if tw_urls:
msg += f" - π§ [Twitter troubleshooting instructions]({make_troubleshoot_link(tw_urls[0])})\n"
msg += "\n"
return msg
# βββ App Layout βββββββββββββββββββββββββββββββββββββββββββββ
st.title("π REVOPS Sales Social Auth Tracker with Partnerships & Troubleshooting")
df = load_data()
if df.empty:
st.write("No data to display.")
else:
# Date selector
# Date selector
df['date_only'] = df['timestamp'].dt.date
# skip NaT so sorting works
dates = sorted(d for d in df['date_only'].dropna().unique())
if not dates:
st.write("No valid dates found.")
st.stop()
selected_date = st.selectbox(
"Select date to display:",
[str(d) for d in dates],
index=len(dates)-1
)
df = df[df['timestamp'].dt.date == datetime.fromisoformat(selected_date).date()]
# hyperlink handles and add troubleshoot link column
df['social_handle'] = df['social_handle'].astype(str).apply(
lambda x: f"<a href='{x if x.startswith('http') else 'https://' + x}' target='_blank'>{x}</a>"
)
df['Troubleshoot Link'] = df['social_handle'].apply(
lambda html: (
f"<a href='{make_troubleshoot_link(re.search(r'>([^<]+)<', html).group(1))}' target='_blank'>Troubleshoot</a>"
if re.search(r'>([^<]+)<', html) else ""
)
)
# Reorder timestamp last
cols = [c for c in df.columns if c != 'timestamp'] + ['timestamp']
df = df[cols]
# Collapsed alert preview
with st.expander("Show Drop Alert Message", expanded=False):
st.markdown(generate_alert_message(df), unsafe_allow_html=True)
# Display data table (~25 rows)
html = df.to_html(escape=False, index=False)
st.markdown(
f"<div style='height:800px; overflow-y:auto'>{html}</div>",
unsafe_allow_html=True
) |