Sumedhzz commited on
Commit
9fae22e
Β·
1 Parent(s): 2faf678

Fix: Add secret-loading safety net and fix indentation

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +12 -8
src/streamlit_app.py CHANGED
@@ -25,21 +25,25 @@ st.markdown("""
25
  # --- CLOUD DATA LOGGING (GOOGLE SHEETS) ---
26
  def get_connection():
27
  try:
28
- # Check if secrets exist to avoid the auto-crash you saw earlier
29
  if "GSHEETS_JSON" not in st.secrets or "GSHEETS_URL" not in st.secrets:
30
- st.warning("πŸš€ System Initializing... Please ensure GSHEETS_JSON is set in HF Settings.")
 
31
  st.stop()
32
 
 
33
  json_secrets = st.secrets["GSHEETS_JSON"]
34
  sheet_url = st.secrets["GSHEETS_URL"]
35
 
 
36
  creds_dict = json.loads(json_secrets)
37
  return st.connection("gsheets", type=GSheetsConnection, credentials=creds_dict), sheet_url
38
  except Exception as e:
39
  st.error(f"Waiting for Vault access... (Technical Error: {e})")
40
  st.stop()
41
 
42
- # Initialize connection safely using Session State
 
43
  if 'conn' not in st.session_state:
44
  conn, GSHEETS_URL = get_connection()
45
  st.session_state.conn = conn
@@ -50,7 +54,7 @@ else:
50
 
51
  def save_to_cloud(text, ai_label, ai_score, corrected_label=None):
52
  try:
53
- # Use Sheet1 as the target
54
  existing_data = conn.read(spreadsheet=GSHEETS_URL, worksheet="Sheet1", ttl=0)
55
 
56
  new_entry = pd.DataFrame([{
@@ -75,24 +79,24 @@ MODEL_PATH = "SumedhGajbhiye/Sentiment-Analyzer"
75
  def load_engine(path):
76
  return pipeline("sentiment-analysis", model=path, tokenizer=path)
77
 
78
- # --- HEADER SECTION ---
79
  col_h1, col_h2 = st.columns([3, 1])
80
  with col_h1:
81
  st.title("Sentiment Analyzer")
82
  st.caption("Advanced Bilingual Sentiment Analysis for English, Hindi & Hinglish")
83
 
84
- # --- SIDEBAR HUD ---
85
  with st.sidebar:
86
  st.markdown("### πŸ› οΈ ENGINE STATUS")
87
  try:
88
  df_log = conn.read(spreadsheet=GSHEETS_URL, worksheet="Sheet1", ttl=0)
89
- st.metric("Total Ingested", len(df_log)) # Shows count from your sheet
90
  st.divider()
91
  st.download_button("πŸ“€ Export Dataset", df_log.to_csv(index=False), "engine_feedback.csv", "text/csv")
92
  except:
93
  st.info("Engine is connecting to cloud...")
94
 
95
- # --- ANALYSIS INTERFACE ---
96
  classifier = load_engine(MODEL_PATH)
97
 
98
  if classifier:
 
25
  # --- CLOUD DATA LOGGING (GOOGLE SHEETS) ---
26
  def get_connection():
27
  try:
28
+ # Step 1: Wait a moment for the HF Vault to mount
29
  if "GSHEETS_JSON" not in st.secrets or "GSHEETS_URL" not in st.secrets:
30
+ # Instead of crashing, we show a friendly wait message
31
+ st.warning("πŸš€ System Initializing... Connecting to Cloud Vault.")
32
  st.stop()
33
 
34
+ # Step 2: Retrieve verified secrets
35
  json_secrets = st.secrets["GSHEETS_JSON"]
36
  sheet_url = st.secrets["GSHEETS_URL"]
37
 
38
+ # Step 3: Parse and connect
39
  creds_dict = json.loads(json_secrets)
40
  return st.connection("gsheets", type=GSheetsConnection, credentials=creds_dict), sheet_url
41
  except Exception as e:
42
  st.error(f"Waiting for Vault access... (Technical Error: {e})")
43
  st.stop()
44
 
45
+ # --- INITIALIZATION ---
46
+ # Use session state so we don't re-connect on every click
47
  if 'conn' not in st.session_state:
48
  conn, GSHEETS_URL = get_connection()
49
  st.session_state.conn = conn
 
54
 
55
  def save_to_cloud(text, ai_label, ai_score, corrected_label=None):
56
  try:
57
+ # worksheet name must match your Google Sheet tab exactly
58
  existing_data = conn.read(spreadsheet=GSHEETS_URL, worksheet="Sheet1", ttl=0)
59
 
60
  new_entry = pd.DataFrame([{
 
79
  def load_engine(path):
80
  return pipeline("sentiment-analysis", model=path, tokenizer=path)
81
 
82
+ # --- UI LAYOUT ---
83
  col_h1, col_h2 = st.columns([3, 1])
84
  with col_h1:
85
  st.title("Sentiment Analyzer")
86
  st.caption("Advanced Bilingual Sentiment Analysis for English, Hindi & Hinglish")
87
 
88
+ # Sidebar Stats
89
  with st.sidebar:
90
  st.markdown("### πŸ› οΈ ENGINE STATUS")
91
  try:
92
  df_log = conn.read(spreadsheet=GSHEETS_URL, worksheet="Sheet1", ttl=0)
93
+ st.metric("Total Ingested", len(df_log))
94
  st.divider()
95
  st.download_button("πŸ“€ Export Dataset", df_log.to_csv(index=False), "engine_feedback.csv", "text/csv")
96
  except:
97
  st.info("Engine is connecting to cloud...")
98
 
99
+ # Main Logic
100
  classifier = load_engine(MODEL_PATH)
101
 
102
  if classifier: