PraneshJs's picture
removed analytics feature
2eebd4f verified
raw
history blame
45 kB
import os
import pandas as pd
import gspread
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
import json
import gradio as gr
import time
from datetime import datetime
from pytz import timezone
import threading
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Time zone Conversion
ist = timezone("Asia/Kolkata")
# Scopes (read-only)
SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"]
def authorize():
creds = None
# Get JSON content from environment variables
token_json_content = os.getenv('TOKEN_JSON')
credentials_json_content = os.getenv('CREDENTIALS_JSON')
# Load token from environment variable if exists
if token_json_content:
try:
token_info = json.loads(token_json_content)
creds = Credentials.from_authorized_user_info(token_info, SCOPES)
except json.JSONDecodeError:
print("⚠️ Invalid TOKEN_JSON format in environment variable")
# If no valid credentials, start OAuth flow
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
if not credentials_json_content:
raise ValueError("CREDENTIALS_JSON environment variable is required for OAuth flow")
try:
credentials_info = json.loads(credentials_json_content)
flow = InstalledAppFlow.from_client_config(credentials_info, SCOPES)
creds = flow.run_local_server(port=0)
except json.JSONDecodeError:
raise ValueError("Invalid CREDENTIALS_JSON format in environment variable")
# Save token back to environment (for this session only)
# Note: You may want to update your .env file manually with the new token
print("🔄 New token generated. Consider updating TOKEN_JSON in your .env file with:")
print(f"TOKEN_JSON={creds.to_json()}")
return gspread.authorize(creds)
# Function to get data from a specific sheet
def get_sheet_data(spreadsheet, gid, sheet_name):
try:
sheet = spreadsheet.get_worksheet_by_id(gid)
all_values = sheet.get_all_values()
if not all_values or len(all_values) <= 4:
print(f"⚠️ {sheet_name} sheet doesn't have enough data")
return pd.DataFrame()
# Use row 4 as headers (the actual column names)
headers = all_values[4]
clean_headers = []
seen_headers = {}
for i, header in enumerate(headers):
if header.strip(): # Non-empty header
base_header = header.strip()
# Handle duplicate headers by adding a counter
if base_header in seen_headers:
seen_headers[base_header] += 1
clean_header = f"{base_header}_{seen_headers[base_header]}"
else:
seen_headers[base_header] = 0
clean_header = base_header
clean_headers.append(clean_header)
else: # Empty header
clean_headers.append(f"Empty_Col_{i}")
# Create DataFrame starting from row 5 (after headers)
data_rows = all_values[5:]
if data_rows:
df = pd.DataFrame(data_rows, columns=clean_headers)
# Remove completely empty columns
df = df.loc[:, (df != '').any(axis=0)]
print(f"✅ Loaded {len(df)} rows from {sheet_name}")
return df
else:
print(f"⚠️ No data rows found in {sheet_name}")
return pd.DataFrame()
except Exception as e:
print(f"❌ Error loading {sheet_name}: {str(e)}")
return pd.DataFrame()
# Function to get studentwise reward points data
def get_studentwise_data(spreadsheet):
try:
worksheet = spreadsheet.worksheet("Studentwise Reward Points")
all_values = worksheet.get_all_values()
if len(all_values) < 3:
print("⚠️ Studentwise Reward Points sheet doesn't have enough data")
return None
print(f"✅ Loaded {len(all_values)} rows from Studentwise Reward Points")
return all_values
except Exception as e:
print(f"❌ Error loading Studentwise Reward Points: {str(e)}")
return None
# Function to load and cache reward points activity data
def load_reward_points_data():
"""Load and cache reward points activity data"""
try:
# Get the reward points sheet ID from environment
REWARD_POINTS_SHEET_ID = os.getenv('REWARD_POINTS_SHEET_ID')
if not REWARD_POINTS_SHEET_ID:
print("⚠️ REWARD_POINTS_SHEET_ID not found in environment variables")
return None
client = authorize()
spreadsheet = client.open_by_key(REWARD_POINTS_SHEET_ID)
worksheet = spreadsheet.get_worksheet_by_id(1113414351) # Activity Sheet GID
all_values = worksheet.get_all_values()
if not all_values or len(all_values) < 2:
print("⚠️ Reward Points sheet doesn't have enough data")
return None
# First row is header
headers = all_values[0]
df = pd.DataFrame(all_values[1:], columns=headers)
if df.empty:
print("⚠️ Reward Points sheet is empty")
return None
print(f"✅ Loaded {len(df)} rows from Reward Points Entry sheet")
return df
except Exception as e:
print(f"❌ Error loading Reward Points data: {str(e)}")
return None
# Function to get activity details from cached data in breakdown format
def get_activity_details(roll_no, reward_points_df):
"""Get activity details for a specific roll number from cached reward points data in breakdown format"""
try:
if reward_points_df is None or reward_points_df.empty:
return ""
# Normalize roll number for search
roll_no_search = roll_no.strip().upper()
# Try to find the roll number column
roll_col = None
for col in reward_points_df.columns:
if 'roll' in col.lower() and 'no' in col.lower():
roll_col = col
break
if not roll_col:
# Use first column as roll number column
roll_col = reward_points_df.columns[0]
# Create a copy to avoid modifying the original cached data
df = reward_points_df.copy()
# Normalize the roll number column
df[roll_col] = df[roll_col].astype(str).str.strip().str.upper()
# Filter rows by roll number
student_rows = df[df[roll_col] == roll_no_search]
if student_rows.empty:
# Try partial matching
partial_matches = df[df[roll_col].str.contains(roll_no_search, na=False)]
if not partial_matches.empty:
student_rows = partial_matches
else:
return ""
if student_rows.empty:
return ""
# Get student info from first record
first_record = student_rows.iloc[0]
student_name = first_record.get('NAME OF THE STUDENT', 'N/A')
student_year = first_record.get('YEAR OF STUDY', 'N/A')
student_dept = first_record.get('DEPARTMENT', 'N/A')
# Calculate activity summary by type
activity_summary = {}
activity_count = {}
total_points = 0
for _, row in student_rows.iterrows():
activity_type = str(row.get('Activity Type', 'N/A'))
reward_points = str(row.get('Reward Points', '0'))
# Convert points to float
try:
points_val = float(reward_points.replace(',', '')) if reward_points else 0
total_points += points_val
except:
points_val = 0
# Track activity summary
if activity_type in activity_summary:
activity_summary[activity_type] += points_val
activity_count[activity_type] += 1
else:
activity_summary[activity_type] = points_val
activity_count[activity_type] = 1
# Format output in breakdown style
output = []
# Define all possible activity categories in order
activity_categories = [
"INITIAL POINTS / CARRY-OVER",
"TECHNICAL EVENTS",
"SKILLS",
"ASSIGNMENTS",
"INTERVIEW",
"TECHNICAL SOCIETY ACTIVITIES",
"P SKILL",
"TAC",
"SPECIAL LAB INITIATIVES",
"EXTRA-CURRICULAR ACTIVITIES",
"STUDENT INITIATIVES",
"EXTERNAL EVENTS",
"EXTERNAL TECHNICAL EVENTS"
]
# Map activity types to standard categories (case-insensitive matching)
category_mapping = {}
for activity_type in activity_summary.keys():
activity_upper = activity_type.upper()
matched_category = None
# Try exact matching first
for category in activity_categories:
if category.upper() in activity_upper or activity_upper in category.upper():
matched_category = category
break
# If no exact match, use the original activity type
if not matched_category:
matched_category = activity_type
category_mapping[activity_type] = matched_category
# Group activities by mapped categories
final_summary = {}
final_count = {}
for activity_type, points in activity_summary.items():
category = category_mapping[activity_type]
if category in final_summary:
final_summary[category] += points
final_count[category] += activity_count[activity_type]
else:
final_summary[category] = points
final_count[category] = activity_count[activity_type]
# Display all categories (including zeros)
for category in activity_categories:
count = final_count.get(category, 0)
points = final_summary.get(category, 0.0)
# Add any categories not in the standard list
for category, points in final_summary.items():
if category not in activity_categories:
count = final_count.get(category, 0)
output.append(f"📋 **{category}**")
output.append(f" Count: {count} | Points: {points:.2f}")
# Add summary totals
output.append("")
# Add detailed activity list if needed
if len(student_rows) <= 20: # Only show detailed list for reasonable number of activities
output.append("📋 DETAILED ACTIVITY LIST")
output.append("=" * 80)
for idx, (_, row) in enumerate(student_rows.iterrows(), 1):
activity_type = str(row.get('Activity Type', 'N/A'))
activity_name = str(row.get('Activity Name', 'N/A'))
reward_points = str(row.get('Reward Points', '0'))
try:
points_val = float(reward_points.replace(',', '')) if reward_points else 0
except:
points_val = 0
# Truncate long names for display
display_name = activity_name[:50] + "..." if len(activity_name) > 63 else activity_name
output.append(f"{idx:2d}. {activity_type}: {display_name} - {points_val:.2f} pts")
output.append("=" * 80)
return "\n".join(output)
except Exception as e:
print(f"❌ Error fetching activity details: {str(e)}")
return ""
# Function to get details sheet information
def get_details_info(spreadsheet):
try:
details_sheet = spreadsheet.get_worksheet_by_id(847680829)
all_values = details_sheet.get_all_values()
if not all_values:
return None
# Use row 4 as headers
headers = all_values[4]
clean_headers = []
for i, header in enumerate(headers):
if header.strip():
clean_headers.append(header.strip())
else:
clean_headers.append(f"Empty_Col_{i}")
# Get data rows after header
data_rows = all_values[5:]
if data_rows:
df = pd.DataFrame(data_rows, columns=clean_headers)
df = df.loc[:, (df != '').any(axis=0)]
details_info = {}
# Extract specific information
for idx in range(len(df)):
student_data = df.iloc[idx]
year_value = str(student_data.get('YEAR', '')).strip()
# Get Average Reward Points
if 'AVERAGE REWARD POINT' in year_value:
details_info['average_points'] = {
'I': student_data.get('I', ''),
'II': student_data.get('II', ''),
'II L': student_data.get('II L', ''),
'III': student_data.get('III', ''),
'IV': student_data.get('IV', '')
}
# Get IP 2 Redemption Dates
elif 'Last Day for IP 2 Redemption Duration' in str(student_data.get('Redemption Dates', '')):
details_info['ip2_redemption'] = {
'S1': student_data.get('S1', ''),
'S2': student_data.get('S2', ''),
'S3': student_data.get('S3', ''),
'S4': student_data.get('S4', ''),
'S5': student_data.get('S5', ''),
'S6': student_data.get('S6', ''),
'S7': student_data.get('S7', ''),
'S8': student_data.get('S8', '')
}
# Get IP 1 Redemption Dates
elif 'Last Day for IP 1 Redemption Duration' in str(student_data.get('Redemption Dates', '')):
details_info['ip1_redemption'] = {
'S1': student_data.get('S1', ''),
'S2': student_data.get('S2', ''),
'S3': student_data.get('S3', ''),
'S4': student_data.get('S4', ''),
'S5': student_data.get('S5', ''),
'S6': student_data.get('S6', ''),
'S7': student_data.get('S7', ''),
'S8': student_data.get('S8', '')
}
# Get Last Updated Information
elif 'POINTS LAST UPDATED' in year_value:
details_info['last_updated'] = year_value
return details_info
except Exception as e:
print(f"❌ Error loading Details Sheet: {str(e)}")
return None
# Initialize global variables
print("🚀 Initializing application...")
client = authorize()
# Get spreadsheet IDs from environment variables
MAIN_SHEET_ID = os.getenv('GOOGLE_SHEET_ID') # Your main sheets (20 sheets)
STUDENTWISE_SHEET_ID = os.getenv('STUDENTWISE_SHEET_ID') # Studentwise Reward Points sheet
if not MAIN_SHEET_ID:
raise ValueError("GOOGLE_SHEET_ID environment variable is required")
# Open both spreadsheets
main_spreadsheet = client.open_by_key(MAIN_SHEET_ID)
studentwise_spreadsheet = client.open_by_key(STUDENTWISE_SHEET_ID)
# Load data from all sheets (Original 3 + New 17 = 20 sheets total)
sheet_configs = [
# Original sheets
{"gid": 688907204, "name": "AIML"},
{"gid": 451167295, "name": "AIDS"},
{"gid": 1955995189, "name": "Sheet_3"},
{"gid": 821473193, "name": "Sheet_4"},
{"gid": 1798819643, "name": "Sheet_5"},
{"gid": 1057532042, "name": "Sheet_6"},
{"gid": 1848020834, "name": "Sheet_7"},
{"gid": 48570283, "name": "Sheet_8"},
{"gid": 559332743, "name": "Sheet_9"},
{"gid": 1481375682, "name": "Sheet_10"},
{"gid": 1136877763, "name": "Sheet_11"},
{"gid": 510521423, "name": "Sheet_12"},
{"gid": 1936618, "name": "Sheet_13"},
{"gid": 91989289, "name": "Sheet_14"},
{"gid": 30073516, "name": "Sheet_15"},
{"gid": 857542309, "name": "Sheet_16"},
{"gid": 790318539, "name": "Sheet_17"},
{"gid": 587090068, "name": "Sheet_18"},
{"gid": 260192612, "name": "Sheet_19"},
{"gid": 400900059, "name": "Sheet_20"}
]
# GLOBAL DATA CACHE WITH 12-HOUR AUTO-REFRESH
data_cache = {
"combined_df": None,
"studentwise_data": None,
"details_info": None,
"reward_points_df": None,
"last_update": None,
"cache_duration_hours": 12, # 12 hours cache
"is_loading": False
}
def load_all_data():
"""Load and cache all data from Google Sheets (including reward points data)"""
global data_cache
if data_cache["is_loading"]:
print("⏳ Data loading already in progress...")
return (data_cache["combined_df"], data_cache["studentwise_data"],
data_cache["details_info"], data_cache["reward_points_df"])
data_cache["is_loading"] = True
print(f"🔄 Loading fresh data from {len(sheet_configs)} Google Sheets + Reward Points sheet...")
start_time = time.time()
try:
# Load all sheet data from main spreadsheet
all_dataframes = []
for config in sheet_configs:
df = get_sheet_data(main_spreadsheet, config["gid"], config["name"])
if not df.empty:
df['Source_Sheet'] = config["name"]
all_dataframes.append(df)
print(f" 📋 {config['name']}: {len(df)} rows")
# Combine dataframes
if all_dataframes:
try:
combined_df = pd.concat(all_dataframes, ignore_index=True, sort=False)
print(f"✅ Successfully combined {len(combined_df)} records from {len(all_dataframes)} sheets")
except Exception as e:
print(f"❌ Error combining dataframes: {str(e)}")
print("🔄 Trying alternative approach...")
# Alternative approach: standardize columns first
standard_columns = ['SL. NO.', 'YEAR', 'ROLL NO.', 'STUDENT NAME', 'COURSE CODE',
'DEPARTMENT', 'MENTOR NAME', 'CUMULATIVE REWARD POINTS',
'REEDEMED POINTS', 'BALANCE POINTS', 'Source_Sheet']
standardized_dfs = []
for df in all_dataframes:
new_df = pd.DataFrame()
for col in standard_columns:
if col == 'Source_Sheet':
new_df[col] = df.get('Source_Sheet', '')
else:
# Try to find matching column
found_col = None
for df_col in df.columns:
if col.upper() in df_col.upper() or df_col.upper() in col.upper():
found_col = df_col
break
if found_col:
new_df[col] = df[found_col]
else:
new_df[col] = ''
standardized_dfs.append(new_df)
combined_df = pd.concat(standardized_dfs, ignore_index=True, sort=False)
print(f"✅ Alternative approach successful: {len(combined_df)} records combined")
else:
combined_df = pd.DataFrame()
print("❌ No data found in any sheets")
# Load studentwise reward points data from separate spreadsheet
studentwise_data = get_studentwise_data(studentwise_spreadsheet)
# Load details info from main spreadsheet
details_info = get_details_info(main_spreadsheet)
# Load reward points activity data
reward_points_df = load_reward_points_data()
# Update cache
data_cache["combined_df"] = combined_df
data_cache["studentwise_data"] = studentwise_data
data_cache["details_info"] = details_info
data_cache["reward_points_df"] = reward_points_df
data_cache["last_update"] = datetime.now()
load_time = time.time() - start_time
print(f"⏱️ Data loaded and cached in {load_time:.2f} seconds")
print(f"📊 Next auto-refresh in {data_cache['cache_duration_hours']} hours")
return combined_df, studentwise_data, details_info, reward_points_df
except Exception as e:
print(f"❌ Error loading data: {str(e)}")
return (data_cache.get("combined_df", pd.DataFrame()),
data_cache.get("studentwise_data", None),
data_cache.get("details_info", None),
data_cache.get("reward_points_df", None))
finally:
data_cache["is_loading"] = False
def get_cached_data():
"""Get data from cache or refresh if 12 hours have passed"""
now = datetime.now()
# Check if cache is empty or expired (12 hours)
if (data_cache["last_update"] is None or
data_cache["combined_df"] is None or
(now - data_cache["last_update"]).total_seconds() > (data_cache["cache_duration_hours"] * 3600)):
print("🔄 Cache expired or empty, loading fresh data...")
return load_all_data()
else:
cache_age_hours = (now - data_cache["last_update"]).total_seconds() / 3600
print(f"🚀 Using cached data (age: {cache_age_hours:.1f} hours)")
return (data_cache["combined_df"], data_cache["studentwise_data"],
data_cache["details_info"], data_cache["reward_points_df"])
def auto_refresh_worker():
"""Background worker to auto-refresh data every 12 hours"""
while True:
try:
# Sleep for 12 hours (43200 seconds)
time.sleep(43200)
print("⏰ 12-hour auto-refresh triggered...")
load_all_data()
except Exception as e:
print(f"❌ Auto-refresh error: {str(e)}")
# If error, wait 1 hour before trying again
time.sleep(3600)
def details_sheet_watcher():
"""Background watcher: checks every 30 seconds if 'POINTS LAST UPDATED' changed - optimized timing"""
last_seen_update = None
consecutive_errors = 0
max_errors = 3
watcher_client = None
watcher_spreadsheet = None
last_connection_time = None
connection_duration = 2700 # 45 minutes
check_interval = 30 # seconds
print(f"👀 Starting optimized details sheet watcher (checks every {check_interval} seconds)...")
while True:
try:
if data_cache["is_loading"]:
print("⏳ Watcher: Skipping check - data loading in progress")
time.sleep(check_interval)
continue
current_time = datetime.now()
if (watcher_client is None or
watcher_spreadsheet is None or
last_connection_time is None or
(current_time - last_connection_time).total_seconds() > connection_duration):
try:
print("🔄 Watcher: Refreshing connection...")
watcher_client = authorize()
watcher_spreadsheet = watcher_client.open_by_key(os.getenv('GOOGLE_SHEET_ID'))
last_connection_time = current_time
print("✅ Watcher: Connection refreshed")
except Exception as auth_error:
print(f"⚠️ Watcher connection error: {str(auth_error)[:100]}...")
consecutive_errors += 1
watcher_client = None
watcher_spreadsheet = None
time.sleep(check_interval)
continue
try:
details_info = get_details_info(watcher_spreadsheet)
except Exception as sheet_error:
print(f"⚠️ Watcher sheet error: {str(sheet_error)[:100]}...")
consecutive_errors += 1
watcher_client = None
watcher_spreadsheet = None
time.sleep(check_interval)
continue
if details_info and 'last_updated' in details_info:
current_update = details_info['last_updated'].strip()
if last_seen_update is None:
last_seen_update = current_update
print(f"🕒 Watcher: Monitoring established")
print(f" Current: {current_update[:80]}...")
elif current_update != last_seen_update:
print(f"🔄 CHANGE DETECTED!")
print(f" Old: {last_seen_update[:60]}...")
print(f" New: {current_update[:60]}...")
last_seen_update = current_update
if not data_cache["is_loading"]:
print("🔄 Reloading data directly...")
try:
load_all_data()
print("✅ Data reload completed successfully")
except Exception as reload_error:
print(f"❌ Reload error: {str(reload_error)[:100]}...")
else:
print("⏳ Data already loading, skipping reload")
else:
# Show heartbeat every 5 minutes instead of 10
if datetime.now().minute % 5 == 0 and datetime.now(ist).second < check_interval:
print(f"✅ Watcher: No changes detected ({datetime.now(ist).strftime('%H:%M')})")
else:
print("⚠️ Watcher: Could not extract last_updated info")
consecutive_errors += 1
if details_info and consecutive_errors > 0:
print(f"✅ Watcher: Connection restored (cleared {consecutive_errors} errors)")
consecutive_errors = 0
except Exception as e:
consecutive_errors += 1
print(f"⚠️ Watcher error #{consecutive_errors}: {str(e)[:100]}...")
watcher_client = None
watcher_spreadsheet = None
if consecutive_errors >= max_errors:
error_wait = 300 # 5 minutes
print(f"❌ Too many watcher errors, waiting {error_wait//60} minutes...")
time.sleep(error_wait)
consecutive_errors = 0
time.sleep(check_interval)
def get_detailed_student_points(roll_no, studentwise_data):
"""Get detailed points breakdown from studentwise data"""
if not studentwise_data or len(studentwise_data) < 3:
return ""
headers = studentwise_data[0]
student_found = None
for row in studentwise_data[2:]:
if len(row) > 1 and row[1].strip().upper() == roll_no.strip().upper():
student_found = row
break
if not student_found:
return ""
student_data = {}
for i, header in enumerate(headers):
student_data[header] = student_found[i] if i < len(student_found) else ""
output = []
output.append("")
output.append("🏆 REWARD POINTS BREAKDOWN")
output.append("=" * 80)
# Using a different approach - no column headers, just data with clear labels
categories = [
("INITIAL POINTS / CARRY-OVER", "-", "Initial Points"),
("TECHNICAL EVENTS", "Technical Events Count", "Technical Events Points"),
("SKILLS", "Skill Count", "Skill Points"),
("ASSIGNMENTS", "Assignement Count", "Assignment Points"),
("INTERVIEW", "Interview Count", "Interview Points"),
("TECHNICAL SOCIETY ACTIVITIES", "TECHNICAL SOCIETY ACTIVITIES Count", "TECHNICAL SOCIETY ACTIVITIES Points"),
("P SKILL", "P Skill Count", "P Skill Points"),
("TAC", "TAC Count", "TAC Points"),
("SPECIAL LAB INITIATIVES", "Special Lab Initiatives Count", "Special Lab Initiatives Points"),
("EXTRA-CURRICULAR ACTIVITIES", "EXTRA-CURRICULAR ACTIVITIES COUNT", "EXTRA-CURRICULAR ACTIVITIES POINTS"),
("STUDENT INITIATIVES", "STUDENT INITIATIVES COUNT", "STUDENT INITIATIVES POINTS"),
("EXTERNAL EVENTS", "EXTERNAL EVENTS COUNT", "EXTERNAL EVENTS POINTS"),
("TOTAL (2023-2024 EVEN)", "Total Count", "Total Points"),
("PENALTIES", "Negative Count", "Negative Points"),
("CUMULATIVE POINTS", "-", "Cumulative Points"),
("INNOVATIVE PRACTICE - 1 (IP-1)", "-", "IP 1 R"),
("INNOVATIVE PRACTICE - 2 (IP-2)", "-", "IP 2 R"),
("REDEEMED POINTS", "-", "Redeemed Points"),
("BALANCE POINTS", "-", "Balance Points"),
("CARRY FORWARD TO NEXT SEMESTER", "-", "EL. CA. FR. POINTS")
]
total_earned = 0
total_redeemed = 0
for idx, (category_name, count_key, points_key) in enumerate(categories):
count_val = "-" if count_key == "-" else student_data.get(count_key, "0")
points_val = student_data.get(points_key, "0.00")
try:
if points_val and points_val != "-":
points_float = float(str(points_val).replace(',', ''))
points_val = f"{points_float:.2f}"
if points_key == "Cumulative Points":
total_earned = points_float
elif points_key == "Redeemed Points":
total_redeemed = points_float
except:
pass
# Alternative format - more readable
output.append(f"📋 **{category_name}**")
output.append(f" Count: {count_val} | Points: {points_val}")
output.append("=" * 80)
return "\n".join(output)
def calculate_yearwise_average_points():
"""Calculate year-wise average points from the combined data"""
combined_df, _, _, _ = get_cached_data()
if combined_df.empty:
return "❌ No data available to calculate averages"
# Try to find columns automatically
year_col, points_col = None, None
for col in combined_df.columns:
if 'year' in col.lower():
year_col = col
if 'balance' in col.lower() and 'points' in col.lower():
points_col = col
if not year_col or not points_col:
return "⚠️ Required columns not found in the data"
# Create a copy for processing
df = combined_df[[year_col, points_col]].copy()
# Clean and convert points data
df[points_col] = pd.to_numeric(
df[points_col].astype(str).str.replace(',', '').str.strip(),
errors='coerce'
)
df.dropna(subset=[points_col], inplace=True)
# Remove rows with zero or negative points for more accurate averages
df = df[df[points_col] > 0]
if df.empty:
return "⚠️ No valid points data found for calculation"
# Group by year
yearwise = df.groupby(year_col)[points_col].agg(['sum', 'count', 'mean', 'min', 'max']).reset_index()
yearwise['average'] = yearwise['mean'] # Use pandas mean for consistency
# Format the output neatly
output = []
output.append("=" * 90)
output.append(" ")
output.append("📊 YEAR-WISE AVERAGE REWARD POINTS (CALCULATED)")
output.append("-" * 90)
for _, row in yearwise.iterrows():
year = str(row[year_col]).strip()
total_points = f"{row['sum']:.0f}"
count = int(row['count'])
avg = f"{row['average']:.2f}"
min_pts = f"{row['min']:.0f}"
max_pts = f"{row['max']:.0f}"
output.append(f"Year {year:<10} {avg}")
output.append("=" * 90)
return "\n".join(output)
# Load initial data
print("📊 Loading initial data...")
load_all_data()
# Start background auto-refresh thread
refresh_thread = threading.Thread(target=auto_refresh_worker, daemon=True)
refresh_thread.start()
print("🕒 Auto-refresh thread started (updates every 12 hours)")
# Start details sheet watcher thread
watcher_thread = threading.Thread(target=details_sheet_watcher, daemon=True)
watcher_thread.start()
print("👀 Details sheet watcher started (checks every 1 minute)")
# Function to search student with cached data
def search_student(roll_no):
if not roll_no.strip():
return "❌ Please enter a roll number"
# Convert roll number to uppercase for consistent searching
roll_no = roll_no.strip().upper()
# Get cached data (fast response, auto-refreshes every 12 hours)
combined_df, studentwise_data, details_info, reward_points_df = get_cached_data()
if combined_df.empty:
return "❌ No data available from Google Sheets"
# Look for 'ROLL NO.' column
roll_column = None
for col in combined_df.columns:
if 'roll' in col.lower() and 'no' in col.lower():
roll_column = col
break
if roll_column is None:
return f"❌ Roll number column not found. Available columns: {list(combined_df.columns)}"
# Convert the roll numbers in DataFrame to uppercase for comparison
student = combined_df[combined_df[roll_column].astype(str).str.strip().str.upper() == roll_no]
if student.empty:
return f"❌ Roll No '{roll_no}' not found in any sheet"
record = student.iloc[0].to_dict()
student_name = str(record.get('STUDENT NAME', 'Unknown')).strip()
student_year = str(record.get('YEAR', '')).strip()
now_ist = datetime.now(ist).strftime("%Y-%m-%d %H:%M:%S")
# Log to see which roll number and student name is searched by user
print(f"Roll No Searched: {roll_no} | Student Name: {student_name} | Time (IST): {now_ist}")
# Format output - Simplified version
output = []
output.append(f"Hello {student_name} 👋")
output.append("=" * 80)
output.append("YOUR DETAILS")
output.append("=" * 80)
# Main student details
main_fields = ['ROLL NO.', 'STUDENT NAME', 'YEAR', 'DEPARTMENT', 'MENTOR NAME',
'CUMULATIVE REWARD POINTS', 'REEDEMED POINTS', 'BALANCE POINTS']
for field in main_fields:
value = record.get(field, '')
if str(value).strip():
output.append(f"{field:<25}: {value}")
# Get student's current points (clean numeric value)
try:
student_points_str = str(record.get('BALANCE POINTS', '')).replace(',', '').strip()
student_points = float(student_points_str) if student_points_str else 0
except:
student_points = 0
# Add year-specific average points and analysis
if details_info and 'average_points' in details_info:
output.append("\n" + "=" * 80)
output.append(f"AVERAGE REWARD POINTS FOR YEAR {student_year}")
output.append("=" * 80)
if student_year in details_info['average_points']:
avg_points_str = details_info['average_points'][student_year]
try:
avg_points = float(avg_points_str) if avg_points_str else 0
except:
avg_points = 0
if avg_points > 0:
output.append(f"Average Points for Year {student_year:<8}: {avg_points_str}")
# Calculate difference and provide guidance
points_difference = avg_points - student_points
if points_difference > 0:
# Student is below average
output.append(f"\n🎯 POINTS NEEDED TO REACH AVERAGE: {points_difference:.0f} points")
output.append("\n💡 WAYS TO EARN POINTS:")
output.append(" • PS Activities")
output.append(" • TAC")
output.append(" • Hackathons / Technical Events")
output.append(" • Project Competitions")
output.append(" • Refer Reward points Breakdown for more details")
else:
# Student is at or above average
output.append(f"\n🎉 EXCELLENT! You are {abs(points_difference):.0f} points ABOVE the average!")
output.append(" Keep up the great work! 🌟")
output.append(" Refer Reward points Breakdown for more details")
# Add individual activity details from cached reward points data
activity_details = get_activity_details(roll_no, reward_points_df)
if activity_details:
output.append(activity_details)
# Add detailed points breakdown from studentwise data
detailed_points = get_detailed_student_points(roll_no, studentwise_data)
if detailed_points:
output.append(detailed_points)
# Add last updated info
if details_info and 'last_updated' in details_info:
output.append("\n" + "-" * 60)
output.append("LAST UPDATE INFO")
output.append("-" * 60)
output.append(details_info['last_updated'])
# Show cache info
if data_cache["last_update"]:
cache_age = datetime.now() - data_cache["last_update"]
hours = cache_age.total_seconds() / 3600
next_refresh_hours = 12 - hours
output.append(f"\n📊 Data age: {hours:.1f} hours")
if next_refresh_hours > 0:
output.append(f"⏰ Next auto-refresh in: {next_refresh_hours:.1f} hours")
else:
output.append("⏰ Auto-refresh due now")
output.append("\n" + "=" * 80)
return "\n".join(output)
# Function to get system information
def get_system_info():
combined_df, studentwise_data, details_info, reward_points_df = get_cached_data()
if not details_info:
return "❌ No system information available"
output = []
output.append("=" * 80)
output.append("SYSTEM INFORMATION")
output.append("=" * 80)
# Average Points
if 'average_points' in details_info:
output.append("\n🎯 AVERAGE REWARD POINTS BY YEAR:")
output.append("-" * 40)
for year, points in details_info['average_points'].items():
if points:
output.append(f"Year {year:<10}: {points}")
# Calculated Year-wise Average Points
calculated_averages = calculate_yearwise_average_points()
if calculated_averages and not calculated_averages.startswith("❌") and not calculated_averages.startswith("⚠️"):
output.append(calculated_averages)
# Redemption Dates
if 'ip1_redemption' in details_info:
output.append("\n📅 IP 1 REDEMPTION DATES:")
output.append("-" * 40)
for semester, date in details_info['ip1_redemption'].items():
if date and date != '-':
output.append(f"{semester:<10}: {date}")
if 'ip2_redemption' in details_info:
output.append("\n📅 IP 2 REDEMPTION DATES:")
output.append("-" * 40)
for semester, date in details_info['ip2_redemption'].items():
if date and date != '-':
output.append(f"{semester:<10}: {date}")
if 'last_updated' in details_info:
output.append(f"\n🕒 LAST UPDATED:")
output.append("-" * 40)
output.append(details_info['last_updated'])
# Cache info
if data_cache["last_update"]:
cache_age = datetime.now() - data_cache["last_update"]
hours = cache_age.total_seconds() / 3600
next_refresh_hours = 12 - hours
output.append(f"\n📊 Data age: {hours:.1f} hours")
if next_refresh_hours > 0:
output.append(f"⏰ Next auto-refresh in: {next_refresh_hours:.1f} hours")
else:
output.append("⏰ Auto-refresh due now")
output.append("\n" + "=" * 80)
return "\n".join(output)
# Create Gradio interface - SIMPLE LAYOUT WITHOUT ANALYTICS DISPLAY
with gr.Blocks(
title="Student Reward Points Check",
theme=gr.themes.Soft(),
) as app:
# Simple Header
gr.Markdown("# 🎓 Student Reward Points Check")
gr.Markdown("### Search for student details including reward points and redemption dates")
gr.Markdown("### எல்லா புகழும் இறைவனுக்கே ✝ 🕉 ☪")
gr.Markdown("🕒 **Auto-Updates**: Data automatically refreshes when there is a change in Reward Points Sheet")
gr.Markdown("### Fill this form for any Issue/Feedback: [Issue/Feedback Form](https://docs.google.com/forms/d/e/1FAIpQLScnl0udcN2pUDENHl45HIj5HZbvDuwZ0g2eepBbp8tJYg-NvQ/viewform)")
with gr.Tabs():
with gr.TabItem("🔍 Student Search"):
with gr.Row():
with gr.Column(scale=3):
roll_input = gr.Textbox(
label="Enter Roll Number",
placeholder="e.g., 7376222AL181",
value=""
)
with gr.Column(scale=1):
search_btn = gr.Button("🔍 Search Student", variant="primary")
result_output = gr.Textbox(
label="Student Details",
lines=50,
max_lines=60,
show_copy_button=True,
autoscroll=False
)
with gr.TabItem("ℹ️ System Information"):
system_btn = gr.Button("📊 Get System Information", variant="secondary")
system_output = gr.Textbox(
label="System Information",
lines=50,
max_lines=60,
show_copy_button=True,
autoscroll=False,
interactive=False,
show_label=True
)
# Event handlers - simplified without analytics updates
search_btn.click(fn=search_student, inputs=roll_input, outputs=result_output)
roll_input.submit(fn=search_student, inputs=roll_input, outputs=result_output)
system_btn.click(fn=get_system_info, outputs=system_output)
# Footer section
gr.Markdown("---")
with gr.Row():
with gr.Column():
gr.Markdown(
"""
<div style="text-align: center; margin-top: 20px; padding: 20px; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); border-radius: 10px; color: white;">
<h3 style="margin: 0; color: white;">💻 Developed with ❤️ by</h3>
<h2 style="margin: 5px 0; color: #ffd700;">PRANESH S</h2>
<div style="margin: 15px 0;">
<a href="https://github.com/Pranesh-2005" target="_blank" style="color: #ffd700; text-decoration: none; margin: 0 10px; font-size: 16px;">
🐱 GitHub
</a>
<span style="color: #ffd700;">|</span>
<a href="https://www.linkedin.com/in/pranesh5264/" target="_blank" style="color: #ffd700; text-decoration: none; margin: 0 10px; font-size: 16px;">
💼 LinkedIn
</a>
<span style="color: #ffd700;">|</span>
<a href="https://mail.google.com/mail/?view=cm&fs=1&to=praneshmadhan646@gmail.com&su=Student%20Reward%20Points%20App%20-%20Feedback&body=Hi%20Pranesh,%0A%0AI%20am%20writing%20regarding%20the%20Student%20Reward%20Points%20application.%0A%0A" target="_blank" style="color: #ffd700; text-decoration: none; margin: 0 10px; font-size: 16px;">
📧 Contact Developer
</a>
</div>
<p style="margin: 10px 0; font-style: italic; color: #e0e0e0;">Made with 💝 Love and Support</p>
<p style="margin: 5px 0; font-size: 14px; color: #b0b0b0;">🚀 Empowering students with instant reward points tracking</p>
</div>
""",
elem_id="footer"
)
# Launch the app
if __name__ == "__main__":
print("🚀 Launching Gradio interface...")
app.launch(share=False, debug=True, server_name="0.0.0.0", server_port=7860, pwa=True)