|
|
from fastapi import FastAPI, HTTPException, Query |
|
|
from functools import lru_cache |
|
|
import pandas as pd |
|
|
import os |
|
|
from typing import Dict, Any |
|
|
|
|
|
app = FastAPI(title="Dashboard Akademik API") |
|
|
|
|
|
|
|
|
CSV_PATH = os.getenv("CSV_PATH", "generated_dummy_data.csv") |
|
|
|
|
|
|
|
|
MAP_NILAI = { |
|
|
"A": 4.0, |
|
|
"AB": 3.5, |
|
|
"B": 3.0, |
|
|
"BC": 2.5, |
|
|
"C": 2.0, |
|
|
"D": 1.0, |
|
|
"E": 0.0 |
|
|
} |
|
|
|
|
|
|
|
|
def _ensure_columns(df: pd.DataFrame): |
|
|
required = { |
|
|
"kode_mhs", "nama_prodi", "id_smt", "kode_mk", "nama_mk", |
|
|
"RMK", "sks", "nilai_akhir", "nilai_huruf", "Tahun angkatan", |
|
|
"Semester_sekarang", "Deskripsi Matkul" |
|
|
} |
|
|
missing = required - set(df.columns) |
|
|
if missing: |
|
|
raise ValueError(f"CSV missing required columns: {missing}") |
|
|
|
|
|
|
|
|
def _to_simple_string(val): |
|
|
try: |
|
|
|
|
|
if pd.isna(val): |
|
|
return "" |
|
|
except Exception: |
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
if isinstance(val, pd.Series): |
|
|
|
|
|
non_null = val.dropna() |
|
|
if not non_null.empty: |
|
|
return str(non_null.iloc[0]) |
|
|
if not val.empty: |
|
|
return str(val.iloc[0]) |
|
|
return "" |
|
|
if isinstance(val, (list, tuple)): |
|
|
if len(val) == 0: |
|
|
return "" |
|
|
return str(val[0]) |
|
|
|
|
|
try: |
|
|
return str(val) |
|
|
except Exception: |
|
|
return "" |
|
|
|
|
|
|
|
|
@lru_cache(maxsize=1) |
|
|
def load_data_cached() -> pd.DataFrame: |
|
|
if not os.path.exists(CSV_PATH): |
|
|
raise FileNotFoundError(f"CSV not found at: {CSV_PATH}") |
|
|
|
|
|
|
|
|
df = pd.read_csv(CSV_PATH) |
|
|
|
|
|
df.columns = [c.strip() for c in df.columns] |
|
|
|
|
|
_ensure_columns(df) |
|
|
|
|
|
|
|
|
df["sks"] = pd.to_numeric(df["sks"], errors="coerce").fillna(0).astype(int) |
|
|
df["id_smt"] = pd.to_numeric(df["id_smt"], errors="coerce").fillna(0).astype(int) |
|
|
|
|
|
|
|
|
df["nilai_huruf"] = df["nilai_huruf"].apply(_to_simple_string).astype(str).str.strip() |
|
|
|
|
|
df["nilai_numerik"] = df["nilai_huruf"].map(MAP_NILAI) |
|
|
|
|
|
|
|
|
def fallback_numeric(row): |
|
|
|
|
|
if pd.notna(row["nilai_numerik"]): |
|
|
return row["nilai_numerik"] |
|
|
try: |
|
|
v = float(row.get("nilai_akhir", 0)) |
|
|
if v >= 86: return 4.0 |
|
|
if v >= 76: return 3.5 |
|
|
if v >= 66: return 3.0 |
|
|
if v >= 61: return 2.5 |
|
|
if v >= 56: return 2.0 |
|
|
if v >= 41: return 1.0 |
|
|
return 0.0 |
|
|
except Exception: |
|
|
return 0.0 |
|
|
|
|
|
df["nilai_numerik"] = df.apply(fallback_numeric, axis=1) |
|
|
|
|
|
return df |
|
|
|
|
|
|
|
|
def get_final_records(df: pd.DataFrame) -> pd.DataFrame: |
|
|
df_sorted = df.sort_values(["kode_mhs", "kode_mk", "id_smt"]) |
|
|
return df_sorted.groupby(["kode_mhs", "kode_mk"], as_index=False).last() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/jumlah_mahasiswa") |
|
|
def jumlah_mahasiswa(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
total = int(df["kode_mhs"].nunique()) |
|
|
return {"total_mahasiswa": total} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/jumlah_per_angkatan") |
|
|
def jumlah_per_angkatan(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
per_ang = df.groupby("Tahun angkatan")["kode_mhs"].nunique().sort_index().to_dict() |
|
|
per_ang = {str(k): int(v) for k, v in per_ang.items()} |
|
|
return {"mahasiswa_per_angkatan": per_ang} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/eligible_ta") |
|
|
def eligible_ta(reload: bool = Query(False, description="reload CSV cache"), |
|
|
min_sks: int = Query(110, description="threshold SKS untuk eligible (default:110)")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
sks_per_mhs = final.groupby("kode_mhs")["sks"].sum() |
|
|
eligible = sks_per_mhs[sks_per_mhs > min_sks] |
|
|
|
|
|
data = [{"kode_mhs": m, "total_sks": int(sks_per_mhs[m])} for m in eligible.index] |
|
|
return {"jumlah_eligible": len(data), "daftar": data} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/ipk_rata_rata") |
|
|
def ipk_rata_rata(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
final["total_bobot"] = final["sks"] * final["nilai_numerik"] |
|
|
|
|
|
grp = final.groupby(["kode_mhs", "id_smt"]).agg( |
|
|
total_bobot=("total_bobot", "sum"), |
|
|
total_sks=("sks", "sum") |
|
|
).reset_index() |
|
|
|
|
|
grp["ips"] = grp.apply(lambda r: (r["total_bobot"] / r["total_sks"]) if r["total_sks"] > 0 else 0.0, axis=1) |
|
|
ipk_series = grp.groupby("kode_mhs")["ips"].mean() |
|
|
|
|
|
if ipk_series.empty: |
|
|
return {"ipk_rata_rata": 0.0, "ipk_quartiles": {}} |
|
|
|
|
|
mean_ipk = float(round(ipk_series.mean(), 3)) |
|
|
q = ipk_series.quantile([0.25, 0.5, 0.75]).to_dict() |
|
|
q = {str(k): float(v) for k, v in q.items()} |
|
|
|
|
|
return {"ipk_rata_rata": mean_ipk, "ipk_quartiles": q} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/dashboard_summary") |
|
|
def dashboard_summary(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
total_mhs = int(df["kode_mhs"].nunique()) |
|
|
|
|
|
per_ang = df.groupby("Tahun angkatan")["kode_mhs"].nunique().to_dict() |
|
|
per_ang = {str(k): int(v) for k, v in per_ang.items()} |
|
|
|
|
|
sks_per_mhs = final.groupby("kode_mhs")["sks"].sum() |
|
|
eligible = sks_per_mhs[sks_per_mhs > 110] |
|
|
eligible_list = [{"kode_mhs": m, "total_sks": int(sks_per_mhs[m])} for m in eligible.index] |
|
|
|
|
|
|
|
|
final["total_bobot"] = final["sks"] * final["nilai_numerik"] |
|
|
grp = final.groupby(["kode_mhs", "id_smt"]).agg( |
|
|
total_bobot=("total_bobot", "sum"), |
|
|
total_sks=("sks", "sum") |
|
|
).reset_index() |
|
|
grp["ips"] = grp.apply(lambda r: (r["total_bobot"] / r["total_sks"]) if r["total_sks"] > 0 else 0.0, axis=1) |
|
|
ipk_series = grp.groupby("kode_mhs")["ips"].mean() |
|
|
mean_ipk = float(round(ipk_series.mean(), 3)) if not ipk_series.empty else 0.0 |
|
|
|
|
|
return { |
|
|
"total_mahasiswa": total_mhs, |
|
|
"mahasiswa_per_angkatan": per_ang, |
|
|
"eligible_ta": { |
|
|
"jumlah": len(eligible_list), |
|
|
"daftar": eligible_list |
|
|
}, |
|
|
"ipk": { |
|
|
"rata_rata_ipk": mean_ipk |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/rata_sks") |
|
|
def rata_sks(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
sks_per_mhs = final.groupby("kode_mhs")["sks"].sum() |
|
|
if sks_per_mhs.empty: |
|
|
return {"rata_rata_sks": 0.0} |
|
|
rata2 = float(round(sks_per_mhs.mean(), 2)) |
|
|
return {"rata_rata_sks": rata2} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/ips_trend") |
|
|
def ips_trend(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
final["total_bobot"] = final["sks"] * final["nilai_numerik"] |
|
|
|
|
|
grp = final.groupby(["kode_mhs", "id_smt", "Tahun angkatan"]).agg( |
|
|
total_bobot=("total_bobot", "sum"), |
|
|
total_sks=("sks", "sum") |
|
|
).reset_index() |
|
|
|
|
|
grp["ips"] = grp.apply(lambda r: (r["total_bobot"] / r["total_sks"]) if r["total_sks"] > 0 else 0.0, axis=1) |
|
|
|
|
|
result = grp.groupby(["Tahun angkatan", "id_smt"])["ips"].mean().round(3).reset_index() |
|
|
|
|
|
output = {} |
|
|
for _, row in result.iterrows(): |
|
|
angkatan = str(int(row["Tahun angkatan"])) |
|
|
semester = str(int(row["id_smt"])) |
|
|
output.setdefault(angkatan, {})[semester] = float(row["ips"]) |
|
|
return output |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/populasi") |
|
|
def populasi(reload: bool = Query(False, description="reload CSV cache")): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
per_ang = df.groupby("Tahun angkatan")["kode_mhs"].nunique().sort_index().to_dict() |
|
|
per_ang = {str(k): int(v) for k, v in per_ang.items()} |
|
|
return {"populasi": per_ang} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/reload_data") |
|
|
def reload_data(): |
|
|
load_data_cached.cache_clear() |
|
|
try: |
|
|
_ = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(status_code=500, detail=f"Reload failed: {e}") |
|
|
return {"status": "reloaded"} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/tren_ips") |
|
|
def tren_ips(reload: bool = False): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
|
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(500, str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
final["total_bobot"] = final["sks"] * final["nilai_numerik"] |
|
|
|
|
|
|
|
|
grouped = final.groupby(["Tahun angkatan", "id_smt"]).agg( |
|
|
total_bobot=("total_bobot", "sum"), |
|
|
total_sks=("sks", "sum") |
|
|
).reset_index() |
|
|
|
|
|
grouped["ips"] = grouped.apply( |
|
|
lambda r: (r["total_bobot"] / r["total_sks"]) if r["total_sks"] > 0 else 0.0, |
|
|
axis=1 |
|
|
) |
|
|
|
|
|
result = {} |
|
|
for angkatan, d in grouped.groupby("Tahun angkatan"): |
|
|
d_sorted = d.sort_values("id_smt") |
|
|
result[str(angkatan)] = d_sorted["ips"].round(3).tolist() |
|
|
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/distribusi_populasi") |
|
|
def distribusi_populasi(reload: bool = False, min_sks: int = 110): |
|
|
if reload: |
|
|
load_data_cached.cache_clear() |
|
|
|
|
|
try: |
|
|
df = load_data_cached() |
|
|
except Exception as e: |
|
|
raise HTTPException(500, str(e)) |
|
|
|
|
|
final = get_final_records(df) |
|
|
|
|
|
|
|
|
total_mhs = df.groupby("Tahun angkatan")["kode_mhs"].nunique() |
|
|
|
|
|
|
|
|
sks_per_mhs = final.groupby("kode_mhs")["sks"].sum() |
|
|
eligible = sks_per_mhs[sks_per_mhs > min_sks] |
|
|
|
|
|
angkatan_map = df.set_index("kode_mhs")["Tahun angkatan"].to_dict() |
|
|
|
|
|
eligible_count = {} |
|
|
for mhs in eligible.index: |
|
|
ang = angkatan_map.get(mhs) |
|
|
if ang is not None: |
|
|
eligible_count.setdefault(ang, 0) |
|
|
eligible_count[ang] += 1 |
|
|
|
|
|
|
|
|
final_result = {} |
|
|
for angkatan in sorted(total_mhs.index): |
|
|
final_result[str(angkatan)] = { |
|
|
"total": int(total_mhs[angkatan]), |
|
|
"eligible": int(eligible_count.get(angkatan, 0)) |
|
|
} |
|
|
|
|
|
return final_result |
|
|
|
|
|
|