Spaces:
Sleeping
Sleeping
| from flask import Flask, request, jsonify | |
| from flask_cors import CORS | |
| import os | |
| import pandas as pd | |
| # from utils.analytics import get_summary_stats | |
| from model.mood_predict import mood_predict | |
| from utils.mapping import map_to_mood | |
| import datetime | |
| app = Flask(__name__) | |
| CORS(app) | |
| # --- Data File --- | |
| DATA_DIR = "/tmp/data" | |
| LOGS_CSV = os.path.join(DATA_DIR, "logs.csv") | |
| LOG_COLUMNS = ['date', 'exercise', 'water', 'reading', 'meditation', 'mood', 'journal_text'] | |
| # --- Helper Function --- | |
| def init_log_file(): | |
| """Creates the log file with headers if it doesn't exist.""" | |
| if not os.path.exists(DATA_DIR): | |
| os.makedirs(DATA_DIR, exist_ok=True) | |
| if not os.path.exists(LOGS_CSV): | |
| df = pd.DataFrame(columns=LOG_COLUMNS) | |
| df.to_csv(LOGS_CSV, index=False) | |
| # --- API Endpoints --- | |
| def home(): | |
| return jsonify({'message': "MindTrack Backend is running!"}), 201 | |
| def log_habit(): | |
| """ | |
| Saves a new log entry. | |
| This is now a "read-modify-write" operation to handle | |
| duplicate dates (overwrite) and file creation. | |
| """ | |
| new_log_data = request.json | |
| if not new_log_data or 'date' not in new_log_data: | |
| return jsonify({"error": "No data or date provided"}), 400 | |
| try: | |
| # 1. Ensure the data directory exists | |
| os.makedirs(DATA_DIR, exist_ok=True) | |
| # 2. Load existing data if file exists | |
| if os.path.exists(LOGS_CSV): | |
| df = pd.read_csv(LOGS_CSV) | |
| else: | |
| # Create an empty DataFrame WITH the correct columns | |
| df = pd.DataFrame(columns=LOG_COLUMNS) | |
| # 3. Check for and remove duplicate date (for overwrite) | |
| new_date = new_log_data['date'] | |
| if not df.empty and new_date in df['date'].values: | |
| print(f"Duplicate date found: {new_date}. Overwriting old entry.") | |
| df = df[df['date'] != new_date] # Keep all rows *except* the one with the duplicate date | |
| # 4. Create a DataFrame for the new entry, ensuring it also has all columns | |
| df_entry = pd.DataFrame([new_log_data], columns=LOG_COLUMNS) | |
| # 5. Append new entry to the (potentially filtered) DataFrame | |
| df_updated = pd.concat([df, df_entry], ignore_index=True) | |
| # 6. Sort by date for consistency (optional but good practice) | |
| df_updated = df_updated.sort_values(by='date') | |
| # 7. Save the entire updated DataFrame back to the CSV | |
| # header=True is the default and correct | |
| df_updated.to_csv(LOGS_CSV, index=False) | |
| return jsonify({"message": "Log saved successfully"}), 201 | |
| except Exception as e: | |
| print(f"Error saving log: {e}") | |
| return jsonify({"error": "Failed to save log"}), 500 | |
| def get_all_logs(): | |
| """ | |
| Reads all log entries from the CSV and returns them as JSON. | |
| This is the "Single Source of Truth" endpoint for the dashboard. | |
| """ | |
| if not os.path.exists(LOGS_CSV): | |
| # If the file doesn't exist yet, just return an empty list | |
| return jsonify([]) | |
| try: | |
| df = pd.read_csv(LOGS_CSV) | |
| # Convert DataFrame to JSON (orient='records' gives a list of dicts) | |
| logs_json = df.to_dict(orient='records') | |
| return jsonify(logs_json) | |
| except Exception as e: | |
| print(f"Error reading logs: {e}") | |
| return jsonify({"error": "Failed to retrieve logs"}), 500 | |
| def predict_mood(): | |
| """ | |
| Predicts the sentiment of a given journal text. | |
| """ | |
| try: | |
| data = request.json | |
| text = data.get("text") | |
| if not text or text.strip() == "": | |
| return jsonify({"error": "No text provided"}), 400 | |
| pred_moods = mood_predict(text) | |
| mood = pred_moods.get('label', 'Neutral') | |
| score = pred_moods.get('score', 0.0) | |
| mapped_mood = map_to_mood(mood) | |
| return jsonify({ | |
| "mood": mapped_mood, | |
| "score": score | |
| }), 200 | |
| except Exception as e: | |
| app.logger.error(f"Error in /predict_mood: {e}") | |
| return jsonify({"error": str(e)}), 500 | |
| def reset_logs(): | |
| """ | |
| Deletes the logs.csv file to reset the dashboard to sample data. | |
| This is for demo purposes. | |
| """ | |
| try: | |
| if os.path.exists(LOGS_CSV): | |
| os.remove(LOGS_CSV) | |
| print("logs.csv has been deleted.") | |
| return jsonify({"message": "Log file deleted successfully. Dashboard will reset to sample data."}), 200 | |
| else: | |
| print("logs.csv not found, no action needed.") | |
| return jsonify({"message": "No log file to delete."}), 200 | |
| except Exception as e: | |
| print(f"Error deleting log file: {e}") | |
| return jsonify({"error": f"Failed to delete log file: {e}"}), 500 | |
| # --- Main execution --- | |
| if __name__ == "__main__": | |
| init_log_file() # Ensure log file exists on startup | |
| # For Render deployment, Render sets the PORT env variable. | |
| # port = int(os.environ.get("PORT", 5000)) | |
| app.run(host="0.0.0.0", port=7860, debug=True) # Debug=True is fine for hackathon | |