File size: 5,308 Bytes
b20cbbf
 
 
 
 
 
 
 
 
 
 
 
 
 
031d680
 
b20cbbf
 
 
 
 
031d680
 
b20cbbf
031d680
b20cbbf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
from flask import Flask, request, jsonify
from flask_cors import CORS
import os
import pandas as pd
# from utils.analytics import get_summary_stats
from model.mood_predict import mood_predict
from utils.mapping import map_to_mood
import datetime

app = Flask(__name__)

CORS(app) 

# --- Data File ---
DATA_DIR = "/tmp/data"
LOGS_CSV = os.path.join(DATA_DIR, "logs.csv")
LOG_COLUMNS = ['date', 'exercise', 'water', 'reading', 'meditation', 'mood', 'journal_text']

# --- Helper Function ---
def init_log_file():
    """Creates the log file with headers if it doesn't exist."""
    if not os.path.exists(DATA_DIR):
        os.makedirs(DATA_DIR, exist_ok=True)
    if not os.path.exists(LOGS_CSV):
        df = pd.DataFrame(columns=LOG_COLUMNS)
        df.to_csv(LOGS_CSV, index=False)


# --- API Endpoints ---

@app.route("/")
def home():
    return jsonify({'message': "MindTrack Backend is running!"}), 201

@app.route("/log", methods=["POST"])
def log_habit():
    """
    Saves a new log entry.
    This is now a "read-modify-write" operation to handle
    duplicate dates (overwrite) and file creation.
    """
    new_log_data = request.json
    
    if not new_log_data or 'date' not in new_log_data:
        return jsonify({"error": "No data or date provided"}), 400
        
    try:
        # 1. Ensure the data directory exists
        os.makedirs(DATA_DIR, exist_ok=True)
        
        # 2. Load existing data if file exists
        if os.path.exists(LOGS_CSV):
            df = pd.read_csv(LOGS_CSV)
        else:
            # Create an empty DataFrame WITH the correct columns
            df = pd.DataFrame(columns=LOG_COLUMNS)

        # 3. Check for and remove duplicate date (for overwrite)
        new_date = new_log_data['date']
        if not df.empty and new_date in df['date'].values:
            print(f"Duplicate date found: {new_date}. Overwriting old entry.")
            df = df[df['date'] != new_date] # Keep all rows *except* the one with the duplicate date
        
        # 4. Create a DataFrame for the new entry, ensuring it also has all columns
        df_entry = pd.DataFrame([new_log_data], columns=LOG_COLUMNS)
        
        # 5. Append new entry to the (potentially filtered) DataFrame
        df_updated = pd.concat([df, df_entry], ignore_index=True)
        
        # 6. Sort by date for consistency (optional but good practice)
        df_updated = df_updated.sort_values(by='date')
        
        # 7. Save the entire updated DataFrame back to the CSV
        # header=True is the default and correct
        df_updated.to_csv(LOGS_CSV, index=False)
        
        return jsonify({"message": "Log saved successfully"}), 201
        
    except Exception as e:
        print(f"Error saving log: {e}")
        return jsonify({"error": "Failed to save log"}), 500


@app.route('/get_all_logs', methods=['GET'])
def get_all_logs():
    """
    Reads all log entries from the CSV and returns them as JSON.
    This is the "Single Source of Truth" endpoint for the dashboard.
    """
    if not os.path.exists(LOGS_CSV):
        # If the file doesn't exist yet, just return an empty list
        return jsonify([]) 
        
    try:
        df = pd.read_csv(LOGS_CSV)
        
        # Convert DataFrame to JSON (orient='records' gives a list of dicts)
        logs_json = df.to_dict(orient='records')
        
        return jsonify(logs_json)
        
    except Exception as e:
        print(f"Error reading logs: {e}")
        return jsonify({"error": "Failed to retrieve logs"}), 500


@app.route("/predict_mood", methods=["POST"])
def predict_mood():
    """
    Predicts the sentiment of a given journal text.
    """
    try:
        data = request.json
        text = data.get("text")
        
        if not text or text.strip() == "":
            return jsonify({"error": "No text provided"}), 400
        
        pred_moods = mood_predict(text)
        mood = pred_moods.get('label', 'Neutral')
        score = pred_moods.get('score', 0.0)

        mapped_mood = map_to_mood(mood)
        
        return jsonify({
            "mood": mapped_mood,
            "score": score
        }), 200

    except Exception as e:
        app.logger.error(f"Error in /predict_mood: {e}")
        return jsonify({"error": str(e)}), 500


@app.route('/reset_logs', methods=['POST'])
def reset_logs():
    """
    Deletes the logs.csv file to reset the dashboard to sample data.
    This is for demo purposes.
    """
    try:
        if os.path.exists(LOGS_CSV):
            os.remove(LOGS_CSV)
            print("logs.csv has been deleted.")
            return jsonify({"message": "Log file deleted successfully. Dashboard will reset to sample data."}), 200
        else:
            print("logs.csv not found, no action needed.")
            return jsonify({"message": "No log file to delete."}), 200
    except Exception as e:
        print(f"Error deleting log file: {e}")
        return jsonify({"error": f"Failed to delete log file: {e}"}), 500


# --- Main execution ---
if __name__ == "__main__":
    init_log_file() # Ensure log file exists on startup
    # For Render deployment, Render sets the PORT env variable.
    # port = int(os.environ.get("PORT", 5000))
    app.run(host="0.0.0.0", port=7860, debug=True) # Debug=True is fine for hackathon