Spaces:
Build error
Build error
| from flask import Flask, request, jsonify, send_from_directory | |
| from flask_cors import CORS # Required for your HTML to talk to this API | |
| from brain import MairaBrain | |
| import os | |
| app = Flask(__name__) | |
| CORS(app) # Unlocks the neural link for your maira2.html | |
| # --- CONFIGURATION --- | |
| # Using the Llama 3.2 1B model (Smart & Fast) | |
| REPO_ID = "bartowski/Llama-3.2-1B-Instruct-GGUF" | |
| FILENAME = "Llama-3.2-1B-Instruct-Q4_K_M.gguf" | |
| # Initialize Maira's Brain | |
| # This will download the model to the container on first run | |
| maira = MairaBrain(REPO_ID, FILENAME) | |
| # --- ROUTES --- | |
| def home(): | |
| """Serves the front-end website""" | |
| # This looks for maira2.html in the same folder as app.py | |
| return send_from_directory('.', 'maira2.html') | |
| def chat(): | |
| """The API endpoint for both the Website and WhatsApp""" | |
| try: | |
| data = request.json | |
| user_message = data.get("message", "") | |
| user_id = data.get("user_id", "default_user") | |
| if not user_message: | |
| return jsonify({"error": "Message is empty"}), 400 | |
| # Ask the AI for a response | |
| ai_response = maira.get_response(user_id, user_message) | |
| return jsonify({ | |
| "status": "success", | |
| "response": ai_response | |
| }) | |
| except Exception as e: | |
| print(f"CRITICAL ERROR: {e}") | |
| return jsonify({ | |
| "status": "error", | |
| "response": "My neural circuits are lagging. Try again? 🧊" | |
| }), 500 | |
| if __name__ == "__main__": | |
| # Port 7860 is mandatory for Hugging Face Spaces. | |
| # It also works perfectly for local testing. | |
| app.run(host="0.0.0.0", port=7860, debug=False) |