Spaces:
Runtime error
Runtime error
| #!/usr/bin/env python3 | |
| """ | |
| Hugging Face Spaces compatible app for MemoryAI. | |
| This is the main entry point for the Spaces deployment. | |
| """ | |
| import os | |
| from flask import Flask, render_template, request, jsonify | |
| from src.main import MemoryAI | |
| # Load environment variables (with fallback if dotenv not available) | |
| try: | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| except ImportError: | |
| print("⚠️ python-dotenv not available, using environment variables directly") | |
| # Set default values if .env not loaded | |
| if not os.getenv("MODEL_NAME"): | |
| os.environ["MODEL_NAME"] = "microsoft/DialoGPT-small" | |
| # Initialize Flask app | |
| app = Flask(__name__) | |
| # Initialize MemoryAI with better default model for Spaces | |
| if not os.getenv("MODEL_NAME"): | |
| os.environ["MODEL_NAME"] = "microsoft/DialoGPT-small" | |
| # Global MemoryAI instance | |
| ai = MemoryAI() | |
| ai.load_memories() | |
| def home(): | |
| """Render the main chat interface.""" | |
| return render_template('spaces_chat.html') | |
| def get_memories(): | |
| """Get recent memories as JSON.""" | |
| recent_memories = ai.get_recent_memories(10) | |
| return jsonify({ | |
| 'memories': recent_memories, | |
| 'total_memories': len(ai.memories) | |
| }) | |
| def clear_memories(): | |
| """Clear all memories.""" | |
| ai.clear_memories() | |
| return jsonify({'status': 'success', 'message': 'All memories cleared'}) | |
| def chat(): | |
| """Get AI response to user input.""" | |
| data = request.json | |
| user_input = data.get('message', '') | |
| conversation_history = data.get('history', []) | |
| if not user_input.strip(): | |
| return jsonify({'error': 'Empty message'}), 400 | |
| # Generate AI response with conversation history | |
| response = ai.generate_response(user_input, conversation_history=conversation_history) | |
| return jsonify({ | |
| 'response': response, | |
| 'memory_count': len(ai.memories), | |
| 'conversation_stats': ai.get_conversation_stats() if hasattr(ai, 'get_conversation_stats') else {} | |
| }) | |
| def get_summary(): | |
| """Get conversation summary.""" | |
| summary = ai.get_conversation_summary() | |
| return jsonify({'summary': summary}) | |
| def find_similar(): | |
| """Find similar memories.""" | |
| data = request.json | |
| query = data.get('query', '') | |
| top_k = data.get('top_k', 3) | |
| if not query.strip(): | |
| return jsonify({'error': 'Empty query'}), 400 | |
| similar = ai.find_similar_memories(query, top_k) | |
| return jsonify({ | |
| 'similar_memories': [{'text': text, 'similarity': float(score)} for text, score in similar], | |
| 'query': query | |
| }) | |
| def reset_conversation(): | |
| """Reset conversation state.""" | |
| ai.reset_conversation() | |
| return jsonify({'status': 'success', 'message': 'Conversation reset'}) | |
| def save_memories(): | |
| """Save memories to file.""" | |
| ai.save_memories() | |
| return jsonify({'status': 'success', 'message': 'Memories saved'}) | |
| if __name__ == '__main__': | |
| # Hugging Face Spaces will run this with their own server | |
| # For local testing, you can use: | |
| app.run(debug=True, host='0.0.0.0', port=int(os.environ.get('PORT', 7860))) |