""" Hugging Face Spaces version of the Memory Chat application. Optimized for the HF Spaces environment with persistent storage. """ import os import gradio as gr from memory_manager import MemoryManager from chat_interface import HuggingFaceChat from rich.console import Console console = Console() class HFSpaceApp: """Hugging Face Spaces version of the Memory Chat application.""" def __init__(self): """Initialize the Spaces application.""" # Use persistent storage on HF Spaces self.memory_dir = "/tmp/memories" if os.getenv("SPACE_ID") else "memories" os.makedirs(self.memory_dir, exist_ok=True) self.memory_manager = MemoryManager(self.memory_dir) self.chat_interface = HuggingFaceChat() # Conversation history self.conversation_history = [] # Load existing memories summary = self.memory_manager.get_summary() console.print(f"[blue]Loaded {summary['total_memories']} memories[/blue]") def should_record_memory(self, user_input: str, ai_response: str) -> bool: """Determine if the conversation should be recorded as a memory.""" important_keywords = [ "remember", "important", "note", "fact", "detail", "information", "love", "hate", "like", "dislike", "favorite", "never", "always", "birthday", "anniversary", "special", "urgent", "must", "should" ] combined_text = f"{user_input} {ai_response}".lower() for keyword in important_keywords: if keyword in combined_text: return True personal_patterns = [ "my name is", "i live in", "i work at", "i study", "my birthday", "my favorite", "i love", "i hate", "i like", "i dislike" ] for pattern in personal_patterns: if pattern in combined_text: return True return False def extract_memory_content(self, user_input: str, ai_response: str) -> str: """Extract the most important information to store as a memory.""" if any(word in user_input.lower() for word in ["remember", "note", "save"]): return user_input personal_info = [] if "my name is" in user_input.lower(): personal_info.append("User shared their name") if "i live in" in user_input.lower(): personal_info.append("User shared their location") if "i work at" in user_input.lower(): personal_info.append("User shared their workplace") if "i study" in user_input.lower(): personal_info.append("User shared their studies") if "my birthday" in user_input.lower(): personal_info.append("User shared their birthday") if "my favorite" in user_input.lower(): personal_info.append("User shared a favorite thing") if personal_info: return f"User mentioned: {', '.join(personal_info)}. Details: {user_input}" return user_input def chat_with_memory(self, user_input: str) -> str: """Chat with the AI while managing memories.""" if not self.chat_interface.check_model_availability(): return "I'm sorry, but I couldn't load the AI model. Please check your internet connection." self.conversation_history.append({"role": "user", "content": user_input}) relevant_memories = self.memory_manager.retrieve_memories(user_input, k=3) context = "" if relevant_memories: context = "Relevant memories:\n" for memory in relevant_memories[:2]: context += f"- {memory['content']}\n" context += "\n" prompt = self.build_prompt(user_input, context) ai_response = self.chat_interface.generate_response(prompt) self.conversation_history.append({"role": "assistant", "content": ai_response}) if self.should_record_memory(user_input, ai_response): memory_content = self.extract_memory_content(user_input, ai_response) context_info = f"During conversation at {self.get_current_time()}" self.memory_manager.add_memory( content=memory_content, context=context_info, memory_type="conversation" ) return ai_response def build_prompt(self, user_input: str, context: str) -> str: """Build the prompt for the AI model.""" prompt = f"{context}Human: {user_input}\nAI: " return prompt def get_current_time(self) -> str: """Get current time in a readable format.""" import datetime return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") def get_memories_summary(self) -> str: """Get a summary of stored memories.""" summary = self.memory_manager.get_summary() memory_types = summary['memory_types'] summary_text = f""" ## Memory Summary **Total Memories:** {summary['total_memories']} **Memory Types:** """ for memory_type, count in memory_types.items(): summary_text += f"- {memory_type}: {count}\n" return summary_text def get_recent_memories(self) -> str: """Get the most recent memories.""" recent_memories = self.memory_manager.get_recent_memories() if not recent_memories: return "No memories stored yet." memory_text = "## Recent Memories\n\n" for memory in recent_memories: memory_text += f"**{memory['type'].title()}** ({memory['timestamp'][:19]}):\n" memory_text += f"{memory['content']}\n\n" return memory_text def clear_all_memories(self) -> str: """Clear all memories.""" self.memory_manager.clear_memories() return "All memories have been cleared." def get_model_info(self) -> str: """Get information about the AI model.""" info = self.chat_interface.get_model_info() return f""" ## Model Information **Model:** {info['model_name']} **Device:** {info['device']} **Available:** {'Yes' if info['available'] else 'No'} """ def run_gradio_interface(self): """Run the Gradio interface optimized for HF Spaces.""" # Custom CSS for better appearance on Spaces css = """ .gradio-container { font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; } .gr-prose h1 { text-align: center; color: #1f2937; } .gr-prose h2 { color: #374151; } """ with gr.Blocks(title="Memory Chat - Hugging Face Spaces", css=css, theme=gr.themes.Soft()) as demo: gr.Markdown("# 🤖 Memory Chat with Hugging Face") gr.Markdown("### Chat with an AI that remembers important details about you!") with gr.Tab("đŸ’Ŧ Chat"): chatbot = gr.Chatbot(height=500) with gr.Row(): msg = gr.Textbox( label="Your Message", placeholder="Type your message here...", scale=4 ) submit_btn = gr.Button("Send", scale=1) with gr.Row(): clear_btn = gr.Button("Clear Conversation") clear_memories_btn = gr.Button("Clear All Memories", variant="stop") # Submit on Enter key msg.submit( fn=self.user, inputs=[msg, chatbot], outputs=[msg, chatbot], queue=False ) submit_btn.click( fn=self.user, inputs=[msg, chatbot], outputs=[msg, chatbot], queue=False ) clear_btn.click( fn=self.clear_history, inputs=None, outputs=chatbot, queue=False ) clear_memories_btn.click( fn=lambda: (self.clear_all_memories(), None), inputs=None, outputs=[gr.Textbox(), chatbot], queue=False ) with gr.Tab("📚 Memories"): memories_summary = gr.Markdown(value=self.get_memories_summary()) recent_memories = gr.Markdown(value=self.get_recent_memories()) with gr.Row(): refresh_btn = gr.Button("Refresh Memories") timeline_link = gr.Markdown(f"[View Timeline]({self.memory_manager.timeline_file})") refresh_btn.click( fn=lambda: (self.get_memories_summary(), self.get_recent_memories()), inputs=None, outputs=[memories_summary, recent_memories], queue=False ) with gr.Tab("🤖 Model Info"): model_info = gr.Markdown(value=self.get_model_info()) with gr.Tab("â„šī¸ About"): gr.Markdown(""" ## About This Application This application combines Hugging Face AI models with a memory system that records important information from your conversations. ### Features: - 🤖 Chat with Hugging Face models - 💾 Automatic memory recording - 📚 View and manage your memories - 🔍 Search through your memories ### How it works: 1. Have a conversation with the AI 2. The system automatically detects important information 3. Important memories are stored and can be recalled in future conversations 4. View your memory timeline and statistics ### Memory Types: - **General**: General information and facts - **Conversation**: Important details from chats - **Preferences**: Likes, dislikes, favorites - **Important**: Critical information marked as important --- **Note**: Memories are stored locally and persist between sessions on this Space. """) return demo def user(self, user_message, history): """Handle user input and generate AI response.""" if not user_message.strip(): return "", history ai_response = self.chat_with_memory(user_message) if history is None: history = [] history.append({"role": "user", "content": user_message}) history.append({"role": "assistant", "content": ai_response}) return "", history def clear_history(self): """Clear conversation history.""" self.conversation_history = [] return None def clear_all_memories(self) -> str: """Clear all memories.""" self.memory_manager.clear_memories() return "All memories have been cleared." def main(): """Main entry point for HF Spaces.""" console.print("[green]🚀 Starting Memory Chat Application for HF Spaces...[/green]") # Create and run the application app = HFSpaceApp() # Run Gradio interface optimized for Spaces demo = app.run_gradio_interface() demo.launch( server_name="0.0.0.0", server_port=int(os.environ.get("PORT", 7860)), debug=False, show_error=True ) if __name__ == "__main__": main()