import os import uvicorn import json from fastapi import FastAPI, Form from fastapi.responses import HTMLResponse, JSONResponse from groq import Groq # --- CORE INITIALIZATION --- app = FastAPI() client = Groq(api_key=os.environ.get("GROQ_API_KEY")) # Configuration & Persistent Memory Setup USERNAME = "Sanjay" MEMORY_FILE = "memory.json" def load_memory(): if os.path.exists(MEMORY_FILE): try: with open(MEMORY_FILE, 'r') as f: data = json.load(f) return data if "history" in data else {"history": []} except Exception as e: return {"history": []} return {"history": []} def save_memory(chat_history): try: with open(MEMORY_FILE, 'w') as f: json.dump({"history": chat_history}, f, indent=4) except Exception as e: print(f"Memory Save Error: {e}") # --- STRICT PERSONA SETTINGS (MEMORY PRESERVED) --- SYSTEM_PROMPT = f""" Role: Senior AI Architect (AumCore AI). User Identity: {USERNAME}. Core Rules: 1. Language: Always 60% English and 40% Hindi (Devanagari). 2. Code Rule: If {USERNAME} asks for code, provide a robust production-ready Python script with try-except blocks. 3. Length: Responses must be powerful and direct (Max 4 lines). 4. Persona: Talk like a Master AI, not a basic chatbot. """ HTML_UI = ''' AumCore AI - Final Build
''' @app.get("/", response_class=HTMLResponse) async def get_ui(): return HTML_UI @app.post("/reset") async def reset(): save_memory([]) return {"message": "Memory clear ho gayi!"} @app.post("/chat") async def chat(message: str = Form(...)): memory_data = load_memory() history = memory_data.get("history", [])[-10:] # Increased history context api_messages = [{"role": "system", "content": SYSTEM_PROMPT}] for chat_pair in history: api_messages.append({"role": "user", "content": chat_pair["u"]}) api_messages.append({"role": "assistant", "content": chat_pair["a"]}) api_messages.append({"role": "user", "content": message}) try: completion = client.chat.completions.create( model="llama-3.3-70b-versatile", messages=api_messages, temperature=0.3, # Balanced for creativity and rule-following max_tokens=800 ) ai_response = completion.choices[0].message.content.strip() history.append({"u": message, "a": ai_response}) save_memory(history) return {"response": ai_response} except Exception as e: return {"response": f"Error: {str(e)}"} if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=7860)