AumCoreAI commited on
Commit
43e96dd
·
verified ·
1 Parent(s): 28bfeaa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -10
app.py CHANGED
@@ -30,14 +30,15 @@ def save_memory(chat_history):
30
  except Exception as e:
31
  print(f"Memory Save Error: {e}")
32
 
 
33
  SYSTEM_PROMPT = f"""
34
- You are AumCore AI, a high-level artificial intelligence system.
35
  User Identity: {USERNAME}.
36
  Core Rules:
37
- 1. Reply in 60% English and 40% Hindi (Devanagari).
38
- 2. Keep responses powerful, concise, and direct (Max 3 lines).
39
- 3. Do NOT use any labels like 'Aum:', 'Sanjay:', or 'Assistant:' in the output.
40
- 4. If code is requested, provide it in clear Markdown blocks.
41
  """
42
 
43
  HTML_UI = '''
@@ -124,7 +125,8 @@ HTML_UI = '''
124
  '''
125
 
126
  @app.get("/", response_class=HTMLResponse)
127
- async def get_ui(): return HTML_UI
 
128
 
129
  @app.post("/reset")
130
  async def reset():
@@ -134,18 +136,21 @@ async def reset():
134
  @app.post("/chat")
135
  async def chat(message: str = Form(...)):
136
  memory_data = load_memory()
137
- history = memory_data.get("history", [])[-8:]
 
138
  api_messages = [{"role": "system", "content": SYSTEM_PROMPT}]
139
  for chat_pair in history:
140
  api_messages.append({"role": "user", "content": chat_pair["u"]})
141
  api_messages.append({"role": "assistant", "content": chat_pair["a"]})
 
142
  api_messages.append({"role": "user", "content": message})
 
143
  try:
144
  completion = client.chat.completions.create(
145
  model="llama-3.3-70b-versatile",
146
  messages=api_messages,
147
- temperature=0.6,
148
- max_tokens=500
149
  )
150
  ai_response = completion.choices[0].message.content.strip()
151
  history.append({"u": message, "a": ai_response})
@@ -155,4 +160,4 @@ async def chat(message: str = Form(...)):
155
  return {"response": f"Error: {str(e)}"}
156
 
157
  if __name__ == "__main__":
158
- uvicorn.run(app, host="0.0.0.0", port=7860)
 
30
  except Exception as e:
31
  print(f"Memory Save Error: {e}")
32
 
33
+ # --- STRICT PERSONA SETTINGS (MEMORY PRESERVED) ---
34
  SYSTEM_PROMPT = f"""
35
+ Role: Senior AI Architect (AumCore AI).
36
  User Identity: {USERNAME}.
37
  Core Rules:
38
+ 1. Language: Always 60% English and 40% Hindi (Devanagari).
39
+ 2. Code Rule: If {USERNAME} asks for code, provide a robust production-ready Python script with try-except blocks.
40
+ 3. Length: Responses must be powerful and direct (Max 4 lines).
41
+ 4. Persona: Talk like a Master AI, not a basic chatbot.
42
  """
43
 
44
  HTML_UI = '''
 
125
  '''
126
 
127
  @app.get("/", response_class=HTMLResponse)
128
+ async def get_ui():
129
+ return HTML_UI
130
 
131
  @app.post("/reset")
132
  async def reset():
 
136
  @app.post("/chat")
137
  async def chat(message: str = Form(...)):
138
  memory_data = load_memory()
139
+ history = memory_data.get("history", [])[-10:] # Increased history context
140
+
141
  api_messages = [{"role": "system", "content": SYSTEM_PROMPT}]
142
  for chat_pair in history:
143
  api_messages.append({"role": "user", "content": chat_pair["u"]})
144
  api_messages.append({"role": "assistant", "content": chat_pair["a"]})
145
+
146
  api_messages.append({"role": "user", "content": message})
147
+
148
  try:
149
  completion = client.chat.completions.create(
150
  model="llama-3.3-70b-versatile",
151
  messages=api_messages,
152
+ temperature=0.3, # Balanced for creativity and rule-following
153
+ max_tokens=800
154
  )
155
  ai_response = completion.choices[0].message.content.strip()
156
  history.append({"u": message, "a": ai_response})
 
160
  return {"response": f"Error: {str(e)}"}
161
 
162
  if __name__ == "__main__":
163
+ uvicorn.run(app, host="0.0.0.0", port=7860)