AumCoreAI commited on
Commit
324e512
·
verified ·
1 Parent(s): ab27732

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -0
app.py CHANGED
@@ -703,6 +703,56 @@ async def reset():
703
  return {"response": expert_response}
704
  except Exception as e:
705
  print(f"⚠️ Expert coding failed: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
706
 
707
  # Fallback to basic code
708
  code_response = generate_basic_code(message)
 
703
  return {"response": expert_response}
704
  except Exception as e:
705
  print(f"⚠️ Expert coding failed: {e}")
706
+ @app.post("/chat")
707
+ async def chat(message: str = Form(...)):
708
+ """Main chat endpoint"""
709
+ if not app.state.groq_available:
710
+ return {"response": "Error: Groq API not configured."}
711
+
712
+ try:
713
+ from core.language_detector import detect_input_language, get_system_prompt
714
+ from core.memory_db import tidb_memory
715
+ except ImportError as e:
716
+ return {"response": f"Error: {str(e)}"}
717
+
718
+ lang_mode = detect_input_language(message)
719
+ system_prompt = get_system_prompt(lang_mode, AumCoreConfig.USERNAME)
720
+
721
+ # Get chat history
722
+ recent_chats = []
723
+ try:
724
+ recent_chats = tidb_memory.get_recent_chats(limit=10)
725
+ except:
726
+ pass
727
+
728
+ # Prepare messages
729
+ api_messages = [{"role": "system", "content": system_prompt}]
730
+ for chat_row in recent_chats:
731
+ user_input, ai_response, _ = chat_row
732
+ api_messages.append({"role": "user", "content": user_input})
733
+ api_messages.append({"role": "assistant", "content": ai_response})
734
+ api_messages.append({"role": "user", "content": message})
735
+
736
+ # Call Groq API
737
+ try:
738
+ completion = client.chat.completions.create(
739
+ model="llama-3.3-70b-versatile",
740
+ messages=api_messages,
741
+ temperature=0.3,
742
+ max_tokens=1000
743
+ )
744
+ ai_response = completion.choices[0].message.content.strip()
745
+
746
+ # Save to database
747
+ try:
748
+ tidb_memory.save_chat(message, ai_response, lang_mode)
749
+ except:
750
+ pass
751
+
752
+ return {"response": ai_response}
753
+
754
+ except Exception as e:
755
+ return {"response": f"Error: {str(e)}"}
756
 
757
  # Fallback to basic code
758
  code_response = generate_basic_code(message)