AIencoder commited on
Commit
2dab13c
·
verified ·
1 Parent(s): ea47f01

Update src/chimera_core.py

Browse files
Files changed (1) hide show
  1. src/chimera_core.py +64 -66
src/chimera_core.py CHANGED
@@ -1,44 +1,43 @@
1
  import os
2
  from google import genai
3
- from openai import OpenAI
4
 
5
  class Chimera:
6
  def __init__(self):
7
- # 1. SETUP GEMINI
8
- self.gemini_key = os.getenv("GEMINI_API_KEY") or getattr(__import__("config", ignore_missing_imports=True), "API_KEY", None)
9
  self.gemini_client = None
10
  if self.gemini_key:
11
  try:
12
  self.gemini_client = genai.Client(api_key=self.gemini_key)
13
  self.gemini_model = "gemini-2.5-flash"
14
- print(f"🦁 Gemini: ONLINE [{self.gemini_model}]")
15
  except Exception as e:
16
  print(f"⚠️ Gemini Init Failed: {e}")
17
 
18
- # 2. SETUP GPT-4o
19
- self.openai_key = os.getenv("OPENAI_API_KEY")
20
- self.openai_client = None
21
- if self.openai_key:
22
  try:
23
- self.openai_client = OpenAI(api_key=self.openai_key)
24
- print("🟢 OpenAI: ONLINE [gpt-4o]")
 
25
  except Exception as e:
26
- print(f"⚠️ OpenAI Init Failed: {e}")
27
 
28
  def _call_gemini(self, prompt, system_msg="You are a helpful AI."):
29
- """ Helper to safely call Gemini """
30
- if not self.gemini_client: raise Exception("Gemini Client missing")
31
  response = self.gemini_client.models.generate_content(
32
  model=self.gemini_model,
33
  contents=f"System: {system_msg}\nUser: {prompt}"
34
  )
35
  return response.text
36
 
37
- def _call_openai(self, prompt, system_msg="You are a helpful AI."):
38
- """ Helper to safely call OpenAI """
39
- if not self.openai_client: raise Exception("OpenAI Client missing")
40
- response = self.openai_client.chat.completions.create(
41
- model="gpt-4o",
42
  messages=[
43
  {"role": "system", "content": system_msg},
44
  {"role": "user", "content": prompt}
@@ -46,80 +45,79 @@ class Chimera:
46
  )
47
  return response.choices[0].message.content
48
 
49
- def _pipeline_code(self, user_prompt):
50
  """
51
- Robust Dual-Core Pipeline:
52
- 1. Try Gemini Draft -> GPT Refine.
53
- 2. If GPT fails -> Return Gemini Draft.
54
- 3. If Gemini fails -> Ask GPT to do it all.
55
  """
56
  draft_code = ""
57
 
58
- # --- PHASE 1: DRAFTING (Prefer Gemini) ---
59
  try:
60
  print(" ↳ Phase 1: Gemini Drafting...")
61
- draft_code = self._call_gemini(user_prompt, "Write a Python solution. Be verbose.")
62
  except Exception as e:
63
  print(f"⚠️ Gemini Draft Failed: {e}")
64
- # FALLBACK: If Gemini dies, ask GPT to do the whole job
65
  try:
66
- print(" ⚠️ Fallback: Switching to OpenAI for full task...")
67
- return self._call_openai(user_prompt, "You are an expert Python Coder. Write a robust solution."), "ASM (OpenAI Backup)"
68
  except Exception as e2:
69
- return f"❌ CRITICAL: Both models failed. Gemini error: {e}. OpenAI error: {e2}.", "ERROR"
70
 
71
- # --- PHASE 2: REFINEMENT (Prefer OpenAI) ---
72
  try:
73
- print(" ↳ Phase 2: GPT-4o Refining...")
74
- refine_prompt = f"Refine this code for production:\n\n{draft_code}"
75
- final_code = self._call_openai(refine_prompt, "You are a Senior Engineer. Fix bugs and optimize.")
76
- return final_code, "ASM (Dual-Core)"
 
 
 
 
 
 
 
 
 
 
77
  except Exception as e:
78
- print(f"⚠️ OpenAI Refine Failed: {e}")
79
- # FALLBACK: If OpenAI dies, just return the Gemini draft
80
- return f"{draft_code}\n\n**[⚠️ Note: OpenAI refinement failed. Showing Gemini Draft.]**", "ASM (Gemini Draft)"
81
 
82
  def _route_task(self, prompt):
83
- """ Tries to route using Gemini, falls back to OpenAI, defaults to CHAT. """
84
- routing_prompt = "Classify: [ASM] (Code), [SFE] (Science), [CSM] (Story), [CHAT]. Reply TAG only."
85
-
86
- # Try Gemini Router
87
- try:
88
- return self._call_gemini(prompt, routing_prompt).strip().replace("[", "").replace("]", "")
89
- except:
90
- # Try OpenAI Router
91
- try:
92
- return self._call_openai(prompt, routing_prompt).strip().replace("[", "").replace("]", "")
93
- except:
94
- return "CHAT" # Default if router is dead
95
 
96
  def process_request(self, user_message, history, manual_role="Auto"):
97
- # 1. ROUTING
98
  role = manual_role if manual_role != "Auto" else self._route_task(user_message)
99
  print(f"👉 Routing to: [{role}]")
100
 
101
- # 2. EXECUTE
102
  try:
103
  # === CODE PATH (ASM) ===
104
- if role == "ASM":
105
- return self._pipeline_code(user_message)
106
-
107
- # === ALL OTHER PATHS (SFE/CSM/CHAT) ===
108
- system_instruction = "You are a helpful AI."
109
- if role == "SFE": system_instruction = "You are a Data Scientist. Be objective."
110
- if role == "CSM": system_instruction = "You are a Creative Writer. Be artistic."
 
111
 
112
- # Primary Attempt: Gemini (Cheaper/Faster)
113
  try:
114
  return self._call_gemini(user_message, system_instruction), f"{role} (Gemini)"
115
- except Exception as e_gem:
116
- print(f"⚠️ Gemini Failed: {e_gem}")
117
-
118
- # Secondary Attempt: OpenAI
119
  try:
120
- return self._call_openai(user_message, system_instruction), f"{role} (OpenAI Backup)"
121
- except Exception as e_gpt:
122
- return "❌ Sorry, model is down right now. (Both API keys failed).", "SYSTEM OUTAGE"
123
 
124
  except Exception as e:
125
  return f"❌ System Error: {str(e)}", "ERR"
 
1
  import os
2
  from google import genai
3
+ from groq import Groq
4
 
5
  class Chimera:
6
  def __init__(self):
7
+ # 1. SETUP GEMINI (The Router & Drafter)
8
+ self.gemini_key = os.getenv("GEMINI_API_KEY")
9
  self.gemini_client = None
10
  if self.gemini_key:
11
  try:
12
  self.gemini_client = genai.Client(api_key=self.gemini_key)
13
  self.gemini_model = "gemini-2.5-flash"
14
+ print(f"🦁 Gemini: ONLINE")
15
  except Exception as e:
16
  print(f"⚠️ Gemini Init Failed: {e}")
17
 
18
+ # 2. SETUP GROQ (The Speed Refiner - Llama 3.3)
19
+ self.groq_key = os.getenv("GROQ_API_KEY")
20
+ self.groq_client = None
21
+ if self.groq_key:
22
  try:
23
+ self.groq_client = Groq(api_key=self.groq_key)
24
+ self.groq_model = "llama-3.3-70b-versatile"
25
+ print(f"⚡ Groq (Llama 3.3): ONLINE")
26
  except Exception as e:
27
+ print(f"⚠️ Groq Init Failed: {e}")
28
 
29
  def _call_gemini(self, prompt, system_msg="You are a helpful AI."):
30
+ if not self.gemini_client: raise Exception("Gemini Down")
 
31
  response = self.gemini_client.models.generate_content(
32
  model=self.gemini_model,
33
  contents=f"System: {system_msg}\nUser: {prompt}"
34
  )
35
  return response.text
36
 
37
+ def _call_groq(self, prompt, system_msg="You are a helpful AI."):
38
+ if not self.groq_client: raise Exception("Groq Down")
39
+ response = self.groq_client.chat.completions.create(
40
+ model=self.groq_model,
 
41
  messages=[
42
  {"role": "system", "content": system_msg},
43
  {"role": "user", "content": prompt}
 
45
  )
46
  return response.choices[0].message.content
47
 
48
+ def _binary_pipeline(self, user_prompt):
49
  """
50
+ THE BINARY STAR PIPELINE:
51
+ Phase 1: Gemini Drafts (Creative/Logic)
52
+ Phase 2: Groq Refines (Optimization/Speed)
 
53
  """
54
  draft_code = ""
55
 
56
+ # --- PHASE 1: DRAFTING (Gemini) ---
57
  try:
58
  print(" ↳ Phase 1: Gemini Drafting...")
59
+ draft_code = self._call_gemini(user_prompt, "Write a Python solution. Be verbose and explanatory.")
60
  except Exception as e:
61
  print(f"⚠️ Gemini Draft Failed: {e}")
62
+ # FALLBACK: If Gemini dies, Groq does it all
63
  try:
64
+ return self._call_groq(user_prompt, "You are an expert Python Coder."), "ASM (Groq Backup)"
 
65
  except Exception as e2:
66
+ return f"❌ CRITICAL: All models failed.", "ERROR"
67
 
68
+ # --- PHASE 2: REFINEMENT (Groq) ---
69
  try:
70
+ print(" ↳ Phase 2: Groq Refining...")
71
+ refine_prompt = f"""
72
+ You are a Senior Principal Engineer. Refine this Junior Developer's code.
73
+
74
+ GOALS:
75
+ 1. Optimize for speed.
76
+ 2. Fix bugs.
77
+ 3. Clean up comments.
78
+
79
+ DRAFT CODE:
80
+ {draft_code}
81
+ """
82
+ final_code = self._call_groq(refine_prompt, "You are a Senior Engineer.")
83
+ return final_code, "ASM (Binary Refined)"
84
  except Exception as e:
85
+ # Fallback: Just show the draft if Groq fails
86
+ return f"{draft_code}\n\n**[⚠️ Groq Refine Failed. Showing Gemini Draft]**", "ASM (Gemini Draft)"
 
87
 
88
  def _route_task(self, prompt):
89
+ # Quick router using Gemini (or Groq if Gemini is down)
90
+ sys = "Classify: [ASM] (Code), [SFE] (Science), [CSM] (Story), [CHAT]. Reply TAG only."
91
+ try: return self._call_gemini(prompt, sys).strip().replace("[","").replace("]","")
92
+ except:
93
+ try: return self._call_groq(prompt, sys).strip().replace("[","").replace("]","")
94
+ except: return "CHAT"
 
 
 
 
 
 
95
 
96
  def process_request(self, user_message, history, manual_role="Auto"):
97
+ # 1. Routing
98
  role = manual_role if manual_role != "Auto" else self._route_task(user_message)
99
  print(f"👉 Routing to: [{role}]")
100
 
 
101
  try:
102
  # === CODE PATH (ASM) ===
103
+ # If we have both models, use the pipeline.
104
+ if role == "ASM" and self.groq_client and self.gemini_client:
105
+ return self._binary_pipeline(user_message)
106
+
107
+ # === SINGLE MODEL PATHS ===
108
+ system_instruction = "You are Project Chimera."
109
+ if role == "SFE": system_instruction = "You are a Data Scientist."
110
+ if role == "CSM": system_instruction = "You are a Creative Writer."
111
 
112
+ # Prefer Gemini for creative/long context
113
  try:
114
  return self._call_gemini(user_message, system_instruction), f"{role} (Gemini)"
115
+ except:
116
+ # Fallback to Groq
 
 
117
  try:
118
+ return self._call_groq(user_message, system_instruction), f"{role} (Groq)"
119
+ except:
120
+ return "❌ All systems down.", "OUTAGE"
121
 
122
  except Exception as e:
123
  return f"❌ System Error: {str(e)}", "ERR"