AIencoder commited on
Commit
3a64534
Β·
verified Β·
1 Parent(s): 90195b4

Update src/chimera_core.py

Browse files
Files changed (1) hide show
  1. src/chimera_core.py +65 -50
src/chimera_core.py CHANGED
@@ -5,10 +5,10 @@ from openai import OpenAI
5
 
6
  class Chimera:
7
  def __init__(self):
8
- # 1. SETUP GEMINI (Google)
9
  self.gemini_key = os.getenv("GEMINI_API_KEY")
10
  if not self.gemini_key:
11
- # Fallback for local testing if env var is missing
12
  try:
13
  import config
14
  self.gemini_key = config.API_KEY
@@ -18,29 +18,69 @@ class Chimera:
18
  if self.gemini_key:
19
  self.gemini_client = genai.Client(api_key=self.gemini_key)
20
  self.gemini_model = "gemini-2.5-flash"
21
- print(f"🦁 Gemini Core: ONLINE [{self.gemini_model}]")
22
  else:
23
  raise ValueError("❌ CRITICAL: GEMINI_API_KEY missing in Secrets!")
24
 
25
- # 2. SETUP GPT-4o (OpenAI)
26
  self.openai_key = os.getenv("OPENAI_API_KEY")
27
  self.openai_client = None
28
  if self.openai_key:
29
  self.openai_client = OpenAI(api_key=self.openai_key)
30
- print("🟒 OpenAI Core: ONLINE [gpt-4o]")
31
  else:
32
- print("⚠️ OpenAI Key not found. GPT-4o disabled.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
  def _route_task(self, prompt):
35
- """ The Router: Decides between Gemini (Fast) and GPT-4o (Smart) """
36
- # We use Gemini for routing because it's cheaper/faster
37
  routing_prompt = f"""
38
  Classify this task.
39
- [ASM] - Coding/Math (Best for GPT-4o)
40
- [SFE] - Data/Science (Best for Gemini)
41
- [CSM] - Creative/Story (Best for Gemini)
42
- [CHAT] - Casual (Best for Gemini)
43
-
44
  User Task: "{prompt}"
45
  Reply ONLY with the tag.
46
  """
@@ -56,48 +96,23 @@ class Chimera:
56
 
57
  def process_request(self, user_message, history, manual_role="Auto"):
58
  # 1. Determine Role
59
- if manual_role and manual_role != "Auto":
60
- role = manual_role
61
- else:
62
- role = self._route_task(user_message)
63
-
64
  print(f"πŸ‘‰ Routing to: [{role}]")
65
 
66
- # 2. Assign Persona & Model
67
- system_instruction = ""
68
- model_to_use = "Gemini"
69
-
70
- if role == "ASM":
71
- system_instruction = "You are the ASM (Abstract Symbology Module). You are an expert Python Developer. Write efficient code."
72
- # ASM (Coding) is better with GPT-4o if available
73
- if self.openai_client:
74
- model_to_use = "OpenAI"
75
-
76
- elif role == "SFE":
77
- system_instruction = "You are the SFE (Sensory Fusion Engine). You are a Data Scientist. Analyze facts objectively."
78
-
79
- elif role == "CSM":
80
- system_instruction = "You are the CSM (Creative Synthesis Module). You are a Novelist. Write with creativity."
81
-
82
- else:
83
- system_instruction = "You are Project Chimera."
84
-
85
- # 3. Execute
86
  try:
87
- if model_to_use == "OpenAI" and self.openai_client:
88
- # Call GPT-4o
89
- response = self.openai_client.chat.completions.create(
90
- model="gpt-4o",
91
- messages=[
92
- {"role": "system", "content": system_instruction},
93
- {"role": "user", "content": user_message}
94
- ]
95
- )
96
- return response.choices[0].message.content, f"{role} (GPT-4o)"
97
 
 
98
  else:
99
- # Call Gemini
100
- full_prompt = f"System Instruction: {system_instruction}\n\nUser Message: {user_message}"
 
 
 
101
  response = self.gemini_client.models.generate_content(
102
  model=self.gemini_model,
103
  contents=full_prompt
 
5
 
6
  class Chimera:
7
  def __init__(self):
8
+ # 1. SETUP GEMINI (The Draftsman)
9
  self.gemini_key = os.getenv("GEMINI_API_KEY")
10
  if not self.gemini_key:
11
+ # Fallback for local testing
12
  try:
13
  import config
14
  self.gemini_key = config.API_KEY
 
18
  if self.gemini_key:
19
  self.gemini_client = genai.Client(api_key=self.gemini_key)
20
  self.gemini_model = "gemini-2.5-flash"
21
+ print(f"🦁 Gemini Draftsman: ONLINE [{self.gemini_model}]")
22
  else:
23
  raise ValueError("❌ CRITICAL: GEMINI_API_KEY missing in Secrets!")
24
 
25
+ # 2. SETUP GPT-4o (The Refiner)
26
  self.openai_key = os.getenv("OPENAI_API_KEY")
27
  self.openai_client = None
28
  if self.openai_key:
29
  self.openai_client = OpenAI(api_key=self.openai_key)
30
+ print("🟒 OpenAI Refiner: ONLINE [gpt-4o]")
31
  else:
32
+ print("⚠️ OpenAI Key not found. Dual-Core mode disabled.")
33
+
34
+ def _pipeline_code(self, user_prompt):
35
+ """
36
+ THE DUAL-CORE PIPELINE:
37
+ Step 1: Gemini generates a draft.
38
+ Step 2: GPT-4o refines and perfects it.
39
+ """
40
+ if not self.openai_client:
41
+ return "⚠️ OpenAI Key missing. Cannot run refinement pipeline."
42
+
43
+ print("⚑ Starting Dual-Core Pipeline...")
44
+
45
+ # Step 1: Gemini Draft
46
+ print(" ↳ Phase 1: Gemini Drafting...")
47
+ draft_prompt = f"Write a Python solution for this task. Be verbose and include comments.\nTask: {user_prompt}"
48
+ draft_response = self.gemini_client.models.generate_content(
49
+ model=self.gemini_model,
50
+ contents=draft_prompt
51
+ )
52
+ draft_code = draft_response.text
53
+
54
+ # Step 2: GPT-4o Refinement
55
+ print(" ↳ Phase 2: GPT-4o Refining...")
56
+ refine_prompt = f"""
57
+ You are a Senior Principal Engineer. Review the following Junior Developer's code.
58
+
59
+ GOALS:
60
+ 1. Fix any potential bugs.
61
+ 2. Optimize for speed and memory.
62
+ 3. Make variable names professional and clear.
63
+ 4. Add robust error handling.
64
+
65
+ JUNIOR CODE DRAFT:
66
+ {draft_code}
67
+
68
+ Output ONLY the final, perfect code (with brief explanations).
69
+ """
70
+
71
+ final_response = self.openai_client.chat.completions.create(
72
+ model="gpt-4o",
73
+ messages=[{"role": "user", "content": refine_prompt}]
74
+ )
75
+
76
+ return final_response.choices[0].message.content
77
 
78
  def _route_task(self, prompt):
79
+ # Route to ASM (Code) automatically if it looks like code
 
80
  routing_prompt = f"""
81
  Classify this task.
82
+ [ASM] - Coding, Math, Python, Algorithms.
83
+ [CHAT] - Everything else.
 
 
 
84
  User Task: "{prompt}"
85
  Reply ONLY with the tag.
86
  """
 
96
 
97
  def process_request(self, user_message, history, manual_role="Auto"):
98
  # 1. Determine Role
99
+ role = manual_role if manual_role != "Auto" else self._route_task(user_message)
 
 
 
 
100
  print(f"πŸ‘‰ Routing to: [{role}]")
101
 
102
+ # 2. EXECUTE
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  try:
104
+ # IF IT IS CODE (ASM), USE THE 2-AI PIPELINE
105
+ if role == "ASM" and self.openai_client:
106
+ final_code = self._pipeline_code(user_message)
107
+ return final_code, "ASM (Dual-Core)"
 
 
 
 
 
 
108
 
109
+ # OTHERWISE, JUST USE GEMINI (Faster/Cheaper)
110
  else:
111
+ system_instruction = "You are Project Chimera."
112
+ if role == "SFE": system_instruction = "You are a Data Scientist."
113
+ if role == "CSM": system_instruction = "You are a Creative Writer."
114
+
115
+ full_prompt = f"System: {system_instruction}\nUser: {user_message}"
116
  response = self.gemini_client.models.generate_content(
117
  model=self.gemini_model,
118
  contents=full_prompt