AIencoder commited on
Commit
80c0889
·
verified ·
1 Parent(s): ee8059f

Update src/chimera_core.py

Browse files
Files changed (1) hide show
  1. src/chimera_core.py +26 -17
src/chimera_core.py CHANGED
@@ -45,7 +45,7 @@ class Chimera:
45
  # Save to a temporary path so Gradio can display it
46
  path = f"/tmp/gen_{random.randint(0,9999)}.png"
47
  image.save(path)
48
- return path, f"![Generated Image](/file={path})"
49
  except Exception as e:
50
  return None, f"Generation Error: {e}"
51
 
@@ -65,54 +65,63 @@ class Chimera:
65
  def process_request(self, message, history, manual_role, image_input=None):
66
  # 1. PRIORITY: VIM (If image is uploaded)
67
  if image_input:
68
- return self._analyze_image(message or "Describe this.", image_input), "VIM (Vision)"
69
 
70
  # 2. DETECT INTENT (Router)
71
  role = manual_role
72
  if role == "Auto":
73
  msg_lower = message.lower()
74
- if any(x in msg_lower for x in ["generate", "draw", "create image", "paint"]):
75
  role = "IGM"
76
- elif any(x in msg_lower for x in ["search", "news", "price", "latest"]):
77
  role = "NET"
78
- elif any(x in msg_lower for x in ["code", "python", "script"]):
79
  role = "ASM"
80
  else:
81
  role = "CHAT"
 
 
 
 
 
 
 
 
 
82
 
83
  print(f"👉 Routing to: [{role}]")
84
 
85
  # 3. EXECUTE MODULES
86
- if role == "IGM":
87
  # Image Gen Mode
88
  path, markdown = self._generate_image(message)
89
- return markdown, "IGM (Flux)"
90
 
91
- elif role == "NET":
92
  # Web Search Mode
93
  search_data = self._web_search(message)
94
  # Synthesize answer with Gemini
95
  prompt = f"User Question: {message}\n\nSearch Results:\n{search_data}\n\nAnswer the user based on these results."
96
  response = self.gemini_client.models.generate_content(model="gemini-2.5-flash", contents=prompt)
97
- return response.text, "NET (Web)"
98
 
99
- elif role == "ASM":
100
  # Coding Mode (Qwen/Llama) - Simplified for stability
101
  if self.groq_client:
102
  try:
103
  res = self.groq_client.chat.completions.create(
104
- model="qwen-2.5-32b", # Or qwen/qwen3-32b
105
  messages=[{"role":"user", "content": message}]
106
  )
107
- return res.choices[0].message.content, "ASM (Qwen)"
108
- except:
109
- pass # Fallback to Gemini
110
 
111
  # Default Fallback (Gemini)
112
  try:
113
  res = self.gemini_client.models.generate_content(
114
  model="gemini-2.5-flash", contents=message
115
  )
116
- return res.text, f"{role} (Gemini)"
117
- except:
118
- return "System Error.", "ERR"
 
45
  # Save to a temporary path so Gradio can display it
46
  path = f"/tmp/gen_{random.randint(0,9999)}.png"
47
  image.save(path)
48
+ return path, f"Generated image saved!\n\n![Generated Image](file={path})"
49
  except Exception as e:
50
  return None, f"Generation Error: {e}"
51
 
 
65
  def process_request(self, message, history, manual_role, image_input=None):
66
  # 1. PRIORITY: VIM (If image is uploaded)
67
  if image_input:
68
+ return self._analyze_image(message or "Describe this.", image_input), "VIM"
69
 
70
  # 2. DETECT INTENT (Router)
71
  role = manual_role
72
  if role == "Auto":
73
  msg_lower = message.lower()
74
+ if any(x in msg_lower for x in ["generate", "draw", "create image", "paint", "make an image"]):
75
  role = "IGM"
76
+ elif any(x in msg_lower for x in ["search", "news", "price", "latest", "find"]):
77
  role = "NET"
78
+ elif any(x in msg_lower for x in ["code", "python", "script", "function", "debug"]):
79
  role = "ASM"
80
  else:
81
  role = "CHAT"
82
+ else:
83
+ # Map dropdown values to internal codes
84
+ role_map = {
85
+ "ASM (Code)": "ASM",
86
+ "IGM (Generate Image)": "IGM",
87
+ "NET (Search)": "NET",
88
+ "VIM (Vision)": "VIM"
89
+ }
90
+ role = role_map.get(manual_role, role)
91
 
92
  print(f"👉 Routing to: [{role}]")
93
 
94
  # 3. EXECUTE MODULES
95
+ if role == "IGM" or role == "IGM (Generate Image)":
96
  # Image Gen Mode
97
  path, markdown = self._generate_image(message)
98
+ return markdown, "IGM"
99
 
100
+ elif role == "NET" or role == "NET (Search)":
101
  # Web Search Mode
102
  search_data = self._web_search(message)
103
  # Synthesize answer with Gemini
104
  prompt = f"User Question: {message}\n\nSearch Results:\n{search_data}\n\nAnswer the user based on these results."
105
  response = self.gemini_client.models.generate_content(model="gemini-2.5-flash", contents=prompt)
106
+ return response.text, "NET"
107
 
108
+ elif role == "ASM" or role == "ASM (Code)":
109
  # Coding Mode (Qwen/Llama) - Simplified for stability
110
  if self.groq_client:
111
  try:
112
  res = self.groq_client.chat.completions.create(
113
+ model="qwen2.5-coder-32b-instruct",
114
  messages=[{"role":"user", "content": message}]
115
  )
116
+ return res.choices[0].message.content, "ASM"
117
+ except Exception as e:
118
+ print(f"Groq Error: {e}, falling back to Gemini")
119
 
120
  # Default Fallback (Gemini)
121
  try:
122
  res = self.gemini_client.models.generate_content(
123
  model="gemini-2.5-flash", contents=message
124
  )
125
+ return res.text, f"{role}"
126
+ except Exception as e:
127
+ return f"System Error: {e}", "ERR"