Spaces:
Sleeping
Sleeping
File size: 5,407 Bytes
12ba16b b5dae1c 90195b4 2dab13c e2b4a55 b5dae1c 12ba16b b5dae1c 2dab13c 90195b4 b5dae1c 12ba16b b5dae1c 2dab13c b5dae1c 3a64534 b5dae1c 3d89e37 80f0d9f b5dae1c 3d89e37 b5dae1c 82aec17 b5dae1c e2b4a55 b5dae1c e2b4a55 b5dae1c 82aec17 b5dae1c 12ba16b b5dae1c e2b4a55 12ba16b b5dae1c 3d89e37 b5dae1c e2b4a55 b5dae1c e2b4a55 b5dae1c e2b4a55 3d89e37 80c0889 3a64534 3d89e37 90195b4 b5dae1c 80c0889 b5dae1c e2b4a55 b5dae1c 80c0889 b5dae1c e2b4a55 b5dae1c 80c0889 b5dae1c 80c0889 e2b4a55 b5dae1c 80c0889 b5dae1c 80c0889 b5dae1c 80c0889 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 |
import os
import random
from google import genai
from groq import Groq
from ddgs import DDGS
from huggingface_hub import InferenceClient
from PIL import Image
class Chimera:
def __init__(self):
# 1. SETUP GEMINI (Router + Vision)
self.gemini_key = os.getenv("GEMINI_API_KEY")
if self.gemini_key:
self.gemini_client = genai.Client(api_key=self.gemini_key)
print("ποΈ VIM (Gemini Vision): ONLINE")
# 2. SETUP GROQ (Coding + Reasoning)
self.groq_key = os.getenv("GROQ_API_KEY")
if self.groq_key:
self.groq_client = Groq(api_key=self.groq_key)
print("β‘ ASM (Qwen/Llama): ONLINE")
# 3. SETUP HUGGING FACE (Image Generation)
self.hf_token = os.getenv("HF_TOKEN")
if self.hf_token:
self.hf_client = InferenceClient("black-forest-labs/FLUX.1-schnell", token=self.hf_token)
print("π¨ IGM (Flux Art): ONLINE")
# --- MODULE 1: NET (Web Search) ---
def _web_search(self, query):
print(f" β³ π Searching web for: {query}")
try:
with DDGS() as ddgs:
results = list(ddgs.text(query, max_results=3))
summary = "\n".join([f"- {r['title']}: {r['body']}" for r in results])
return summary
except Exception as e:
return f"Search Error: {e}"
# --- MODULE 2: IGM (Image Generation) ---
def _generate_image(self, prompt):
print(f" β³ π¨ Generating image for: {prompt}")
if not self.hf_client:
return None, "β HF_TOKEN missing."
try:
image = self.hf_client.text_to_image(prompt)
# Save to a temporary path so Gradio can display it
path = f"/tmp/gen_{random.randint(0,9999)}.png"
image.save(path)
# Return the path - Gradio will handle displaying it
return path, None
except Exception as e:
return None, f"Generation Error: {e}"
# --- MODULE 3: VIM (Vision Analysis) ---
def _analyze_image(self, text, image_path):
print(" β³ ποΈ Analyzing Image...")
try:
pil_image = Image.open(image_path)
response = self.gemini_client.models.generate_content(
model="gemini-2.5-flash",
contents=[text, pil_image]
)
return response.text
except Exception as e:
return f"Vision Error: {e}"
def process_request(self, message, history, manual_role, image_input=None):
# 1. PRIORITY: VIM (If image is uploaded)
if image_input:
return self._analyze_image(message or "Describe this image in detail.", image_input), "VIM"
# 2. DETECT INTENT (Router)
role = manual_role
if role == "Auto":
msg_lower = message.lower()
if any(x in msg_lower for x in ["generate", "draw", "create image", "paint", "make an image", "picture of"]):
role = "IGM"
elif any(x in msg_lower for x in ["search", "news", "price", "latest", "find", "look up"]):
role = "NET"
elif any(x in msg_lower for x in ["code", "python", "script", "function", "debug", "program"]):
role = "ASM"
else:
role = "CHAT"
else:
# Map dropdown values to internal codes
role_map = {
"ASM (Code)": "ASM",
"IGM (Generate Image)": "IGM",
"NET (Search)": "NET",
"VIM (Vision)": "VIM"
}
role = role_map.get(manual_role, role)
print(f"π Routing to: [{role}]")
# 3. EXECUTE MODULES
if role == "IGM" or role == "IGM (Generate Image)":
# Image Gen Mode
image_path, error_msg = self._generate_image(message)
if error_msg:
return error_msg, "IGM"
# Return both text and image path
return ("β¨ Image generated successfully!", image_path), "IGM"
elif role == "NET" or role == "NET (Search)":
# Web Search Mode
search_data = self._web_search(message)
# Synthesize answer with Gemini
prompt = f"User Question: {message}\n\nSearch Results:\n{search_data}\n\nProvide a clear, concise answer based on these results."
response = self.gemini_client.models.generate_content(model="gemini-2.5-flash", contents=prompt)
return response.text, "NET"
elif role == "ASM" or role == "ASM (Code)":
# Coding Mode (Qwen/Llama)
if self.groq_client:
try:
res = self.groq_client.chat.completions.create(
model="qwen2.5-coder-32b-instruct",
messages=[{"role":"user", "content": message}]
)
return res.choices[0].message.content, "ASM"
except Exception as e:
print(f"Groq Error: {e}, falling back to Gemini")
# Default Fallback (Gemini)
try:
res = self.gemini_client.models.generate_content(
model="gemini-2.5-flash", contents=message
)
return res.text, f"{role}"
except Exception as e:
return f"System Error: {e}", "ERR" |