Spaces:
Running
Running
Update backend/lens_core.py
Browse files- backend/lens_core.py +24 -4
backend/lens_core.py
CHANGED
|
@@ -562,10 +562,29 @@ def _resolve_ai_config():
|
|
| 562 |
|
| 563 |
def _openai_compat_generate_json(api_key: str, base_url: str, model: str, system_text: str, user_parts: list[str]):
|
| 564 |
url = (base_url.rstrip("/") + "/chat/completions")
|
| 565 |
-
|
| 566 |
-
|
| 567 |
-
|
| 568 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 569 |
payload = {
|
| 570 |
"model": model,
|
| 571 |
"messages": messages,
|
|
@@ -597,6 +616,7 @@ def _openai_compat_generate_json(api_key: str, base_url: str, model: str, system
|
|
| 597 |
fallback = _pick_hf_fallback_model(models)
|
| 598 |
if fallback and fallback != model:
|
| 599 |
payload["model"] = fallback
|
|
|
|
| 600 |
used_model = fallback
|
| 601 |
r2 = client.post(url, json=payload, headers=headers)
|
| 602 |
try:
|
|
|
|
| 562 |
|
| 563 |
def _openai_compat_generate_json(api_key: str, base_url: str, model: str, system_text: str, user_parts: list[str]):
|
| 564 |
url = (base_url.rstrip("/") + "/chat/completions")
|
| 565 |
+
|
| 566 |
+
def _user_only_prompt_for_model(m: str) -> bool:
|
| 567 |
+
ml = (m or "").strip().lower()
|
| 568 |
+
if not ml:
|
| 569 |
+
return False
|
| 570 |
+
if "gemma-3" in ml or "gemma-2" in ml:
|
| 571 |
+
return True
|
| 572 |
+
if "gemma" in ml and ("-it" in ml or "instruct" in ml):
|
| 573 |
+
return True
|
| 574 |
+
return False
|
| 575 |
+
|
| 576 |
+
def _build_messages(m: str):
|
| 577 |
+
parts = [p.strip() for p in (user_parts or []) if isinstance(p, str) and p.strip()]
|
| 578 |
+
sys = (system_text or "").strip()
|
| 579 |
+
if _user_only_prompt_for_model(m):
|
| 580 |
+
combined = "\n\n".join([x for x in ([sys] + parts) if x])
|
| 581 |
+
return [{"role": "user", "content": combined}]
|
| 582 |
+
msgs = [{"role": "system", "content": system_text}]
|
| 583 |
+
for p in parts:
|
| 584 |
+
msgs.append({"role": "user", "content": p})
|
| 585 |
+
return msgs
|
| 586 |
+
|
| 587 |
+
messages = _build_messages(model)
|
| 588 |
payload = {
|
| 589 |
"model": model,
|
| 590 |
"messages": messages,
|
|
|
|
| 616 |
fallback = _pick_hf_fallback_model(models)
|
| 617 |
if fallback and fallback != model:
|
| 618 |
payload["model"] = fallback
|
| 619 |
+
payload["messages"] = _build_messages(fallback)
|
| 620 |
used_model = fallback
|
| 621 |
r2 = client.post(url, json=payload, headers=headers)
|
| 622 |
try:
|