dboa9 commited on
Commit
c640df3
·
1 Parent(s): 60d5807

Accept OpenAI content as string or array to fix 422 from Cline/Roo

Browse files
Files changed (1) hide show
  1. app.py +20 -6
app.py CHANGED
@@ -28,7 +28,7 @@ import logging
28
  from fastapi import FastAPI, HTTPException, Header, UploadFile, File
29
  from fastapi.responses import StreamingResponse
30
  from pydantic import BaseModel
31
- from typing import List, Optional, Dict, Any
32
 
33
  logging.basicConfig(level=logging.INFO)
34
  logger = logging.getLogger("moltbot-engine")
@@ -453,7 +453,7 @@ async def extract_date(filename: str, x_api_key: str = Header(None)):
453
 
454
  class ChatMessage(BaseModel):
455
  role: str
456
- content: str
457
 
458
  class ChatCompletionRequest(BaseModel):
459
  model: str = "moltbot-legal"
@@ -479,25 +479,39 @@ MODEL_ROUTING = {
479
  }
480
 
481
 
 
 
 
 
 
 
 
 
 
 
 
 
 
482
  def _build_prompt_from_messages(messages: List[ChatMessage]) -> str:
483
  """Build a full prompt from the chat message history.
484
  Preserves conversation context, not just last message.
485
  """
486
  parts = []
487
  for msg in messages:
 
488
  if msg.role == "system":
489
- parts.append(f"System: {msg.content}")
490
  elif msg.role == "user":
491
- parts.append(f"User: {msg.content}")
492
  elif msg.role == "assistant":
493
- parts.append(f"Assistant: {msg.content}")
494
  parts.append("Assistant:")
495
  return "\n\n".join(parts)
496
 
497
 
498
  def _build_hf_chat_messages(messages: List[ChatMessage]) -> List[Dict[str, str]]:
499
  """Build HF chat_completion message list from OpenAI messages."""
500
- return [{"role": msg.role, "content": msg.content} for msg in messages]
501
 
502
 
503
  def _generate_for_model(model_name: str, messages: List[ChatMessage], temperature: float = 0.7, max_tokens: int = 2048) -> Optional[str]:
 
28
  from fastapi import FastAPI, HTTPException, Header, UploadFile, File
29
  from fastapi.responses import StreamingResponse
30
  from pydantic import BaseModel
31
+ from typing import List, Optional, Dict, Any, Union
32
 
33
  logging.basicConfig(level=logging.INFO)
34
  logger = logging.getLogger("moltbot-engine")
 
453
 
454
  class ChatMessage(BaseModel):
455
  role: str
456
+ content: Union[str, List[Dict[str, Any]]] # OpenAI: string or array of parts (vision/R1)
457
 
458
  class ChatCompletionRequest(BaseModel):
459
  model: str = "moltbot-legal"
 
479
  }
480
 
481
 
482
+ def _message_content_to_str(content: Union[str, List[Dict[str, Any]]]) -> str:
483
+ """Normalize message content to string (OpenAI can send string or array of parts)."""
484
+ if isinstance(content, str):
485
+ return content
486
+ if isinstance(content, list):
487
+ return " ".join(
488
+ p.get("text", "") if isinstance(p.get("text"), str) else str(p)
489
+ for p in content
490
+ if isinstance(p, dict)
491
+ )
492
+ return str(content)
493
+
494
+
495
  def _build_prompt_from_messages(messages: List[ChatMessage]) -> str:
496
  """Build a full prompt from the chat message history.
497
  Preserves conversation context, not just last message.
498
  """
499
  parts = []
500
  for msg in messages:
501
+ text = _message_content_to_str(msg.content)
502
  if msg.role == "system":
503
+ parts.append(f"System: {text}")
504
  elif msg.role == "user":
505
+ parts.append(f"User: {text}")
506
  elif msg.role == "assistant":
507
+ parts.append(f"Assistant: {text}")
508
  parts.append("Assistant:")
509
  return "\n\n".join(parts)
510
 
511
 
512
  def _build_hf_chat_messages(messages: List[ChatMessage]) -> List[Dict[str, str]]:
513
  """Build HF chat_completion message list from OpenAI messages."""
514
+ return [{"role": msg.role, "content": _message_content_to_str(msg.content)} for msg in messages]
515
 
516
 
517
  def _generate_for_model(model_name: str, messages: List[ChatMessage], temperature: float = 0.7, max_tokens: int = 2048) -> Optional[str]: