akhaliq HF Staff commited on
Commit
5f52a28
·
1 Parent(s): 2a752a7

add claude sonnet 4 5

Browse files
Files changed (1) hide show
  1. app.py +18 -0
app.py CHANGED
@@ -2088,6 +2088,11 @@ AVAILABLE_MODELS = [
2088
  "id": "claude-opus-4.1",
2089
  "description": "Anthropic Claude Opus 4.1 via Poe (OpenAI-compatible)"
2090
  },
 
 
 
 
 
2091
  {
2092
  "name": "Qwen3 Max Preview",
2093
  "id": "qwen3-max-preview",
@@ -2233,6 +2238,12 @@ def get_inference_client(model_id, provider="auto"):
2233
  api_key=os.getenv("POE_API_KEY"),
2234
  base_url="https://api.poe.com/v1"
2235
  )
 
 
 
 
 
 
2236
  elif model_id == "qwen3-max-preview":
2237
  # Use DashScope International OpenAI client for Qwen3 Max Preview
2238
  return OpenAI(
@@ -6308,6 +6319,13 @@ Generate the exact search/replace blocks needed to make these changes."""
6308
  stream=True,
6309
  max_tokens=16384
6310
  )
 
 
 
 
 
 
 
6311
  else:
6312
  completion = client.chat.completions.create(
6313
  model=get_real_model_id(_current_model["id"]),
 
2088
  "id": "claude-opus-4.1",
2089
  "description": "Anthropic Claude Opus 4.1 via Poe (OpenAI-compatible)"
2090
  },
2091
+ {
2092
+ "name": "Claude-Sonnet-4.5",
2093
+ "id": "claude-sonnet-4.5",
2094
+ "description": "Anthropic Claude Sonnet 4.5 via Poe (OpenAI-compatible)"
2095
+ },
2096
  {
2097
  "name": "Qwen3 Max Preview",
2098
  "id": "qwen3-max-preview",
 
2238
  api_key=os.getenv("POE_API_KEY"),
2239
  base_url="https://api.poe.com/v1"
2240
  )
2241
+ elif model_id == "claude-sonnet-4.5":
2242
+ # Use Poe (OpenAI-compatible) client for Claude-Sonnet-4.5
2243
+ return OpenAI(
2244
+ api_key=os.getenv("POE_API_KEY"),
2245
+ base_url="https://api.poe.com/v1"
2246
+ )
2247
  elif model_id == "qwen3-max-preview":
2248
  # Use DashScope International OpenAI client for Qwen3 Max Preview
2249
  return OpenAI(
 
6319
  stream=True,
6320
  max_tokens=16384
6321
  )
6322
+ elif _current_model["id"] == "claude-sonnet-4.5":
6323
+ completion = client.chat.completions.create(
6324
+ model="Claude-Sonnet-4.5",
6325
+ messages=messages,
6326
+ stream=True,
6327
+ max_tokens=16384
6328
+ )
6329
  else:
6330
  completion = client.chat.completions.create(
6331
  model=get_real_model_id(_current_model["id"]),