File size: 1,706 Bytes
85b19cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
"""Compatibility helpers for OpenAI chat completions across model families."""

from __future__ import annotations

from typing import Any


def rewrite_chat_completion_kwargs(payload: dict[str, Any]) -> dict[str, Any]:
    """Translate deprecated chat completion parameters for reasoning models."""
    rewritten = dict(payload)
    model = str(rewritten.get("model") or "")
    if (
        model.startswith("gpt-5")
        and "max_tokens" in rewritten
        and "max_completion_tokens" not in rewritten
    ):
        rewritten["max_completion_tokens"] = rewritten.pop("max_tokens")
    return rewritten


def patch_openai_chat_completions() -> bool:
    """Monkeypatch the OpenAI SDK so GPT-5 chat calls accept legacy max_tokens."""
    try:
        from openai.resources.chat.completions.completions import Completions
    except Exception:
        return False

    current = Completions.create
    if getattr(current, "_eval_framework_patched", False):
        return True

    original_create = current

    def _patched_create(self: Any, *args: Any, **kwargs: Any) -> Any:
        rewritten = rewrite_chat_completion_kwargs(kwargs)
        try:
            return original_create(self, *args, **rewritten)
        except Exception as exc:
            if (
                "Unsupported parameter: 'max_tokens'" in str(exc)
                and "max_tokens" in kwargs
            ):
                retried = rewrite_chat_completion_kwargs(kwargs)
                return original_create(self, *args, **retried)
            raise

    _patched_create._eval_framework_patched = True  # type: ignore[attr-defined]
    Completions.create = _patched_create  # type: ignore[assignment]
    return True