Replace hardcoded OAuth secrets with placeholders
Browse files- .env.example +34 -0
- .github/FUNDING.yml +1 -0
- .github/ISSUE_TEMPLATE/bug_report.md +44 -0
- .github/workflows/docker-image.yml +46 -0
- .github/workflows/pr-path-guard.yml +28 -0
- .github/workflows/pr-test-build.yml +23 -0
- .github/workflows/release.yaml +39 -0
- .goreleaser.yml +39 -0
- CC_MIRROR_SETUP.md +64 -0
- QUICK_REFERENCE.txt +54 -0
- README_CN.md +24 -0
- SETUP_GUIDE (Copy).md +353 -0
- SETUP_GUIDE.md +353 -0
- assets/cubence.png +0 -0
- assets/packycode.png +0 -0
- auths/.gitkeep +0 -0
- docker-build.ps1 +53 -0
- docker-build.sh +58 -0
- dsa +0 -0
- examples/custom-provider/main.go +207 -0
- examples/translator/main.go +42 -0
- internal/api/middleware/anthropic_auth.go +106 -0
- internal/api/middleware/anthropic_auth_test.go +97 -0
- internal/api/middleware/anthropic_debug.go +59 -0
- scripts/verify_claude_proxy.sh +75 -0
- server.log +0 -0
- switch-provider.sh +53 -0
- test-proxy-config.sh +31 -0
- test/amp_management_test.go +915 -0
- test/config_migration_test.go +195 -0
- test/gemini3_thinking_level_test.go +423 -0
- test/model_alias_thinking_suffix_test.go +211 -0
- test/thinking_conversion_test.go +798 -0
.env.example
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Example environment configuration for CLIProxyAPI.
|
| 2 |
+
# Copy this file to `.env` and uncomment the variables you need.
|
| 3 |
+
#
|
| 4 |
+
# NOTE: Environment variables are only required when using remote storage options.
|
| 5 |
+
# For local file-based storage (default), no environment variables need to be set.
|
| 6 |
+
|
| 7 |
+
# ------------------------------------------------------------------------------
|
| 8 |
+
# Management Web UI
|
| 9 |
+
# ------------------------------------------------------------------------------
|
| 10 |
+
# MANAGEMENT_PASSWORD=change-me-to-a-strong-password
|
| 11 |
+
|
| 12 |
+
# ------------------------------------------------------------------------------
|
| 13 |
+
# Postgres Token Store (optional)
|
| 14 |
+
# ------------------------------------------------------------------------------
|
| 15 |
+
# PGSTORE_DSN=postgresql://user:pass@localhost:5432/cliproxy
|
| 16 |
+
# PGSTORE_SCHEMA=public
|
| 17 |
+
# PGSTORE_LOCAL_PATH=/var/lib/cliproxy
|
| 18 |
+
|
| 19 |
+
# ------------------------------------------------------------------------------
|
| 20 |
+
# Git-Backed Config Store (optional)
|
| 21 |
+
# ------------------------------------------------------------------------------
|
| 22 |
+
# GITSTORE_GIT_URL=https://github.com/your-org/cli-proxy-config.git
|
| 23 |
+
# GITSTORE_GIT_USERNAME=git-user
|
| 24 |
+
# GITSTORE_GIT_TOKEN=ghp_your_personal_access_token
|
| 25 |
+
# GITSTORE_LOCAL_PATH=/data/cliproxy/gitstore
|
| 26 |
+
|
| 27 |
+
# ------------------------------------------------------------------------------
|
| 28 |
+
# Object Store Token Store (optional)
|
| 29 |
+
# ------------------------------------------------------------------------------
|
| 30 |
+
# OBJECTSTORE_ENDPOINT=https://s3.your-cloud.example.com
|
| 31 |
+
# OBJECTSTORE_BUCKET=cli-proxy-config
|
| 32 |
+
# OBJECTSTORE_ACCESS_KEY=your_access_key
|
| 33 |
+
# OBJECTSTORE_SECRET_KEY=your_secret_key
|
| 34 |
+
# OBJECTSTORE_LOCAL_PATH=/data/cliproxy/objectstore
|
.github/FUNDING.yml
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
github: [router-for-me]
|
.github/ISSUE_TEMPLATE/bug_report.md
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Bug report
|
| 3 |
+
about: Create a report to help us improve
|
| 4 |
+
title: ''
|
| 5 |
+
labels: ''
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Is it a request payload issue?**
|
| 11 |
+
[ ] Yes, this is a request payload issue. I am using a client/cURL to send a request payload, but I received an unexpected error.
|
| 12 |
+
[ ] No, it's another issue.
|
| 13 |
+
|
| 14 |
+
**If it's a request payload issue, you MUST know**
|
| 15 |
+
Our team doesn't have any GODs or ORACLEs or MIND READERs. Please make sure to attach the request log or curl payload.
|
| 16 |
+
|
| 17 |
+
**Describe the bug**
|
| 18 |
+
A clear and concise description of what the bug is.
|
| 19 |
+
|
| 20 |
+
**CLI Type**
|
| 21 |
+
What type of CLI account do you use? (gemini-cli, gemini, codex, claude code or openai-compatibility)
|
| 22 |
+
|
| 23 |
+
**Model Name**
|
| 24 |
+
What model are you using? (example: gemini-2.5-pro, claude-sonnet-4-20250514, gpt-5, etc.)
|
| 25 |
+
|
| 26 |
+
**LLM Client**
|
| 27 |
+
What LLM Client are you using? (example: roo-code, cline, claude code, etc.)
|
| 28 |
+
|
| 29 |
+
**Request Information**
|
| 30 |
+
The best way is to paste the cURL command of the HTTP request here.
|
| 31 |
+
Alternatively, you can set `request-log: true` in the `config.yaml` file and then upload the detailed log file.
|
| 32 |
+
|
| 33 |
+
**Expected behavior**
|
| 34 |
+
A clear and concise description of what you expected to happen.
|
| 35 |
+
|
| 36 |
+
**Screenshots**
|
| 37 |
+
If applicable, add screenshots to help explain your problem.
|
| 38 |
+
|
| 39 |
+
**OS Type**
|
| 40 |
+
- OS: [e.g. macOS]
|
| 41 |
+
- Version [e.g. 15.6.0]
|
| 42 |
+
|
| 43 |
+
**Additional context**
|
| 44 |
+
Add any other context about the problem here.
|
.github/workflows/docker-image.yml
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: docker-image
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
tags:
|
| 6 |
+
- v*
|
| 7 |
+
|
| 8 |
+
env:
|
| 9 |
+
APP_NAME: CLIProxyAPI
|
| 10 |
+
DOCKERHUB_REPO: eceasy/cli-proxy-api-plus
|
| 11 |
+
|
| 12 |
+
jobs:
|
| 13 |
+
docker:
|
| 14 |
+
runs-on: ubuntu-latest
|
| 15 |
+
steps:
|
| 16 |
+
- name: Checkout
|
| 17 |
+
uses: actions/checkout@v4
|
| 18 |
+
- name: Set up QEMU
|
| 19 |
+
uses: docker/setup-qemu-action@v3
|
| 20 |
+
- name: Set up Docker Buildx
|
| 21 |
+
uses: docker/setup-buildx-action@v3
|
| 22 |
+
- name: Login to DockerHub
|
| 23 |
+
uses: docker/login-action@v3
|
| 24 |
+
with:
|
| 25 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 26 |
+
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
| 27 |
+
- name: Generate Build Metadata
|
| 28 |
+
run: |
|
| 29 |
+
echo VERSION=`git describe --tags --always --dirty` >> $GITHUB_ENV
|
| 30 |
+
echo COMMIT=`git rev-parse --short HEAD` >> $GITHUB_ENV
|
| 31 |
+
echo BUILD_DATE=`date -u +%Y-%m-%dT%H:%M:%SZ` >> $GITHUB_ENV
|
| 32 |
+
- name: Build and push
|
| 33 |
+
uses: docker/build-push-action@v6
|
| 34 |
+
with:
|
| 35 |
+
context: .
|
| 36 |
+
platforms: |
|
| 37 |
+
linux/amd64
|
| 38 |
+
linux/arm64
|
| 39 |
+
push: true
|
| 40 |
+
build-args: |
|
| 41 |
+
VERSION=${{ env.VERSION }}
|
| 42 |
+
COMMIT=${{ env.COMMIT }}
|
| 43 |
+
BUILD_DATE=${{ env.BUILD_DATE }}
|
| 44 |
+
tags: |
|
| 45 |
+
${{ env.DOCKERHUB_REPO }}:latest
|
| 46 |
+
${{ env.DOCKERHUB_REPO }}:${{ env.VERSION }}
|
.github/workflows/pr-path-guard.yml
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: translator-path-guard
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
types:
|
| 6 |
+
- opened
|
| 7 |
+
- synchronize
|
| 8 |
+
- reopened
|
| 9 |
+
|
| 10 |
+
jobs:
|
| 11 |
+
ensure-no-translator-changes:
|
| 12 |
+
runs-on: ubuntu-latest
|
| 13 |
+
steps:
|
| 14 |
+
- uses: actions/checkout@v4
|
| 15 |
+
with:
|
| 16 |
+
fetch-depth: 0
|
| 17 |
+
- name: Detect internal/translator changes
|
| 18 |
+
id: changed-files
|
| 19 |
+
uses: tj-actions/changed-files@v45
|
| 20 |
+
with:
|
| 21 |
+
files: |
|
| 22 |
+
internal/translator/**
|
| 23 |
+
- name: Fail when restricted paths change
|
| 24 |
+
if: steps.changed-files.outputs.any_changed == 'true'
|
| 25 |
+
run: |
|
| 26 |
+
echo "Changes under internal/translator are not allowed in pull requests."
|
| 27 |
+
echo "You need to create an issue for our maintenance team to make the necessary changes."
|
| 28 |
+
exit 1
|
.github/workflows/pr-test-build.yml
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: pr-test-build
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
|
| 6 |
+
permissions:
|
| 7 |
+
contents: read
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
build:
|
| 11 |
+
runs-on: ubuntu-latest
|
| 12 |
+
steps:
|
| 13 |
+
- name: Checkout
|
| 14 |
+
uses: actions/checkout@v4
|
| 15 |
+
- name: Set up Go
|
| 16 |
+
uses: actions/setup-go@v5
|
| 17 |
+
with:
|
| 18 |
+
go-version-file: go.mod
|
| 19 |
+
cache: true
|
| 20 |
+
- name: Build
|
| 21 |
+
run: |
|
| 22 |
+
go build -o test-output ./cmd/server
|
| 23 |
+
rm -f test-output
|
.github/workflows/release.yaml
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: goreleaser
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
# run only against tags
|
| 6 |
+
tags:
|
| 7 |
+
- '*'
|
| 8 |
+
|
| 9 |
+
permissions:
|
| 10 |
+
contents: write
|
| 11 |
+
|
| 12 |
+
jobs:
|
| 13 |
+
goreleaser:
|
| 14 |
+
runs-on: ubuntu-latest
|
| 15 |
+
steps:
|
| 16 |
+
- uses: actions/checkout@v4
|
| 17 |
+
with:
|
| 18 |
+
fetch-depth: 0
|
| 19 |
+
- run: git fetch --force --tags
|
| 20 |
+
- uses: actions/setup-go@v4
|
| 21 |
+
with:
|
| 22 |
+
go-version: '>=1.24.0'
|
| 23 |
+
cache: true
|
| 24 |
+
- name: Generate Build Metadata
|
| 25 |
+
run: |
|
| 26 |
+
VERSION=$(git describe --tags --always --dirty)
|
| 27 |
+
echo "VERSION=${VERSION}" >> $GITHUB_ENV
|
| 28 |
+
echo COMMIT=`git rev-parse --short HEAD` >> $GITHUB_ENV
|
| 29 |
+
echo BUILD_DATE=`date -u +%Y-%m-%dT%H:%M:%SZ` >> $GITHUB_ENV
|
| 30 |
+
- uses: goreleaser/goreleaser-action@v4
|
| 31 |
+
with:
|
| 32 |
+
distribution: goreleaser
|
| 33 |
+
version: latest
|
| 34 |
+
args: release --clean
|
| 35 |
+
env:
|
| 36 |
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
| 37 |
+
VERSION: ${{ env.VERSION }}
|
| 38 |
+
COMMIT: ${{ env.COMMIT }}
|
| 39 |
+
BUILD_DATE: ${{ env.BUILD_DATE }}
|
.goreleaser.yml
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
builds:
|
| 2 |
+
- id: "cli-proxy-api-plus"
|
| 3 |
+
env:
|
| 4 |
+
- CGO_ENABLED=0
|
| 5 |
+
goos:
|
| 6 |
+
- linux
|
| 7 |
+
- windows
|
| 8 |
+
- darwin
|
| 9 |
+
goarch:
|
| 10 |
+
- amd64
|
| 11 |
+
- arm64
|
| 12 |
+
main: ./cmd/server/
|
| 13 |
+
binary: cli-proxy-api-plus
|
| 14 |
+
ldflags:
|
| 15 |
+
- -s -w -X 'main.Version={{.Version}}-plus' -X 'main.Commit={{.ShortCommit}}' -X 'main.BuildDate={{.Date}}'
|
| 16 |
+
archives:
|
| 17 |
+
- id: "cli-proxy-api-plus"
|
| 18 |
+
format: tar.gz
|
| 19 |
+
format_overrides:
|
| 20 |
+
- goos: windows
|
| 21 |
+
format: zip
|
| 22 |
+
files:
|
| 23 |
+
- LICENSE
|
| 24 |
+
- README.md
|
| 25 |
+
- README_CN.md
|
| 26 |
+
- config.example.yaml
|
| 27 |
+
|
| 28 |
+
checksum:
|
| 29 |
+
name_template: 'checksums.txt'
|
| 30 |
+
|
| 31 |
+
snapshot:
|
| 32 |
+
name_template: "{{ incpatch .Version }}-next"
|
| 33 |
+
|
| 34 |
+
changelog:
|
| 35 |
+
sort: asc
|
| 36 |
+
filters:
|
| 37 |
+
exclude:
|
| 38 |
+
- '^docs:'
|
| 39 |
+
- '^test:'
|
CC_MIRROR_SETUP.md
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Super Claude (CC-Mirror + CLIProxyAPIPlus) Setup Guide
|
| 2 |
+
|
| 3 |
+
This document explains your "unshackled" Claude Code setup, which routes requests through your local proxy to use multiple AI providers (Gemini, Kiro, Antigravity) transparently.
|
| 4 |
+
|
| 5 |
+
## 🚀 Usage
|
| 6 |
+
|
| 7 |
+
**Start the Multi-Agent Claude:**
|
| 8 |
+
```bash
|
| 9 |
+
proxy-claude
|
| 10 |
+
```
|
| 11 |
+
|
| 12 |
+
**Run a specific task (Agentic Mode):**
|
| 13 |
+
```bash
|
| 14 |
+
proxy-claude "Analyze the auth middleware and create a task graph for improvements"
|
| 15 |
+
```
|
| 16 |
+
|
| 17 |
+
---
|
| 18 |
+
|
| 19 |
+
## 🧠 Model Routing (The Magic)
|
| 20 |
+
|
| 21 |
+
Your proxy (`config.yaml`) intercepts Claude Code's requests and routes them to your optimized providers:
|
| 22 |
+
|
| 23 |
+
| Claude Code Request | Actually Uses... | Why? | Keys Available |
|
| 24 |
+
| :--- | :--- | :--- | :--- |
|
| 25 |
+
| **Haiku** (Fast/Cheap) | **Gemini 2.5 Flash** | Infinite scaling, massive context | **5+ keys** (Gemini + Antigravity) |
|
| 26 |
+
| **Sonnet** (Coding) | **Kiro (AWS)** | High intelligence, best for code | **1 key** (Currently) |
|
| 27 |
+
| **Opus** (Complex) | **Kiro (AWS)** | Maximum reasoning | **1 key** (Shared with Sonnet) |
|
| 28 |
+
|
| 29 |
+
*To switch models inside Claude Code, type `/model`.*
|
| 30 |
+
|
| 31 |
+
---
|
| 32 |
+
|
| 33 |
+
## 🔑 Managing Keys
|
| 34 |
+
|
| 35 |
+
### Adding More "Fast" Power (Gemini/Antigravity)
|
| 36 |
+
Just drop new `.json` auth files into your `auth/` folder. The proxy detects them automatically and rotates through them.
|
| 37 |
+
|
| 38 |
+
### Adding More "Smart" Power (Kiro/Claude)
|
| 39 |
+
To get the "15 key rotation" for Sonnet/Opus, you need to add more Kiro account files to `auth/`:
|
| 40 |
+
1. Login to Kiro/AWS on another machine or session.
|
| 41 |
+
2. Copy the `kiro-auth-token.json` (or similar).
|
| 42 |
+
3. Paste it into `auth/` with a unique name (e.g., `kiro-account-2.json`).
|
| 43 |
+
|
| 44 |
+
---
|
| 45 |
+
|
| 46 |
+
## 🛠 Troubleshooting
|
| 47 |
+
|
| 48 |
+
**If `proxy-claude` fails:**
|
| 49 |
+
1. Check the proxy logs:
|
| 50 |
+
```bash
|
| 51 |
+
tail -f server.log
|
| 52 |
+
```
|
| 53 |
+
2. Verify the proxy server is running:
|
| 54 |
+
```bash
|
| 55 |
+
pgrep -f "main.go"
|
| 56 |
+
```
|
| 57 |
+
3. Restart the proxy if needed:
|
| 58 |
+
```bash
|
| 59 |
+
pkill -f "main.go" && go run cmd/server/main.go > server.log 2>&1 &
|
| 60 |
+
```
|
| 61 |
+
|
| 62 |
+
**Config Location:**
|
| 63 |
+
* **Proxy Config:** `/home/kek/CLIProxyAPIPlus/config.yaml` (Edit `model-mappings` here)
|
| 64 |
+
* **Client Config:** `~/.cc-mirror/proxy-claude/config/`
|
QUICK_REFERENCE.txt
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
╔══════════════════════════════════════════════════════════════╗
|
| 2 |
+
║ CLIProxyAPIPlus - Quick Reference Card ║
|
| 3 |
+
╚══════════════════════════════════════════════════════════════╝
|
| 4 |
+
|
| 5 |
+
📊 CONFIGURED ACCOUNTS: 15
|
| 6 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 7 |
+
Gemini: 6 accounts
|
| 8 |
+
Antigravity: 7 accounts
|
| 9 |
+
Qwen: 1 account
|
| 10 |
+
OpenRouter: 1 account
|
| 11 |
+
|
| 12 |
+
🌐 SERVER INFO
|
| 13 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 14 |
+
URL: http://localhost:8317/v1
|
| 15 |
+
API Key: sk-client-key-1
|
| 16 |
+
Status: Running ✅
|
| 17 |
+
|
| 18 |
+
⚡ QUICK COMMANDS
|
| 19 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 20 |
+
~/CLIProxyAPIPlus/proxy models - List all models
|
| 21 |
+
~/CLIProxyAPIPlus/proxy test - Test API
|
| 22 |
+
~/CLIProxyAPIPlus/proxy auth - Show accounts
|
| 23 |
+
~/CLIProxyAPIPlus/proxy status - Server status
|
| 24 |
+
~/CLIProxyAPIPlus/proxy logs - View logs
|
| 25 |
+
~/CLIProxyAPIPlus/proxy restart - Restart server
|
| 26 |
+
|
| 27 |
+
🎯 EXAMPLE REQUEST
|
| 28 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 29 |
+
curl -H "Authorization: Bearer sk-client-key-1" \
|
| 30 |
+
-H "Content-Type: application/json" \
|
| 31 |
+
-d '{"model":"gemini-2.5-pro","messages":[{"role":"user","content":"Hi!"}]}' \
|
| 32 |
+
http://localhost:8317/v1/chat/completions
|
| 33 |
+
|
| 34 |
+
🐚 FISH ALIASES (in new terminal)
|
| 35 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 36 |
+
cliproxy-models - Same as proxy models
|
| 37 |
+
cliproxy-test - Same as proxy test
|
| 38 |
+
cliproxy-auth - Same as proxy auth
|
| 39 |
+
cliproxy-status - Same as proxy status
|
| 40 |
+
cliproxy-restart - Same as proxy restart
|
| 41 |
+
|
| 42 |
+
🔑 ADD MORE ACCOUNTS
|
| 43 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 44 |
+
gemini-login - Add Gemini account
|
| 45 |
+
antigravity-login - Add Antigravity account
|
| 46 |
+
qwen-login - Add Qwen account
|
| 47 |
+
kiro-login - Add Kiro account
|
| 48 |
+
|
| 49 |
+
📚 DOCUMENTATION
|
| 50 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 51 |
+
Full Guide: ~/CLIProxyAPIPlus/SETUP_GUIDE.md
|
| 52 |
+
Quick Ref: ~/CLIPROXY_QUICK_REFERENCE.md
|
| 53 |
+
|
| 54 |
+
══════════════════════════════════════════════════════════════════
|
README_CN.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# CLIProxyAPI Plus
|
| 2 |
+
|
| 3 |
+
[English](README.md) | 中文
|
| 4 |
+
|
| 5 |
+
这是 [CLIProxyAPI](https://github.com/router-for-me/CLIProxyAPI) 的 Plus 版本,在原有基础上增加了第三方供应商的支持。
|
| 6 |
+
|
| 7 |
+
所有的第三方供应商支持都由第三方社区维护者提供,CLIProxyAPI 不提供技术支持。如需取得支持,请与对应的社区维护者联系。
|
| 8 |
+
|
| 9 |
+
该 Plus 版本的主线功能与主线功能强制同步。
|
| 10 |
+
|
| 11 |
+
## 与主线版本版本差异
|
| 12 |
+
|
| 13 |
+
- 新增 GitHub Copilot 支持(OAuth 登录),由[em4go](https://github.com/em4go/CLIProxyAPI/tree/feature/github-copilot-auth)提供
|
| 14 |
+
- 新增 Kiro (AWS CodeWhisperer) 支持 (OAuth 登录), 由[fuko2935](https://github.com/fuko2935/CLIProxyAPI/tree/feature/kiro-integration)、[Ravens2121](https://github.com/Ravens2121/CLIProxyAPIPlus/)提供
|
| 15 |
+
|
| 16 |
+
## 贡献
|
| 17 |
+
|
| 18 |
+
该项目仅接受第三方供应商支持的 Pull Request。任何非第三方供应商支持的 Pull Request 都将被拒绝。
|
| 19 |
+
|
| 20 |
+
如果需要提交任何非第三方供应商支持的 Pull Request,请提交到主线版本。
|
| 21 |
+
|
| 22 |
+
## 许可证
|
| 23 |
+
|
| 24 |
+
此项目根据 MIT 许可证授权 - 有关详细信息,请参阅 [LICENSE](LICENSE) 文件。
|
SETUP_GUIDE (Copy).md
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# CLIProxyAPIPlus Setup Guide - YOUR SETUP
|
| 2 |
+
|
| 3 |
+
## 🎉 Setup Complete!
|
| 4 |
+
|
| 5 |
+
You have **15 AI accounts** configured and ready to use through a single OpenAI-compatible proxy.
|
| 6 |
+
|
| 7 |
+
---
|
| 8 |
+
|
| 9 |
+
## 📊 Account Summary
|
| 10 |
+
|
| 11 |
+
| Provider | Accounts | Type |
|
| 12 |
+
|-----------|-----------|-------|
|
| 13 |
+
| Gemini | 6 | OAuth |
|
| 14 |
+
| Antigravity | 7 | OAuth |
|
| 15 |
+
| Qwen | 1 | OAuth |
|
| 16 |
+
| OpenRouter | 1 | API Key |
|
| 17 |
+
| **Total** | **15** | |
|
| 18 |
+
|
| 19 |
+
### Your Configured Accounts
|
| 20 |
+
- pascal.hintermaier@gmail.com (Gemini)
|
| 21 |
+
- hintermaier.pascal@gmail.com (Gemini, Antigravity)
|
| 22 |
+
- claracouve342@gmail.com (Gemini, Antigravity)
|
| 23 |
+
- hintermaierpascal0@gmail.com (Gemini, Antigravity)
|
| 24 |
+
- rave.riotofficial@gmail.com (Gemini, Antigravity)
|
| 25 |
+
- citrondon666@gmail.com (Gemini, Antigravity)
|
| 26 |
+
- diesmalwichtigsamuel@gmail.com (Antigravity)
|
| 27 |
+
- 1x Qwen account
|
| 28 |
+
- 1x OpenRouter account
|
| 29 |
+
|
| 30 |
+
---
|
| 31 |
+
|
| 32 |
+
## 🚀 Quick Start
|
| 33 |
+
|
| 34 |
+
### Server Status
|
| 35 |
+
- **URL**: http://localhost:8317
|
| 36 |
+
- **Status**: Running ✅
|
| 37 |
+
- **Config**: ~/CLIProxyAPIPlus/config.yaml
|
| 38 |
+
- **Auth**: ~/.cli-proxy-api/
|
| 39 |
+
|
| 40 |
+
### Quick Test
|
| 41 |
+
```bash
|
| 42 |
+
~/CLIProxyAPIPlus/proxy test
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
### List Models
|
| 46 |
+
```bash
|
| 47 |
+
~/CLIProxyAPIPlus/proxy models
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
---
|
| 51 |
+
|
| 52 |
+
## 📝 Quick Reference Commands
|
| 53 |
+
|
| 54 |
+
### Using the `proxy` Script
|
| 55 |
+
|
| 56 |
+
```bash
|
| 57 |
+
~/CLIProxyAPIPlus/proxy <command>
|
| 58 |
+
```
|
| 59 |
+
|
| 60 |
+
**Available Commands:**
|
| 61 |
+
- `models` or `m` - List all available models
|
| 62 |
+
- `test` or `t` - Send test request
|
| 63 |
+
- `status` or `s` - Check server status
|
| 64 |
+
- `auth` or `a` - Show configured accounts
|
| 65 |
+
- `logs` or `l` - View recent logs
|
| 66 |
+
- `start` - Start proxy server
|
| 67 |
+
- `stop` - Stop proxy server
|
| 68 |
+
- `restart` or `r` - Restart server
|
| 69 |
+
|
| 70 |
+
### Fish Shell Aliases
|
| 71 |
+
|
| 72 |
+
These are available in your terminal:
|
| 73 |
+
|
| 74 |
+
**Testing:**
|
| 75 |
+
```bash
|
| 76 |
+
cliproxy-models # List models
|
| 77 |
+
cliproxy-test # Test request
|
| 78 |
+
cliproxy-status # Server status
|
| 79 |
+
```
|
| 80 |
+
|
| 81 |
+
**Management:**
|
| 82 |
+
```bash
|
| 83 |
+
cliproxy-start # Start server
|
| 84 |
+
cliproxy-stop # Stop server
|
| 85 |
+
cliproxy-restart # Restart server
|
| 86 |
+
cliproxy-logs # View logs
|
| 87 |
+
cliproxy-auth # Show accounts
|
| 88 |
+
```
|
| 89 |
+
|
| 90 |
+
**Add Accounts:**
|
| 91 |
+
```bash
|
| 92 |
+
gemini-login # Add Gemini
|
| 93 |
+
antigravity-login # Add Antigravity
|
| 94 |
+
qwen-login # Add Qwen
|
| 95 |
+
kiro-login # Add Kiro
|
| 96 |
+
```
|
| 97 |
+
|
| 98 |
+
---
|
| 99 |
+
|
| 100 |
+
## 🔧 Configuration
|
| 101 |
+
|
| 102 |
+
### Environment Variables
|
| 103 |
+
```bash
|
| 104 |
+
$CLIPROXY_URL # http://localhost:8317/v1
|
| 105 |
+
$CLIPROXY_KEY # sk-client-key-1
|
| 106 |
+
```
|
| 107 |
+
|
| 108 |
+
### API Keys (from config.yaml)
|
| 109 |
+
- `sk-client-key-1` - Primary API key
|
| 110 |
+
- `sk-client-key-2` - Secondary API key
|
| 111 |
+
|
| 112 |
+
### Client Configuration
|
| 113 |
+
|
| 114 |
+
#### OpenAI SDK (Python)
|
| 115 |
+
```python
|
| 116 |
+
from openai import OpenAI
|
| 117 |
+
|
| 118 |
+
client = OpenAI(
|
| 119 |
+
base_url="http://localhost:8317/v1",
|
| 120 |
+
api_key="sk-client-key-1"
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
response = client.chat.completions.create(
|
| 124 |
+
model="gemini-2.5-pro",
|
| 125 |
+
messages=[{"role": "user", "content": "Hello!"}]
|
| 126 |
+
)
|
| 127 |
+
```
|
| 128 |
+
|
| 129 |
+
#### Node.js
|
| 130 |
+
```javascript
|
| 131 |
+
import OpenAI from 'openai';
|
| 132 |
+
|
| 133 |
+
const openai = new OpenAI({
|
| 134 |
+
baseURL: 'http://localhost:8317/v1',
|
| 135 |
+
apiKey: 'sk-client-key-1'
|
| 136 |
+
});
|
| 137 |
+
|
| 138 |
+
const completion = await openai.chat.completions.create({
|
| 139 |
+
model: 'gemini-2.5-pro',
|
| 140 |
+
messages: [{ role: 'user', content: 'Hello!' }]
|
| 141 |
+
});
|
| 142 |
+
```
|
| 143 |
+
|
| 144 |
+
#### cURL
|
| 145 |
+
```bash
|
| 146 |
+
curl -H "Authorization: Bearer sk-client-key-1" \
|
| 147 |
+
-H "Content-Type: application/json" \
|
| 148 |
+
-d '{
|
| 149 |
+
"model": "gemini-2.5-pro",
|
| 150 |
+
"messages": [{"role": "user", "content": "Hello!"}]
|
| 151 |
+
}' \
|
| 152 |
+
http://localhost:8317/v1/chat/completions
|
| 153 |
+
```
|
| 154 |
+
|
| 155 |
+
---
|
| 156 |
+
|
| 157 |
+
## 🎯 Available Models
|
| 158 |
+
|
| 159 |
+
The proxy automatically combines models from all providers. To see all available models:
|
| 160 |
+
|
| 161 |
+
```bash
|
| 162 |
+
~/CLIProxyAPIPlus/proxy models
|
| 163 |
+
```
|
| 164 |
+
|
| 165 |
+
### Model Aliases
|
| 166 |
+
Some models have shorter aliases configured:
|
| 167 |
+
- `g2.5p` → `gemini-2.5-pro`
|
| 168 |
+
- `g2.5f` → `gemini-2.5-flash`
|
| 169 |
+
- `ag-pro` → Antigravity pro model
|
| 170 |
+
- `qwen-plus` → Qwen coder plus
|
| 171 |
+
|
| 172 |
+
---
|
| 173 |
+
|
| 174 |
+
## 🔄 Load Balancing
|
| 175 |
+
|
| 176 |
+
The proxy uses **round-robin** routing by default. When you make requests:
|
| 177 |
+
1. First request uses Account 1
|
| 178 |
+
2. Second request uses Account 2
|
| 179 |
+
3. Third request uses Account 3
|
| 180 |
+
4. ...and so on
|
| 181 |
+
|
| 182 |
+
This automatically distributes load across all your accounts!
|
| 183 |
+
|
| 184 |
+
---
|
| 185 |
+
|
| 186 |
+
## 📁 File Structure
|
| 187 |
+
|
| 188 |
+
```
|
| 189 |
+
~/CLIProxyAPIPlus/
|
| 190 |
+
├── cli-proxy-api-plus # Main binary
|
| 191 |
+
├── config.yaml # Configuration file
|
| 192 |
+
├── server.log # Server logs
|
| 193 |
+
├── proxy # Quick access script
|
| 194 |
+
├── SETUP_GUIDE.md # This file
|
| 195 |
+
├── README.md # Original README
|
| 196 |
+
└── auths/ # (optional, alternate auth location)
|
| 197 |
+
|
| 198 |
+
~/.cli-proxy-api/ # OAuth tokens stored here
|
| 199 |
+
├── *.json # Provider tokens
|
| 200 |
+
└── ...
|
| 201 |
+
```
|
| 202 |
+
|
| 203 |
+
---
|
| 204 |
+
|
| 205 |
+
## 🔑 Account Management
|
| 206 |
+
|
| 207 |
+
### Add New Accounts
|
| 208 |
+
|
| 209 |
+
**Gemini:**
|
| 210 |
+
```bash
|
| 211 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -login -incognito
|
| 212 |
+
```
|
| 213 |
+
|
| 214 |
+
**Antigravity:**
|
| 215 |
+
```bash
|
| 216 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -antigravity-login -incognito
|
| 217 |
+
```
|
| 218 |
+
|
| 219 |
+
**Qwen:**
|
| 220 |
+
```bash
|
| 221 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -qwen-login -incognito
|
| 222 |
+
```
|
| 223 |
+
|
| 224 |
+
**Kiro (AWS CodeWhisperer):**
|
| 225 |
+
```bash
|
| 226 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -kiro-aws-login -incognito
|
| 227 |
+
```
|
| 228 |
+
|
| 229 |
+
### View Current Accounts
|
| 230 |
+
```bash
|
| 231 |
+
~/CLIProxyAPIPlus/proxy auth
|
| 232 |
+
```
|
| 233 |
+
|
| 234 |
+
---
|
| 235 |
+
|
| 236 |
+
## 🛠️ Troubleshooting
|
| 237 |
+
|
| 238 |
+
### Server Not Running
|
| 239 |
+
```bash
|
| 240 |
+
~/CLIProxyAPIPlus/proxy restart
|
| 241 |
+
```
|
| 242 |
+
|
| 243 |
+
### Check Logs
|
| 244 |
+
```bash
|
| 245 |
+
~/CLIProxyAPIPlus/proxy logs
|
| 246 |
+
```
|
| 247 |
+
|
| 248 |
+
### Port Already in Use
|
| 249 |
+
```bash
|
| 250 |
+
~/CLIProxyAPIPlus/proxy stop
|
| 251 |
+
sleep 2
|
| 252 |
+
~/CLIProxyAPIPlus/proxy start
|
| 253 |
+
```
|
| 254 |
+
|
| 255 |
+
### API Key Errors
|
| 256 |
+
- Make sure to use `Authorization: Bearer sk-client-key-1`
|
| 257 |
+
- Or set correct API key in your client config
|
| 258 |
+
|
| 259 |
+
### Browser Not Opening
|
| 260 |
+
- The incognito window might be behind other windows
|
| 261 |
+
- Check for browser notifications
|
| 262 |
+
- OAuth URLs are also printed in terminal
|
| 263 |
+
|
| 264 |
+
---
|
| 265 |
+
|
| 266 |
+
## 📚 Additional Resources
|
| 267 |
+
|
| 268 |
+
### Official Documentation
|
| 269 |
+
- CLIProxyAPI: https://github.com/router-for-me/CLIProxyAPI
|
| 270 |
+
- CLIProxyAPIPlus: https://github.com/router-for-me/CLIProxyAPIPlus
|
| 271 |
+
|
| 272 |
+
### API Endpoints
|
| 273 |
+
- `GET /v1/models` - List all models
|
| 274 |
+
- `POST /v1/chat/completions` - Chat completions
|
| 275 |
+
- `POST /v1/completions` - Text completions
|
| 276 |
+
|
| 277 |
+
### Management API (requires auth key)
|
| 278 |
+
- Base URL: `http://localhost:8317/v0/management`
|
| 279 |
+
- Auth Key: `admin123` (set in config.yaml)
|
| 280 |
+
- Endpoints: Add/remove accounts, view status, etc.
|
| 281 |
+
|
| 282 |
+
---
|
| 283 |
+
|
| 284 |
+
## 🎓 Advanced Configuration
|
| 285 |
+
|
| 286 |
+
### Edit Config File
|
| 287 |
+
```bash
|
| 288 |
+
nano ~/CLIProxyAPIPlus/config.yaml
|
| 289 |
+
```
|
| 290 |
+
|
| 291 |
+
### Key Settings
|
| 292 |
+
- `port: 8317` - Server port
|
| 293 |
+
- `host: ""` - Bind to all interfaces
|
| 294 |
+
- `api-keys` - Client API keys
|
| 295 |
+
- `incognito-browser: true` - Multi-account support
|
| 296 |
+
- `routing.strategy: "round-robin"` - Load balancing
|
| 297 |
+
|
| 298 |
+
### Add More Providers
|
| 299 |
+
You can add:
|
| 300 |
+
- Claude (OAuth)
|
| 301 |
+
- OpenAI Codex (OAuth)
|
| 302 |
+
- Vertex (OAuth or API key)
|
| 303 |
+
- Any OpenAI-compatible provider via `openai-compatibility`
|
| 304 |
+
|
| 305 |
+
---
|
| 306 |
+
|
| 307 |
+
## ✅ Quick Verification
|
| 308 |
+
|
| 309 |
+
Run these commands to verify everything works:
|
| 310 |
+
|
| 311 |
+
```bash
|
| 312 |
+
# 1. Check server status
|
| 313 |
+
~/CLIProxyAPIPlus/proxy status
|
| 314 |
+
|
| 315 |
+
# 2. View all accounts
|
| 316 |
+
~/CLIProxyAPIPlus/proxy auth
|
| 317 |
+
|
| 318 |
+
# 3. List available models
|
| 319 |
+
~/CLIProxyAPIPlus/proxy models
|
| 320 |
+
|
| 321 |
+
# 4. Test API request
|
| 322 |
+
~/CLIProxyAPIPlus/proxy test
|
| 323 |
+
```
|
| 324 |
+
|
| 325 |
+
All should complete successfully! 🎉
|
| 326 |
+
|
| 327 |
+
---
|
| 328 |
+
|
| 329 |
+
## 📞 Need Help?
|
| 330 |
+
|
| 331 |
+
### Common Issues
|
| 332 |
+
- **"Missing API key"** → Add `Authorization: Bearer sk-client-key-1` header
|
| 333 |
+
- **"Connection refused"** → Server not running, run `~/CLIProxyAPIPlus/proxy start`
|
| 334 |
+
- **"Model not found"** → Check `~/CLIProxyAPIPlus/proxy models` for available models
|
| 335 |
+
|
| 336 |
+
### Get Support
|
| 337 |
+
- Check logs: `~/CLIProxyAPIPlus/proxy logs`
|
| 338 |
+
- Restart server: `~/CLIProxyAPIPlus/proxy restart`
|
| 339 |
+
- Review config: `nano ~/CLIProxyAPIPlus/config.yaml`
|
| 340 |
+
|
| 341 |
+
---
|
| 342 |
+
|
| 343 |
+
## 🎉 You're All Set!
|
| 344 |
+
|
| 345 |
+
Your CLIProxyAPIPlus proxy is running with 15 AI accounts. Use any OpenAI-compatible client and point it to:
|
| 346 |
+
|
| 347 |
+
```
|
| 348 |
+
http://localhost:8317/v1
|
| 349 |
+
```
|
| 350 |
+
|
| 351 |
+
The proxy will handle load balancing, account rotation, and provide a unified API endpoint for all your AI providers!
|
| 352 |
+
|
| 353 |
+
**Happy coding! 🚀**
|
SETUP_GUIDE.md
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# CLIProxyAPIPlus Setup Guide - YOUR SETUP
|
| 2 |
+
|
| 3 |
+
## 🎉 Setup Complete!
|
| 4 |
+
|
| 5 |
+
You have **15 AI accounts** configured and ready to use through a single OpenAI-compatible proxy.
|
| 6 |
+
|
| 7 |
+
---
|
| 8 |
+
|
| 9 |
+
## 📊 Account Summary
|
| 10 |
+
|
| 11 |
+
| Provider | Accounts | Type |
|
| 12 |
+
|-----------|-----------|-------|
|
| 13 |
+
| Gemini | 6 | OAuth |
|
| 14 |
+
| Antigravity | 7 | OAuth |
|
| 15 |
+
| Qwen | 1 | OAuth |
|
| 16 |
+
| OpenRouter | 1 | API Key |
|
| 17 |
+
| **Total** | **15** | |
|
| 18 |
+
|
| 19 |
+
### Your Configured Accounts
|
| 20 |
+
- pascal.hintermaier@gmail.com (Gemini)
|
| 21 |
+
- hintermaier.pascal@gmail.com (Gemini, Antigravity)
|
| 22 |
+
- claracouve342@gmail.com (Gemini, Antigravity)
|
| 23 |
+
- hintermaierpascal0@gmail.com (Gemini, Antigravity)
|
| 24 |
+
- rave.riotofficial@gmail.com (Gemini, Antigravity)
|
| 25 |
+
- citrondon666@gmail.com (Gemini, Antigravity)
|
| 26 |
+
- diesmalwichtigsamuel@gmail.com (Antigravity)
|
| 27 |
+
- 1x Qwen account
|
| 28 |
+
- 1x OpenRouter account
|
| 29 |
+
|
| 30 |
+
---
|
| 31 |
+
|
| 32 |
+
## 🚀 Quick Start
|
| 33 |
+
|
| 34 |
+
### Server Status
|
| 35 |
+
- **URL**: http://localhost:8317
|
| 36 |
+
- **Status**: Running ✅
|
| 37 |
+
- **Config**: ~/CLIProxyAPIPlus/config.yaml
|
| 38 |
+
- **Auth**: ~/.cli-proxy-api/
|
| 39 |
+
|
| 40 |
+
### Quick Test
|
| 41 |
+
```bash
|
| 42 |
+
~/CLIProxyAPIPlus/proxy test
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
### List Models
|
| 46 |
+
```bash
|
| 47 |
+
~/CLIProxyAPIPlus/proxy models
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
---
|
| 51 |
+
|
| 52 |
+
## 📝 Quick Reference Commands
|
| 53 |
+
|
| 54 |
+
### Using the `proxy` Script
|
| 55 |
+
|
| 56 |
+
```bash
|
| 57 |
+
~/CLIProxyAPIPlus/proxy <command>
|
| 58 |
+
```
|
| 59 |
+
|
| 60 |
+
**Available Commands:**
|
| 61 |
+
- `models` or `m` - List all available models
|
| 62 |
+
- `test` or `t` - Send test request
|
| 63 |
+
- `status` or `s` - Check server status
|
| 64 |
+
- `auth` or `a` - Show configured accounts
|
| 65 |
+
- `logs` or `l` - View recent logs
|
| 66 |
+
- `start` - Start proxy server
|
| 67 |
+
- `stop` - Stop proxy server
|
| 68 |
+
- `restart` or `r` - Restart server
|
| 69 |
+
|
| 70 |
+
### Fish Shell Aliases
|
| 71 |
+
|
| 72 |
+
These are available in your terminal:
|
| 73 |
+
|
| 74 |
+
**Testing:**
|
| 75 |
+
```bash
|
| 76 |
+
cliproxy-models # List models
|
| 77 |
+
cliproxy-test # Test request
|
| 78 |
+
cliproxy-status # Server status
|
| 79 |
+
```
|
| 80 |
+
|
| 81 |
+
**Management:**
|
| 82 |
+
```bash
|
| 83 |
+
cliproxy-start # Start server
|
| 84 |
+
cliproxy-stop # Stop server
|
| 85 |
+
cliproxy-restart # Restart server
|
| 86 |
+
cliproxy-logs # View logs
|
| 87 |
+
cliproxy-auth # Show accounts
|
| 88 |
+
```
|
| 89 |
+
|
| 90 |
+
**Add Accounts:**
|
| 91 |
+
```bash
|
| 92 |
+
gemini-login # Add Gemini
|
| 93 |
+
antigravity-login # Add Antigravity
|
| 94 |
+
qwen-login # Add Qwen
|
| 95 |
+
kiro-login # Add Kiro
|
| 96 |
+
```
|
| 97 |
+
|
| 98 |
+
---
|
| 99 |
+
|
| 100 |
+
## 🔧 Configuration
|
| 101 |
+
|
| 102 |
+
### Environment Variables
|
| 103 |
+
```bash
|
| 104 |
+
$CLIPROXY_URL # http://localhost:8317/v1
|
| 105 |
+
$CLIPROXY_KEY # sk-client-key-1
|
| 106 |
+
```
|
| 107 |
+
|
| 108 |
+
### API Keys (from config.yaml)
|
| 109 |
+
- `sk-client-key-1` - Primary API key
|
| 110 |
+
- `sk-client-key-2` - Secondary API key
|
| 111 |
+
|
| 112 |
+
### Client Configuration
|
| 113 |
+
|
| 114 |
+
#### OpenAI SDK (Python)
|
| 115 |
+
```python
|
| 116 |
+
from openai import OpenAI
|
| 117 |
+
|
| 118 |
+
client = OpenAI(
|
| 119 |
+
base_url="http://localhost:8317/v1",
|
| 120 |
+
api_key="sk-client-key-1"
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
response = client.chat.completions.create(
|
| 124 |
+
model="gemini-2.5-pro",
|
| 125 |
+
messages=[{"role": "user", "content": "Hello!"}]
|
| 126 |
+
)
|
| 127 |
+
```
|
| 128 |
+
|
| 129 |
+
#### Node.js
|
| 130 |
+
```javascript
|
| 131 |
+
import OpenAI from 'openai';
|
| 132 |
+
|
| 133 |
+
const openai = new OpenAI({
|
| 134 |
+
baseURL: 'http://localhost:8317/v1',
|
| 135 |
+
apiKey: 'sk-client-key-1'
|
| 136 |
+
});
|
| 137 |
+
|
| 138 |
+
const completion = await openai.chat.completions.create({
|
| 139 |
+
model: 'gemini-2.5-pro',
|
| 140 |
+
messages: [{ role: 'user', content: 'Hello!' }]
|
| 141 |
+
});
|
| 142 |
+
```
|
| 143 |
+
|
| 144 |
+
#### cURL
|
| 145 |
+
```bash
|
| 146 |
+
curl -H "Authorization: Bearer sk-client-key-1" \
|
| 147 |
+
-H "Content-Type: application/json" \
|
| 148 |
+
-d '{
|
| 149 |
+
"model": "gemini-2.5-pro",
|
| 150 |
+
"messages": [{"role": "user", "content": "Hello!"}]
|
| 151 |
+
}' \
|
| 152 |
+
http://localhost:8317/v1/chat/completions
|
| 153 |
+
```
|
| 154 |
+
|
| 155 |
+
---
|
| 156 |
+
|
| 157 |
+
## 🎯 Available Models
|
| 158 |
+
|
| 159 |
+
The proxy automatically combines models from all providers. To see all available models:
|
| 160 |
+
|
| 161 |
+
```bash
|
| 162 |
+
~/CLIProxyAPIPlus/proxy models
|
| 163 |
+
```
|
| 164 |
+
|
| 165 |
+
### Model Aliases
|
| 166 |
+
Some models have shorter aliases configured:
|
| 167 |
+
- `g2.5p` → `gemini-2.5-pro`
|
| 168 |
+
- `g2.5f` → `gemini-2.5-flash`
|
| 169 |
+
- `ag-pro` → Antigravity pro model
|
| 170 |
+
- `qwen-plus` → Qwen coder plus
|
| 171 |
+
|
| 172 |
+
---
|
| 173 |
+
|
| 174 |
+
## 🔄 Load Balancing
|
| 175 |
+
|
| 176 |
+
The proxy uses **round-robin** routing by default. When you make requests:
|
| 177 |
+
1. First request uses Account 1
|
| 178 |
+
2. Second request uses Account 2
|
| 179 |
+
3. Third request uses Account 3
|
| 180 |
+
4. ...and so on
|
| 181 |
+
|
| 182 |
+
This automatically distributes load across all your accounts!
|
| 183 |
+
|
| 184 |
+
---
|
| 185 |
+
|
| 186 |
+
## 📁 File Structure
|
| 187 |
+
|
| 188 |
+
```
|
| 189 |
+
~/CLIProxyAPIPlus/
|
| 190 |
+
├── cli-proxy-api-plus # Main binary
|
| 191 |
+
├── config.yaml # Configuration file
|
| 192 |
+
├── server.log # Server logs
|
| 193 |
+
├── proxy # Quick access script
|
| 194 |
+
├── SETUP_GUIDE.md # This file
|
| 195 |
+
├── README.md # Original README
|
| 196 |
+
└── auths/ # (optional, alternate auth location)
|
| 197 |
+
|
| 198 |
+
~/.cli-proxy-api/ # OAuth tokens stored here
|
| 199 |
+
├── *.json # Provider tokens
|
| 200 |
+
└── ...
|
| 201 |
+
```
|
| 202 |
+
|
| 203 |
+
---
|
| 204 |
+
|
| 205 |
+
## 🔑 Account Management
|
| 206 |
+
|
| 207 |
+
### Add New Accounts
|
| 208 |
+
|
| 209 |
+
**Gemini:**
|
| 210 |
+
```bash
|
| 211 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -login -incognito
|
| 212 |
+
```
|
| 213 |
+
|
| 214 |
+
**Antigravity:**
|
| 215 |
+
```bash
|
| 216 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -antigravity-login -incognito
|
| 217 |
+
```
|
| 218 |
+
|
| 219 |
+
**Qwen:**
|
| 220 |
+
```bash
|
| 221 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -qwen-login -incognito
|
| 222 |
+
```
|
| 223 |
+
|
| 224 |
+
**Kiro (AWS CodeWhisperer):**
|
| 225 |
+
```bash
|
| 226 |
+
cd ~/CLIProxyAPIPlus && ./cli-proxy-api-plus -kiro-aws-login -incognito
|
| 227 |
+
```
|
| 228 |
+
|
| 229 |
+
### View Current Accounts
|
| 230 |
+
```bash
|
| 231 |
+
~/CLIProxyAPIPlus/proxy auth
|
| 232 |
+
```
|
| 233 |
+
|
| 234 |
+
---
|
| 235 |
+
|
| 236 |
+
## 🛠️ Troubleshooting
|
| 237 |
+
|
| 238 |
+
### Server Not Running
|
| 239 |
+
```bash
|
| 240 |
+
~/CLIProxyAPIPlus/proxy restart
|
| 241 |
+
```
|
| 242 |
+
|
| 243 |
+
### Check Logs
|
| 244 |
+
```bash
|
| 245 |
+
~/CLIProxyAPIPlus/proxy logs
|
| 246 |
+
```
|
| 247 |
+
|
| 248 |
+
### Port Already in Use
|
| 249 |
+
```bash
|
| 250 |
+
~/CLIProxyAPIPlus/proxy stop
|
| 251 |
+
sleep 2
|
| 252 |
+
~/CLIProxyAPIPlus/proxy start
|
| 253 |
+
```
|
| 254 |
+
|
| 255 |
+
### API Key Errors
|
| 256 |
+
- Make sure to use `Authorization: Bearer sk-client-key-1`
|
| 257 |
+
- Or set correct API key in your client config
|
| 258 |
+
|
| 259 |
+
### Browser Not Opening
|
| 260 |
+
- The incognito window might be behind other windows
|
| 261 |
+
- Check for browser notifications
|
| 262 |
+
- OAuth URLs are also printed in terminal
|
| 263 |
+
|
| 264 |
+
---
|
| 265 |
+
|
| 266 |
+
## 📚 Additional Resources
|
| 267 |
+
|
| 268 |
+
### Official Documentation
|
| 269 |
+
- CLIProxyAPI: https://github.com/router-for-me/CLIProxyAPI
|
| 270 |
+
- CLIProxyAPIPlus: https://github.com/router-for-me/CLIProxyAPIPlus
|
| 271 |
+
|
| 272 |
+
### API Endpoints
|
| 273 |
+
- `GET /v1/models` - List all models
|
| 274 |
+
- `POST /v1/chat/completions` - Chat completions
|
| 275 |
+
- `POST /v1/completions` - Text completions
|
| 276 |
+
|
| 277 |
+
### Management API (requires auth key)
|
| 278 |
+
- Base URL: `http://localhost:8317/v0/management`
|
| 279 |
+
- Auth Key: `admin123` (set in config.yaml)
|
| 280 |
+
- Endpoints: Add/remove accounts, view status, etc.
|
| 281 |
+
|
| 282 |
+
---
|
| 283 |
+
|
| 284 |
+
## 🎓 Advanced Configuration
|
| 285 |
+
|
| 286 |
+
### Edit Config File
|
| 287 |
+
```bash
|
| 288 |
+
nano ~/CLIProxyAPIPlus/config.yaml
|
| 289 |
+
```
|
| 290 |
+
|
| 291 |
+
### Key Settings
|
| 292 |
+
- `port: 8317` - Server port
|
| 293 |
+
- `host: ""` - Bind to all interfaces
|
| 294 |
+
- `api-keys` - Client API keys
|
| 295 |
+
- `incognito-browser: true` - Multi-account support
|
| 296 |
+
- `routing.strategy: "round-robin"` - Load balancing
|
| 297 |
+
|
| 298 |
+
### Add More Providers
|
| 299 |
+
You can add:
|
| 300 |
+
- Claude (OAuth)
|
| 301 |
+
- OpenAI Codex (OAuth)
|
| 302 |
+
- Vertex (OAuth or API key)
|
| 303 |
+
- Any OpenAI-compatible provider via `openai-compatibility`
|
| 304 |
+
|
| 305 |
+
---
|
| 306 |
+
|
| 307 |
+
## ✅ Quick Verification
|
| 308 |
+
|
| 309 |
+
Run these commands to verify everything works:
|
| 310 |
+
|
| 311 |
+
```bash
|
| 312 |
+
# 1. Check server status
|
| 313 |
+
~/CLIProxyAPIPlus/proxy status
|
| 314 |
+
|
| 315 |
+
# 2. View all accounts
|
| 316 |
+
~/CLIProxyAPIPlus/proxy auth
|
| 317 |
+
|
| 318 |
+
# 3. List available models
|
| 319 |
+
~/CLIProxyAPIPlus/proxy models
|
| 320 |
+
|
| 321 |
+
# 4. Test API request
|
| 322 |
+
~/CLIProxyAPIPlus/proxy test
|
| 323 |
+
```
|
| 324 |
+
|
| 325 |
+
All should complete successfully! 🎉
|
| 326 |
+
|
| 327 |
+
---
|
| 328 |
+
|
| 329 |
+
## 📞 Need Help?
|
| 330 |
+
|
| 331 |
+
### Common Issues
|
| 332 |
+
- **"Missing API key"** → Add `Authorization: Bearer sk-client-key-1` header
|
| 333 |
+
- **"Connection refused"** → Server not running, run `~/CLIProxyAPIPlus/proxy start`
|
| 334 |
+
- **"Model not found"** → Check `~/CLIProxyAPIPlus/proxy models` for available models
|
| 335 |
+
|
| 336 |
+
### Get Support
|
| 337 |
+
- Check logs: `~/CLIProxyAPIPlus/proxy logs`
|
| 338 |
+
- Restart server: `~/CLIProxyAPIPlus/proxy restart`
|
| 339 |
+
- Review config: `nano ~/CLIProxyAPIPlus/config.yaml`
|
| 340 |
+
|
| 341 |
+
---
|
| 342 |
+
|
| 343 |
+
## 🎉 You're All Set!
|
| 344 |
+
|
| 345 |
+
Your CLIProxyAPIPlus proxy is running with 15 AI accounts. Use any OpenAI-compatible client and point it to:
|
| 346 |
+
|
| 347 |
+
```
|
| 348 |
+
http://localhost:8317/v1
|
| 349 |
+
```
|
| 350 |
+
|
| 351 |
+
The proxy will handle load balancing, account rotation, and provide a unified API endpoint for all your AI providers!
|
| 352 |
+
|
| 353 |
+
**Happy coding! 🚀**
|
assets/cubence.png
ADDED
|
assets/packycode.png
ADDED
|
auths/.gitkeep
ADDED
|
File without changes
|
docker-build.ps1
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# build.ps1 - Windows PowerShell Build Script
|
| 2 |
+
#
|
| 3 |
+
# This script automates the process of building and running the Docker container
|
| 4 |
+
# with version information dynamically injected at build time.
|
| 5 |
+
|
| 6 |
+
# Stop script execution on any error
|
| 7 |
+
$ErrorActionPreference = "Stop"
|
| 8 |
+
|
| 9 |
+
# --- Step 1: Choose Environment ---
|
| 10 |
+
Write-Host "Please select an option:"
|
| 11 |
+
Write-Host "1) Run using Pre-built Image (Recommended)"
|
| 12 |
+
Write-Host "2) Build from Source and Run (For Developers)"
|
| 13 |
+
$choice = Read-Host -Prompt "Enter choice [1-2]"
|
| 14 |
+
|
| 15 |
+
# --- Step 2: Execute based on choice ---
|
| 16 |
+
switch ($choice) {
|
| 17 |
+
"1" {
|
| 18 |
+
Write-Host "--- Running with Pre-built Image ---"
|
| 19 |
+
docker compose up -d --remove-orphans --no-build
|
| 20 |
+
Write-Host "Services are starting from remote image."
|
| 21 |
+
Write-Host "Run 'docker compose logs -f' to see the logs."
|
| 22 |
+
}
|
| 23 |
+
"2" {
|
| 24 |
+
Write-Host "--- Building from Source and Running ---"
|
| 25 |
+
|
| 26 |
+
# Get Version Information
|
| 27 |
+
$VERSION = (git describe --tags --always --dirty)
|
| 28 |
+
$COMMIT = (git rev-parse --short HEAD)
|
| 29 |
+
$BUILD_DATE = (Get-Date).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
|
| 30 |
+
|
| 31 |
+
Write-Host "Building with the following info:"
|
| 32 |
+
Write-Host " Version: $VERSION"
|
| 33 |
+
Write-Host " Commit: $COMMIT"
|
| 34 |
+
Write-Host " Build Date: $BUILD_DATE"
|
| 35 |
+
Write-Host "----------------------------------------"
|
| 36 |
+
|
| 37 |
+
# Build and start the services with a local-only image tag
|
| 38 |
+
$env:CLI_PROXY_IMAGE = "cli-proxy-api:local"
|
| 39 |
+
|
| 40 |
+
Write-Host "Building the Docker image..."
|
| 41 |
+
docker compose build --build-arg VERSION=$VERSION --build-arg COMMIT=$COMMIT --build-arg BUILD_DATE=$BUILD_DATE
|
| 42 |
+
|
| 43 |
+
Write-Host "Starting the services..."
|
| 44 |
+
docker compose up -d --remove-orphans --pull never
|
| 45 |
+
|
| 46 |
+
Write-Host "Build complete. Services are starting."
|
| 47 |
+
Write-Host "Run 'docker compose logs -f' to see the logs."
|
| 48 |
+
}
|
| 49 |
+
default {
|
| 50 |
+
Write-Host "Invalid choice. Please enter 1 or 2."
|
| 51 |
+
exit 1
|
| 52 |
+
}
|
| 53 |
+
}
|
docker-build.sh
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
#
|
| 3 |
+
# build.sh - Linux/macOS Build Script
|
| 4 |
+
#
|
| 5 |
+
# This script automates the process of building and running the Docker container
|
| 6 |
+
# with version information dynamically injected at build time.
|
| 7 |
+
|
| 8 |
+
# Exit immediately if a command exits with a non-zero status.
|
| 9 |
+
set -euo pipefail
|
| 10 |
+
|
| 11 |
+
# --- Step 1: Choose Environment ---
|
| 12 |
+
echo "Please select an option:"
|
| 13 |
+
echo "1) Run using Pre-built Image (Recommended)"
|
| 14 |
+
echo "2) Build from Source and Run (For Developers)"
|
| 15 |
+
read -r -p "Enter choice [1-2]: " choice
|
| 16 |
+
|
| 17 |
+
# --- Step 2: Execute based on choice ---
|
| 18 |
+
case "$choice" in
|
| 19 |
+
1)
|
| 20 |
+
echo "--- Running with Pre-built Image ---"
|
| 21 |
+
docker compose up -d --remove-orphans --no-build
|
| 22 |
+
echo "Services are starting from remote image."
|
| 23 |
+
echo "Run 'docker compose logs -f' to see the logs."
|
| 24 |
+
;;
|
| 25 |
+
2)
|
| 26 |
+
echo "--- Building from Source and Running ---"
|
| 27 |
+
|
| 28 |
+
# Get Version Information
|
| 29 |
+
VERSION="$(git describe --tags --always --dirty)"
|
| 30 |
+
COMMIT="$(git rev-parse --short HEAD)"
|
| 31 |
+
BUILD_DATE="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
| 32 |
+
|
| 33 |
+
echo "Building with the following info:"
|
| 34 |
+
echo " Version: ${VERSION}"
|
| 35 |
+
echo " Commit: ${COMMIT}"
|
| 36 |
+
echo " Build Date: ${BUILD_DATE}"
|
| 37 |
+
echo "----------------------------------------"
|
| 38 |
+
|
| 39 |
+
# Build and start the services with a local-only image tag
|
| 40 |
+
export CLI_PROXY_IMAGE="cli-proxy-api:local"
|
| 41 |
+
|
| 42 |
+
echo "Building the Docker image..."
|
| 43 |
+
docker compose build \
|
| 44 |
+
--build-arg VERSION="${VERSION}" \
|
| 45 |
+
--build-arg COMMIT="${COMMIT}" \
|
| 46 |
+
--build-arg BUILD_DATE="${BUILD_DATE}"
|
| 47 |
+
|
| 48 |
+
echo "Starting the services..."
|
| 49 |
+
docker compose up -d --remove-orphans --pull never
|
| 50 |
+
|
| 51 |
+
echo "Build complete. Services are starting."
|
| 52 |
+
echo "Run 'docker compose logs -f' to see the logs."
|
| 53 |
+
;;
|
| 54 |
+
*)
|
| 55 |
+
echo "Invalid choice. Please enter 1 or 2."
|
| 56 |
+
exit 1
|
| 57 |
+
;;
|
| 58 |
+
esac
|
dsa
ADDED
|
File without changes
|
examples/custom-provider/main.go
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Package main demonstrates how to create a custom AI provider executor
|
| 2 |
+
// and integrate it with the CLI Proxy API server. This example shows how to:
|
| 3 |
+
// - Create a custom executor that implements the Executor interface
|
| 4 |
+
// - Register custom translators for request/response transformation
|
| 5 |
+
// - Integrate the custom provider with the SDK server
|
| 6 |
+
// - Register custom models in the model registry
|
| 7 |
+
//
|
| 8 |
+
// This example uses a simple echo service (httpbin.org) as the upstream API
|
| 9 |
+
// for demonstration purposes. In a real implementation, you would replace
|
| 10 |
+
// this with your actual AI service provider.
|
| 11 |
+
package main
|
| 12 |
+
|
| 13 |
+
import (
|
| 14 |
+
"bytes"
|
| 15 |
+
"context"
|
| 16 |
+
"errors"
|
| 17 |
+
"io"
|
| 18 |
+
"net/http"
|
| 19 |
+
"net/url"
|
| 20 |
+
"os"
|
| 21 |
+
"path/filepath"
|
| 22 |
+
"strings"
|
| 23 |
+
"time"
|
| 24 |
+
|
| 25 |
+
"github.com/gin-gonic/gin"
|
| 26 |
+
"github.com/router-for-me/CLIProxyAPI/v6/sdk/api"
|
| 27 |
+
sdkAuth "github.com/router-for-me/CLIProxyAPI/v6/sdk/auth"
|
| 28 |
+
"github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy"
|
| 29 |
+
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
| 30 |
+
clipexec "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
|
| 31 |
+
"github.com/router-for-me/CLIProxyAPI/v6/sdk/config"
|
| 32 |
+
"github.com/router-for-me/CLIProxyAPI/v6/sdk/logging"
|
| 33 |
+
sdktr "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
const (
|
| 37 |
+
// providerKey is the identifier for our custom provider.
|
| 38 |
+
providerKey = "myprov"
|
| 39 |
+
|
| 40 |
+
// fOpenAI represents the OpenAI chat format.
|
| 41 |
+
fOpenAI = sdktr.Format("openai.chat")
|
| 42 |
+
|
| 43 |
+
// fMyProv represents our custom provider's chat format.
|
| 44 |
+
fMyProv = sdktr.Format("myprov.chat")
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
// init registers trivial translators for demonstration purposes.
|
| 48 |
+
// In a real implementation, you would implement proper request/response
|
| 49 |
+
// transformation logic between OpenAI format and your provider's format.
|
| 50 |
+
func init() {
|
| 51 |
+
sdktr.Register(fOpenAI, fMyProv,
|
| 52 |
+
func(model string, raw []byte, stream bool) []byte { return raw },
|
| 53 |
+
sdktr.ResponseTransform{
|
| 54 |
+
Stream: func(ctx context.Context, model string, originalReq, translatedReq, raw []byte, param *any) []string {
|
| 55 |
+
return []string{string(raw)}
|
| 56 |
+
},
|
| 57 |
+
NonStream: func(ctx context.Context, model string, originalReq, translatedReq, raw []byte, param *any) string {
|
| 58 |
+
return string(raw)
|
| 59 |
+
},
|
| 60 |
+
},
|
| 61 |
+
)
|
| 62 |
+
}
|
| 63 |
+
|
| 64 |
+
// MyExecutor is a minimal provider implementation for demonstration purposes.
|
| 65 |
+
// It implements the Executor interface to handle requests to a custom AI provider.
|
| 66 |
+
type MyExecutor struct{}
|
| 67 |
+
|
| 68 |
+
// Identifier returns the unique identifier for this executor.
|
| 69 |
+
func (MyExecutor) Identifier() string { return providerKey }
|
| 70 |
+
|
| 71 |
+
// PrepareRequest optionally injects credentials to raw HTTP requests.
|
| 72 |
+
// This method is called before each request to allow the executor to modify
|
| 73 |
+
// the HTTP request with authentication headers or other necessary modifications.
|
| 74 |
+
//
|
| 75 |
+
// Parameters:
|
| 76 |
+
// - req: The HTTP request to prepare
|
| 77 |
+
// - a: The authentication information
|
| 78 |
+
//
|
| 79 |
+
// Returns:
|
| 80 |
+
// - error: An error if request preparation fails
|
| 81 |
+
func (MyExecutor) PrepareRequest(req *http.Request, a *coreauth.Auth) error {
|
| 82 |
+
if req == nil || a == nil {
|
| 83 |
+
return nil
|
| 84 |
+
}
|
| 85 |
+
if a.Attributes != nil {
|
| 86 |
+
if ak := strings.TrimSpace(a.Attributes["api_key"]); ak != "" {
|
| 87 |
+
req.Header.Set("Authorization", "Bearer "+ak)
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
return nil
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
func buildHTTPClient(a *coreauth.Auth) *http.Client {
|
| 94 |
+
if a == nil || strings.TrimSpace(a.ProxyURL) == "" {
|
| 95 |
+
return http.DefaultClient
|
| 96 |
+
}
|
| 97 |
+
u, err := url.Parse(a.ProxyURL)
|
| 98 |
+
if err != nil || (u.Scheme != "http" && u.Scheme != "https") {
|
| 99 |
+
return http.DefaultClient
|
| 100 |
+
}
|
| 101 |
+
return &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(u)}}
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
func upstreamEndpoint(a *coreauth.Auth) string {
|
| 105 |
+
if a != nil && a.Attributes != nil {
|
| 106 |
+
if ep := strings.TrimSpace(a.Attributes["endpoint"]); ep != "" {
|
| 107 |
+
return ep
|
| 108 |
+
}
|
| 109 |
+
}
|
| 110 |
+
// Demo echo endpoint; replace with your upstream.
|
| 111 |
+
return "https://httpbin.org/post"
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
func (MyExecutor) Execute(ctx context.Context, a *coreauth.Auth, req clipexec.Request, opts clipexec.Options) (clipexec.Response, error) {
|
| 115 |
+
client := buildHTTPClient(a)
|
| 116 |
+
endpoint := upstreamEndpoint(a)
|
| 117 |
+
|
| 118 |
+
httpReq, errNew := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(req.Payload))
|
| 119 |
+
if errNew != nil {
|
| 120 |
+
return clipexec.Response{}, errNew
|
| 121 |
+
}
|
| 122 |
+
httpReq.Header.Set("Content-Type", "application/json")
|
| 123 |
+
|
| 124 |
+
// Inject credentials via PrepareRequest hook.
|
| 125 |
+
_ = (MyExecutor{}).PrepareRequest(httpReq, a)
|
| 126 |
+
|
| 127 |
+
resp, errDo := client.Do(httpReq)
|
| 128 |
+
if errDo != nil {
|
| 129 |
+
return clipexec.Response{}, errDo
|
| 130 |
+
}
|
| 131 |
+
defer func() {
|
| 132 |
+
if errClose := resp.Body.Close(); errClose != nil {
|
| 133 |
+
// Best-effort close; log if needed in real projects.
|
| 134 |
+
}
|
| 135 |
+
}()
|
| 136 |
+
body, _ := io.ReadAll(resp.Body)
|
| 137 |
+
return clipexec.Response{Payload: body}, nil
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
func (MyExecutor) CountTokens(context.Context, *coreauth.Auth, clipexec.Request, clipexec.Options) (clipexec.Response, error) {
|
| 141 |
+
return clipexec.Response{}, errors.New("count tokens not implemented")
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
func (MyExecutor) ExecuteStream(ctx context.Context, a *coreauth.Auth, req clipexec.Request, opts clipexec.Options) (<-chan clipexec.StreamChunk, error) {
|
| 145 |
+
ch := make(chan clipexec.StreamChunk, 1)
|
| 146 |
+
go func() {
|
| 147 |
+
defer close(ch)
|
| 148 |
+
ch <- clipexec.StreamChunk{Payload: []byte("data: {\"ok\":true}\n\n")}
|
| 149 |
+
}()
|
| 150 |
+
return ch, nil
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
func (MyExecutor) Refresh(ctx context.Context, a *coreauth.Auth) (*coreauth.Auth, error) {
|
| 154 |
+
return a, nil
|
| 155 |
+
}
|
| 156 |
+
|
| 157 |
+
func main() {
|
| 158 |
+
cfg, err := config.LoadConfig("config.yaml")
|
| 159 |
+
if err != nil {
|
| 160 |
+
panic(err)
|
| 161 |
+
}
|
| 162 |
+
|
| 163 |
+
tokenStore := sdkAuth.GetTokenStore()
|
| 164 |
+
if dirSetter, ok := tokenStore.(interface{ SetBaseDir(string) }); ok {
|
| 165 |
+
dirSetter.SetBaseDir(cfg.AuthDir)
|
| 166 |
+
}
|
| 167 |
+
core := coreauth.NewManager(tokenStore, nil, nil)
|
| 168 |
+
core.RegisterExecutor(MyExecutor{})
|
| 169 |
+
|
| 170 |
+
hooks := cliproxy.Hooks{
|
| 171 |
+
OnAfterStart: func(s *cliproxy.Service) {
|
| 172 |
+
// Register demo models for the custom provider so they appear in /v1/models.
|
| 173 |
+
models := []*cliproxy.ModelInfo{{ID: "myprov-pro-1", Object: "model", Type: providerKey, DisplayName: "MyProv Pro 1"}}
|
| 174 |
+
for _, a := range core.List() {
|
| 175 |
+
if strings.EqualFold(a.Provider, providerKey) {
|
| 176 |
+
cliproxy.GlobalModelRegistry().RegisterClient(a.ID, providerKey, models)
|
| 177 |
+
}
|
| 178 |
+
}
|
| 179 |
+
},
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
svc, err := cliproxy.NewBuilder().
|
| 183 |
+
WithConfig(cfg).
|
| 184 |
+
WithConfigPath("config.yaml").
|
| 185 |
+
WithCoreAuthManager(core).
|
| 186 |
+
WithServerOptions(
|
| 187 |
+
// Optional: add a simple middleware + custom request logger
|
| 188 |
+
api.WithMiddleware(func(c *gin.Context) { c.Header("X-Example", "custom-provider"); c.Next() }),
|
| 189 |
+
api.WithRequestLoggerFactory(func(cfg *config.Config, cfgPath string) logging.RequestLogger {
|
| 190 |
+
return logging.NewFileRequestLogger(true, "logs", filepath.Dir(cfgPath))
|
| 191 |
+
}),
|
| 192 |
+
).
|
| 193 |
+
WithHooks(hooks).
|
| 194 |
+
Build()
|
| 195 |
+
if err != nil {
|
| 196 |
+
panic(err)
|
| 197 |
+
}
|
| 198 |
+
|
| 199 |
+
ctx, cancel := context.WithCancel(context.Background())
|
| 200 |
+
defer cancel()
|
| 201 |
+
|
| 202 |
+
if err := svc.Run(ctx); err != nil && !errors.Is(err, context.Canceled) {
|
| 203 |
+
panic(err)
|
| 204 |
+
}
|
| 205 |
+
_ = os.Stderr // keep os import used (demo only)
|
| 206 |
+
_ = time.Second
|
| 207 |
+
}
|
examples/translator/main.go
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"fmt"
|
| 6 |
+
|
| 7 |
+
"github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
|
| 8 |
+
_ "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator/builtin"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
func main() {
|
| 12 |
+
rawRequest := []byte(`{"messages":[{"content":[{"text":"Hello! Gemini","type":"text"}],"role":"user"}],"model":"gemini-2.5-pro","stream":false}`)
|
| 13 |
+
fmt.Println("Has gemini->openai response translator:", translator.HasResponseTransformerByFormatName(
|
| 14 |
+
translator.FormatGemini,
|
| 15 |
+
translator.FormatOpenAI,
|
| 16 |
+
))
|
| 17 |
+
|
| 18 |
+
translatedRequest := translator.TranslateRequestByFormatName(
|
| 19 |
+
translator.FormatOpenAI,
|
| 20 |
+
translator.FormatGemini,
|
| 21 |
+
"gemini-2.5-pro",
|
| 22 |
+
rawRequest,
|
| 23 |
+
false,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
fmt.Printf("Translated request to Gemini format:\n%s\n\n", translatedRequest)
|
| 27 |
+
|
| 28 |
+
claudeResponse := []byte(`{"candidates":[{"content":{"role":"model","parts":[{"thought":true,"text":"Okay, here's what's going through my mind. I need to schedule a meeting"},{"thoughtSignature":"","functionCall":{"name":"schedule_meeting","args":{"topic":"Q3 planning","attendees":["Bob","Alice"],"time":"10:00","date":"2025-03-27"}}}]},"finishReason":"STOP","avgLogprobs":-0.50018133435930523}],"usageMetadata":{"promptTokenCount":117,"candidatesTokenCount":28,"totalTokenCount":474,"trafficType":"PROVISIONED_THROUGHPUT","promptTokensDetails":[{"modality":"TEXT","tokenCount":117}],"candidatesTokensDetails":[{"modality":"TEXT","tokenCount":28}],"thoughtsTokenCount":329},"modelVersion":"gemini-2.5-pro","createTime":"2025-08-15T04:12:55.249090Z","responseId":"x7OeaIKaD6CU48APvNXDyA4"}`)
|
| 29 |
+
|
| 30 |
+
convertedResponse := translator.TranslateNonStreamByFormatName(
|
| 31 |
+
context.Background(),
|
| 32 |
+
translator.FormatGemini,
|
| 33 |
+
translator.FormatOpenAI,
|
| 34 |
+
"gemini-2.5-pro",
|
| 35 |
+
rawRequest,
|
| 36 |
+
translatedRequest,
|
| 37 |
+
claudeResponse,
|
| 38 |
+
nil,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
fmt.Printf("Converted response for OpenAI clients:\n%s\n", convertedResponse)
|
| 42 |
+
}
|
internal/api/middleware/anthropic_auth.go
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package middleware
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"net/http"
|
| 5 |
+
"strings"
|
| 6 |
+
|
| 7 |
+
"github.com/gin-gonic/gin"
|
| 8 |
+
log "github.com/sirupsen/logrus"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
// AuthenticateAnthropicRequest creates a middleware that authenticates requests
|
| 12 |
+
// for the Anthropic API using multiple methods (Bearer, X-API-Key, Query).
|
| 13 |
+
// It validates the extracted key against the provided list of allowed keys.
|
| 14 |
+
func AuthenticateAnthropicRequest(allowedKeys []string) gin.HandlerFunc {
|
| 15 |
+
// Convert allowed keys to a map for O(1) lookup
|
| 16 |
+
allowedKeyMap := make(map[string]struct{})
|
| 17 |
+
for _, key := range allowedKeys {
|
| 18 |
+
if key != "" {
|
| 19 |
+
allowedKeyMap[key] = struct{}{}
|
| 20 |
+
}
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
return func(c *gin.Context) {
|
| 24 |
+
var apiKey string
|
| 25 |
+
var authMethod string
|
| 26 |
+
|
| 27 |
+
// 1. Check Authorization Header (Bearer)
|
| 28 |
+
authHeader := c.GetHeader("Authorization")
|
| 29 |
+
if authHeader != "" {
|
| 30 |
+
parts := strings.SplitN(authHeader, " ", 2)
|
| 31 |
+
if len(parts) == 2 && strings.EqualFold(parts[0], "Bearer") {
|
| 32 |
+
apiKey = strings.TrimSpace(parts[1])
|
| 33 |
+
authMethod = "Bearer"
|
| 34 |
+
}
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
// 2. Check X-API-Key Header
|
| 38 |
+
if apiKey == "" {
|
| 39 |
+
apiKey = c.GetHeader("x-api-key")
|
| 40 |
+
if apiKey != "" {
|
| 41 |
+
authMethod = "X-API-Key"
|
| 42 |
+
}
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
// 3. Check Query Parameter
|
| 46 |
+
if apiKey == "" {
|
| 47 |
+
apiKey = c.Query("api_key")
|
| 48 |
+
if apiKey != "" {
|
| 49 |
+
authMethod = "QueryParam"
|
| 50 |
+
}
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
// Validate
|
| 54 |
+
if apiKey == "" {
|
| 55 |
+
log.Debug("Anthropic Auth: No credentials provided")
|
| 56 |
+
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{
|
| 57 |
+
"error": map[string]string{
|
| 58 |
+
"type": "authentication_error",
|
| 59 |
+
"message": "No API key provided",
|
| 60 |
+
},
|
| 61 |
+
})
|
| 62 |
+
return
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
// Check if key is allowed
|
| 66 |
+
valid := false
|
| 67 |
+
if len(allowedKeyMap) > 0 {
|
| 68 |
+
if _, ok := allowedKeyMap[apiKey]; ok {
|
| 69 |
+
valid = true
|
| 70 |
+
}
|
| 71 |
+
} else {
|
| 72 |
+
// If no keys configured in proxy, log warning and deny
|
| 73 |
+
log.Warn("Anthropic Auth: No allowed keys configured in proxy")
|
| 74 |
+
valid = false
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
if !valid {
|
| 78 |
+
maskedKey := apiKey
|
| 79 |
+
if len(apiKey) > 4 {
|
| 80 |
+
maskedKey = apiKey[:4] + "..."
|
| 81 |
+
}
|
| 82 |
+
log.WithFields(log.Fields{
|
| 83 |
+
"method": authMethod,
|
| 84 |
+
"key_prefix": maskedKey,
|
| 85 |
+
}).Debug("Anthropic Auth: Invalid API key")
|
| 86 |
+
|
| 87 |
+
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{
|
| 88 |
+
"error": map[string]string{
|
| 89 |
+
"type": "authentication_error",
|
| 90 |
+
"message": "Invalid API key",
|
| 91 |
+
},
|
| 92 |
+
})
|
| 93 |
+
return
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
log.WithFields(log.Fields{
|
| 97 |
+
"method": authMethod,
|
| 98 |
+
}).Debug("Anthropic Auth: Successful")
|
| 99 |
+
|
| 100 |
+
// Set context variables for downstream handlers
|
| 101 |
+
c.Set("apiKey", apiKey)
|
| 102 |
+
c.Set("accessProvider", "anthropic")
|
| 103 |
+
|
| 104 |
+
c.Next()
|
| 105 |
+
}
|
| 106 |
+
}
|
internal/api/middleware/anthropic_auth_test.go
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package middleware
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"net/http"
|
| 5 |
+
"net/http/httptest"
|
| 6 |
+
"testing"
|
| 7 |
+
|
| 8 |
+
"github.com/gin-gonic/gin"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
func setupTestRouter(allowedKeys []string) *gin.Engine {
|
| 12 |
+
gin.SetMode(gin.TestMode)
|
| 13 |
+
r := gin.New()
|
| 14 |
+
r.Use(AuthenticateAnthropicRequest(allowedKeys))
|
| 15 |
+
r.GET("/test", func(c *gin.Context) {
|
| 16 |
+
apiKey, _ := c.Get("apiKey")
|
| 17 |
+
c.String(http.StatusOK, "OK: "+apiKey.(string))
|
| 18 |
+
})
|
| 19 |
+
return r
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
func TestBearerTokenAuth(t *testing.T) {
|
| 23 |
+
router := setupTestRouter([]string{"valid-key"})
|
| 24 |
+
|
| 25 |
+
w := httptest.NewRecorder()
|
| 26 |
+
req, _ := http.NewRequest("GET", "/test", nil)
|
| 27 |
+
req.Header.Set("Authorization", "Bearer valid-key")
|
| 28 |
+
|
| 29 |
+
router.ServeHTTP(w, req)
|
| 30 |
+
|
| 31 |
+
if w.Code != http.StatusOK {
|
| 32 |
+
t.Errorf("Expected status 200, got %d", w.Code)
|
| 33 |
+
}
|
| 34 |
+
if w.Body.String() != "OK: valid-key" {
|
| 35 |
+
t.Errorf("Expected body 'OK: valid-key', got '%s'", w.Body.String())
|
| 36 |
+
}
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
func TestXAPIKeyAuth(t *testing.T) {
|
| 40 |
+
router := setupTestRouter([]string{"valid-key"})
|
| 41 |
+
|
| 42 |
+
w := httptest.NewRecorder()
|
| 43 |
+
req, _ := http.NewRequest("GET", "/test", nil)
|
| 44 |
+
req.Header.Set("X-API-Key", "valid-key")
|
| 45 |
+
|
| 46 |
+
router.ServeHTTP(w, req)
|
| 47 |
+
|
| 48 |
+
if w.Code != http.StatusOK {
|
| 49 |
+
t.Errorf("Expected status 200, got %d", w.Code)
|
| 50 |
+
}
|
| 51 |
+
if w.Body.String() != "OK: valid-key" {
|
| 52 |
+
t.Errorf("Expected body 'OK: valid-key', got '%s'", w.Body.String())
|
| 53 |
+
}
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
func TestQueryParamAuth(t *testing.T) {
|
| 57 |
+
router := setupTestRouter([]string{"valid-key"})
|
| 58 |
+
|
| 59 |
+
w := httptest.NewRecorder()
|
| 60 |
+
req, _ := http.NewRequest("GET", "/test?api_key=valid-key", nil)
|
| 61 |
+
|
| 62 |
+
router.ServeHTTP(w, req)
|
| 63 |
+
|
| 64 |
+
if w.Code != http.StatusOK {
|
| 65 |
+
t.Errorf("Expected status 200, got %d", w.Code)
|
| 66 |
+
}
|
| 67 |
+
if w.Body.String() != "OK: valid-key" {
|
| 68 |
+
t.Errorf("Expected body 'OK: valid-key', got '%s'", w.Body.String())
|
| 69 |
+
}
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
func TestNoAuth(t *testing.T) {
|
| 73 |
+
router := setupTestRouter([]string{"valid-key"})
|
| 74 |
+
|
| 75 |
+
w := httptest.NewRecorder()
|
| 76 |
+
req, _ := http.NewRequest("GET", "/test", nil)
|
| 77 |
+
|
| 78 |
+
router.ServeHTTP(w, req)
|
| 79 |
+
|
| 80 |
+
if w.Code != http.StatusUnauthorized {
|
| 81 |
+
t.Errorf("Expected status 401, got %d", w.Code)
|
| 82 |
+
}
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
func TestInvalidKey(t *testing.T) {
|
| 86 |
+
router := setupTestRouter([]string{"valid-key"})
|
| 87 |
+
|
| 88 |
+
w := httptest.NewRecorder()
|
| 89 |
+
req, _ := http.NewRequest("GET", "/test", nil)
|
| 90 |
+
req.Header.Set("Authorization", "Bearer invalid-key")
|
| 91 |
+
|
| 92 |
+
router.ServeHTTP(w, req)
|
| 93 |
+
|
| 94 |
+
if w.Code != http.StatusUnauthorized {
|
| 95 |
+
t.Errorf("Expected status 401, got %d", w.Code)
|
| 96 |
+
}
|
| 97 |
+
}
|
internal/api/middleware/anthropic_debug.go
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package middleware
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"bytes"
|
| 5 |
+
"io"
|
| 6 |
+
"strings"
|
| 7 |
+
|
| 8 |
+
"github.com/gin-gonic/gin"
|
| 9 |
+
log "github.com/sirupsen/logrus"
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
// LogAnthropicRequest creates a middleware that logs detailed debug info for Anthropic requests.
|
| 13 |
+
// It logs headers (masking sensitive ones) and a preview of the request body.
|
| 14 |
+
func LogAnthropicRequest() gin.HandlerFunc {
|
| 15 |
+
return func(c *gin.Context) {
|
| 16 |
+
path := c.Request.URL.Path
|
| 17 |
+
method := c.Request.Method
|
| 18 |
+
userAgent := c.GetHeader("User-Agent")
|
| 19 |
+
|
| 20 |
+
// Check if it's Claude Code
|
| 21 |
+
isClaudeCode := strings.Contains(userAgent, "claude-code") || strings.Contains(userAgent, "claude-cli")
|
| 22 |
+
|
| 23 |
+
fields := log.Fields{
|
| 24 |
+
"prefix": "CLAUDE_CODE_AUTH_DEBUG",
|
| 25 |
+
"method": method,
|
| 26 |
+
"path": path,
|
| 27 |
+
"user_agent": userAgent,
|
| 28 |
+
"is_claude_code": isClaudeCode,
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
// Log Headers (masked)
|
| 32 |
+
headers := make(map[string]string)
|
| 33 |
+
for k, v := range c.Request.Header {
|
| 34 |
+
if strings.EqualFold(k, "Authorization") || strings.EqualFold(k, "x-api-key") {
|
| 35 |
+
headers[k] = "[MASKED]"
|
| 36 |
+
} else {
|
| 37 |
+
headers[k] = strings.Join(v, ";")
|
| 38 |
+
}
|
| 39 |
+
}
|
| 40 |
+
fields["headers"] = headers
|
| 41 |
+
|
| 42 |
+
// Peek Body
|
| 43 |
+
var bodyBytes []byte
|
| 44 |
+
if c.Request.Body != nil {
|
| 45 |
+
bodyBytes, _ = io.ReadAll(c.Request.Body)
|
| 46 |
+
c.Request.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
| 47 |
+
|
| 48 |
+
previewLen := 200
|
| 49 |
+
if len(bodyBytes) < previewLen {
|
| 50 |
+
previewLen = len(bodyBytes)
|
| 51 |
+
}
|
| 52 |
+
fields["body_preview"] = string(bodyBytes[:previewLen])
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
log.WithFields(fields).Debug("Incoming Anthropic Request")
|
| 56 |
+
|
| 57 |
+
c.Next()
|
| 58 |
+
}
|
| 59 |
+
}
|
scripts/verify_claude_proxy.sh
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
# Quick health check for Claude Code through CLIProxyAPIPlus.
|
| 5 |
+
# It validates configuration placeholders, confirms the proxy is reachable,
|
| 6 |
+
# and performs a real /v1/messages call using the provided client key.
|
| 7 |
+
|
| 8 |
+
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
| 9 |
+
CONFIG_FILE="${CONFIG_FILE:-${ROOT_DIR}/config.yaml}"
|
| 10 |
+
BASE_URL="${BASE_URL:-http://localhost:7860}"
|
| 11 |
+
CLIENT_KEY="${CLIENT_KEY:-${ANTHROPIC_API_KEY:-${CLAUDE_API_KEY:-}}}"
|
| 12 |
+
MODEL="${MODEL:-claude-3-5-haiku-20241022}"
|
| 13 |
+
|
| 14 |
+
tmp_body="$(mktemp)"
|
| 15 |
+
cleanup() { rm -f "$tmp_body"; }
|
| 16 |
+
trap cleanup EXIT
|
| 17 |
+
|
| 18 |
+
info() { printf '[INFO] %s\n' "$*"; }
|
| 19 |
+
warn() { printf '[WARN] %s\n' "$*" >&2; }
|
| 20 |
+
fatal() { printf '[FAIL] %s\n' "$*" >&2; exit 1; }
|
| 21 |
+
|
| 22 |
+
info "Using BASE_URL=${BASE_URL}"
|
| 23 |
+
info "Using MODEL=${MODEL}"
|
| 24 |
+
|
| 25 |
+
# 1) Check for placeholder keys in config.yaml
|
| 26 |
+
if [[ -f "$CONFIG_FILE" ]]; then
|
| 27 |
+
if grep -nE 'DEIN_(CLAUDE|GEMINI|OPENROUTER)_KEY' "$CONFIG_FILE" >/dev/null; then
|
| 28 |
+
warn "config.yaml still contains placeholder provider keys (DEIN_*). Upstream requests will fail."
|
| 29 |
+
fi
|
| 30 |
+
else
|
| 31 |
+
warn "config file not found at ${CONFIG_FILE}"
|
| 32 |
+
fi
|
| 33 |
+
|
| 34 |
+
# 2) Ensure we have a client key
|
| 35 |
+
[[ -n "$CLIENT_KEY" ]] || fatal "No client key set. Export CLIENT_KEY or ANTHROPIC_API_KEY or CLAUDE_API_KEY."
|
| 36 |
+
|
| 37 |
+
# 3) /v1/models check
|
| 38 |
+
info "Checking /v1/models..."
|
| 39 |
+
models_status=$(curl -sS -o "$tmp_body" -w '%{http_code}' \
|
| 40 |
+
-H "Authorization: Bearer ${CLIENT_KEY}" \
|
| 41 |
+
"${BASE_URL}/v1/models") || fatal "curl to /v1/models failed"
|
| 42 |
+
if [[ "$models_status" != "200" ]]; then
|
| 43 |
+
cat "$tmp_body" >&2
|
| 44 |
+
fatal "/v1/models returned HTTP ${models_status}"
|
| 45 |
+
fi
|
| 46 |
+
info "/v1/models OK"
|
| 47 |
+
|
| 48 |
+
# 4) /v1/messages check
|
| 49 |
+
info "Checking /v1/messages with model=${MODEL}..."
|
| 50 |
+
payload=$(cat <<'JSON'
|
| 51 |
+
{
|
| 52 |
+
"model": "__MODEL__",
|
| 53 |
+
"messages": [
|
| 54 |
+
{"role": "user", "content": "ping"}
|
| 55 |
+
],
|
| 56 |
+
"max_tokens": 16
|
| 57 |
+
}
|
| 58 |
+
JSON
|
| 59 |
+
)
|
| 60 |
+
payload="${payload/__MODEL__/${MODEL}}"
|
| 61 |
+
|
| 62 |
+
msg_status=$(curl -sS -o "$tmp_body" -w '%{http_code}' \
|
| 63 |
+
-X POST "${BASE_URL}/v1/messages" \
|
| 64 |
+
-H "x-api-key: ${CLIENT_KEY}" \
|
| 65 |
+
-H "content-type: application/json" \
|
| 66 |
+
-d "$payload") || fatal "curl to /v1/messages failed"
|
| 67 |
+
|
| 68 |
+
if [[ "$msg_status" != "200" ]]; then
|
| 69 |
+
cat "$tmp_body" >&2
|
| 70 |
+
fatal "/v1/messages returned HTTP ${msg_status}"
|
| 71 |
+
fi
|
| 72 |
+
|
| 73 |
+
info "/v1/messages OK"
|
| 74 |
+
info "Claude proxy looks healthy. You can run Claude Code with:"
|
| 75 |
+
info "ANTHROPIC_API_KEY=${CLIENT_KEY} ANTHROPIC_BASE_URL=${BASE_URL} claude"
|
server.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
switch-provider.sh
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# Configuration
|
| 4 |
+
CONFIG_FILE="/home/kek/CLIProxyAPIPlus/config.yaml"
|
| 5 |
+
|
| 6 |
+
# Providers
|
| 7 |
+
KIRO_MODEL="kiro-claude-sonnet-4-5-agentic"
|
| 8 |
+
ANTIGRAVITY_MODEL="gemini-claude-sonnet-4-5-thinking"
|
| 9 |
+
GEMINI_MODEL="gemini-2.5-pro"
|
| 10 |
+
|
| 11 |
+
# Helper function to update config
|
| 12 |
+
update_config() {
|
| 13 |
+
local TARGET_MODEL="$1"
|
| 14 |
+
local PROVIDER_NAME="$2"
|
| 15 |
+
|
| 16 |
+
echo "Switching Claude Sonnet to $PROVIDER_NAME..."
|
| 17 |
+
|
| 18 |
+
# Use sed to replace the 'to:' line for Sonnet mappings
|
| 19 |
+
# We match the lines specifically to avoid breaking other mappings
|
| 20 |
+
|
| 21 |
+
# 1. Update claude-3-5-sonnet-20241022
|
| 22 |
+
sed -i "s|to: \"kiro-claude-sonnet-4-5-agentic\"|to: \"$TARGET_MODEL\"|g" "$CONFIG_FILE"
|
| 23 |
+
sed -i "s|to: \"gemini-claude-sonnet-4-5-thinking\"|to: \"$TARGET_MODEL\"|g" "$CONFIG_FILE"
|
| 24 |
+
sed -i "s|to: \"gemini-2.5-pro\"|to: \"$TARGET_MODEL\"|g" "$CONFIG_FILE"
|
| 25 |
+
|
| 26 |
+
echo "✅ Switched to $PROVIDER_NAME!"
|
| 27 |
+
echo "You can now use proxy-claude immediately."
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
# Main logic
|
| 31 |
+
case "$1" in
|
| 32 |
+
kiro)
|
| 33 |
+
update_config "$KIRO_MODEL" "Kiro (Power Key)"
|
| 34 |
+
;;
|
| 35 |
+
antigravity)
|
| 36 |
+
update_config "$ANTIGRAVITY_MODEL" "Antigravity (8 Keys)"
|
| 37 |
+
;;
|
| 38 |
+
gemini)
|
| 39 |
+
update_config "$GEMINI_MODEL" "Gemini Pro (5+ Keys)"
|
| 40 |
+
;;
|
| 41 |
+
*)
|
| 42 |
+
echo "Usage: ./switch-provider.sh [kiro|antigravity|gemini]"
|
| 43 |
+
echo ""
|
| 44 |
+
echo "Current Status:"
|
| 45 |
+
grep -A 1 "claude-3-5-sonnet-20241022" "$CONFIG_FILE" | grep "to:"
|
| 46 |
+
echo ""
|
| 47 |
+
echo "Examples:"
|
| 48 |
+
echo " ./switch-provider.sh kiro -> Switch to Kiro"
|
| 49 |
+
echo " ./switch-provider.sh antigravity -> Switch to Antigravity"
|
| 50 |
+
echo " ./switch-provider.sh gemini -> Switch to Gemini Pro"
|
| 51 |
+
exit 1
|
| 52 |
+
;;
|
| 53 |
+
esac
|
test-proxy-config.sh
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Test script to verify Claude Code proxy configuration
|
| 3 |
+
|
| 4 |
+
echo "Testing CLIProxyAPI connection..."
|
| 5 |
+
echo ""
|
| 6 |
+
|
| 7 |
+
# Test 1: Check if server is accessible
|
| 8 |
+
echo "1. Testing server endpoint..."
|
| 9 |
+
curl -s http://localhost:7860/v1/models \
|
| 10 |
+
-H "Authorization: Bearer sk-admin-power-1" \
|
| 11 |
+
| head -20
|
| 12 |
+
|
| 13 |
+
echo ""
|
| 14 |
+
echo ""
|
| 15 |
+
echo "2. Testing with environment variables..."
|
| 16 |
+
export ANTHROPIC_BASE_URL="http://localhost:7860"
|
| 17 |
+
export ANTHROPIC_API_KEY="sk-admin-power-1"
|
| 18 |
+
|
| 19 |
+
echo "Environment variables set:"
|
| 20 |
+
echo " ANTHROPIC_BASE_URL=$ANTHROPIC_BASE_URL"
|
| 21 |
+
echo " ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY"
|
| 22 |
+
echo ""
|
| 23 |
+
echo "✅ Configuration complete!"
|
| 24 |
+
echo ""
|
| 25 |
+
echo "To use Claude Code with the proxy:"
|
| 26 |
+
echo " 1. The settings are already configured in ~/.claude/settings.json"
|
| 27 |
+
echo " 2. Start a new Claude Code session: claude"
|
| 28 |
+
echo " 3. Your requests will route through the proxy with model mappings:"
|
| 29 |
+
echo " - Sonnet → kiro-claude-sonnet-4-5-agentic"
|
| 30 |
+
echo " - Haiku → gemini-2.5-flash"
|
| 31 |
+
echo " - Opus → gemini-claude-opus-4-5-thinking"
|
test/amp_management_test.go
ADDED
|
@@ -0,0 +1,915 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package test
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"bytes"
|
| 5 |
+
"encoding/json"
|
| 6 |
+
"net/http"
|
| 7 |
+
"net/http/httptest"
|
| 8 |
+
"os"
|
| 9 |
+
"path/filepath"
|
| 10 |
+
"testing"
|
| 11 |
+
|
| 12 |
+
"github.com/gin-gonic/gin"
|
| 13 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/api/handlers/management"
|
| 14 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
func init() {
|
| 18 |
+
gin.SetMode(gin.TestMode)
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
// newAmpTestHandler creates a test handler with default ampcode configuration.
|
| 22 |
+
func newAmpTestHandler(t *testing.T) (*management.Handler, string) {
|
| 23 |
+
t.Helper()
|
| 24 |
+
tmpDir := t.TempDir()
|
| 25 |
+
configPath := filepath.Join(tmpDir, "config.yaml")
|
| 26 |
+
|
| 27 |
+
cfg := &config.Config{
|
| 28 |
+
AmpCode: config.AmpCode{
|
| 29 |
+
UpstreamURL: "https://example.com",
|
| 30 |
+
UpstreamAPIKey: "test-api-key-12345",
|
| 31 |
+
RestrictManagementToLocalhost: true,
|
| 32 |
+
ForceModelMappings: false,
|
| 33 |
+
ModelMappings: []config.AmpModelMapping{
|
| 34 |
+
{From: "gpt-4", To: "gemini-pro"},
|
| 35 |
+
},
|
| 36 |
+
},
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
if err := os.WriteFile(configPath, []byte("port: 8080\n"), 0644); err != nil {
|
| 40 |
+
t.Fatalf("failed to write config file: %v", err)
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
h := management.NewHandler(cfg, configPath, nil)
|
| 44 |
+
return h, configPath
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
// setupAmpRouter creates a test router with all ampcode management endpoints.
|
| 48 |
+
func setupAmpRouter(h *management.Handler) *gin.Engine {
|
| 49 |
+
r := gin.New()
|
| 50 |
+
mgmt := r.Group("/v0/management")
|
| 51 |
+
{
|
| 52 |
+
mgmt.GET("/ampcode", h.GetAmpCode)
|
| 53 |
+
mgmt.GET("/ampcode/upstream-url", h.GetAmpUpstreamURL)
|
| 54 |
+
mgmt.PUT("/ampcode/upstream-url", h.PutAmpUpstreamURL)
|
| 55 |
+
mgmt.DELETE("/ampcode/upstream-url", h.DeleteAmpUpstreamURL)
|
| 56 |
+
mgmt.GET("/ampcode/upstream-api-key", h.GetAmpUpstreamAPIKey)
|
| 57 |
+
mgmt.PUT("/ampcode/upstream-api-key", h.PutAmpUpstreamAPIKey)
|
| 58 |
+
mgmt.DELETE("/ampcode/upstream-api-key", h.DeleteAmpUpstreamAPIKey)
|
| 59 |
+
mgmt.GET("/ampcode/upstream-api-keys", h.GetAmpUpstreamAPIKeys)
|
| 60 |
+
mgmt.PUT("/ampcode/upstream-api-keys", h.PutAmpUpstreamAPIKeys)
|
| 61 |
+
mgmt.PATCH("/ampcode/upstream-api-keys", h.PatchAmpUpstreamAPIKeys)
|
| 62 |
+
mgmt.DELETE("/ampcode/upstream-api-keys", h.DeleteAmpUpstreamAPIKeys)
|
| 63 |
+
mgmt.GET("/ampcode/restrict-management-to-localhost", h.GetAmpRestrictManagementToLocalhost)
|
| 64 |
+
mgmt.PUT("/ampcode/restrict-management-to-localhost", h.PutAmpRestrictManagementToLocalhost)
|
| 65 |
+
mgmt.GET("/ampcode/model-mappings", h.GetAmpModelMappings)
|
| 66 |
+
mgmt.PUT("/ampcode/model-mappings", h.PutAmpModelMappings)
|
| 67 |
+
mgmt.PATCH("/ampcode/model-mappings", h.PatchAmpModelMappings)
|
| 68 |
+
mgmt.DELETE("/ampcode/model-mappings", h.DeleteAmpModelMappings)
|
| 69 |
+
mgmt.GET("/ampcode/force-model-mappings", h.GetAmpForceModelMappings)
|
| 70 |
+
mgmt.PUT("/ampcode/force-model-mappings", h.PutAmpForceModelMappings)
|
| 71 |
+
}
|
| 72 |
+
return r
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
// TestGetAmpCode verifies GET /v0/management/ampcode returns full ampcode config.
|
| 76 |
+
func TestGetAmpCode(t *testing.T) {
|
| 77 |
+
h, _ := newAmpTestHandler(t)
|
| 78 |
+
r := setupAmpRouter(h)
|
| 79 |
+
|
| 80 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode", nil)
|
| 81 |
+
w := httptest.NewRecorder()
|
| 82 |
+
r.ServeHTTP(w, req)
|
| 83 |
+
|
| 84 |
+
if w.Code != http.StatusOK {
|
| 85 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
var resp map[string]config.AmpCode
|
| 89 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 90 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
ampcode := resp["ampcode"]
|
| 94 |
+
if ampcode.UpstreamURL != "https://example.com" {
|
| 95 |
+
t.Errorf("expected upstream-url %q, got %q", "https://example.com", ampcode.UpstreamURL)
|
| 96 |
+
}
|
| 97 |
+
if len(ampcode.ModelMappings) != 1 {
|
| 98 |
+
t.Errorf("expected 1 model mapping, got %d", len(ampcode.ModelMappings))
|
| 99 |
+
}
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
// TestGetAmpUpstreamURL verifies GET /v0/management/ampcode/upstream-url returns the upstream URL.
|
| 103 |
+
func TestGetAmpUpstreamURL(t *testing.T) {
|
| 104 |
+
h, _ := newAmpTestHandler(t)
|
| 105 |
+
r := setupAmpRouter(h)
|
| 106 |
+
|
| 107 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-url", nil)
|
| 108 |
+
w := httptest.NewRecorder()
|
| 109 |
+
r.ServeHTTP(w, req)
|
| 110 |
+
|
| 111 |
+
if w.Code != http.StatusOK {
|
| 112 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
var resp map[string]string
|
| 116 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 117 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
if resp["upstream-url"] != "https://example.com" {
|
| 121 |
+
t.Errorf("expected %q, got %q", "https://example.com", resp["upstream-url"])
|
| 122 |
+
}
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
// TestPutAmpUpstreamURL verifies PUT /v0/management/ampcode/upstream-url updates the upstream URL.
|
| 126 |
+
func TestPutAmpUpstreamURL(t *testing.T) {
|
| 127 |
+
h, _ := newAmpTestHandler(t)
|
| 128 |
+
r := setupAmpRouter(h)
|
| 129 |
+
|
| 130 |
+
body := `{"value": "https://new-upstream.com"}`
|
| 131 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/upstream-url", bytes.NewBufferString(body))
|
| 132 |
+
req.Header.Set("Content-Type", "application/json")
|
| 133 |
+
w := httptest.NewRecorder()
|
| 134 |
+
r.ServeHTTP(w, req)
|
| 135 |
+
|
| 136 |
+
if w.Code != http.StatusOK {
|
| 137 |
+
t.Fatalf("expected status %d, got %d: %s", http.StatusOK, w.Code, w.Body.String())
|
| 138 |
+
}
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
// TestDeleteAmpUpstreamURL verifies DELETE /v0/management/ampcode/upstream-url clears the upstream URL.
|
| 142 |
+
func TestDeleteAmpUpstreamURL(t *testing.T) {
|
| 143 |
+
h, _ := newAmpTestHandler(t)
|
| 144 |
+
r := setupAmpRouter(h)
|
| 145 |
+
|
| 146 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/upstream-url", nil)
|
| 147 |
+
w := httptest.NewRecorder()
|
| 148 |
+
r.ServeHTTP(w, req)
|
| 149 |
+
|
| 150 |
+
if w.Code != http.StatusOK {
|
| 151 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 152 |
+
}
|
| 153 |
+
}
|
| 154 |
+
|
| 155 |
+
// TestGetAmpUpstreamAPIKey verifies GET /v0/management/ampcode/upstream-api-key returns the API key.
|
| 156 |
+
func TestGetAmpUpstreamAPIKey(t *testing.T) {
|
| 157 |
+
h, _ := newAmpTestHandler(t)
|
| 158 |
+
r := setupAmpRouter(h)
|
| 159 |
+
|
| 160 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-api-key", nil)
|
| 161 |
+
w := httptest.NewRecorder()
|
| 162 |
+
r.ServeHTTP(w, req)
|
| 163 |
+
|
| 164 |
+
if w.Code != http.StatusOK {
|
| 165 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 166 |
+
}
|
| 167 |
+
|
| 168 |
+
var resp map[string]any
|
| 169 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 170 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 171 |
+
}
|
| 172 |
+
|
| 173 |
+
key := resp["upstream-api-key"].(string)
|
| 174 |
+
if key != "test-api-key-12345" {
|
| 175 |
+
t.Errorf("expected key %q, got %q", "test-api-key-12345", key)
|
| 176 |
+
}
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
// TestPutAmpUpstreamAPIKey verifies PUT /v0/management/ampcode/upstream-api-key updates the API key.
|
| 180 |
+
func TestPutAmpUpstreamAPIKey(t *testing.T) {
|
| 181 |
+
h, _ := newAmpTestHandler(t)
|
| 182 |
+
r := setupAmpRouter(h)
|
| 183 |
+
|
| 184 |
+
body := `{"value": "new-secret-key"}`
|
| 185 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/upstream-api-key", bytes.NewBufferString(body))
|
| 186 |
+
req.Header.Set("Content-Type", "application/json")
|
| 187 |
+
w := httptest.NewRecorder()
|
| 188 |
+
r.ServeHTTP(w, req)
|
| 189 |
+
|
| 190 |
+
if w.Code != http.StatusOK {
|
| 191 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 192 |
+
}
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
func TestPutAmpUpstreamAPIKeys_PersistsAndReturns(t *testing.T) {
|
| 196 |
+
h, configPath := newAmpTestHandler(t)
|
| 197 |
+
r := setupAmpRouter(h)
|
| 198 |
+
|
| 199 |
+
body := `{"value":[{"upstream-api-key":" u1 ","api-keys":[" k1 ","","k2"]}]}`
|
| 200 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/upstream-api-keys", bytes.NewBufferString(body))
|
| 201 |
+
req.Header.Set("Content-Type", "application/json")
|
| 202 |
+
w := httptest.NewRecorder()
|
| 203 |
+
r.ServeHTTP(w, req)
|
| 204 |
+
|
| 205 |
+
if w.Code != http.StatusOK {
|
| 206 |
+
t.Fatalf("expected status %d, got %d: %s", http.StatusOK, w.Code, w.Body.String())
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
// Verify it was persisted to disk
|
| 210 |
+
loaded, err := config.LoadConfig(configPath)
|
| 211 |
+
if err != nil {
|
| 212 |
+
t.Fatalf("failed to load config from disk: %v", err)
|
| 213 |
+
}
|
| 214 |
+
if len(loaded.AmpCode.UpstreamAPIKeys) != 1 {
|
| 215 |
+
t.Fatalf("expected 1 upstream-api-keys entry, got %d", len(loaded.AmpCode.UpstreamAPIKeys))
|
| 216 |
+
}
|
| 217 |
+
entry := loaded.AmpCode.UpstreamAPIKeys[0]
|
| 218 |
+
if entry.UpstreamAPIKey != "u1" {
|
| 219 |
+
t.Fatalf("expected upstream-api-key u1, got %q", entry.UpstreamAPIKey)
|
| 220 |
+
}
|
| 221 |
+
if len(entry.APIKeys) != 2 || entry.APIKeys[0] != "k1" || entry.APIKeys[1] != "k2" {
|
| 222 |
+
t.Fatalf("expected api-keys [k1 k2], got %#v", entry.APIKeys)
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
// Verify it is returned by GET /ampcode
|
| 226 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode", nil)
|
| 227 |
+
w = httptest.NewRecorder()
|
| 228 |
+
r.ServeHTTP(w, req)
|
| 229 |
+
if w.Code != http.StatusOK {
|
| 230 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 231 |
+
}
|
| 232 |
+
var resp map[string]config.AmpCode
|
| 233 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 234 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 235 |
+
}
|
| 236 |
+
if got := resp["ampcode"].UpstreamAPIKeys; len(got) != 1 || got[0].UpstreamAPIKey != "u1" {
|
| 237 |
+
t.Fatalf("expected upstream-api-keys to be present after update, got %#v", got)
|
| 238 |
+
}
|
| 239 |
+
}
|
| 240 |
+
|
| 241 |
+
func TestDeleteAmpUpstreamAPIKeys_ClearsAll(t *testing.T) {
|
| 242 |
+
h, _ := newAmpTestHandler(t)
|
| 243 |
+
r := setupAmpRouter(h)
|
| 244 |
+
|
| 245 |
+
// Seed with one entry
|
| 246 |
+
putBody := `{"value":[{"upstream-api-key":"u1","api-keys":["k1"]}]}`
|
| 247 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/upstream-api-keys", bytes.NewBufferString(putBody))
|
| 248 |
+
req.Header.Set("Content-Type", "application/json")
|
| 249 |
+
w := httptest.NewRecorder()
|
| 250 |
+
r.ServeHTTP(w, req)
|
| 251 |
+
if w.Code != http.StatusOK {
|
| 252 |
+
t.Fatalf("expected status %d, got %d: %s", http.StatusOK, w.Code, w.Body.String())
|
| 253 |
+
}
|
| 254 |
+
|
| 255 |
+
deleteBody := `{"value":[]}`
|
| 256 |
+
req = httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/upstream-api-keys", bytes.NewBufferString(deleteBody))
|
| 257 |
+
req.Header.Set("Content-Type", "application/json")
|
| 258 |
+
w = httptest.NewRecorder()
|
| 259 |
+
r.ServeHTTP(w, req)
|
| 260 |
+
if w.Code != http.StatusOK {
|
| 261 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-api-keys", nil)
|
| 265 |
+
w = httptest.NewRecorder()
|
| 266 |
+
r.ServeHTTP(w, req)
|
| 267 |
+
if w.Code != http.StatusOK {
|
| 268 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 269 |
+
}
|
| 270 |
+
var resp map[string][]config.AmpUpstreamAPIKeyEntry
|
| 271 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 272 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 273 |
+
}
|
| 274 |
+
if resp["upstream-api-keys"] != nil && len(resp["upstream-api-keys"]) != 0 {
|
| 275 |
+
t.Fatalf("expected cleared list, got %#v", resp["upstream-api-keys"])
|
| 276 |
+
}
|
| 277 |
+
}
|
| 278 |
+
|
| 279 |
+
// TestDeleteAmpUpstreamAPIKey verifies DELETE /v0/management/ampcode/upstream-api-key clears the API key.
|
| 280 |
+
func TestDeleteAmpUpstreamAPIKey(t *testing.T) {
|
| 281 |
+
h, _ := newAmpTestHandler(t)
|
| 282 |
+
r := setupAmpRouter(h)
|
| 283 |
+
|
| 284 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/upstream-api-key", nil)
|
| 285 |
+
w := httptest.NewRecorder()
|
| 286 |
+
r.ServeHTTP(w, req)
|
| 287 |
+
|
| 288 |
+
if w.Code != http.StatusOK {
|
| 289 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 290 |
+
}
|
| 291 |
+
}
|
| 292 |
+
|
| 293 |
+
// TestGetAmpRestrictManagementToLocalhost verifies GET returns the localhost restriction setting.
|
| 294 |
+
func TestGetAmpRestrictManagementToLocalhost(t *testing.T) {
|
| 295 |
+
h, _ := newAmpTestHandler(t)
|
| 296 |
+
r := setupAmpRouter(h)
|
| 297 |
+
|
| 298 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/restrict-management-to-localhost", nil)
|
| 299 |
+
w := httptest.NewRecorder()
|
| 300 |
+
r.ServeHTTP(w, req)
|
| 301 |
+
|
| 302 |
+
if w.Code != http.StatusOK {
|
| 303 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 304 |
+
}
|
| 305 |
+
|
| 306 |
+
var resp map[string]bool
|
| 307 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 308 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 309 |
+
}
|
| 310 |
+
|
| 311 |
+
if resp["restrict-management-to-localhost"] != true {
|
| 312 |
+
t.Error("expected restrict-management-to-localhost to be true")
|
| 313 |
+
}
|
| 314 |
+
}
|
| 315 |
+
|
| 316 |
+
// TestPutAmpRestrictManagementToLocalhost verifies PUT updates the localhost restriction setting.
|
| 317 |
+
func TestPutAmpRestrictManagementToLocalhost(t *testing.T) {
|
| 318 |
+
h, _ := newAmpTestHandler(t)
|
| 319 |
+
r := setupAmpRouter(h)
|
| 320 |
+
|
| 321 |
+
body := `{"value": false}`
|
| 322 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/restrict-management-to-localhost", bytes.NewBufferString(body))
|
| 323 |
+
req.Header.Set("Content-Type", "application/json")
|
| 324 |
+
w := httptest.NewRecorder()
|
| 325 |
+
r.ServeHTTP(w, req)
|
| 326 |
+
|
| 327 |
+
if w.Code != http.StatusOK {
|
| 328 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 329 |
+
}
|
| 330 |
+
}
|
| 331 |
+
|
| 332 |
+
// TestGetAmpModelMappings verifies GET /v0/management/ampcode/model-mappings returns all mappings.
|
| 333 |
+
func TestGetAmpModelMappings(t *testing.T) {
|
| 334 |
+
h, _ := newAmpTestHandler(t)
|
| 335 |
+
r := setupAmpRouter(h)
|
| 336 |
+
|
| 337 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 338 |
+
w := httptest.NewRecorder()
|
| 339 |
+
r.ServeHTTP(w, req)
|
| 340 |
+
|
| 341 |
+
if w.Code != http.StatusOK {
|
| 342 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 343 |
+
}
|
| 344 |
+
|
| 345 |
+
var resp map[string][]config.AmpModelMapping
|
| 346 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 347 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 348 |
+
}
|
| 349 |
+
|
| 350 |
+
mappings := resp["model-mappings"]
|
| 351 |
+
if len(mappings) != 1 {
|
| 352 |
+
t.Fatalf("expected 1 mapping, got %d", len(mappings))
|
| 353 |
+
}
|
| 354 |
+
if mappings[0].From != "gpt-4" || mappings[0].To != "gemini-pro" {
|
| 355 |
+
t.Errorf("unexpected mapping: %+v", mappings[0])
|
| 356 |
+
}
|
| 357 |
+
}
|
| 358 |
+
|
| 359 |
+
// TestPutAmpModelMappings verifies PUT /v0/management/ampcode/model-mappings replaces all mappings.
|
| 360 |
+
func TestPutAmpModelMappings(t *testing.T) {
|
| 361 |
+
h, _ := newAmpTestHandler(t)
|
| 362 |
+
r := setupAmpRouter(h)
|
| 363 |
+
|
| 364 |
+
body := `{"value": [{"from": "claude-3", "to": "gpt-4o"}, {"from": "gemini", "to": "claude"}]}`
|
| 365 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(body))
|
| 366 |
+
req.Header.Set("Content-Type", "application/json")
|
| 367 |
+
w := httptest.NewRecorder()
|
| 368 |
+
r.ServeHTTP(w, req)
|
| 369 |
+
|
| 370 |
+
if w.Code != http.StatusOK {
|
| 371 |
+
t.Fatalf("expected status %d, got %d: %s", http.StatusOK, w.Code, w.Body.String())
|
| 372 |
+
}
|
| 373 |
+
}
|
| 374 |
+
|
| 375 |
+
// TestPatchAmpModelMappings verifies PATCH updates existing mappings and adds new ones.
|
| 376 |
+
func TestPatchAmpModelMappings(t *testing.T) {
|
| 377 |
+
h, _ := newAmpTestHandler(t)
|
| 378 |
+
r := setupAmpRouter(h)
|
| 379 |
+
|
| 380 |
+
body := `{"value": [{"from": "gpt-4", "to": "updated-model"}, {"from": "new-model", "to": "target"}]}`
|
| 381 |
+
req := httptest.NewRequest(http.MethodPatch, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(body))
|
| 382 |
+
req.Header.Set("Content-Type", "application/json")
|
| 383 |
+
w := httptest.NewRecorder()
|
| 384 |
+
r.ServeHTTP(w, req)
|
| 385 |
+
|
| 386 |
+
if w.Code != http.StatusOK {
|
| 387 |
+
t.Fatalf("expected status %d, got %d: %s", http.StatusOK, w.Code, w.Body.String())
|
| 388 |
+
}
|
| 389 |
+
}
|
| 390 |
+
|
| 391 |
+
// TestDeleteAmpModelMappings_Specific verifies DELETE removes specified mappings by "from" field.
|
| 392 |
+
func TestDeleteAmpModelMappings_Specific(t *testing.T) {
|
| 393 |
+
h, _ := newAmpTestHandler(t)
|
| 394 |
+
r := setupAmpRouter(h)
|
| 395 |
+
|
| 396 |
+
body := `{"value": ["gpt-4"]}`
|
| 397 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(body))
|
| 398 |
+
req.Header.Set("Content-Type", "application/json")
|
| 399 |
+
w := httptest.NewRecorder()
|
| 400 |
+
r.ServeHTTP(w, req)
|
| 401 |
+
|
| 402 |
+
if w.Code != http.StatusOK {
|
| 403 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 404 |
+
}
|
| 405 |
+
}
|
| 406 |
+
|
| 407 |
+
// TestDeleteAmpModelMappings_All verifies DELETE with empty body removes all mappings.
|
| 408 |
+
func TestDeleteAmpModelMappings_All(t *testing.T) {
|
| 409 |
+
h, _ := newAmpTestHandler(t)
|
| 410 |
+
r := setupAmpRouter(h)
|
| 411 |
+
|
| 412 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/model-mappings", nil)
|
| 413 |
+
w := httptest.NewRecorder()
|
| 414 |
+
r.ServeHTTP(w, req)
|
| 415 |
+
|
| 416 |
+
if w.Code != http.StatusOK {
|
| 417 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 418 |
+
}
|
| 419 |
+
}
|
| 420 |
+
|
| 421 |
+
// TestGetAmpForceModelMappings verifies GET returns the force-model-mappings setting.
|
| 422 |
+
func TestGetAmpForceModelMappings(t *testing.T) {
|
| 423 |
+
h, _ := newAmpTestHandler(t)
|
| 424 |
+
r := setupAmpRouter(h)
|
| 425 |
+
|
| 426 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/force-model-mappings", nil)
|
| 427 |
+
w := httptest.NewRecorder()
|
| 428 |
+
r.ServeHTTP(w, req)
|
| 429 |
+
|
| 430 |
+
if w.Code != http.StatusOK {
|
| 431 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 432 |
+
}
|
| 433 |
+
|
| 434 |
+
var resp map[string]bool
|
| 435 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 436 |
+
t.Fatalf("failed to unmarshal response: %v", err)
|
| 437 |
+
}
|
| 438 |
+
|
| 439 |
+
if resp["force-model-mappings"] != false {
|
| 440 |
+
t.Error("expected force-model-mappings to be false")
|
| 441 |
+
}
|
| 442 |
+
}
|
| 443 |
+
|
| 444 |
+
// TestPutAmpForceModelMappings verifies PUT updates the force-model-mappings setting.
|
| 445 |
+
func TestPutAmpForceModelMappings(t *testing.T) {
|
| 446 |
+
h, _ := newAmpTestHandler(t)
|
| 447 |
+
r := setupAmpRouter(h)
|
| 448 |
+
|
| 449 |
+
body := `{"value": true}`
|
| 450 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/force-model-mappings", bytes.NewBufferString(body))
|
| 451 |
+
req.Header.Set("Content-Type", "application/json")
|
| 452 |
+
w := httptest.NewRecorder()
|
| 453 |
+
r.ServeHTTP(w, req)
|
| 454 |
+
|
| 455 |
+
if w.Code != http.StatusOK {
|
| 456 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 457 |
+
}
|
| 458 |
+
}
|
| 459 |
+
|
| 460 |
+
// TestPutAmpModelMappings_VerifyState verifies PUT replaces mappings and state is persisted.
|
| 461 |
+
func TestPutAmpModelMappings_VerifyState(t *testing.T) {
|
| 462 |
+
h, _ := newAmpTestHandler(t)
|
| 463 |
+
r := setupAmpRouter(h)
|
| 464 |
+
|
| 465 |
+
body := `{"value": [{"from": "model-a", "to": "model-b"}, {"from": "model-c", "to": "model-d"}, {"from": "model-e", "to": "model-f"}]}`
|
| 466 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(body))
|
| 467 |
+
req.Header.Set("Content-Type", "application/json")
|
| 468 |
+
w := httptest.NewRecorder()
|
| 469 |
+
r.ServeHTTP(w, req)
|
| 470 |
+
|
| 471 |
+
if w.Code != http.StatusOK {
|
| 472 |
+
t.Fatalf("PUT failed: status %d, body: %s", w.Code, w.Body.String())
|
| 473 |
+
}
|
| 474 |
+
|
| 475 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 476 |
+
w = httptest.NewRecorder()
|
| 477 |
+
r.ServeHTTP(w, req)
|
| 478 |
+
|
| 479 |
+
var resp map[string][]config.AmpModelMapping
|
| 480 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 481 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 482 |
+
}
|
| 483 |
+
|
| 484 |
+
mappings := resp["model-mappings"]
|
| 485 |
+
if len(mappings) != 3 {
|
| 486 |
+
t.Fatalf("expected 3 mappings, got %d", len(mappings))
|
| 487 |
+
}
|
| 488 |
+
|
| 489 |
+
expected := map[string]string{"model-a": "model-b", "model-c": "model-d", "model-e": "model-f"}
|
| 490 |
+
for _, m := range mappings {
|
| 491 |
+
if expected[m.From] != m.To {
|
| 492 |
+
t.Errorf("mapping %q -> expected %q, got %q", m.From, expected[m.From], m.To)
|
| 493 |
+
}
|
| 494 |
+
}
|
| 495 |
+
}
|
| 496 |
+
|
| 497 |
+
// TestPatchAmpModelMappings_VerifyState verifies PATCH merges mappings correctly.
|
| 498 |
+
func TestPatchAmpModelMappings_VerifyState(t *testing.T) {
|
| 499 |
+
h, _ := newAmpTestHandler(t)
|
| 500 |
+
r := setupAmpRouter(h)
|
| 501 |
+
|
| 502 |
+
body := `{"value": [{"from": "gpt-4", "to": "updated-target"}, {"from": "new-model", "to": "new-target"}]}`
|
| 503 |
+
req := httptest.NewRequest(http.MethodPatch, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(body))
|
| 504 |
+
req.Header.Set("Content-Type", "application/json")
|
| 505 |
+
w := httptest.NewRecorder()
|
| 506 |
+
r.ServeHTTP(w, req)
|
| 507 |
+
|
| 508 |
+
if w.Code != http.StatusOK {
|
| 509 |
+
t.Fatalf("PATCH failed: status %d", w.Code)
|
| 510 |
+
}
|
| 511 |
+
|
| 512 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 513 |
+
w = httptest.NewRecorder()
|
| 514 |
+
r.ServeHTTP(w, req)
|
| 515 |
+
|
| 516 |
+
var resp map[string][]config.AmpModelMapping
|
| 517 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 518 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 519 |
+
}
|
| 520 |
+
|
| 521 |
+
mappings := resp["model-mappings"]
|
| 522 |
+
if len(mappings) != 2 {
|
| 523 |
+
t.Fatalf("expected 2 mappings (1 updated + 1 new), got %d", len(mappings))
|
| 524 |
+
}
|
| 525 |
+
|
| 526 |
+
found := make(map[string]string)
|
| 527 |
+
for _, m := range mappings {
|
| 528 |
+
found[m.From] = m.To
|
| 529 |
+
}
|
| 530 |
+
|
| 531 |
+
if found["gpt-4"] != "updated-target" {
|
| 532 |
+
t.Errorf("gpt-4 should map to updated-target, got %q", found["gpt-4"])
|
| 533 |
+
}
|
| 534 |
+
if found["new-model"] != "new-target" {
|
| 535 |
+
t.Errorf("new-model should map to new-target, got %q", found["new-model"])
|
| 536 |
+
}
|
| 537 |
+
}
|
| 538 |
+
|
| 539 |
+
// TestDeleteAmpModelMappings_VerifyState verifies DELETE removes specific mappings and keeps others.
|
| 540 |
+
func TestDeleteAmpModelMappings_VerifyState(t *testing.T) {
|
| 541 |
+
h, _ := newAmpTestHandler(t)
|
| 542 |
+
r := setupAmpRouter(h)
|
| 543 |
+
|
| 544 |
+
putBody := `{"value": [{"from": "a", "to": "1"}, {"from": "b", "to": "2"}, {"from": "c", "to": "3"}]}`
|
| 545 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(putBody))
|
| 546 |
+
req.Header.Set("Content-Type", "application/json")
|
| 547 |
+
w := httptest.NewRecorder()
|
| 548 |
+
r.ServeHTTP(w, req)
|
| 549 |
+
|
| 550 |
+
delBody := `{"value": ["a", "c"]}`
|
| 551 |
+
req = httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(delBody))
|
| 552 |
+
req.Header.Set("Content-Type", "application/json")
|
| 553 |
+
w = httptest.NewRecorder()
|
| 554 |
+
r.ServeHTTP(w, req)
|
| 555 |
+
|
| 556 |
+
if w.Code != http.StatusOK {
|
| 557 |
+
t.Fatalf("DELETE failed: status %d", w.Code)
|
| 558 |
+
}
|
| 559 |
+
|
| 560 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 561 |
+
w = httptest.NewRecorder()
|
| 562 |
+
r.ServeHTTP(w, req)
|
| 563 |
+
|
| 564 |
+
var resp map[string][]config.AmpModelMapping
|
| 565 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 566 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 567 |
+
}
|
| 568 |
+
|
| 569 |
+
mappings := resp["model-mappings"]
|
| 570 |
+
if len(mappings) != 1 {
|
| 571 |
+
t.Fatalf("expected 1 mapping remaining, got %d", len(mappings))
|
| 572 |
+
}
|
| 573 |
+
if mappings[0].From != "b" || mappings[0].To != "2" {
|
| 574 |
+
t.Errorf("expected b->2, got %s->%s", mappings[0].From, mappings[0].To)
|
| 575 |
+
}
|
| 576 |
+
}
|
| 577 |
+
|
| 578 |
+
// TestDeleteAmpModelMappings_NonExistent verifies DELETE with non-existent mapping doesn't affect existing ones.
|
| 579 |
+
func TestDeleteAmpModelMappings_NonExistent(t *testing.T) {
|
| 580 |
+
h, _ := newAmpTestHandler(t)
|
| 581 |
+
r := setupAmpRouter(h)
|
| 582 |
+
|
| 583 |
+
delBody := `{"value": ["non-existent-model"]}`
|
| 584 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(delBody))
|
| 585 |
+
req.Header.Set("Content-Type", "application/json")
|
| 586 |
+
w := httptest.NewRecorder()
|
| 587 |
+
r.ServeHTTP(w, req)
|
| 588 |
+
|
| 589 |
+
if w.Code != http.StatusOK {
|
| 590 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 591 |
+
}
|
| 592 |
+
|
| 593 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 594 |
+
w = httptest.NewRecorder()
|
| 595 |
+
r.ServeHTTP(w, req)
|
| 596 |
+
|
| 597 |
+
var resp map[string][]config.AmpModelMapping
|
| 598 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 599 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 600 |
+
}
|
| 601 |
+
|
| 602 |
+
if len(resp["model-mappings"]) != 1 {
|
| 603 |
+
t.Errorf("original mapping should remain, got %d mappings", len(resp["model-mappings"]))
|
| 604 |
+
}
|
| 605 |
+
}
|
| 606 |
+
|
| 607 |
+
// TestPutAmpModelMappings_Empty verifies PUT with empty array clears all mappings.
|
| 608 |
+
func TestPutAmpModelMappings_Empty(t *testing.T) {
|
| 609 |
+
h, _ := newAmpTestHandler(t)
|
| 610 |
+
r := setupAmpRouter(h)
|
| 611 |
+
|
| 612 |
+
body := `{"value": []}`
|
| 613 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(body))
|
| 614 |
+
req.Header.Set("Content-Type", "application/json")
|
| 615 |
+
w := httptest.NewRecorder()
|
| 616 |
+
r.ServeHTTP(w, req)
|
| 617 |
+
|
| 618 |
+
if w.Code != http.StatusOK {
|
| 619 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 620 |
+
}
|
| 621 |
+
|
| 622 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 623 |
+
w = httptest.NewRecorder()
|
| 624 |
+
r.ServeHTTP(w, req)
|
| 625 |
+
|
| 626 |
+
var resp map[string][]config.AmpModelMapping
|
| 627 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 628 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 629 |
+
}
|
| 630 |
+
|
| 631 |
+
if len(resp["model-mappings"]) != 0 {
|
| 632 |
+
t.Errorf("expected 0 mappings, got %d", len(resp["model-mappings"]))
|
| 633 |
+
}
|
| 634 |
+
}
|
| 635 |
+
|
| 636 |
+
// TestPutAmpUpstreamURL_VerifyState verifies PUT updates upstream URL and persists state.
|
| 637 |
+
func TestPutAmpUpstreamURL_VerifyState(t *testing.T) {
|
| 638 |
+
h, _ := newAmpTestHandler(t)
|
| 639 |
+
r := setupAmpRouter(h)
|
| 640 |
+
|
| 641 |
+
body := `{"value": "https://new-api.example.com"}`
|
| 642 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/upstream-url", bytes.NewBufferString(body))
|
| 643 |
+
req.Header.Set("Content-Type", "application/json")
|
| 644 |
+
w := httptest.NewRecorder()
|
| 645 |
+
r.ServeHTTP(w, req)
|
| 646 |
+
|
| 647 |
+
if w.Code != http.StatusOK {
|
| 648 |
+
t.Fatalf("PUT failed: status %d", w.Code)
|
| 649 |
+
}
|
| 650 |
+
|
| 651 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-url", nil)
|
| 652 |
+
w = httptest.NewRecorder()
|
| 653 |
+
r.ServeHTTP(w, req)
|
| 654 |
+
|
| 655 |
+
var resp map[string]string
|
| 656 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 657 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 658 |
+
}
|
| 659 |
+
|
| 660 |
+
if resp["upstream-url"] != "https://new-api.example.com" {
|
| 661 |
+
t.Errorf("expected %q, got %q", "https://new-api.example.com", resp["upstream-url"])
|
| 662 |
+
}
|
| 663 |
+
}
|
| 664 |
+
|
| 665 |
+
// TestDeleteAmpUpstreamURL_VerifyState verifies DELETE clears upstream URL.
|
| 666 |
+
func TestDeleteAmpUpstreamURL_VerifyState(t *testing.T) {
|
| 667 |
+
h, _ := newAmpTestHandler(t)
|
| 668 |
+
r := setupAmpRouter(h)
|
| 669 |
+
|
| 670 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/upstream-url", nil)
|
| 671 |
+
w := httptest.NewRecorder()
|
| 672 |
+
r.ServeHTTP(w, req)
|
| 673 |
+
|
| 674 |
+
if w.Code != http.StatusOK {
|
| 675 |
+
t.Fatalf("DELETE failed: status %d", w.Code)
|
| 676 |
+
}
|
| 677 |
+
|
| 678 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-url", nil)
|
| 679 |
+
w = httptest.NewRecorder()
|
| 680 |
+
r.ServeHTTP(w, req)
|
| 681 |
+
|
| 682 |
+
var resp map[string]string
|
| 683 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 684 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 685 |
+
}
|
| 686 |
+
|
| 687 |
+
if resp["upstream-url"] != "" {
|
| 688 |
+
t.Errorf("expected empty string, got %q", resp["upstream-url"])
|
| 689 |
+
}
|
| 690 |
+
}
|
| 691 |
+
|
| 692 |
+
// TestPutAmpUpstreamAPIKey_VerifyState verifies PUT updates API key and persists state.
|
| 693 |
+
func TestPutAmpUpstreamAPIKey_VerifyState(t *testing.T) {
|
| 694 |
+
h, _ := newAmpTestHandler(t)
|
| 695 |
+
r := setupAmpRouter(h)
|
| 696 |
+
|
| 697 |
+
body := `{"value": "new-secret-api-key-xyz"}`
|
| 698 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/upstream-api-key", bytes.NewBufferString(body))
|
| 699 |
+
req.Header.Set("Content-Type", "application/json")
|
| 700 |
+
w := httptest.NewRecorder()
|
| 701 |
+
r.ServeHTTP(w, req)
|
| 702 |
+
|
| 703 |
+
if w.Code != http.StatusOK {
|
| 704 |
+
t.Fatalf("PUT failed: status %d", w.Code)
|
| 705 |
+
}
|
| 706 |
+
|
| 707 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-api-key", nil)
|
| 708 |
+
w = httptest.NewRecorder()
|
| 709 |
+
r.ServeHTTP(w, req)
|
| 710 |
+
|
| 711 |
+
var resp map[string]string
|
| 712 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 713 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 714 |
+
}
|
| 715 |
+
|
| 716 |
+
if resp["upstream-api-key"] != "new-secret-api-key-xyz" {
|
| 717 |
+
t.Errorf("expected %q, got %q", "new-secret-api-key-xyz", resp["upstream-api-key"])
|
| 718 |
+
}
|
| 719 |
+
}
|
| 720 |
+
|
| 721 |
+
// TestDeleteAmpUpstreamAPIKey_VerifyState verifies DELETE clears API key.
|
| 722 |
+
func TestDeleteAmpUpstreamAPIKey_VerifyState(t *testing.T) {
|
| 723 |
+
h, _ := newAmpTestHandler(t)
|
| 724 |
+
r := setupAmpRouter(h)
|
| 725 |
+
|
| 726 |
+
req := httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/upstream-api-key", nil)
|
| 727 |
+
w := httptest.NewRecorder()
|
| 728 |
+
r.ServeHTTP(w, req)
|
| 729 |
+
|
| 730 |
+
if w.Code != http.StatusOK {
|
| 731 |
+
t.Fatalf("DELETE failed: status %d", w.Code)
|
| 732 |
+
}
|
| 733 |
+
|
| 734 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/upstream-api-key", nil)
|
| 735 |
+
w = httptest.NewRecorder()
|
| 736 |
+
r.ServeHTTP(w, req)
|
| 737 |
+
|
| 738 |
+
var resp map[string]string
|
| 739 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 740 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 741 |
+
}
|
| 742 |
+
|
| 743 |
+
if resp["upstream-api-key"] != "" {
|
| 744 |
+
t.Errorf("expected empty string, got %q", resp["upstream-api-key"])
|
| 745 |
+
}
|
| 746 |
+
}
|
| 747 |
+
|
| 748 |
+
// TestPutAmpRestrictManagementToLocalhost_VerifyState verifies PUT updates localhost restriction.
|
| 749 |
+
func TestPutAmpRestrictManagementToLocalhost_VerifyState(t *testing.T) {
|
| 750 |
+
h, _ := newAmpTestHandler(t)
|
| 751 |
+
r := setupAmpRouter(h)
|
| 752 |
+
|
| 753 |
+
body := `{"value": false}`
|
| 754 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/restrict-management-to-localhost", bytes.NewBufferString(body))
|
| 755 |
+
req.Header.Set("Content-Type", "application/json")
|
| 756 |
+
w := httptest.NewRecorder()
|
| 757 |
+
r.ServeHTTP(w, req)
|
| 758 |
+
|
| 759 |
+
if w.Code != http.StatusOK {
|
| 760 |
+
t.Fatalf("PUT failed: status %d", w.Code)
|
| 761 |
+
}
|
| 762 |
+
|
| 763 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/restrict-management-to-localhost", nil)
|
| 764 |
+
w = httptest.NewRecorder()
|
| 765 |
+
r.ServeHTTP(w, req)
|
| 766 |
+
|
| 767 |
+
var resp map[string]bool
|
| 768 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 769 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 770 |
+
}
|
| 771 |
+
|
| 772 |
+
if resp["restrict-management-to-localhost"] != false {
|
| 773 |
+
t.Error("expected false after update")
|
| 774 |
+
}
|
| 775 |
+
}
|
| 776 |
+
|
| 777 |
+
// TestPutAmpForceModelMappings_VerifyState verifies PUT updates force-model-mappings setting.
|
| 778 |
+
func TestPutAmpForceModelMappings_VerifyState(t *testing.T) {
|
| 779 |
+
h, _ := newAmpTestHandler(t)
|
| 780 |
+
r := setupAmpRouter(h)
|
| 781 |
+
|
| 782 |
+
body := `{"value": true}`
|
| 783 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/force-model-mappings", bytes.NewBufferString(body))
|
| 784 |
+
req.Header.Set("Content-Type", "application/json")
|
| 785 |
+
w := httptest.NewRecorder()
|
| 786 |
+
r.ServeHTTP(w, req)
|
| 787 |
+
|
| 788 |
+
if w.Code != http.StatusOK {
|
| 789 |
+
t.Fatalf("PUT failed: status %d", w.Code)
|
| 790 |
+
}
|
| 791 |
+
|
| 792 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/force-model-mappings", nil)
|
| 793 |
+
w = httptest.NewRecorder()
|
| 794 |
+
r.ServeHTTP(w, req)
|
| 795 |
+
|
| 796 |
+
var resp map[string]bool
|
| 797 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 798 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 799 |
+
}
|
| 800 |
+
|
| 801 |
+
if resp["force-model-mappings"] != true {
|
| 802 |
+
t.Error("expected true after update")
|
| 803 |
+
}
|
| 804 |
+
}
|
| 805 |
+
|
| 806 |
+
// TestPutBoolField_EmptyObject verifies PUT with empty object returns 400.
|
| 807 |
+
func TestPutBoolField_EmptyObject(t *testing.T) {
|
| 808 |
+
h, _ := newAmpTestHandler(t)
|
| 809 |
+
r := setupAmpRouter(h)
|
| 810 |
+
|
| 811 |
+
body := `{}`
|
| 812 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/force-model-mappings", bytes.NewBufferString(body))
|
| 813 |
+
req.Header.Set("Content-Type", "application/json")
|
| 814 |
+
w := httptest.NewRecorder()
|
| 815 |
+
r.ServeHTTP(w, req)
|
| 816 |
+
|
| 817 |
+
if w.Code != http.StatusBadRequest {
|
| 818 |
+
t.Fatalf("expected status %d for empty object, got %d", http.StatusBadRequest, w.Code)
|
| 819 |
+
}
|
| 820 |
+
}
|
| 821 |
+
|
| 822 |
+
// TestComplexMappingsWorkflow tests a full workflow: PUT, PATCH, DELETE, and GET.
|
| 823 |
+
func TestComplexMappingsWorkflow(t *testing.T) {
|
| 824 |
+
h, _ := newAmpTestHandler(t)
|
| 825 |
+
r := setupAmpRouter(h)
|
| 826 |
+
|
| 827 |
+
putBody := `{"value": [{"from": "m1", "to": "t1"}, {"from": "m2", "to": "t2"}, {"from": "m3", "to": "t3"}, {"from": "m4", "to": "t4"}]}`
|
| 828 |
+
req := httptest.NewRequest(http.MethodPut, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(putBody))
|
| 829 |
+
req.Header.Set("Content-Type", "application/json")
|
| 830 |
+
w := httptest.NewRecorder()
|
| 831 |
+
r.ServeHTTP(w, req)
|
| 832 |
+
|
| 833 |
+
patchBody := `{"value": [{"from": "m2", "to": "t2-updated"}, {"from": "m5", "to": "t5"}]}`
|
| 834 |
+
req = httptest.NewRequest(http.MethodPatch, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(patchBody))
|
| 835 |
+
req.Header.Set("Content-Type", "application/json")
|
| 836 |
+
w = httptest.NewRecorder()
|
| 837 |
+
r.ServeHTTP(w, req)
|
| 838 |
+
|
| 839 |
+
delBody := `{"value": ["m1", "m3"]}`
|
| 840 |
+
req = httptest.NewRequest(http.MethodDelete, "/v0/management/ampcode/model-mappings", bytes.NewBufferString(delBody))
|
| 841 |
+
req.Header.Set("Content-Type", "application/json")
|
| 842 |
+
w = httptest.NewRecorder()
|
| 843 |
+
r.ServeHTTP(w, req)
|
| 844 |
+
|
| 845 |
+
req = httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 846 |
+
w = httptest.NewRecorder()
|
| 847 |
+
r.ServeHTTP(w, req)
|
| 848 |
+
|
| 849 |
+
var resp map[string][]config.AmpModelMapping
|
| 850 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 851 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 852 |
+
}
|
| 853 |
+
|
| 854 |
+
mappings := resp["model-mappings"]
|
| 855 |
+
if len(mappings) != 3 {
|
| 856 |
+
t.Fatalf("expected 3 mappings (m2, m4, m5), got %d", len(mappings))
|
| 857 |
+
}
|
| 858 |
+
|
| 859 |
+
expected := map[string]string{"m2": "t2-updated", "m4": "t4", "m5": "t5"}
|
| 860 |
+
found := make(map[string]string)
|
| 861 |
+
for _, m := range mappings {
|
| 862 |
+
found[m.From] = m.To
|
| 863 |
+
}
|
| 864 |
+
|
| 865 |
+
for from, to := range expected {
|
| 866 |
+
if found[from] != to {
|
| 867 |
+
t.Errorf("mapping %s: expected %q, got %q", from, to, found[from])
|
| 868 |
+
}
|
| 869 |
+
}
|
| 870 |
+
}
|
| 871 |
+
|
| 872 |
+
// TestNilHandlerGetAmpCode verifies handler works with empty config.
|
| 873 |
+
func TestNilHandlerGetAmpCode(t *testing.T) {
|
| 874 |
+
cfg := &config.Config{}
|
| 875 |
+
h := management.NewHandler(cfg, "", nil)
|
| 876 |
+
r := setupAmpRouter(h)
|
| 877 |
+
|
| 878 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode", nil)
|
| 879 |
+
w := httptest.NewRecorder()
|
| 880 |
+
r.ServeHTTP(w, req)
|
| 881 |
+
|
| 882 |
+
if w.Code != http.StatusOK {
|
| 883 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 884 |
+
}
|
| 885 |
+
}
|
| 886 |
+
|
| 887 |
+
// TestEmptyConfigGetAmpModelMappings verifies GET returns empty array for fresh config.
|
| 888 |
+
func TestEmptyConfigGetAmpModelMappings(t *testing.T) {
|
| 889 |
+
cfg := &config.Config{}
|
| 890 |
+
tmpDir := t.TempDir()
|
| 891 |
+
configPath := filepath.Join(tmpDir, "config.yaml")
|
| 892 |
+
if err := os.WriteFile(configPath, []byte("port: 8080\n"), 0644); err != nil {
|
| 893 |
+
t.Fatalf("failed to write config: %v", err)
|
| 894 |
+
}
|
| 895 |
+
|
| 896 |
+
h := management.NewHandler(cfg, configPath, nil)
|
| 897 |
+
r := setupAmpRouter(h)
|
| 898 |
+
|
| 899 |
+
req := httptest.NewRequest(http.MethodGet, "/v0/management/ampcode/model-mappings", nil)
|
| 900 |
+
w := httptest.NewRecorder()
|
| 901 |
+
r.ServeHTTP(w, req)
|
| 902 |
+
|
| 903 |
+
if w.Code != http.StatusOK {
|
| 904 |
+
t.Fatalf("expected status %d, got %d", http.StatusOK, w.Code)
|
| 905 |
+
}
|
| 906 |
+
|
| 907 |
+
var resp map[string][]config.AmpModelMapping
|
| 908 |
+
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
| 909 |
+
t.Fatalf("failed to unmarshal: %v", err)
|
| 910 |
+
}
|
| 911 |
+
|
| 912 |
+
if len(resp["model-mappings"]) != 0 {
|
| 913 |
+
t.Errorf("expected 0 mappings, got %d", len(resp["model-mappings"]))
|
| 914 |
+
}
|
| 915 |
+
}
|
test/config_migration_test.go
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package test
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"os"
|
| 5 |
+
"path/filepath"
|
| 6 |
+
"strings"
|
| 7 |
+
"testing"
|
| 8 |
+
|
| 9 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
func TestLegacyConfigMigration(t *testing.T) {
|
| 13 |
+
t.Run("onlyLegacyFields", func(t *testing.T) {
|
| 14 |
+
path := writeConfig(t, `
|
| 15 |
+
port: 8080
|
| 16 |
+
generative-language-api-key:
|
| 17 |
+
- "legacy-gemini-1"
|
| 18 |
+
openai-compatibility:
|
| 19 |
+
- name: "legacy-provider"
|
| 20 |
+
base-url: "https://example.com"
|
| 21 |
+
api-keys:
|
| 22 |
+
- "legacy-openai-1"
|
| 23 |
+
amp-upstream-url: "https://amp.example.com"
|
| 24 |
+
amp-upstream-api-key: "amp-legacy-key"
|
| 25 |
+
amp-restrict-management-to-localhost: false
|
| 26 |
+
amp-model-mappings:
|
| 27 |
+
- from: "old-model"
|
| 28 |
+
to: "new-model"
|
| 29 |
+
`)
|
| 30 |
+
cfg, err := config.LoadConfig(path)
|
| 31 |
+
if err != nil {
|
| 32 |
+
t.Fatalf("load legacy config: %v", err)
|
| 33 |
+
}
|
| 34 |
+
if got := len(cfg.GeminiKey); got != 1 || cfg.GeminiKey[0].APIKey != "legacy-gemini-1" {
|
| 35 |
+
t.Fatalf("gemini migration mismatch: %+v", cfg.GeminiKey)
|
| 36 |
+
}
|
| 37 |
+
if got := len(cfg.OpenAICompatibility); got != 1 {
|
| 38 |
+
t.Fatalf("expected 1 openai-compat provider, got %d", got)
|
| 39 |
+
}
|
| 40 |
+
if entries := cfg.OpenAICompatibility[0].APIKeyEntries; len(entries) != 1 || entries[0].APIKey != "legacy-openai-1" {
|
| 41 |
+
t.Fatalf("openai-compat migration mismatch: %+v", entries)
|
| 42 |
+
}
|
| 43 |
+
if cfg.AmpCode.UpstreamURL != "https://amp.example.com" || cfg.AmpCode.UpstreamAPIKey != "amp-legacy-key" {
|
| 44 |
+
t.Fatalf("amp migration failed: %+v", cfg.AmpCode)
|
| 45 |
+
}
|
| 46 |
+
if cfg.AmpCode.RestrictManagementToLocalhost {
|
| 47 |
+
t.Fatalf("expected amp restriction to be false after migration")
|
| 48 |
+
}
|
| 49 |
+
if got := len(cfg.AmpCode.ModelMappings); got != 1 || cfg.AmpCode.ModelMappings[0].From != "old-model" {
|
| 50 |
+
t.Fatalf("amp mappings migration mismatch: %+v", cfg.AmpCode.ModelMappings)
|
| 51 |
+
}
|
| 52 |
+
updated := readFile(t, path)
|
| 53 |
+
if strings.Contains(updated, "generative-language-api-key") {
|
| 54 |
+
t.Fatalf("legacy gemini key still present:\n%s", updated)
|
| 55 |
+
}
|
| 56 |
+
if strings.Contains(updated, "amp-upstream-url") || strings.Contains(updated, "amp-restrict-management-to-localhost") {
|
| 57 |
+
t.Fatalf("legacy amp keys still present:\n%s", updated)
|
| 58 |
+
}
|
| 59 |
+
if strings.Contains(updated, "\n api-keys:") {
|
| 60 |
+
t.Fatalf("legacy openai compat keys still present:\n%s", updated)
|
| 61 |
+
}
|
| 62 |
+
})
|
| 63 |
+
|
| 64 |
+
t.Run("mixedLegacyAndNewFields", func(t *testing.T) {
|
| 65 |
+
path := writeConfig(t, `
|
| 66 |
+
gemini-api-key:
|
| 67 |
+
- api-key: "new-gemini"
|
| 68 |
+
generative-language-api-key:
|
| 69 |
+
- "new-gemini"
|
| 70 |
+
- "legacy-gemini-only"
|
| 71 |
+
openai-compatibility:
|
| 72 |
+
- name: "mixed-provider"
|
| 73 |
+
base-url: "https://mixed.example.com"
|
| 74 |
+
api-key-entries:
|
| 75 |
+
- api-key: "new-entry"
|
| 76 |
+
api-keys:
|
| 77 |
+
- "legacy-entry"
|
| 78 |
+
- "new-entry"
|
| 79 |
+
`)
|
| 80 |
+
cfg, err := config.LoadConfig(path)
|
| 81 |
+
if err != nil {
|
| 82 |
+
t.Fatalf("load mixed config: %v", err)
|
| 83 |
+
}
|
| 84 |
+
if got := len(cfg.GeminiKey); got != 2 {
|
| 85 |
+
t.Fatalf("expected 2 gemini entries, got %d: %+v", got, cfg.GeminiKey)
|
| 86 |
+
}
|
| 87 |
+
seen := make(map[string]struct{}, len(cfg.GeminiKey))
|
| 88 |
+
for _, entry := range cfg.GeminiKey {
|
| 89 |
+
if _, exists := seen[entry.APIKey]; exists {
|
| 90 |
+
t.Fatalf("duplicate gemini key %q after migration", entry.APIKey)
|
| 91 |
+
}
|
| 92 |
+
seen[entry.APIKey] = struct{}{}
|
| 93 |
+
}
|
| 94 |
+
provider := cfg.OpenAICompatibility[0]
|
| 95 |
+
if got := len(provider.APIKeyEntries); got != 2 {
|
| 96 |
+
t.Fatalf("expected 2 openai entries, got %d: %+v", got, provider.APIKeyEntries)
|
| 97 |
+
}
|
| 98 |
+
entrySeen := make(map[string]struct{}, len(provider.APIKeyEntries))
|
| 99 |
+
for _, entry := range provider.APIKeyEntries {
|
| 100 |
+
if _, ok := entrySeen[entry.APIKey]; ok {
|
| 101 |
+
t.Fatalf("duplicate openai key %q after migration", entry.APIKey)
|
| 102 |
+
}
|
| 103 |
+
entrySeen[entry.APIKey] = struct{}{}
|
| 104 |
+
}
|
| 105 |
+
})
|
| 106 |
+
|
| 107 |
+
t.Run("onlyNewFields", func(t *testing.T) {
|
| 108 |
+
path := writeConfig(t, `
|
| 109 |
+
gemini-api-key:
|
| 110 |
+
- api-key: "new-only"
|
| 111 |
+
openai-compatibility:
|
| 112 |
+
- name: "new-only-provider"
|
| 113 |
+
base-url: "https://new-only.example.com"
|
| 114 |
+
api-key-entries:
|
| 115 |
+
- api-key: "new-only-entry"
|
| 116 |
+
ampcode:
|
| 117 |
+
upstream-url: "https://amp.new"
|
| 118 |
+
upstream-api-key: "new-amp-key"
|
| 119 |
+
restrict-management-to-localhost: true
|
| 120 |
+
model-mappings:
|
| 121 |
+
- from: "a"
|
| 122 |
+
to: "b"
|
| 123 |
+
`)
|
| 124 |
+
cfg, err := config.LoadConfig(path)
|
| 125 |
+
if err != nil {
|
| 126 |
+
t.Fatalf("load new config: %v", err)
|
| 127 |
+
}
|
| 128 |
+
if len(cfg.GeminiKey) != 1 || cfg.GeminiKey[0].APIKey != "new-only" {
|
| 129 |
+
t.Fatalf("unexpected gemini entries: %+v", cfg.GeminiKey)
|
| 130 |
+
}
|
| 131 |
+
if len(cfg.OpenAICompatibility) != 1 || len(cfg.OpenAICompatibility[0].APIKeyEntries) != 1 {
|
| 132 |
+
t.Fatalf("unexpected openai compat entries: %+v", cfg.OpenAICompatibility)
|
| 133 |
+
}
|
| 134 |
+
if cfg.AmpCode.UpstreamURL != "https://amp.new" || cfg.AmpCode.UpstreamAPIKey != "new-amp-key" {
|
| 135 |
+
t.Fatalf("unexpected amp config: %+v", cfg.AmpCode)
|
| 136 |
+
}
|
| 137 |
+
})
|
| 138 |
+
|
| 139 |
+
t.Run("duplicateNamesDifferentBase", func(t *testing.T) {
|
| 140 |
+
path := writeConfig(t, `
|
| 141 |
+
openai-compatibility:
|
| 142 |
+
- name: "dup-provider"
|
| 143 |
+
base-url: "https://provider-a"
|
| 144 |
+
api-keys:
|
| 145 |
+
- "key-a"
|
| 146 |
+
- name: "dup-provider"
|
| 147 |
+
base-url: "https://provider-b"
|
| 148 |
+
api-keys:
|
| 149 |
+
- "key-b"
|
| 150 |
+
`)
|
| 151 |
+
cfg, err := config.LoadConfig(path)
|
| 152 |
+
if err != nil {
|
| 153 |
+
t.Fatalf("load duplicate config: %v", err)
|
| 154 |
+
}
|
| 155 |
+
if len(cfg.OpenAICompatibility) != 2 {
|
| 156 |
+
t.Fatalf("expected 2 providers, got %d", len(cfg.OpenAICompatibility))
|
| 157 |
+
}
|
| 158 |
+
for _, entry := range cfg.OpenAICompatibility {
|
| 159 |
+
if len(entry.APIKeyEntries) != 1 {
|
| 160 |
+
t.Fatalf("expected 1 key entry per provider: %+v", entry)
|
| 161 |
+
}
|
| 162 |
+
switch entry.BaseURL {
|
| 163 |
+
case "https://provider-a":
|
| 164 |
+
if entry.APIKeyEntries[0].APIKey != "key-a" {
|
| 165 |
+
t.Fatalf("provider-a key mismatch: %+v", entry.APIKeyEntries)
|
| 166 |
+
}
|
| 167 |
+
case "https://provider-b":
|
| 168 |
+
if entry.APIKeyEntries[0].APIKey != "key-b" {
|
| 169 |
+
t.Fatalf("provider-b key mismatch: %+v", entry.APIKeyEntries)
|
| 170 |
+
}
|
| 171 |
+
default:
|
| 172 |
+
t.Fatalf("unexpected provider base url: %s", entry.BaseURL)
|
| 173 |
+
}
|
| 174 |
+
}
|
| 175 |
+
})
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
func writeConfig(t *testing.T, content string) string {
|
| 179 |
+
t.Helper()
|
| 180 |
+
dir := t.TempDir()
|
| 181 |
+
path := filepath.Join(dir, "config.yaml")
|
| 182 |
+
if err := os.WriteFile(path, []byte(strings.TrimSpace(content)+"\n"), 0o644); err != nil {
|
| 183 |
+
t.Fatalf("write temp config: %v", err)
|
| 184 |
+
}
|
| 185 |
+
return path
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
func readFile(t *testing.T, path string) string {
|
| 189 |
+
t.Helper()
|
| 190 |
+
data, err := os.ReadFile(path)
|
| 191 |
+
if err != nil {
|
| 192 |
+
t.Fatalf("read temp config: %v", err)
|
| 193 |
+
}
|
| 194 |
+
return string(data)
|
| 195 |
+
}
|
test/gemini3_thinking_level_test.go
ADDED
|
@@ -0,0 +1,423 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package test
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"fmt"
|
| 5 |
+
"testing"
|
| 6 |
+
"time"
|
| 7 |
+
|
| 8 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
| 9 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
| 10 |
+
"github.com/tidwall/gjson"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
// registerGemini3Models loads Gemini 3 models into the registry for testing.
|
| 14 |
+
func registerGemini3Models(t *testing.T) func() {
|
| 15 |
+
t.Helper()
|
| 16 |
+
reg := registry.GetGlobalRegistry()
|
| 17 |
+
uid := fmt.Sprintf("gemini3-test-%d", time.Now().UnixNano())
|
| 18 |
+
reg.RegisterClient(uid+"-gemini", "gemini", registry.GetGeminiModels())
|
| 19 |
+
reg.RegisterClient(uid+"-aistudio", "aistudio", registry.GetAIStudioModels())
|
| 20 |
+
return func() {
|
| 21 |
+
reg.UnregisterClient(uid + "-gemini")
|
| 22 |
+
reg.UnregisterClient(uid + "-aistudio")
|
| 23 |
+
}
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
func TestIsGemini3Model(t *testing.T) {
|
| 27 |
+
cases := []struct {
|
| 28 |
+
model string
|
| 29 |
+
expected bool
|
| 30 |
+
}{
|
| 31 |
+
{"gemini-3-pro-preview", true},
|
| 32 |
+
{"gemini-3-flash-preview", true},
|
| 33 |
+
{"gemini_3_pro_preview", true},
|
| 34 |
+
{"gemini-3-pro", true},
|
| 35 |
+
{"gemini-3-flash", true},
|
| 36 |
+
{"GEMINI-3-PRO-PREVIEW", true},
|
| 37 |
+
{"gemini-2.5-pro", false},
|
| 38 |
+
{"gemini-2.5-flash", false},
|
| 39 |
+
{"gpt-5", false},
|
| 40 |
+
{"claude-sonnet-4-5", false},
|
| 41 |
+
{"", false},
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
for _, cs := range cases {
|
| 45 |
+
t.Run(cs.model, func(t *testing.T) {
|
| 46 |
+
got := util.IsGemini3Model(cs.model)
|
| 47 |
+
if got != cs.expected {
|
| 48 |
+
t.Fatalf("IsGemini3Model(%q) = %v, want %v", cs.model, got, cs.expected)
|
| 49 |
+
}
|
| 50 |
+
})
|
| 51 |
+
}
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
func TestIsGemini3ProModel(t *testing.T) {
|
| 55 |
+
cases := []struct {
|
| 56 |
+
model string
|
| 57 |
+
expected bool
|
| 58 |
+
}{
|
| 59 |
+
{"gemini-3-pro-preview", true},
|
| 60 |
+
{"gemini_3_pro_preview", true},
|
| 61 |
+
{"gemini-3-pro", true},
|
| 62 |
+
{"GEMINI-3-PRO-PREVIEW", true},
|
| 63 |
+
{"gemini-3-flash-preview", false},
|
| 64 |
+
{"gemini-3-flash", false},
|
| 65 |
+
{"gemini-2.5-pro", false},
|
| 66 |
+
{"", false},
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
for _, cs := range cases {
|
| 70 |
+
t.Run(cs.model, func(t *testing.T) {
|
| 71 |
+
got := util.IsGemini3ProModel(cs.model)
|
| 72 |
+
if got != cs.expected {
|
| 73 |
+
t.Fatalf("IsGemini3ProModel(%q) = %v, want %v", cs.model, got, cs.expected)
|
| 74 |
+
}
|
| 75 |
+
})
|
| 76 |
+
}
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
func TestIsGemini3FlashModel(t *testing.T) {
|
| 80 |
+
cases := []struct {
|
| 81 |
+
model string
|
| 82 |
+
expected bool
|
| 83 |
+
}{
|
| 84 |
+
{"gemini-3-flash-preview", true},
|
| 85 |
+
{"gemini_3_flash_preview", true},
|
| 86 |
+
{"gemini-3-flash", true},
|
| 87 |
+
{"GEMINI-3-FLASH-PREVIEW", true},
|
| 88 |
+
{"gemini-3-pro-preview", false},
|
| 89 |
+
{"gemini-3-pro", false},
|
| 90 |
+
{"gemini-2.5-flash", false},
|
| 91 |
+
{"", false},
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
for _, cs := range cases {
|
| 95 |
+
t.Run(cs.model, func(t *testing.T) {
|
| 96 |
+
got := util.IsGemini3FlashModel(cs.model)
|
| 97 |
+
if got != cs.expected {
|
| 98 |
+
t.Fatalf("IsGemini3FlashModel(%q) = %v, want %v", cs.model, got, cs.expected)
|
| 99 |
+
}
|
| 100 |
+
})
|
| 101 |
+
}
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
func TestValidateGemini3ThinkingLevel(t *testing.T) {
|
| 105 |
+
cases := []struct {
|
| 106 |
+
name string
|
| 107 |
+
model string
|
| 108 |
+
level string
|
| 109 |
+
wantOK bool
|
| 110 |
+
wantVal string
|
| 111 |
+
}{
|
| 112 |
+
// Gemini 3 Pro: supports "low", "high"
|
| 113 |
+
{"pro-low", "gemini-3-pro-preview", "low", true, "low"},
|
| 114 |
+
{"pro-high", "gemini-3-pro-preview", "high", true, "high"},
|
| 115 |
+
{"pro-minimal-invalid", "gemini-3-pro-preview", "minimal", false, ""},
|
| 116 |
+
{"pro-medium-invalid", "gemini-3-pro-preview", "medium", false, ""},
|
| 117 |
+
|
| 118 |
+
// Gemini 3 Flash: supports "minimal", "low", "medium", "high"
|
| 119 |
+
{"flash-minimal", "gemini-3-flash-preview", "minimal", true, "minimal"},
|
| 120 |
+
{"flash-low", "gemini-3-flash-preview", "low", true, "low"},
|
| 121 |
+
{"flash-medium", "gemini-3-flash-preview", "medium", true, "medium"},
|
| 122 |
+
{"flash-high", "gemini-3-flash-preview", "high", true, "high"},
|
| 123 |
+
|
| 124 |
+
// Case insensitivity
|
| 125 |
+
{"flash-LOW-case", "gemini-3-flash-preview", "LOW", true, "low"},
|
| 126 |
+
{"flash-High-case", "gemini-3-flash-preview", "High", true, "high"},
|
| 127 |
+
{"pro-HIGH-case", "gemini-3-pro-preview", "HIGH", true, "high"},
|
| 128 |
+
|
| 129 |
+
// Invalid levels
|
| 130 |
+
{"flash-invalid", "gemini-3-flash-preview", "xhigh", false, ""},
|
| 131 |
+
{"flash-invalid-auto", "gemini-3-flash-preview", "auto", false, ""},
|
| 132 |
+
{"flash-empty", "gemini-3-flash-preview", "", false, ""},
|
| 133 |
+
|
| 134 |
+
// Non-Gemini 3 models
|
| 135 |
+
{"non-gemini3", "gemini-2.5-pro", "high", false, ""},
|
| 136 |
+
{"gpt5", "gpt-5", "high", false, ""},
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
for _, cs := range cases {
|
| 140 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 141 |
+
got, ok := util.ValidateGemini3ThinkingLevel(cs.model, cs.level)
|
| 142 |
+
if ok != cs.wantOK {
|
| 143 |
+
t.Fatalf("ValidateGemini3ThinkingLevel(%q, %q) ok = %v, want %v", cs.model, cs.level, ok, cs.wantOK)
|
| 144 |
+
}
|
| 145 |
+
if got != cs.wantVal {
|
| 146 |
+
t.Fatalf("ValidateGemini3ThinkingLevel(%q, %q) = %q, want %q", cs.model, cs.level, got, cs.wantVal)
|
| 147 |
+
}
|
| 148 |
+
})
|
| 149 |
+
}
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
func TestThinkingBudgetToGemini3Level(t *testing.T) {
|
| 153 |
+
cases := []struct {
|
| 154 |
+
name string
|
| 155 |
+
model string
|
| 156 |
+
budget int
|
| 157 |
+
wantOK bool
|
| 158 |
+
wantVal string
|
| 159 |
+
}{
|
| 160 |
+
// Gemini 3 Pro: maps to "low" or "high"
|
| 161 |
+
{"pro-dynamic", "gemini-3-pro-preview", -1, true, "high"},
|
| 162 |
+
{"pro-zero", "gemini-3-pro-preview", 0, true, "low"},
|
| 163 |
+
{"pro-small", "gemini-3-pro-preview", 1000, true, "low"},
|
| 164 |
+
{"pro-medium", "gemini-3-pro-preview", 8000, true, "low"},
|
| 165 |
+
{"pro-large", "gemini-3-pro-preview", 20000, true, "high"},
|
| 166 |
+
{"pro-huge", "gemini-3-pro-preview", 50000, true, "high"},
|
| 167 |
+
|
| 168 |
+
// Gemini 3 Flash: maps to "minimal", "low", "medium", "high"
|
| 169 |
+
{"flash-dynamic", "gemini-3-flash-preview", -1, true, "high"},
|
| 170 |
+
{"flash-zero", "gemini-3-flash-preview", 0, true, "minimal"},
|
| 171 |
+
{"flash-tiny", "gemini-3-flash-preview", 500, true, "minimal"},
|
| 172 |
+
{"flash-small", "gemini-3-flash-preview", 1000, true, "low"},
|
| 173 |
+
{"flash-medium-val", "gemini-3-flash-preview", 8000, true, "medium"},
|
| 174 |
+
{"flash-large", "gemini-3-flash-preview", 20000, true, "high"},
|
| 175 |
+
{"flash-huge", "gemini-3-flash-preview", 50000, true, "high"},
|
| 176 |
+
|
| 177 |
+
// Non-Gemini 3 models should return false
|
| 178 |
+
{"gemini25-budget", "gemini-2.5-pro", 8000, false, ""},
|
| 179 |
+
{"gpt5-budget", "gpt-5", 8000, false, ""},
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
for _, cs := range cases {
|
| 183 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 184 |
+
got, ok := util.ThinkingBudgetToGemini3Level(cs.model, cs.budget)
|
| 185 |
+
if ok != cs.wantOK {
|
| 186 |
+
t.Fatalf("ThinkingBudgetToGemini3Level(%q, %d) ok = %v, want %v", cs.model, cs.budget, ok, cs.wantOK)
|
| 187 |
+
}
|
| 188 |
+
if got != cs.wantVal {
|
| 189 |
+
t.Fatalf("ThinkingBudgetToGemini3Level(%q, %d) = %q, want %q", cs.model, cs.budget, got, cs.wantVal)
|
| 190 |
+
}
|
| 191 |
+
})
|
| 192 |
+
}
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
func TestApplyGemini3ThinkingLevelFromMetadata(t *testing.T) {
|
| 196 |
+
cleanup := registerGemini3Models(t)
|
| 197 |
+
defer cleanup()
|
| 198 |
+
|
| 199 |
+
cases := []struct {
|
| 200 |
+
name string
|
| 201 |
+
model string
|
| 202 |
+
metadata map[string]any
|
| 203 |
+
inputBody string
|
| 204 |
+
wantLevel string
|
| 205 |
+
wantInclude bool
|
| 206 |
+
wantNoChange bool
|
| 207 |
+
}{
|
| 208 |
+
{
|
| 209 |
+
name: "flash-minimal-from-suffix",
|
| 210 |
+
model: "gemini-3-flash-preview",
|
| 211 |
+
metadata: map[string]any{"reasoning_effort": "minimal"},
|
| 212 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
| 213 |
+
wantLevel: "minimal",
|
| 214 |
+
wantInclude: true,
|
| 215 |
+
},
|
| 216 |
+
{
|
| 217 |
+
name: "flash-medium-from-suffix",
|
| 218 |
+
model: "gemini-3-flash-preview",
|
| 219 |
+
metadata: map[string]any{"reasoning_effort": "medium"},
|
| 220 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
| 221 |
+
wantLevel: "medium",
|
| 222 |
+
wantInclude: true,
|
| 223 |
+
},
|
| 224 |
+
{
|
| 225 |
+
name: "pro-high-from-suffix",
|
| 226 |
+
model: "gemini-3-pro-preview",
|
| 227 |
+
metadata: map[string]any{"reasoning_effort": "high"},
|
| 228 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
| 229 |
+
wantLevel: "high",
|
| 230 |
+
wantInclude: true,
|
| 231 |
+
},
|
| 232 |
+
{
|
| 233 |
+
name: "no-metadata-no-change",
|
| 234 |
+
model: "gemini-3-flash-preview",
|
| 235 |
+
metadata: nil,
|
| 236 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
| 237 |
+
wantNoChange: true,
|
| 238 |
+
},
|
| 239 |
+
{
|
| 240 |
+
name: "non-gemini3-no-change",
|
| 241 |
+
model: "gemini-2.5-pro",
|
| 242 |
+
metadata: map[string]any{"reasoning_effort": "high"},
|
| 243 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":-1}}}`,
|
| 244 |
+
wantNoChange: true,
|
| 245 |
+
},
|
| 246 |
+
{
|
| 247 |
+
name: "invalid-level-no-change",
|
| 248 |
+
model: "gemini-3-flash-preview",
|
| 249 |
+
metadata: map[string]any{"reasoning_effort": "xhigh"},
|
| 250 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
| 251 |
+
wantNoChange: true,
|
| 252 |
+
},
|
| 253 |
+
}
|
| 254 |
+
|
| 255 |
+
for _, cs := range cases {
|
| 256 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 257 |
+
input := []byte(cs.inputBody)
|
| 258 |
+
result := util.ApplyGemini3ThinkingLevelFromMetadata(cs.model, cs.metadata, input)
|
| 259 |
+
|
| 260 |
+
if cs.wantNoChange {
|
| 261 |
+
if string(result) != cs.inputBody {
|
| 262 |
+
t.Fatalf("expected no change, but got: %s", string(result))
|
| 263 |
+
}
|
| 264 |
+
return
|
| 265 |
+
}
|
| 266 |
+
|
| 267 |
+
level := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingLevel")
|
| 268 |
+
if !level.Exists() {
|
| 269 |
+
t.Fatalf("thinkingLevel not set in result: %s", string(result))
|
| 270 |
+
}
|
| 271 |
+
if level.String() != cs.wantLevel {
|
| 272 |
+
t.Fatalf("thinkingLevel = %q, want %q", level.String(), cs.wantLevel)
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
include := gjson.GetBytes(result, "generationConfig.thinkingConfig.includeThoughts")
|
| 276 |
+
if cs.wantInclude && (!include.Exists() || !include.Bool()) {
|
| 277 |
+
t.Fatalf("includeThoughts should be true, got: %s", string(result))
|
| 278 |
+
}
|
| 279 |
+
})
|
| 280 |
+
}
|
| 281 |
+
}
|
| 282 |
+
|
| 283 |
+
func TestApplyGemini3ThinkingLevelFromMetadataCLI(t *testing.T) {
|
| 284 |
+
cleanup := registerGemini3Models(t)
|
| 285 |
+
defer cleanup()
|
| 286 |
+
|
| 287 |
+
cases := []struct {
|
| 288 |
+
name string
|
| 289 |
+
model string
|
| 290 |
+
metadata map[string]any
|
| 291 |
+
inputBody string
|
| 292 |
+
wantLevel string
|
| 293 |
+
wantInclude bool
|
| 294 |
+
wantNoChange bool
|
| 295 |
+
}{
|
| 296 |
+
{
|
| 297 |
+
name: "flash-minimal-from-suffix-cli",
|
| 298 |
+
model: "gemini-3-flash-preview",
|
| 299 |
+
metadata: map[string]any{"reasoning_effort": "minimal"},
|
| 300 |
+
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
| 301 |
+
wantLevel: "minimal",
|
| 302 |
+
wantInclude: true,
|
| 303 |
+
},
|
| 304 |
+
{
|
| 305 |
+
name: "flash-low-from-suffix-cli",
|
| 306 |
+
model: "gemini-3-flash-preview",
|
| 307 |
+
metadata: map[string]any{"reasoning_effort": "low"},
|
| 308 |
+
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
| 309 |
+
wantLevel: "low",
|
| 310 |
+
wantInclude: true,
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
name: "pro-low-from-suffix-cli",
|
| 314 |
+
model: "gemini-3-pro-preview",
|
| 315 |
+
metadata: map[string]any{"reasoning_effort": "low"},
|
| 316 |
+
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
| 317 |
+
wantLevel: "low",
|
| 318 |
+
wantInclude: true,
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
name: "no-metadata-no-change-cli",
|
| 322 |
+
model: "gemini-3-flash-preview",
|
| 323 |
+
metadata: nil,
|
| 324 |
+
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
| 325 |
+
wantNoChange: true,
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
name: "non-gemini3-no-change-cli",
|
| 329 |
+
model: "gemini-2.5-pro",
|
| 330 |
+
metadata: map[string]any{"reasoning_effort": "high"},
|
| 331 |
+
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"thinkingBudget":-1}}}}`,
|
| 332 |
+
wantNoChange: true,
|
| 333 |
+
},
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
for _, cs := range cases {
|
| 337 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 338 |
+
input := []byte(cs.inputBody)
|
| 339 |
+
result := util.ApplyGemini3ThinkingLevelFromMetadataCLI(cs.model, cs.metadata, input)
|
| 340 |
+
|
| 341 |
+
if cs.wantNoChange {
|
| 342 |
+
if string(result) != cs.inputBody {
|
| 343 |
+
t.Fatalf("expected no change, but got: %s", string(result))
|
| 344 |
+
}
|
| 345 |
+
return
|
| 346 |
+
}
|
| 347 |
+
|
| 348 |
+
level := gjson.GetBytes(result, "request.generationConfig.thinkingConfig.thinkingLevel")
|
| 349 |
+
if !level.Exists() {
|
| 350 |
+
t.Fatalf("thinkingLevel not set in result: %s", string(result))
|
| 351 |
+
}
|
| 352 |
+
if level.String() != cs.wantLevel {
|
| 353 |
+
t.Fatalf("thinkingLevel = %q, want %q", level.String(), cs.wantLevel)
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
include := gjson.GetBytes(result, "request.generationConfig.thinkingConfig.includeThoughts")
|
| 357 |
+
if cs.wantInclude && (!include.Exists() || !include.Bool()) {
|
| 358 |
+
t.Fatalf("includeThoughts should be true, got: %s", string(result))
|
| 359 |
+
}
|
| 360 |
+
})
|
| 361 |
+
}
|
| 362 |
+
}
|
| 363 |
+
|
| 364 |
+
func TestNormalizeGeminiThinkingBudget_Gemini3Conversion(t *testing.T) {
|
| 365 |
+
cleanup := registerGemini3Models(t)
|
| 366 |
+
defer cleanup()
|
| 367 |
+
|
| 368 |
+
cases := []struct {
|
| 369 |
+
name string
|
| 370 |
+
model string
|
| 371 |
+
inputBody string
|
| 372 |
+
wantLevel string
|
| 373 |
+
wantBudget bool // if true, expect thinkingBudget instead of thinkingLevel
|
| 374 |
+
}{
|
| 375 |
+
{
|
| 376 |
+
name: "gemini3-flash-budget-to-level",
|
| 377 |
+
model: "gemini-3-flash-preview",
|
| 378 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":8000}}}`,
|
| 379 |
+
wantLevel: "medium",
|
| 380 |
+
},
|
| 381 |
+
{
|
| 382 |
+
name: "gemini3-pro-budget-to-level",
|
| 383 |
+
model: "gemini-3-pro-preview",
|
| 384 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":20000}}}`,
|
| 385 |
+
wantLevel: "high",
|
| 386 |
+
},
|
| 387 |
+
{
|
| 388 |
+
name: "gemini25-keeps-budget",
|
| 389 |
+
model: "gemini-2.5-pro",
|
| 390 |
+
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":8000}}}`,
|
| 391 |
+
wantBudget: true,
|
| 392 |
+
},
|
| 393 |
+
}
|
| 394 |
+
|
| 395 |
+
for _, cs := range cases {
|
| 396 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 397 |
+
result := util.NormalizeGeminiThinkingBudget(cs.model, []byte(cs.inputBody))
|
| 398 |
+
|
| 399 |
+
if cs.wantBudget {
|
| 400 |
+
budget := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingBudget")
|
| 401 |
+
if !budget.Exists() {
|
| 402 |
+
t.Fatalf("thinkingBudget should exist for non-Gemini3 model: %s", string(result))
|
| 403 |
+
}
|
| 404 |
+
level := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingLevel")
|
| 405 |
+
if level.Exists() {
|
| 406 |
+
t.Fatalf("thinkingLevel should not exist for non-Gemini3 model: %s", string(result))
|
| 407 |
+
}
|
| 408 |
+
} else {
|
| 409 |
+
level := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingLevel")
|
| 410 |
+
if !level.Exists() {
|
| 411 |
+
t.Fatalf("thinkingLevel should exist for Gemini3 model: %s", string(result))
|
| 412 |
+
}
|
| 413 |
+
if level.String() != cs.wantLevel {
|
| 414 |
+
t.Fatalf("thinkingLevel = %q, want %q", level.String(), cs.wantLevel)
|
| 415 |
+
}
|
| 416 |
+
budget := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingBudget")
|
| 417 |
+
if budget.Exists() {
|
| 418 |
+
t.Fatalf("thinkingBudget should be removed for Gemini3 model: %s", string(result))
|
| 419 |
+
}
|
| 420 |
+
}
|
| 421 |
+
})
|
| 422 |
+
}
|
| 423 |
+
}
|
test/model_alias_thinking_suffix_test.go
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package test
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"testing"
|
| 5 |
+
|
| 6 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/executor"
|
| 7 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
| 8 |
+
"github.com/tidwall/gjson"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
// TestModelAliasThinkingSuffix tests the 32 test cases defined in docs/thinking_suffix_test_cases.md
|
| 12 |
+
// These tests verify the thinking suffix parsing and application logic across different providers.
|
| 13 |
+
func TestModelAliasThinkingSuffix(t *testing.T) {
|
| 14 |
+
tests := []struct {
|
| 15 |
+
id int
|
| 16 |
+
name string
|
| 17 |
+
provider string
|
| 18 |
+
requestModel string
|
| 19 |
+
suffixType string
|
| 20 |
+
expectedField string // "thinkingBudget", "thinkingLevel", "budget_tokens", "reasoning_effort", "enable_thinking"
|
| 21 |
+
expectedValue any
|
| 22 |
+
upstreamModel string // The upstream model after alias resolution
|
| 23 |
+
isAlias bool
|
| 24 |
+
}{
|
| 25 |
+
// === 1. Antigravity Provider ===
|
| 26 |
+
// 1.1 Budget-only models (Gemini 2.5)
|
| 27 |
+
{1, "antigravity_original_numeric", "antigravity", "gemini-2.5-computer-use-preview-10-2025(1000)", "numeric", "thinkingBudget", 1000, "gemini-2.5-computer-use-preview-10-2025", false},
|
| 28 |
+
{2, "antigravity_alias_numeric", "antigravity", "gp(1000)", "numeric", "thinkingBudget", 1000, "gemini-2.5-computer-use-preview-10-2025", true},
|
| 29 |
+
// 1.2 Budget+Levels models (Gemini 3)
|
| 30 |
+
{3, "antigravity_original_numeric_to_level", "antigravity", "gemini-3-flash-preview(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 31 |
+
{4, "antigravity_original_level", "antigravity", "gemini-3-flash-preview(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 32 |
+
{5, "antigravity_alias_numeric_to_level", "antigravity", "gf(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 33 |
+
{6, "antigravity_alias_level", "antigravity", "gf(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 34 |
+
|
| 35 |
+
// === 2. Gemini CLI Provider ===
|
| 36 |
+
// 2.1 Budget-only models
|
| 37 |
+
{7, "gemini_cli_original_numeric", "gemini-cli", "gemini-2.5-pro(8192)", "numeric", "thinkingBudget", 8192, "gemini-2.5-pro", false},
|
| 38 |
+
{8, "gemini_cli_alias_numeric", "gemini-cli", "g25p(8192)", "numeric", "thinkingBudget", 8192, "gemini-2.5-pro", true},
|
| 39 |
+
// 2.2 Budget+Levels models
|
| 40 |
+
{9, "gemini_cli_original_numeric_to_level", "gemini-cli", "gemini-3-flash-preview(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 41 |
+
{10, "gemini_cli_original_level", "gemini-cli", "gemini-3-flash-preview(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 42 |
+
{11, "gemini_cli_alias_numeric_to_level", "gemini-cli", "gf(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 43 |
+
{12, "gemini_cli_alias_level", "gemini-cli", "gf(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 44 |
+
|
| 45 |
+
// === 3. Vertex Provider ===
|
| 46 |
+
// 3.1 Budget-only models
|
| 47 |
+
{13, "vertex_original_numeric", "vertex", "gemini-2.5-pro(16384)", "numeric", "thinkingBudget", 16384, "gemini-2.5-pro", false},
|
| 48 |
+
{14, "vertex_alias_numeric", "vertex", "vg25p(16384)", "numeric", "thinkingBudget", 16384, "gemini-2.5-pro", true},
|
| 49 |
+
// 3.2 Budget+Levels models
|
| 50 |
+
{15, "vertex_original_numeric_to_level", "vertex", "gemini-3-flash-preview(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 51 |
+
{16, "vertex_original_level", "vertex", "gemini-3-flash-preview(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 52 |
+
{17, "vertex_alias_numeric_to_level", "vertex", "vgf(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 53 |
+
{18, "vertex_alias_level", "vertex", "vgf(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 54 |
+
|
| 55 |
+
// === 4. AI Studio Provider ===
|
| 56 |
+
// 4.1 Budget-only models
|
| 57 |
+
{19, "aistudio_original_numeric", "aistudio", "gemini-2.5-pro(12000)", "numeric", "thinkingBudget", 12000, "gemini-2.5-pro", false},
|
| 58 |
+
{20, "aistudio_alias_numeric", "aistudio", "ag25p(12000)", "numeric", "thinkingBudget", 12000, "gemini-2.5-pro", true},
|
| 59 |
+
// 4.2 Budget+Levels models
|
| 60 |
+
{21, "aistudio_original_numeric_to_level", "aistudio", "gemini-3-flash-preview(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 61 |
+
{22, "aistudio_original_level", "aistudio", "gemini-3-flash-preview(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", false},
|
| 62 |
+
{23, "aistudio_alias_numeric_to_level", "aistudio", "agf(1000)", "numeric", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 63 |
+
{24, "aistudio_alias_level", "aistudio", "agf(low)", "level", "thinkingLevel", "low", "gemini-3-flash-preview", true},
|
| 64 |
+
|
| 65 |
+
// === 5. Claude Provider ===
|
| 66 |
+
{25, "claude_original_numeric", "claude", "claude-sonnet-4-5-20250929(16384)", "numeric", "budget_tokens", 16384, "claude-sonnet-4-5-20250929", false},
|
| 67 |
+
{26, "claude_alias_numeric", "claude", "cs45(16384)", "numeric", "budget_tokens", 16384, "claude-sonnet-4-5-20250929", true},
|
| 68 |
+
|
| 69 |
+
// === 6. Codex Provider ===
|
| 70 |
+
{27, "codex_original_level", "codex", "gpt-5(high)", "level", "reasoning_effort", "high", "gpt-5", false},
|
| 71 |
+
{28, "codex_alias_level", "codex", "g5(high)", "level", "reasoning_effort", "high", "gpt-5", true},
|
| 72 |
+
|
| 73 |
+
// === 7. Qwen Provider ===
|
| 74 |
+
{29, "qwen_original_level", "qwen", "qwen3-coder-plus(high)", "level", "enable_thinking", true, "qwen3-coder-plus", false},
|
| 75 |
+
{30, "qwen_alias_level", "qwen", "qcp(high)", "level", "enable_thinking", true, "qwen3-coder-plus", true},
|
| 76 |
+
|
| 77 |
+
// === 8. iFlow Provider ===
|
| 78 |
+
{31, "iflow_original_level", "iflow", "glm-4.7(high)", "level", "reasoning_effort", "high", "glm-4.7", false},
|
| 79 |
+
{32, "iflow_alias_level", "iflow", "glm(high)", "level", "reasoning_effort", "high", "glm-4.7", true},
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
for _, tt := range tests {
|
| 83 |
+
t.Run(tt.name, func(t *testing.T) {
|
| 84 |
+
// Step 1: Parse model suffix (simulates SDK layer normalization)
|
| 85 |
+
// For "gp(1000)" -> requestedModel="gp", metadata={thinking_budget: 1000}
|
| 86 |
+
requestedModel, metadata := util.NormalizeThinkingModel(tt.requestModel)
|
| 87 |
+
|
| 88 |
+
// Verify suffix was parsed
|
| 89 |
+
if metadata == nil && (tt.suffixType == "numeric" || tt.suffixType == "level") {
|
| 90 |
+
t.Errorf("Case #%d: NormalizeThinkingModel(%q) metadata is nil", tt.id, tt.requestModel)
|
| 91 |
+
return
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
// Step 2: Simulate OAuth model mapping
|
| 95 |
+
// Real flow: applyOAuthModelMapping stores requestedModel (the alias) in metadata
|
| 96 |
+
if tt.isAlias {
|
| 97 |
+
if metadata == nil {
|
| 98 |
+
metadata = make(map[string]any)
|
| 99 |
+
}
|
| 100 |
+
metadata[util.ModelMappingOriginalModelMetadataKey] = requestedModel
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
// Step 3: Verify metadata extraction
|
| 104 |
+
switch tt.suffixType {
|
| 105 |
+
case "numeric":
|
| 106 |
+
budget, _, _, matched := util.ThinkingFromMetadata(metadata)
|
| 107 |
+
if !matched {
|
| 108 |
+
t.Errorf("Case #%d: ThinkingFromMetadata did not match", tt.id)
|
| 109 |
+
return
|
| 110 |
+
}
|
| 111 |
+
if budget == nil {
|
| 112 |
+
t.Errorf("Case #%d: expected budget in metadata", tt.id)
|
| 113 |
+
return
|
| 114 |
+
}
|
| 115 |
+
// For thinkingBudget/budget_tokens, verify the parsed budget value
|
| 116 |
+
if tt.expectedField == "thinkingBudget" || tt.expectedField == "budget_tokens" {
|
| 117 |
+
expectedBudget := tt.expectedValue.(int)
|
| 118 |
+
if *budget != expectedBudget {
|
| 119 |
+
t.Errorf("Case #%d: budget = %d, want %d", tt.id, *budget, expectedBudget)
|
| 120 |
+
}
|
| 121 |
+
}
|
| 122 |
+
// For thinkingLevel (Gemini 3), verify conversion from budget to level
|
| 123 |
+
if tt.expectedField == "thinkingLevel" {
|
| 124 |
+
level, ok := util.ThinkingBudgetToGemini3Level(tt.upstreamModel, *budget)
|
| 125 |
+
if !ok {
|
| 126 |
+
t.Errorf("Case #%d: ThinkingBudgetToGemini3Level failed", tt.id)
|
| 127 |
+
return
|
| 128 |
+
}
|
| 129 |
+
expectedLevel := tt.expectedValue.(string)
|
| 130 |
+
if level != expectedLevel {
|
| 131 |
+
t.Errorf("Case #%d: converted level = %q, want %q", tt.id, level, expectedLevel)
|
| 132 |
+
}
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
case "level":
|
| 136 |
+
_, _, effort, matched := util.ThinkingFromMetadata(metadata)
|
| 137 |
+
if !matched {
|
| 138 |
+
t.Errorf("Case #%d: ThinkingFromMetadata did not match", tt.id)
|
| 139 |
+
return
|
| 140 |
+
}
|
| 141 |
+
if effort == nil {
|
| 142 |
+
t.Errorf("Case #%d: expected effort in metadata", tt.id)
|
| 143 |
+
return
|
| 144 |
+
}
|
| 145 |
+
if tt.expectedField == "thinkingLevel" || tt.expectedField == "reasoning_effort" {
|
| 146 |
+
expectedEffort := tt.expectedValue.(string)
|
| 147 |
+
if *effort != expectedEffort {
|
| 148 |
+
t.Errorf("Case #%d: effort = %q, want %q", tt.id, *effort, expectedEffort)
|
| 149 |
+
}
|
| 150 |
+
}
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
// Step 4: Test Gemini-specific thinkingLevel conversion for Gemini 3 models
|
| 154 |
+
if tt.expectedField == "thinkingLevel" && util.IsGemini3Model(tt.upstreamModel) {
|
| 155 |
+
body := []byte(`{"request":{"contents":[]}}`)
|
| 156 |
+
|
| 157 |
+
// Build metadata simulating real OAuth flow:
|
| 158 |
+
// - requestedModel (alias like "gf") is stored in model_mapping_original_model
|
| 159 |
+
// - upstreamModel is passed as the model parameter
|
| 160 |
+
testMetadata := make(map[string]any)
|
| 161 |
+
if tt.isAlias {
|
| 162 |
+
// Real flow: applyOAuthModelMapping stores requestedModel (the alias)
|
| 163 |
+
testMetadata[util.ModelMappingOriginalModelMetadataKey] = requestedModel
|
| 164 |
+
}
|
| 165 |
+
// Copy parsed metadata (thinking_budget, reasoning_effort, etc.)
|
| 166 |
+
for k, v := range metadata {
|
| 167 |
+
testMetadata[k] = v
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
result := util.ApplyGemini3ThinkingLevelFromMetadataCLI(tt.upstreamModel, testMetadata, body)
|
| 171 |
+
levelVal := gjson.GetBytes(result, "request.generationConfig.thinkingConfig.thinkingLevel")
|
| 172 |
+
|
| 173 |
+
expectedLevel := tt.expectedValue.(string)
|
| 174 |
+
if !levelVal.Exists() {
|
| 175 |
+
t.Errorf("Case #%d: expected thinkingLevel in result", tt.id)
|
| 176 |
+
} else if levelVal.String() != expectedLevel {
|
| 177 |
+
t.Errorf("Case #%d: thinkingLevel = %q, want %q", tt.id, levelVal.String(), expectedLevel)
|
| 178 |
+
}
|
| 179 |
+
}
|
| 180 |
+
|
| 181 |
+
// Step 5: Test Gemini 2.5 thinkingBudget application using real ApplyThinkingMetadataCLI flow
|
| 182 |
+
if tt.expectedField == "thinkingBudget" && util.IsGemini25Model(tt.upstreamModel) {
|
| 183 |
+
body := []byte(`{"request":{"contents":[]}}`)
|
| 184 |
+
|
| 185 |
+
// Build metadata simulating real OAuth flow:
|
| 186 |
+
// - requestedModel (alias like "gp") is stored in model_mapping_original_model
|
| 187 |
+
// - upstreamModel is passed as the model parameter
|
| 188 |
+
testMetadata := make(map[string]any)
|
| 189 |
+
if tt.isAlias {
|
| 190 |
+
// Real flow: applyOAuthModelMapping stores requestedModel (the alias)
|
| 191 |
+
testMetadata[util.ModelMappingOriginalModelMetadataKey] = requestedModel
|
| 192 |
+
}
|
| 193 |
+
// Copy parsed metadata (thinking_budget, reasoning_effort, etc.)
|
| 194 |
+
for k, v := range metadata {
|
| 195 |
+
testMetadata[k] = v
|
| 196 |
+
}
|
| 197 |
+
|
| 198 |
+
// Use the exported ApplyThinkingMetadataCLI which includes the fallback logic
|
| 199 |
+
result := executor.ApplyThinkingMetadataCLI(body, testMetadata, tt.upstreamModel)
|
| 200 |
+
budgetVal := gjson.GetBytes(result, "request.generationConfig.thinkingConfig.thinkingBudget")
|
| 201 |
+
|
| 202 |
+
expectedBudget := tt.expectedValue.(int)
|
| 203 |
+
if !budgetVal.Exists() {
|
| 204 |
+
t.Errorf("Case #%d: expected thinkingBudget in result", tt.id)
|
| 205 |
+
} else if int(budgetVal.Int()) != expectedBudget {
|
| 206 |
+
t.Errorf("Case #%d: thinkingBudget = %d, want %d", tt.id, int(budgetVal.Int()), expectedBudget)
|
| 207 |
+
}
|
| 208 |
+
}
|
| 209 |
+
})
|
| 210 |
+
}
|
| 211 |
+
}
|
test/thinking_conversion_test.go
ADDED
|
@@ -0,0 +1,798 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package test
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"fmt"
|
| 5 |
+
"strings"
|
| 6 |
+
"testing"
|
| 7 |
+
"time"
|
| 8 |
+
|
| 9 |
+
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator"
|
| 10 |
+
|
| 11 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
| 12 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/executor"
|
| 13 |
+
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
| 14 |
+
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
|
| 15 |
+
"github.com/tidwall/gjson"
|
| 16 |
+
"github.com/tidwall/sjson"
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
// isOpenAICompatModel returns true if the model is configured as an OpenAI-compatible
|
| 20 |
+
// model that should have reasoning effort passed through even if not in registry.
|
| 21 |
+
// This simulates the allowCompat behavior from OpenAICompatExecutor.
|
| 22 |
+
func isOpenAICompatModel(model string) bool {
|
| 23 |
+
return model == "openai-compat"
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
// registerCoreModels loads representative models across providers into the registry
|
| 27 |
+
// so NormalizeThinkingBudget and level validation use real ranges.
|
| 28 |
+
func registerCoreModels(t *testing.T) func() {
|
| 29 |
+
t.Helper()
|
| 30 |
+
reg := registry.GetGlobalRegistry()
|
| 31 |
+
uid := fmt.Sprintf("thinking-core-%d", time.Now().UnixNano())
|
| 32 |
+
reg.RegisterClient(uid+"-gemini", "gemini", registry.GetGeminiModels())
|
| 33 |
+
reg.RegisterClient(uid+"-claude", "claude", registry.GetClaudeModels())
|
| 34 |
+
reg.RegisterClient(uid+"-openai", "codex", registry.GetOpenAIModels())
|
| 35 |
+
reg.RegisterClient(uid+"-qwen", "qwen", registry.GetQwenModels())
|
| 36 |
+
// Custom openai-compatible model with forced thinking suffix passthrough.
|
| 37 |
+
// No Thinking field - simulates an external model added via openai-compat
|
| 38 |
+
// where the registry has no knowledge of its thinking capabilities.
|
| 39 |
+
// The allowCompat flag should preserve reasoning effort for such models.
|
| 40 |
+
customOpenAIModels := []*registry.ModelInfo{
|
| 41 |
+
{
|
| 42 |
+
ID: "openai-compat",
|
| 43 |
+
Object: "model",
|
| 44 |
+
Created: 1700000000,
|
| 45 |
+
OwnedBy: "custom-provider",
|
| 46 |
+
Type: "openai",
|
| 47 |
+
DisplayName: "OpenAI Compatible Model",
|
| 48 |
+
Description: "OpenAI-compatible model with forced thinking suffix support",
|
| 49 |
+
},
|
| 50 |
+
}
|
| 51 |
+
reg.RegisterClient(uid+"-custom-openai", "codex", customOpenAIModels)
|
| 52 |
+
return func() {
|
| 53 |
+
reg.UnregisterClient(uid + "-gemini")
|
| 54 |
+
reg.UnregisterClient(uid + "-claude")
|
| 55 |
+
reg.UnregisterClient(uid + "-openai")
|
| 56 |
+
reg.UnregisterClient(uid + "-qwen")
|
| 57 |
+
reg.UnregisterClient(uid + "-custom-openai")
|
| 58 |
+
}
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
var (
|
| 62 |
+
thinkingTestModels = []string{
|
| 63 |
+
"gpt-5", // level-based thinking model
|
| 64 |
+
"gemini-2.5-pro", // numeric-budget thinking model
|
| 65 |
+
"qwen3-code-plus", // no thinking support
|
| 66 |
+
"openai-compat", // allowCompat=true (OpenAI-compatible channel)
|
| 67 |
+
}
|
| 68 |
+
thinkingTestFromProtocols = []string{"openai", "claude", "gemini", "openai-response"}
|
| 69 |
+
thinkingTestToProtocols = []string{"gemini", "claude", "openai", "codex"}
|
| 70 |
+
|
| 71 |
+
// Numeric budgets and their level equivalents:
|
| 72 |
+
// -1 -> auto
|
| 73 |
+
// 0 -> none
|
| 74 |
+
// 1..1024 -> low
|
| 75 |
+
// 1025..8192 -> medium
|
| 76 |
+
// 8193..24576 -> high
|
| 77 |
+
// >24576 -> model highest level (right-most in Levels)
|
| 78 |
+
thinkingNumericSamples = []int{-1, 0, 1023, 1025, 8193, 64000}
|
| 79 |
+
|
| 80 |
+
// Levels and their numeric equivalents:
|
| 81 |
+
// auto -> -1
|
| 82 |
+
// none -> 0
|
| 83 |
+
// minimal -> 512
|
| 84 |
+
// low -> 1024
|
| 85 |
+
// medium -> 8192
|
| 86 |
+
// high -> 24576
|
| 87 |
+
// xhigh -> 32768
|
| 88 |
+
// invalid -> invalid (no mapping)
|
| 89 |
+
thinkingLevelSamples = []string{"auto", "none", "minimal", "low", "medium", "high", "xhigh", "invalid"}
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
func buildRawPayload(fromProtocol, modelWithSuffix string) []byte {
|
| 93 |
+
switch fromProtocol {
|
| 94 |
+
case "gemini":
|
| 95 |
+
return []byte(fmt.Sprintf(`{"model":"%s","contents":[{"role":"user","parts":[{"text":"hi"}]}]}`, modelWithSuffix))
|
| 96 |
+
case "openai-response":
|
| 97 |
+
return []byte(fmt.Sprintf(`{"model":"%s","input":[{"role":"user","content":[{"type":"text","text":"hi"}]}]}`, modelWithSuffix))
|
| 98 |
+
default: // openai / claude and other chat-style payloads
|
| 99 |
+
return []byte(fmt.Sprintf(`{"model":"%s","messages":[{"role":"user","content":"hi"}]}`, modelWithSuffix))
|
| 100 |
+
}
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
// normalizeCodexPayload mirrors codex_executor's reasoning + streaming tweaks.
|
| 104 |
+
func normalizeCodexPayload(body []byte, upstreamModel string, allowCompat bool) ([]byte, error) {
|
| 105 |
+
body = executor.NormalizeThinkingConfig(body, upstreamModel, allowCompat)
|
| 106 |
+
if err := executor.ValidateThinkingConfig(body, upstreamModel); err != nil {
|
| 107 |
+
return body, err
|
| 108 |
+
}
|
| 109 |
+
body, _ = sjson.SetBytes(body, "model", upstreamModel)
|
| 110 |
+
body, _ = sjson.SetBytes(body, "stream", true)
|
| 111 |
+
body, _ = sjson.DeleteBytes(body, "previous_response_id")
|
| 112 |
+
return body, nil
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
// buildBodyForProtocol runs a minimal request through the same translation and
|
| 116 |
+
// thinking pipeline used in executors for the given target protocol.
|
| 117 |
+
func buildBodyForProtocol(t *testing.T, fromProtocol, toProtocol, modelWithSuffix string) ([]byte, error) {
|
| 118 |
+
t.Helper()
|
| 119 |
+
normalizedModel, metadata := util.NormalizeThinkingModel(modelWithSuffix)
|
| 120 |
+
upstreamModel := util.ResolveOriginalModel(normalizedModel, metadata)
|
| 121 |
+
raw := buildRawPayload(fromProtocol, modelWithSuffix)
|
| 122 |
+
stream := fromProtocol != toProtocol
|
| 123 |
+
|
| 124 |
+
body := sdktranslator.TranslateRequest(
|
| 125 |
+
sdktranslator.FromString(fromProtocol),
|
| 126 |
+
sdktranslator.FromString(toProtocol),
|
| 127 |
+
normalizedModel,
|
| 128 |
+
raw,
|
| 129 |
+
stream,
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
var err error
|
| 133 |
+
allowCompat := isOpenAICompatModel(normalizedModel)
|
| 134 |
+
switch toProtocol {
|
| 135 |
+
case "gemini":
|
| 136 |
+
body = executor.ApplyThinkingMetadata(body, metadata, normalizedModel)
|
| 137 |
+
body = util.ApplyDefaultThinkingIfNeeded(normalizedModel, body)
|
| 138 |
+
body = util.NormalizeGeminiThinkingBudget(normalizedModel, body)
|
| 139 |
+
body = util.StripThinkingConfigIfUnsupported(normalizedModel, body)
|
| 140 |
+
case "claude":
|
| 141 |
+
if budget, ok := util.ResolveClaudeThinkingConfig(normalizedModel, metadata); ok {
|
| 142 |
+
body = util.ApplyClaudeThinkingConfig(body, budget)
|
| 143 |
+
}
|
| 144 |
+
case "openai":
|
| 145 |
+
body = executor.ApplyReasoningEffortMetadata(body, metadata, normalizedModel, "reasoning_effort", allowCompat)
|
| 146 |
+
body = executor.NormalizeThinkingConfig(body, upstreamModel, allowCompat)
|
| 147 |
+
err = executor.ValidateThinkingConfig(body, upstreamModel)
|
| 148 |
+
case "codex": // OpenAI responses / codex
|
| 149 |
+
// Codex does not support allowCompat; always use false.
|
| 150 |
+
body = executor.ApplyReasoningEffortMetadata(body, metadata, normalizedModel, "reasoning.effort", false)
|
| 151 |
+
// Mirror CodexExecutor final normalization and model override so tests log the final body.
|
| 152 |
+
body, err = normalizeCodexPayload(body, upstreamModel, false)
|
| 153 |
+
default:
|
| 154 |
+
}
|
| 155 |
+
|
| 156 |
+
// Mirror executor behavior: final payload uses the upstream (base) model name.
|
| 157 |
+
if upstreamModel != "" {
|
| 158 |
+
body, _ = sjson.SetBytes(body, "model", upstreamModel)
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
// For tests we only keep model + thinking-related fields to avoid noise.
|
| 162 |
+
body = filterThinkingBody(toProtocol, body, upstreamModel, normalizedModel)
|
| 163 |
+
return body, err
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
// filterThinkingBody projects the translated payload down to only model and
|
| 167 |
+
// thinking-related fields for the given target protocol.
|
| 168 |
+
func filterThinkingBody(toProtocol string, body []byte, upstreamModel, normalizedModel string) []byte {
|
| 169 |
+
if len(body) == 0 {
|
| 170 |
+
return body
|
| 171 |
+
}
|
| 172 |
+
out := []byte(`{}`)
|
| 173 |
+
|
| 174 |
+
// Preserve model if present, otherwise fall back to upstream/normalized model.
|
| 175 |
+
if m := gjson.GetBytes(body, "model"); m.Exists() {
|
| 176 |
+
out, _ = sjson.SetBytes(out, "model", m.Value())
|
| 177 |
+
} else if upstreamModel != "" {
|
| 178 |
+
out, _ = sjson.SetBytes(out, "model", upstreamModel)
|
| 179 |
+
} else if normalizedModel != "" {
|
| 180 |
+
out, _ = sjson.SetBytes(out, "model", normalizedModel)
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
switch toProtocol {
|
| 184 |
+
case "gemini":
|
| 185 |
+
if tc := gjson.GetBytes(body, "generationConfig.thinkingConfig"); tc.Exists() {
|
| 186 |
+
out, _ = sjson.SetRawBytes(out, "generationConfig.thinkingConfig", []byte(tc.Raw))
|
| 187 |
+
}
|
| 188 |
+
case "claude":
|
| 189 |
+
if tcfg := gjson.GetBytes(body, "thinking"); tcfg.Exists() {
|
| 190 |
+
out, _ = sjson.SetRawBytes(out, "thinking", []byte(tcfg.Raw))
|
| 191 |
+
}
|
| 192 |
+
case "openai":
|
| 193 |
+
if re := gjson.GetBytes(body, "reasoning_effort"); re.Exists() {
|
| 194 |
+
out, _ = sjson.SetBytes(out, "reasoning_effort", re.Value())
|
| 195 |
+
}
|
| 196 |
+
case "codex":
|
| 197 |
+
if re := gjson.GetBytes(body, "reasoning.effort"); re.Exists() {
|
| 198 |
+
out, _ = sjson.SetBytes(out, "reasoning.effort", re.Value())
|
| 199 |
+
}
|
| 200 |
+
}
|
| 201 |
+
return out
|
| 202 |
+
}
|
| 203 |
+
|
| 204 |
+
func TestThinkingConversionsAcrossProtocolsAndModels(t *testing.T) {
|
| 205 |
+
cleanup := registerCoreModels(t)
|
| 206 |
+
defer cleanup()
|
| 207 |
+
|
| 208 |
+
type scenario struct {
|
| 209 |
+
name string
|
| 210 |
+
modelSuffix string
|
| 211 |
+
}
|
| 212 |
+
|
| 213 |
+
numericName := func(budget int) string {
|
| 214 |
+
if budget < 0 {
|
| 215 |
+
return "numeric-neg1"
|
| 216 |
+
}
|
| 217 |
+
return fmt.Sprintf("numeric-%d", budget)
|
| 218 |
+
}
|
| 219 |
+
|
| 220 |
+
for _, model := range thinkingTestModels {
|
| 221 |
+
_ = registry.GetGlobalRegistry().GetModelInfo(model)
|
| 222 |
+
|
| 223 |
+
for _, from := range thinkingTestFromProtocols {
|
| 224 |
+
// Scenario selection follows protocol semantics:
|
| 225 |
+
// - OpenAI-style protocols (openai/openai-response) express thinking as levels.
|
| 226 |
+
// - Claude/Gemini-style protocols express thinking as numeric budgets.
|
| 227 |
+
cases := []scenario{
|
| 228 |
+
{name: "no-suffix", modelSuffix: model},
|
| 229 |
+
}
|
| 230 |
+
if from == "openai" || from == "openai-response" {
|
| 231 |
+
for _, lvl := range thinkingLevelSamples {
|
| 232 |
+
cases = append(cases, scenario{
|
| 233 |
+
name: "level-" + lvl,
|
| 234 |
+
modelSuffix: fmt.Sprintf("%s(%s)", model, lvl),
|
| 235 |
+
})
|
| 236 |
+
}
|
| 237 |
+
} else { // claude or gemini
|
| 238 |
+
for _, budget := range thinkingNumericSamples {
|
| 239 |
+
budget := budget
|
| 240 |
+
cases = append(cases, scenario{
|
| 241 |
+
name: numericName(budget),
|
| 242 |
+
modelSuffix: fmt.Sprintf("%s(%d)", model, budget),
|
| 243 |
+
})
|
| 244 |
+
}
|
| 245 |
+
}
|
| 246 |
+
|
| 247 |
+
for _, to := range thinkingTestToProtocols {
|
| 248 |
+
if from == to {
|
| 249 |
+
continue
|
| 250 |
+
}
|
| 251 |
+
t.Logf("─────────────────────────────────────────────────────────────────────────────────")
|
| 252 |
+
t.Logf(" %s -> %s | model: %s", from, to, model)
|
| 253 |
+
t.Logf("─────────────────────────────────────────────────────────────────────────────────")
|
| 254 |
+
for _, cs := range cases {
|
| 255 |
+
from := from
|
| 256 |
+
to := to
|
| 257 |
+
cs := cs
|
| 258 |
+
testName := fmt.Sprintf("%s->%s/%s/%s", from, to, model, cs.name)
|
| 259 |
+
t.Run(testName, func(t *testing.T) {
|
| 260 |
+
normalizedModel, metadata := util.NormalizeThinkingModel(cs.modelSuffix)
|
| 261 |
+
expectPresent, expectValue, expectErr := func() (bool, string, bool) {
|
| 262 |
+
switch to {
|
| 263 |
+
case "gemini":
|
| 264 |
+
budget, include, ok := util.ResolveThinkingConfigFromMetadata(normalizedModel, metadata)
|
| 265 |
+
if !ok || !util.ModelSupportsThinking(normalizedModel) {
|
| 266 |
+
return false, "", false
|
| 267 |
+
}
|
| 268 |
+
if include != nil && !*include {
|
| 269 |
+
return false, "", false
|
| 270 |
+
}
|
| 271 |
+
if budget == nil {
|
| 272 |
+
return false, "", false
|
| 273 |
+
}
|
| 274 |
+
norm := util.NormalizeThinkingBudget(normalizedModel, *budget)
|
| 275 |
+
return true, fmt.Sprintf("%d", norm), false
|
| 276 |
+
case "claude":
|
| 277 |
+
if !util.ModelSupportsThinking(normalizedModel) {
|
| 278 |
+
return false, "", false
|
| 279 |
+
}
|
| 280 |
+
budget, ok := util.ResolveClaudeThinkingConfig(normalizedModel, metadata)
|
| 281 |
+
if !ok || budget == nil {
|
| 282 |
+
return false, "", false
|
| 283 |
+
}
|
| 284 |
+
return true, fmt.Sprintf("%d", *budget), false
|
| 285 |
+
case "openai":
|
| 286 |
+
allowCompat := isOpenAICompatModel(normalizedModel)
|
| 287 |
+
if !util.ModelSupportsThinking(normalizedModel) && !allowCompat {
|
| 288 |
+
return false, "", false
|
| 289 |
+
}
|
| 290 |
+
// For allowCompat models, pass through effort directly without validation
|
| 291 |
+
if allowCompat {
|
| 292 |
+
effort, ok := util.ReasoningEffortFromMetadata(metadata)
|
| 293 |
+
if ok && strings.TrimSpace(effort) != "" {
|
| 294 |
+
return true, strings.ToLower(strings.TrimSpace(effort)), false
|
| 295 |
+
}
|
| 296 |
+
// Check numeric budget fallback for allowCompat
|
| 297 |
+
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
|
| 298 |
+
if mapped, okMap := util.ThinkingBudgetToEffort(normalizedModel, *budget); okMap && mapped != "" {
|
| 299 |
+
return true, mapped, false
|
| 300 |
+
}
|
| 301 |
+
}
|
| 302 |
+
return false, "", false
|
| 303 |
+
}
|
| 304 |
+
if !util.ModelUsesThinkingLevels(normalizedModel) {
|
| 305 |
+
// Non-levels models don't support effort strings in openai
|
| 306 |
+
return false, "", false
|
| 307 |
+
}
|
| 308 |
+
effort, ok := util.ReasoningEffortFromMetadata(metadata)
|
| 309 |
+
if !ok || strings.TrimSpace(effort) == "" {
|
| 310 |
+
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
|
| 311 |
+
if mapped, okMap := util.ThinkingBudgetToEffort(normalizedModel, *budget); okMap {
|
| 312 |
+
effort = mapped
|
| 313 |
+
ok = true
|
| 314 |
+
}
|
| 315 |
+
}
|
| 316 |
+
}
|
| 317 |
+
if !ok || strings.TrimSpace(effort) == "" {
|
| 318 |
+
return false, "", false
|
| 319 |
+
}
|
| 320 |
+
effort = strings.ToLower(strings.TrimSpace(effort))
|
| 321 |
+
if normalized, okLevel := util.NormalizeReasoningEffortLevel(normalizedModel, effort); okLevel {
|
| 322 |
+
return true, normalized, false
|
| 323 |
+
}
|
| 324 |
+
return false, "", true // validation would fail
|
| 325 |
+
case "codex":
|
| 326 |
+
// Codex does not support allowCompat; require thinking-capable level models.
|
| 327 |
+
if !util.ModelSupportsThinking(normalizedModel) || !util.ModelUsesThinkingLevels(normalizedModel) {
|
| 328 |
+
return false, "", false
|
| 329 |
+
}
|
| 330 |
+
effort, ok := util.ReasoningEffortFromMetadata(metadata)
|
| 331 |
+
if ok && strings.TrimSpace(effort) != "" {
|
| 332 |
+
effort = strings.ToLower(strings.TrimSpace(effort))
|
| 333 |
+
if normalized, okLevel := util.NormalizeReasoningEffortLevel(normalizedModel, effort); okLevel {
|
| 334 |
+
return true, normalized, false
|
| 335 |
+
}
|
| 336 |
+
return false, "", true
|
| 337 |
+
}
|
| 338 |
+
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
|
| 339 |
+
if mapped, okMap := util.ThinkingBudgetToEffort(normalizedModel, *budget); okMap && mapped != "" {
|
| 340 |
+
mapped = strings.ToLower(strings.TrimSpace(mapped))
|
| 341 |
+
if normalized, okLevel := util.NormalizeReasoningEffortLevel(normalizedModel, mapped); okLevel {
|
| 342 |
+
return true, normalized, false
|
| 343 |
+
}
|
| 344 |
+
return false, "", true
|
| 345 |
+
}
|
| 346 |
+
}
|
| 347 |
+
if from != "openai-response" {
|
| 348 |
+
// Codex translators default reasoning.effort to "medium" when
|
| 349 |
+
// no explicit thinking suffix/metadata is provided.
|
| 350 |
+
return true, "medium", false
|
| 351 |
+
}
|
| 352 |
+
return false, "", false
|
| 353 |
+
default:
|
| 354 |
+
return false, "", false
|
| 355 |
+
}
|
| 356 |
+
}()
|
| 357 |
+
|
| 358 |
+
body, err := buildBodyForProtocol(t, from, to, cs.modelSuffix)
|
| 359 |
+
actualPresent, actualValue := func() (bool, string) {
|
| 360 |
+
path := ""
|
| 361 |
+
switch to {
|
| 362 |
+
case "gemini":
|
| 363 |
+
path = "generationConfig.thinkingConfig.thinkingBudget"
|
| 364 |
+
case "claude":
|
| 365 |
+
path = "thinking.budget_tokens"
|
| 366 |
+
case "openai":
|
| 367 |
+
path = "reasoning_effort"
|
| 368 |
+
case "codex":
|
| 369 |
+
path = "reasoning.effort"
|
| 370 |
+
}
|
| 371 |
+
if path == "" {
|
| 372 |
+
return false, ""
|
| 373 |
+
}
|
| 374 |
+
val := gjson.GetBytes(body, path)
|
| 375 |
+
if to == "codex" && !val.Exists() {
|
| 376 |
+
reasoning := gjson.GetBytes(body, "reasoning")
|
| 377 |
+
if reasoning.Exists() {
|
| 378 |
+
val = reasoning.Get("effort")
|
| 379 |
+
}
|
| 380 |
+
}
|
| 381 |
+
if !val.Exists() {
|
| 382 |
+
return false, ""
|
| 383 |
+
}
|
| 384 |
+
if val.Type == gjson.Number {
|
| 385 |
+
return true, fmt.Sprintf("%d", val.Int())
|
| 386 |
+
}
|
| 387 |
+
return true, val.String()
|
| 388 |
+
}()
|
| 389 |
+
|
| 390 |
+
t.Logf("from=%s to=%s model=%s suffix=%s present(expect=%v got=%v) value(expect=%s got=%s) err(expect=%v got=%v) body=%s",
|
| 391 |
+
from, to, model, cs.modelSuffix, expectPresent, actualPresent, expectValue, actualValue, expectErr, err != nil, string(body))
|
| 392 |
+
|
| 393 |
+
if expectErr {
|
| 394 |
+
if err == nil {
|
| 395 |
+
t.Fatalf("expected validation error but got none, body=%s", string(body))
|
| 396 |
+
}
|
| 397 |
+
return
|
| 398 |
+
}
|
| 399 |
+
if err != nil {
|
| 400 |
+
t.Fatalf("unexpected error: %v body=%s", err, string(body))
|
| 401 |
+
}
|
| 402 |
+
|
| 403 |
+
if expectPresent != actualPresent {
|
| 404 |
+
t.Fatalf("presence mismatch: expect %v got %v body=%s", expectPresent, actualPresent, string(body))
|
| 405 |
+
}
|
| 406 |
+
if expectPresent && expectValue != actualValue {
|
| 407 |
+
t.Fatalf("value mismatch: expect %s got %s body=%s", expectValue, actualValue, string(body))
|
| 408 |
+
}
|
| 409 |
+
})
|
| 410 |
+
}
|
| 411 |
+
}
|
| 412 |
+
}
|
| 413 |
+
}
|
| 414 |
+
}
|
| 415 |
+
|
| 416 |
+
// buildRawPayloadWithThinking creates a payload with thinking parameters already in the body.
|
| 417 |
+
// This tests the path where thinking comes from the raw payload, not model suffix.
|
| 418 |
+
func buildRawPayloadWithThinking(fromProtocol, model string, thinkingParam any) []byte {
|
| 419 |
+
switch fromProtocol {
|
| 420 |
+
case "gemini":
|
| 421 |
+
base := fmt.Sprintf(`{"model":"%s","contents":[{"role":"user","parts":[{"text":"hi"}]}]}`, model)
|
| 422 |
+
if budget, ok := thinkingParam.(int); ok {
|
| 423 |
+
base, _ = sjson.Set(base, "generationConfig.thinkingConfig.thinkingBudget", budget)
|
| 424 |
+
}
|
| 425 |
+
return []byte(base)
|
| 426 |
+
case "openai-response":
|
| 427 |
+
base := fmt.Sprintf(`{"model":"%s","input":[{"role":"user","content":[{"type":"text","text":"hi"}]}]}`, model)
|
| 428 |
+
if effort, ok := thinkingParam.(string); ok && effort != "" {
|
| 429 |
+
base, _ = sjson.Set(base, "reasoning.effort", effort)
|
| 430 |
+
}
|
| 431 |
+
return []byte(base)
|
| 432 |
+
case "openai":
|
| 433 |
+
base := fmt.Sprintf(`{"model":"%s","messages":[{"role":"user","content":"hi"}]}`, model)
|
| 434 |
+
if effort, ok := thinkingParam.(string); ok && effort != "" {
|
| 435 |
+
base, _ = sjson.Set(base, "reasoning_effort", effort)
|
| 436 |
+
}
|
| 437 |
+
return []byte(base)
|
| 438 |
+
case "claude":
|
| 439 |
+
base := fmt.Sprintf(`{"model":"%s","messages":[{"role":"user","content":"hi"}]}`, model)
|
| 440 |
+
if budget, ok := thinkingParam.(int); ok {
|
| 441 |
+
base, _ = sjson.Set(base, "thinking.type", "enabled")
|
| 442 |
+
base, _ = sjson.Set(base, "thinking.budget_tokens", budget)
|
| 443 |
+
}
|
| 444 |
+
return []byte(base)
|
| 445 |
+
default:
|
| 446 |
+
return []byte(fmt.Sprintf(`{"model":"%s","messages":[{"role":"user","content":"hi"}]}`, model))
|
| 447 |
+
}
|
| 448 |
+
}
|
| 449 |
+
|
| 450 |
+
// buildBodyForProtocolWithRawThinking translates payload with raw thinking params.
|
| 451 |
+
func buildBodyForProtocolWithRawThinking(t *testing.T, fromProtocol, toProtocol, model string, thinkingParam any) ([]byte, error) {
|
| 452 |
+
t.Helper()
|
| 453 |
+
raw := buildRawPayloadWithThinking(fromProtocol, model, thinkingParam)
|
| 454 |
+
stream := fromProtocol != toProtocol
|
| 455 |
+
|
| 456 |
+
body := sdktranslator.TranslateRequest(
|
| 457 |
+
sdktranslator.FromString(fromProtocol),
|
| 458 |
+
sdktranslator.FromString(toProtocol),
|
| 459 |
+
model,
|
| 460 |
+
raw,
|
| 461 |
+
stream,
|
| 462 |
+
)
|
| 463 |
+
|
| 464 |
+
var err error
|
| 465 |
+
allowCompat := isOpenAICompatModel(model)
|
| 466 |
+
switch toProtocol {
|
| 467 |
+
case "gemini":
|
| 468 |
+
body = util.ApplyDefaultThinkingIfNeeded(model, body)
|
| 469 |
+
body = util.NormalizeGeminiThinkingBudget(model, body)
|
| 470 |
+
body = util.StripThinkingConfigIfUnsupported(model, body)
|
| 471 |
+
case "claude":
|
| 472 |
+
// For raw payload, Claude thinking is passed through by translator
|
| 473 |
+
// No additional processing needed as thinking is already in body
|
| 474 |
+
case "openai":
|
| 475 |
+
body = executor.NormalizeThinkingConfig(body, model, allowCompat)
|
| 476 |
+
err = executor.ValidateThinkingConfig(body, model)
|
| 477 |
+
case "codex":
|
| 478 |
+
// Codex does not support allowCompat; always use false.
|
| 479 |
+
body, err = normalizeCodexPayload(body, model, false)
|
| 480 |
+
}
|
| 481 |
+
|
| 482 |
+
body, _ = sjson.SetBytes(body, "model", model)
|
| 483 |
+
body = filterThinkingBody(toProtocol, body, model, model)
|
| 484 |
+
return body, err
|
| 485 |
+
}
|
| 486 |
+
|
| 487 |
+
func TestRawPayloadThinkingConversions(t *testing.T) {
|
| 488 |
+
cleanup := registerCoreModels(t)
|
| 489 |
+
defer cleanup()
|
| 490 |
+
|
| 491 |
+
type scenario struct {
|
| 492 |
+
name string
|
| 493 |
+
thinkingParam any // int for budget, string for effort level
|
| 494 |
+
}
|
| 495 |
+
|
| 496 |
+
numericName := func(budget int) string {
|
| 497 |
+
if budget < 0 {
|
| 498 |
+
return "budget-neg1"
|
| 499 |
+
}
|
| 500 |
+
return fmt.Sprintf("budget-%d", budget)
|
| 501 |
+
}
|
| 502 |
+
|
| 503 |
+
for _, model := range thinkingTestModels {
|
| 504 |
+
supportsThinking := util.ModelSupportsThinking(model)
|
| 505 |
+
usesLevels := util.ModelUsesThinkingLevels(model)
|
| 506 |
+
allowCompat := isOpenAICompatModel(model)
|
| 507 |
+
|
| 508 |
+
for _, from := range thinkingTestFromProtocols {
|
| 509 |
+
var cases []scenario
|
| 510 |
+
switch from {
|
| 511 |
+
case "openai", "openai-response":
|
| 512 |
+
cases = []scenario{
|
| 513 |
+
{name: "no-thinking", thinkingParam: nil},
|
| 514 |
+
}
|
| 515 |
+
for _, lvl := range thinkingLevelSamples {
|
| 516 |
+
cases = append(cases, scenario{
|
| 517 |
+
name: "effort-" + lvl,
|
| 518 |
+
thinkingParam: lvl,
|
| 519 |
+
})
|
| 520 |
+
}
|
| 521 |
+
case "gemini", "claude":
|
| 522 |
+
cases = []scenario{
|
| 523 |
+
{name: "no-thinking", thinkingParam: nil},
|
| 524 |
+
}
|
| 525 |
+
for _, budget := range thinkingNumericSamples {
|
| 526 |
+
budget := budget
|
| 527 |
+
cases = append(cases, scenario{
|
| 528 |
+
name: numericName(budget),
|
| 529 |
+
thinkingParam: budget,
|
| 530 |
+
})
|
| 531 |
+
}
|
| 532 |
+
}
|
| 533 |
+
|
| 534 |
+
for _, to := range thinkingTestToProtocols {
|
| 535 |
+
if from == to {
|
| 536 |
+
continue
|
| 537 |
+
}
|
| 538 |
+
t.Logf("═══════════════════════════════════════════════════════════════════════════════")
|
| 539 |
+
t.Logf(" RAW PAYLOAD: %s -> %s | model: %s", from, to, model)
|
| 540 |
+
t.Logf("═══════════════════════════════════════════════════════════════════════════════")
|
| 541 |
+
|
| 542 |
+
for _, cs := range cases {
|
| 543 |
+
from := from
|
| 544 |
+
to := to
|
| 545 |
+
cs := cs
|
| 546 |
+
testName := fmt.Sprintf("raw/%s->%s/%s/%s", from, to, model, cs.name)
|
| 547 |
+
t.Run(testName, func(t *testing.T) {
|
| 548 |
+
expectPresent, expectValue, expectErr := func() (bool, string, bool) {
|
| 549 |
+
if cs.thinkingParam == nil {
|
| 550 |
+
if to == "codex" && from != "openai-response" && supportsThinking && usesLevels {
|
| 551 |
+
// Codex translators default reasoning.effort to "medium" for thinking-capable level models
|
| 552 |
+
return true, "medium", false
|
| 553 |
+
}
|
| 554 |
+
return false, "", false
|
| 555 |
+
}
|
| 556 |
+
|
| 557 |
+
switch to {
|
| 558 |
+
case "gemini":
|
| 559 |
+
if !supportsThinking || usesLevels {
|
| 560 |
+
return false, "", false
|
| 561 |
+
}
|
| 562 |
+
// Gemini expects numeric budget (only for non-level models)
|
| 563 |
+
if budget, ok := cs.thinkingParam.(int); ok {
|
| 564 |
+
norm := util.NormalizeThinkingBudget(model, budget)
|
| 565 |
+
return true, fmt.Sprintf("%d", norm), false
|
| 566 |
+
}
|
| 567 |
+
// Convert effort level to budget for non-level models only
|
| 568 |
+
if effort, ok := cs.thinkingParam.(string); ok && effort != "" {
|
| 569 |
+
// "none" disables thinking - no thinkingBudget in output
|
| 570 |
+
if strings.ToLower(effort) == "none" {
|
| 571 |
+
return false, "", false
|
| 572 |
+
}
|
| 573 |
+
if budget, okB := util.ThinkingEffortToBudget(model, effort); okB {
|
| 574 |
+
// ThinkingEffortToBudget already returns normalized budget
|
| 575 |
+
return true, fmt.Sprintf("%d", budget), false
|
| 576 |
+
}
|
| 577 |
+
// Invalid effort does not map to a budget
|
| 578 |
+
return false, "", false
|
| 579 |
+
}
|
| 580 |
+
return false, "", false
|
| 581 |
+
case "claude":
|
| 582 |
+
if !supportsThinking || usesLevels {
|
| 583 |
+
return false, "", false
|
| 584 |
+
}
|
| 585 |
+
// Claude expects numeric budget (only for non-level models)
|
| 586 |
+
if budget, ok := cs.thinkingParam.(int); ok && budget > 0 {
|
| 587 |
+
norm := util.NormalizeThinkingBudget(model, budget)
|
| 588 |
+
return true, fmt.Sprintf("%d", norm), false
|
| 589 |
+
}
|
| 590 |
+
// Convert effort level to budget for non-level models only
|
| 591 |
+
if effort, ok := cs.thinkingParam.(string); ok && effort != "" {
|
| 592 |
+
// "none" and "auto" don't produce budget_tokens
|
| 593 |
+
lower := strings.ToLower(effort)
|
| 594 |
+
if lower == "none" || lower == "auto" {
|
| 595 |
+
return false, "", false
|
| 596 |
+
}
|
| 597 |
+
if budget, okB := util.ThinkingEffortToBudget(model, effort); okB {
|
| 598 |
+
// ThinkingEffortToBudget already returns normalized budget
|
| 599 |
+
return true, fmt.Sprintf("%d", budget), false
|
| 600 |
+
}
|
| 601 |
+
// Invalid effort - claude sets thinking.type:enabled but no budget_tokens
|
| 602 |
+
return false, "", false
|
| 603 |
+
}
|
| 604 |
+
return false, "", false
|
| 605 |
+
case "openai":
|
| 606 |
+
if allowCompat {
|
| 607 |
+
if effort, ok := cs.thinkingParam.(string); ok && strings.TrimSpace(effort) != "" {
|
| 608 |
+
normalized := strings.ToLower(strings.TrimSpace(effort))
|
| 609 |
+
return true, normalized, false
|
| 610 |
+
}
|
| 611 |
+
if budget, ok := cs.thinkingParam.(int); ok {
|
| 612 |
+
if mapped, okM := util.ThinkingBudgetToEffort(model, budget); okM && mapped != "" {
|
| 613 |
+
return true, mapped, false
|
| 614 |
+
}
|
| 615 |
+
}
|
| 616 |
+
return false, "", false
|
| 617 |
+
}
|
| 618 |
+
if !supportsThinking || !usesLevels {
|
| 619 |
+
return false, "", false
|
| 620 |
+
}
|
| 621 |
+
if effort, ok := cs.thinkingParam.(string); ok && effort != "" {
|
| 622 |
+
if normalized, okN := util.NormalizeReasoningEffortLevel(model, effort); okN {
|
| 623 |
+
return true, normalized, false
|
| 624 |
+
}
|
| 625 |
+
return false, "", true // invalid level
|
| 626 |
+
}
|
| 627 |
+
if budget, ok := cs.thinkingParam.(int); ok {
|
| 628 |
+
if mapped, okM := util.ThinkingBudgetToEffort(model, budget); okM && mapped != "" {
|
| 629 |
+
// Check if the mapped effort is valid for this model
|
| 630 |
+
if _, validLevel := util.NormalizeReasoningEffortLevel(model, mapped); !validLevel {
|
| 631 |
+
return true, mapped, true // expect validation error
|
| 632 |
+
}
|
| 633 |
+
return true, mapped, false
|
| 634 |
+
}
|
| 635 |
+
}
|
| 636 |
+
return false, "", false
|
| 637 |
+
case "codex":
|
| 638 |
+
// Codex does not support allowCompat; require thinking-capable level models.
|
| 639 |
+
if !supportsThinking || !usesLevels {
|
| 640 |
+
return false, "", false
|
| 641 |
+
}
|
| 642 |
+
if effort, ok := cs.thinkingParam.(string); ok && effort != "" {
|
| 643 |
+
if normalized, okN := util.NormalizeReasoningEffortLevel(model, effort); okN {
|
| 644 |
+
return true, normalized, false
|
| 645 |
+
}
|
| 646 |
+
return false, "", true
|
| 647 |
+
}
|
| 648 |
+
if budget, ok := cs.thinkingParam.(int); ok {
|
| 649 |
+
if mapped, okM := util.ThinkingBudgetToEffort(model, budget); okM && mapped != "" {
|
| 650 |
+
// Check if the mapped effort is valid for this model
|
| 651 |
+
if _, validLevel := util.NormalizeReasoningEffortLevel(model, mapped); !validLevel {
|
| 652 |
+
return true, mapped, true // expect validation error
|
| 653 |
+
}
|
| 654 |
+
return true, mapped, false
|
| 655 |
+
}
|
| 656 |
+
}
|
| 657 |
+
if from != "openai-response" {
|
| 658 |
+
// Codex translators default reasoning.effort to "medium" for thinking-capable models
|
| 659 |
+
return true, "medium", false
|
| 660 |
+
}
|
| 661 |
+
return false, "", false
|
| 662 |
+
}
|
| 663 |
+
return false, "", false
|
| 664 |
+
}()
|
| 665 |
+
|
| 666 |
+
body, err := buildBodyForProtocolWithRawThinking(t, from, to, model, cs.thinkingParam)
|
| 667 |
+
actualPresent, actualValue := func() (bool, string) {
|
| 668 |
+
path := ""
|
| 669 |
+
switch to {
|
| 670 |
+
case "gemini":
|
| 671 |
+
path = "generationConfig.thinkingConfig.thinkingBudget"
|
| 672 |
+
case "claude":
|
| 673 |
+
path = "thinking.budget_tokens"
|
| 674 |
+
case "openai":
|
| 675 |
+
path = "reasoning_effort"
|
| 676 |
+
case "codex":
|
| 677 |
+
path = "reasoning.effort"
|
| 678 |
+
}
|
| 679 |
+
if path == "" {
|
| 680 |
+
return false, ""
|
| 681 |
+
}
|
| 682 |
+
val := gjson.GetBytes(body, path)
|
| 683 |
+
if to == "codex" && !val.Exists() {
|
| 684 |
+
reasoning := gjson.GetBytes(body, "reasoning")
|
| 685 |
+
if reasoning.Exists() {
|
| 686 |
+
val = reasoning.Get("effort")
|
| 687 |
+
}
|
| 688 |
+
}
|
| 689 |
+
if !val.Exists() {
|
| 690 |
+
return false, ""
|
| 691 |
+
}
|
| 692 |
+
if val.Type == gjson.Number {
|
| 693 |
+
return true, fmt.Sprintf("%d", val.Int())
|
| 694 |
+
}
|
| 695 |
+
return true, val.String()
|
| 696 |
+
}()
|
| 697 |
+
|
| 698 |
+
t.Logf("from=%s to=%s model=%s param=%v present(expect=%v got=%v) value(expect=%s got=%s) err(expect=%v got=%v) body=%s",
|
| 699 |
+
from, to, model, cs.thinkingParam, expectPresent, actualPresent, expectValue, actualValue, expectErr, err != nil, string(body))
|
| 700 |
+
|
| 701 |
+
if expectErr {
|
| 702 |
+
if err == nil {
|
| 703 |
+
t.Fatalf("expected validation error but got none, body=%s", string(body))
|
| 704 |
+
}
|
| 705 |
+
return
|
| 706 |
+
}
|
| 707 |
+
if err != nil {
|
| 708 |
+
t.Fatalf("unexpected error: %v body=%s", err, string(body))
|
| 709 |
+
}
|
| 710 |
+
|
| 711 |
+
if expectPresent != actualPresent {
|
| 712 |
+
t.Fatalf("presence mismatch: expect %v got %v body=%s", expectPresent, actualPresent, string(body))
|
| 713 |
+
}
|
| 714 |
+
if expectPresent && expectValue != actualValue {
|
| 715 |
+
t.Fatalf("value mismatch: expect %s got %s body=%s", expectValue, actualValue, string(body))
|
| 716 |
+
}
|
| 717 |
+
})
|
| 718 |
+
}
|
| 719 |
+
}
|
| 720 |
+
}
|
| 721 |
+
}
|
| 722 |
+
}
|
| 723 |
+
|
| 724 |
+
func TestThinkingBudgetToEffort(t *testing.T) {
|
| 725 |
+
cleanup := registerCoreModels(t)
|
| 726 |
+
defer cleanup()
|
| 727 |
+
|
| 728 |
+
cases := []struct {
|
| 729 |
+
name string
|
| 730 |
+
model string
|
| 731 |
+
budget int
|
| 732 |
+
want string
|
| 733 |
+
ok bool
|
| 734 |
+
}{
|
| 735 |
+
{name: "dynamic-auto", model: "gpt-5", budget: -1, want: "auto", ok: true},
|
| 736 |
+
{name: "zero-none", model: "gpt-5", budget: 0, want: "minimal", ok: true},
|
| 737 |
+
{name: "low-min", model: "gpt-5", budget: 1, want: "low", ok: true},
|
| 738 |
+
{name: "low-max", model: "gpt-5", budget: 1024, want: "low", ok: true},
|
| 739 |
+
{name: "medium-min", model: "gpt-5", budget: 1025, want: "medium", ok: true},
|
| 740 |
+
{name: "medium-max", model: "gpt-5", budget: 8192, want: "medium", ok: true},
|
| 741 |
+
{name: "high-min", model: "gpt-5", budget: 8193, want: "high", ok: true},
|
| 742 |
+
{name: "high-max", model: "gpt-5", budget: 24576, want: "high", ok: true},
|
| 743 |
+
{name: "over-max-clamps-to-highest", model: "gpt-5", budget: 64000, want: "high", ok: true},
|
| 744 |
+
{name: "over-max-xhigh-model", model: "gpt-5.2", budget: 64000, want: "xhigh", ok: true},
|
| 745 |
+
{name: "negative-unsupported", model: "gpt-5", budget: -5, want: "", ok: false},
|
| 746 |
+
}
|
| 747 |
+
|
| 748 |
+
for _, cs := range cases {
|
| 749 |
+
cs := cs
|
| 750 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 751 |
+
got, ok := util.ThinkingBudgetToEffort(cs.model, cs.budget)
|
| 752 |
+
if ok != cs.ok {
|
| 753 |
+
t.Fatalf("ok mismatch for model=%s budget=%d: expect %v got %v", cs.model, cs.budget, cs.ok, ok)
|
| 754 |
+
}
|
| 755 |
+
if got != cs.want {
|
| 756 |
+
t.Fatalf("value mismatch for model=%s budget=%d: expect %q got %q", cs.model, cs.budget, cs.want, got)
|
| 757 |
+
}
|
| 758 |
+
})
|
| 759 |
+
}
|
| 760 |
+
}
|
| 761 |
+
|
| 762 |
+
func TestThinkingEffortToBudget(t *testing.T) {
|
| 763 |
+
cleanup := registerCoreModels(t)
|
| 764 |
+
defer cleanup()
|
| 765 |
+
|
| 766 |
+
cases := []struct {
|
| 767 |
+
name string
|
| 768 |
+
model string
|
| 769 |
+
effort string
|
| 770 |
+
want int
|
| 771 |
+
ok bool
|
| 772 |
+
}{
|
| 773 |
+
{name: "none", model: "gemini-2.5-pro", effort: "none", want: 0, ok: true},
|
| 774 |
+
{name: "auto", model: "gemini-2.5-pro", effort: "auto", want: -1, ok: true},
|
| 775 |
+
{name: "minimal", model: "gemini-2.5-pro", effort: "minimal", want: 512, ok: true},
|
| 776 |
+
{name: "low", model: "gemini-2.5-pro", effort: "low", want: 1024, ok: true},
|
| 777 |
+
{name: "medium", model: "gemini-2.5-pro", effort: "medium", want: 8192, ok: true},
|
| 778 |
+
{name: "high", model: "gemini-2.5-pro", effort: "high", want: 24576, ok: true},
|
| 779 |
+
{name: "xhigh", model: "gemini-2.5-pro", effort: "xhigh", want: 32768, ok: true},
|
| 780 |
+
{name: "empty-unsupported", model: "gemini-2.5-pro", effort: "", want: 0, ok: false},
|
| 781 |
+
{name: "invalid-unsupported", model: "gemini-2.5-pro", effort: "ultra", want: 0, ok: false},
|
| 782 |
+
{name: "case-insensitive", model: "gemini-2.5-pro", effort: "LOW", want: 1024, ok: true},
|
| 783 |
+
{name: "case-insensitive-medium", model: "gemini-2.5-pro", effort: "MEDIUM", want: 8192, ok: true},
|
| 784 |
+
}
|
| 785 |
+
|
| 786 |
+
for _, cs := range cases {
|
| 787 |
+
cs := cs
|
| 788 |
+
t.Run(cs.name, func(t *testing.T) {
|
| 789 |
+
got, ok := util.ThinkingEffortToBudget(cs.model, cs.effort)
|
| 790 |
+
if ok != cs.ok {
|
| 791 |
+
t.Fatalf("ok mismatch for model=%s effort=%s: expect %v got %v", cs.model, cs.effort, cs.ok, ok)
|
| 792 |
+
}
|
| 793 |
+
if got != cs.want {
|
| 794 |
+
t.Fatalf("value mismatch for model=%s effort=%s: expect %d got %d", cs.model, cs.effort, cs.want, got)
|
| 795 |
+
}
|
| 796 |
+
})
|
| 797 |
+
}
|
| 798 |
+
}
|