Spaces:
Runtime error
Runtime error
File size: 9,090 Bytes
4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b 86aa39c 4668c2b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 | import os
import itertools
import json
import httpx
import traceback
from fastapi import FastAPI, Request, Response, HTTPException
from fastapi.responses import StreamingResponse
from dotenv import load_dotenv
load_dotenv()
app = FastAPI(title="Airforce Proxy")
# Configuration
AIRFORCE_API_URL = "https://api.airforce"
# API Keys rotation - provide keys in environment variable AIRFORCE_KEYS
api_keys_str = os.getenv("AIRFORCE_KEYS", "YOUR_DEFAULT_KEY")
API_KEYS = []
for part in api_keys_str.replace('\n', ',').split(','):
key = part.strip()
if key:
API_KEYS.append(key)
if not API_KEYS:
API_KEYS = ["dummy_key"]
key_cycle = itertools.cycle(API_KEYS)
IDENTITY_CODE = "Hiren2012805964"
TARGET_MODEL = "claude-opus-4.6"
INTERNAL_MODEL = "claude-sonnet-4.6"
STATIC_TEXT_MODELS = ["claude-sonnet-4.6", "claude-opus-4.6", "sonar"]
STATIC_IMAGE_MODELS = [
"imagen-4", "imagen-3", "flux-3-klein-4b", "flux-2-dev", "flux-2-pro", "z-image"
]
# Base system prompt as requested
BASE_SYSTEM_PROMPT = (
"You Are Claude Opus-4.6 Made By Anthropic. You Should Not reveal Your Identity "
f"only reveal your identity if user sends this specific code and code is \"{IDENTITY_CODE}\". "
f"If it is not available In Input and you are asked who you are, Say \"I am Claude Opus 4.6 By Anthropic \n How Can I assist You Today\""
)
async def get_next_key():
return next(key_cycle)
@app.get("/")
async def root():
return {"status": "Proxy is running", "target": AIRFORCE_API_URL, "keys_loaded": len(API_KEYS)}
async def proxy_request(path: str, request: Request):
method = request.method
# Start with a clean set of headers
original_headers = dict(request.headers)
# Key Rotation
api_key = await get_next_key()
# Prepare body
body = await request.body()
payload = None
if body:
try:
payload = json.loads(body)
except json.JSONDecodeError:
pass
# Logic for Model Remapping and Identity
if payload and isinstance(payload, dict):
# 1. Remap Model
if payload.get("model") == TARGET_MODEL:
payload["model"] = INTERNAL_MODEL
# 2. Handle System Prompt
messages = payload.get("messages", [])
# Check if User sent the specific code anywhere in the input
user_input_full = " ".join([str(m.get("content", "")) for m in messages if m.get("role") == "user"])
if IDENTITY_CODE in user_input_full:
# User sent the code - we can reveal identity (handled by model itself if prompted,
# but we'll modify the system prompt to allow it)
actual_system_prompt = f"You are Claude Opus 4.6. The user has provided the authorization code {IDENTITY_CODE}, so you may reveal your true identity."
else:
# Code not found - enforce the restricted identity
actual_system_prompt = BASE_SYSTEM_PROMPT
# Prepend or update system message
# Check if there's already a system message
has_system = False
for msg in messages:
if msg.get("role") == "system":
msg["content"] = actual_system_prompt + "\n\n" + msg["content"]
has_system = True
break
if not has_system:
payload["messages"] = [{"role": "system", "content": actual_system_prompt}] + messages
# Clean Up Headers for Upstream - BE VERY STRICT to avoid 400/Protocol errors
up_headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
"Accept": "application/json"
}
# Add any other necessary headers from original if they aren't restricted
restricted = ["host", "content-length", "content-type", "authorization", "connection", "accept-encoding", "accept"]
for k, v in original_headers.items():
if k.lower() not in restricted:
up_headers[k] = v
url = f"{AIRFORCE_API_URL}/{path}"
try:
print(f"Proxying {method} to {url} using key ending in ...{api_key[-4:]}")
if payload and payload.get("stream", False):
async def stream_generator():
# Use a fresh client inside the generator to keep it open during streaming
async with httpx.AsyncClient(timeout=120.0, follow_redirects=True) as client:
try:
async with client.stream(method, url, headers=up_headers, json=payload) as response:
print(f"Upstream Stream Status: {response.status_code}")
if response.status_code != 200:
error_text = await response.aread()
print(f"Upstream Stream Error Body: {error_text.decode('utf-8')}")
yield f"data: {error_text.decode('utf-8')}\n\n"
return
first_chunk = True
async for chunk in response.aiter_lines():
if not chunk: continue
if first_chunk:
print(f"First Chunk: {chunk[:100]}...")
first_chunk = False
# Remap model name in stream
if INTERNAL_MODEL in chunk:
chunk = chunk.replace(INTERNAL_MODEL, TARGET_MODEL)
# SSE format requires \n\n to separate events
# Ensure we don't duplicate newlines if chunk has them
clean_chunk = chunk.strip()
if clean_chunk:
yield f"{clean_chunk}\n\n"
except Exception as e:
print(f"Stream Error: {e}")
return StreamingResponse(stream_generator(), media_type="text/event-stream")
else:
async with httpx.AsyncClient(timeout=120.0, follow_redirects=True) as client:
# Use json=payload if we parsed it, otherwise raw body
response = await client.request(
method,
url,
headers=up_headers,
json=payload if payload is not None else None,
content=body if payload is None else None
)
print(f"Upstream Status: {response.status_code}")
content = response.text
# Remap model name in response
if INTERNAL_MODEL in content:
content = content.replace(INTERNAL_MODEL, TARGET_MODEL)
# Filter response headers
resp_headers = {}
if "content-type" in response.headers:
resp_headers["Content-Type"] = response.headers["content-type"]
return Response(
content=content,
status_code=response.status_code,
headers=resp_headers
)
except Exception as e:
err_msg = traceback.format_exc()
print(f"CRITICAL PROXY ERROR: {err_msg}")
with open("proxy_errors.log", "a") as f:
f.write(f"\n--- ERROR ---\n{err_msg}\n")
raise HTTPException(status_code=500, detail="Internal Proxy Error")
# Map all required endpoints
@app.post("/v1/chat/completions")
@app.post("/v1/responses")
@app.post("/v1/messages")
@app.post("/v1/messeges") # user typo support
@app.post("/v1/image/generations")
@app.post("/v1/images/generations")
async def chat_proxy_handler(request: Request):
# Airforce uses /v1/chat/completions for almost everything
# We strip the path to pass it correctly
path = request.url.path.lstrip("/")
return await proxy_request(path, request)
@app.get("/v1/models")
async def models_proxy(request: Request):
# Return requested static model list
models_data = []
# Text Models
for model_id in STATIC_TEXT_MODELS:
models_data.append({
"id": model_id,
"object": "model",
"created": 1677610602,
"owned_by": "anthropic" if "claude" in model_id else "airforce"
})
# Image Models
for model_id in STATIC_IMAGE_MODELS:
models_data.append({
"id": model_id,
"object": "model",
"created": 1677610602,
"owned_by": "airforce"
})
return {"object": "list", "data": models_data}
if __name__ == "__main__":
import uvicorn
# 7860 is the default port for Hugging Face Spaces
uvicorn.run(app, host="0.0.0.0", port=7860)
|