Spaces:
Paused
Paused
File size: 3,397 Bytes
5d0a52f d0eb8b9 5d0a52f d0eb8b9 8b777a2 5d0a52f d0eb8b9 5d0a52f d0eb8b9 5d0a52f 8b777a2 5d0a52f d0eb8b9 5d0a52f d0eb8b9 5d0a52f d0eb8b9 5d0a52f d0eb8b9 5d0a52f d0eb8b9 5d0a52f d0eb8b9 5d0a52f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 | import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status";
import type { OpenAIErrorBody } from "../types/openai.js";
import type { AnthropicErrorBody, AnthropicErrorType } from "../types/anthropic.js";
import { GEMINI_STATUS_MAP } from "../types/gemini.js";
function makeOpenAIError(
message: string,
type: string,
code: string | null,
): OpenAIErrorBody {
return {
error: {
message,
type,
param: null,
code,
},
};
}
function makeAnthropicError(
message: string,
errorType: AnthropicErrorType,
): AnthropicErrorBody {
return { type: "error", error: { type: errorType, message } };
}
interface GeminiErrorBody {
error: { code: number; message: string; status: string };
}
function makeGeminiError(
code: number,
message: string,
status: string,
): GeminiErrorBody {
return { error: { code, message, status } };
}
export async function errorHandler(c: Context, next: Next): Promise<void> {
try {
await next();
} catch (err: unknown) {
const message = err instanceof Error ? err.message : "Internal server error";
console.error("[ErrorHandler]", err instanceof Error ? (err.stack ?? message) : message);
const status = (err as { status?: number }).status;
const path = c.req.path;
// Anthropic Messages API errors
if (path.startsWith("/v1/messages")) {
if (status === 401) {
c.status(401);
return c.json(
makeAnthropicError(
"Invalid or expired token. Please re-authenticate.",
"authentication_error",
),
) as never;
}
if (status === 429) {
c.status(429);
return c.json(
makeAnthropicError(
"Rate limit exceeded. Please try again later.",
"rate_limit_error",
),
) as never;
}
if (status && status >= 500) {
c.status(502);
return c.json(
makeAnthropicError(`Upstream server error: ${message}`, "api_error"),
) as never;
}
c.status(500);
return c.json(makeAnthropicError(message, "api_error")) as never;
}
// Gemini API errors
if (path.startsWith("/v1beta/")) {
const code = status ?? 500;
const geminiStatus = GEMINI_STATUS_MAP[code] ?? "INTERNAL";
c.status((code >= 400 && code < 600 ? code : 500) as StatusCode);
return c.json(makeGeminiError(code, message, geminiStatus)) as never;
}
// Default: OpenAI-format errors
if (status === 401) {
c.status(401);
return c.json(
makeOpenAIError(
"Invalid or expired ChatGPT token. Please re-authenticate.",
"invalid_request_error",
"invalid_api_key",
),
) as never;
}
if (status === 429) {
c.status(429);
return c.json(
makeOpenAIError(
"Rate limit exceeded. Please try again later.",
"rate_limit_error",
"rate_limit_exceeded",
),
) as never;
}
if (status && status >= 500) {
c.status(502);
return c.json(
makeOpenAIError(
`Upstream server error: ${message}`,
"server_error",
"server_error",
),
) as never;
}
c.status(500);
return c.json(
makeOpenAIError(message, "server_error", "internal_error"),
) as never;
}
}
|