|
|
import { OpenAIService } from "./openai-service"; |
|
|
|
|
|
const openAIService = new OpenAIService(); |
|
|
|
|
|
const server = Bun.serve({ |
|
|
port: process.env.PORT || 3000, |
|
|
async fetch(req) { |
|
|
const authHeader = req.headers.get("Authorization"); |
|
|
const API_KEY = process.env.API_KEY || "MySecretKey_12345"; |
|
|
|
|
|
if (authHeader !== `Bearer ${API_KEY}` && req.method !== "OPTIONS") { |
|
|
return new Response(JSON.stringify({ error: "Unauthorized" }), { status: 401 }); |
|
|
} |
|
|
|
|
|
console.log(`Received request: ${req.method} ${req.url}`); |
|
|
const url = new URL(req.url); |
|
|
|
|
|
|
|
|
const corsHeaders = { |
|
|
"Access-Control-Allow-Origin": "*", |
|
|
"Access-Control-Allow-Methods": "GET, POST, OPTIONS", |
|
|
"Access-Control-Allow-Headers": "Content-Type, Authorization", |
|
|
}; |
|
|
|
|
|
|
|
|
if (req.method === "OPTIONS") { |
|
|
return new Response(null, { headers: corsHeaders }); |
|
|
} |
|
|
|
|
|
try { |
|
|
|
|
|
if (url.pathname === "/health" && req.method === "GET") { |
|
|
return new Response(JSON.stringify({ status: "ok" }), { |
|
|
headers: { "Content-Type": "application/json", ...corsHeaders }, |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
if (url.pathname === "/v1/models" && req.method === "GET") { |
|
|
const models = openAIService.getModels(); |
|
|
return new Response(JSON.stringify(models), { |
|
|
headers: { "Content-Type": "application/json", ...corsHeaders }, |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
if (url.pathname === "/v1/chat/completions" && req.method === "POST") { |
|
|
const body = await req.json(); |
|
|
const validatedRequest = openAIService.validateRequest(body); |
|
|
|
|
|
|
|
|
if (validatedRequest.stream) { |
|
|
const stream = |
|
|
await openAIService.createChatCompletionStream(validatedRequest); |
|
|
return new Response(stream, { |
|
|
headers: { |
|
|
"Content-Type": "text/event-stream", |
|
|
"Cache-Control": "no-cache", |
|
|
Connection: "keep-alive", |
|
|
...corsHeaders, |
|
|
}, |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
const completion = |
|
|
await openAIService.createChatCompletion(validatedRequest); |
|
|
return new Response(JSON.stringify(completion), { |
|
|
headers: { "Content-Type": "application/json", ...corsHeaders }, |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
return new Response( |
|
|
JSON.stringify({ |
|
|
error: { |
|
|
message: "Not found", |
|
|
type: "invalid_request_error", |
|
|
}, |
|
|
}), |
|
|
{ |
|
|
status: 404, |
|
|
headers: { "Content-Type": "application/json", ...corsHeaders }, |
|
|
} |
|
|
); |
|
|
} catch (error) { |
|
|
console.error("Server error:", error); |
|
|
|
|
|
const errorMessage = |
|
|
error instanceof Error ? error.message : "Internal server error"; |
|
|
const statusCode = |
|
|
errorMessage.includes("required") || errorMessage.includes("must") |
|
|
? 400 |
|
|
: 500; |
|
|
|
|
|
return new Response( |
|
|
JSON.stringify({ |
|
|
error: { |
|
|
message: errorMessage, |
|
|
type: |
|
|
statusCode === 400 |
|
|
? "invalid_request_error" |
|
|
: "internal_server_error", |
|
|
}, |
|
|
}), |
|
|
{ |
|
|
status: statusCode, |
|
|
headers: { "Content-Type": "application/json", ...corsHeaders }, |
|
|
} |
|
|
); |
|
|
} |
|
|
}, |
|
|
}); |
|
|
|
|
|
console.log( |
|
|
`π OpenAI-compatible server running on http://localhost:${server.port}` |
|
|
); |
|
|
console.log(`π Available endpoints:`); |
|
|
console.log(` GET /health - Health check`); |
|
|
console.log(` GET /v1/models - List available models`); |
|
|
console.log( |
|
|
` POST /v1/chat/completions - Chat completions (streaming & non-streaming)` |
|
|
); |
|
|
console.log(`\nπ§ Example usage:`); |
|
|
console.log( |
|
|
`curl -X POST http://localhost:${server.port}/v1/chat/completions \\` |
|
|
); |
|
|
console.log(` -H "Content-Type: application/json" \\`); |
|
|
console.log( |
|
|
` -d '{"model":"gpt-4o-mini","messages":[{"role":"user","content":"Hello!"}]}'` |
|
|
); |
|
|
|