File size: 4,116 Bytes
0eedb5a
 
 
 
 
 
a521d64
 
 
 
 
 
 
 
 
0eedb5a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import { OpenAIService } from "./openai-service";

const openAIService = new OpenAIService();

const server = Bun.serve({
  port: process.env.PORT || 3000,
    async fetch(req) {
      const authHeader = req.headers.get("Authorization");
      const API_KEY = process.env.API_KEY || "MySecretKey_12345";
      
      if (authHeader !== `Bearer ${API_KEY}` && req.method !== "OPTIONS") {
        return new Response(JSON.stringify({ error: "Unauthorized" }), { status: 401 });
      }

      console.log(`Received request: ${req.method} ${req.url}`);
    const url = new URL(req.url);

    // CORS headers
    const corsHeaders = {
      "Access-Control-Allow-Origin": "*",
      "Access-Control-Allow-Methods": "GET, POST, OPTIONS",
      "Access-Control-Allow-Headers": "Content-Type, Authorization",
    };

    // Handle preflight requests
    if (req.method === "OPTIONS") {
      return new Response(null, { headers: corsHeaders });
    }

    try {
      // Health check endpoint
      if (url.pathname === "/health" && req.method === "GET") {
        return new Response(JSON.stringify({ status: "ok" }), {
          headers: { "Content-Type": "application/json", ...corsHeaders },
        });
      }

      // Models endpoint
      if (url.pathname === "/v1/models" && req.method === "GET") {
        const models = openAIService.getModels();
        return new Response(JSON.stringify(models), {
          headers: { "Content-Type": "application/json", ...corsHeaders },
        });
      }

      // Chat completions endpoint
      if (url.pathname === "/v1/chat/completions" && req.method === "POST") {
        const body = await req.json();
        const validatedRequest = openAIService.validateRequest(body);

        // Handle streaming
        if (validatedRequest.stream) {
          const stream =
            await openAIService.createChatCompletionStream(validatedRequest);
          return new Response(stream, {
            headers: {
              "Content-Type": "text/event-stream",
              "Cache-Control": "no-cache",
              Connection: "keep-alive",
              ...corsHeaders,
            },
          });
        }

        // Handle non-streaming
        const completion =
          await openAIService.createChatCompletion(validatedRequest);
        return new Response(JSON.stringify(completion), {
          headers: { "Content-Type": "application/json", ...corsHeaders },
        });
      }

      // 404 for unknown endpoints
      return new Response(
        JSON.stringify({
          error: {
            message: "Not found",
            type: "invalid_request_error",
          },
        }),
        {
          status: 404,
          headers: { "Content-Type": "application/json", ...corsHeaders },
        }
      );
    } catch (error) {
      console.error("Server error:", error);

      const errorMessage =
        error instanceof Error ? error.message : "Internal server error";
      const statusCode =
        errorMessage.includes("required") || errorMessage.includes("must")
          ? 400
          : 500;

      return new Response(
        JSON.stringify({
          error: {
            message: errorMessage,
            type:
              statusCode === 400
                ? "invalid_request_error"
                : "internal_server_error",
          },
        }),
        {
          status: statusCode,
          headers: { "Content-Type": "application/json", ...corsHeaders },
        }
      );
    }
  },
});

console.log(
  `🚀 OpenAI-compatible server running on http://localhost:${server.port}`
);
console.log(`📚 Available endpoints:`);
console.log(`  GET  /health - Health check`);
console.log(`  GET  /v1/models - List available models`);
console.log(
  `  POST /v1/chat/completions - Chat completions (streaming & non-streaming)`
);
console.log(`\n🔧 Example usage:`);
console.log(
  `curl -X POST http://localhost:${server.port}/v1/chat/completions \\`
);
console.log(`  -H "Content-Type: application/json" \\`);
console.log(
  `  -d '{"model":"gpt-4o-mini","messages":[{"role":"user","content":"Hello!"}]}'`
);