File size: 3,940 Bytes
a426e85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
const express = require('express');
const helmet = require('helmet');
const cors = require('cors');
const rateLimit = require('express-rate-limit');
const { GoogleGenerativeAI } = require('@google/generative-ai');
const Database = require('better-sqlite3');

const app = express();
const PORT = process.env.PORT || 7860;

// Security
app.use(helmet());
app.use(cors());
app.use(express.json());

// Rate limiting
const limiter = rateLimit({
  windowMs: 15 * 60 * 1000, // 15 minutes
  max: 100, // Limit each IP to 100 requests per windowMs
  message: { error: { type: 'rate_limit_error', message: 'Too many requests' }}
});
app.use('/anthropic', limiter);

// Database for usage tracking
const db = new Database(':memory:');
db.exec(`CREATE TABLE IF NOT EXISTS usage ( id INTEGER PRIMARY KEY AUTOINCREMENT, api_key TEXT, input_tokens INTEGER, output_tokens INTEGER, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP )`);

// Gemini client
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY || '');

// Auth middleware
const authenticate = (req, res, next) => {
  const apiKey = req.headers['x-api-key'];
  if (!apiKey || apiKey !== process.env.PROXY_API_KEY) {
    return res.status(401).json({
      error: { type: 'authentication_error', message: 'Invalid API key' }
    });
  }
  req.apiKey = apiKey;
  next();
};

// Health check
app.get('/health', (req, res) => {
  res.json({ status: 'healthy', timestamp: new Date().toISOString() });
});

// Models endpoint
app.get('/anthropic/v1/models', authenticate, (req, res) => {
  res.json({
    data: [
      {
        id: 'claude-3-5-sonnet-20241022',
        name: 'Claude 3.5 Sonnet',
        type: 'model'
      }
    ]
  });
});

// Messages endpoint
app.post('/anthropic/v1/messages', authenticate, async (req, res) => {
  try {
    const { messages, max_tokens = 1024, stream = false } = req.body;

    if (!messages || !Array.isArray(messages)) {
      return res.status(400).json({
        error: { type: 'invalid_request_error', message: 'messages is required' }
      });
    }

    // Convert to Gemini format
    const prompt = messages.map(m => `${m.role}: ${m.content}`).join('\n');

    const model = genAI.getGenerativeModel({ model: 'gemini-2.0-flash-exp' });

    if (stream) {
      res.setHeader('Content-Type', 'text/event-stream');
      res.setHeader('Cache-Control', 'no-cache');
      res.setHeader('Connection', 'keep-alive');
      
      const result = await model.generateContentStream(prompt);
      
      for await (const chunk of result.stream) {
        const text = chunk.text();
        res.write(`event: content_block_delta\n`);
        res.write(`data: ${JSON.stringify({
          type: 'content_block_delta',
          delta: { type: 'text_delta', text }
        })}\n\n`);
      }
      
      res.write(`event: message_stop\n`);
      res.write(`data: {}\n\n`);
      res.end();
    } else {
      const result = await model.generateContent(prompt);
      const text = result.response.text();
      
      // Track usage
      const inputTokens = Math.ceil(prompt.length / 4);
      const outputTokens = Math.ceil(text.length / 4);
      
      db.prepare('INSERT INTO usage (api_key, input_tokens, output_tokens) VALUES (?, ?, ?)')
        .run(req.apiKey, inputTokens, outputTokens);
      
      res.json({
        id: `msg_${Date.now()}`,
        type: 'message',
        role: 'assistant',
        content: [{ type: 'text', text }],
        model: 'claude-3-5-sonnet-20241022',
        stop_reason: 'end_turn',
        usage: {
          input_tokens: inputTokens,
          output_tokens: outputTokens
        }
      });
    }
  } catch (error) {
    console.error('Error:', error);
    res.status(500).json({
      error: { type: 'api_error', message: error.message }
    });
  }
});

app.listen(PORT, () => {
  console.log(`🚀 Server running on port ${PORT}`);
  console.log(`📊 Health: http://localhost:${PORT}/health`);
});