huijio commited on
Commit
7a7336f
·
verified ·
1 Parent(s): 48d619c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +196 -27
app.py CHANGED
@@ -27,16 +27,6 @@ def home():
27
  @app.route('/groq', methods=['POST'])
28
  def proxy_to_groq():
29
  """Main proxy endpoint - keeps /groq URL but uses iFlow API internally"""
30
- return proxy_to_iflow_internal()
31
-
32
- @app.route('/iflow', methods=['POST'])
33
- @app.route('/chat/completions', methods=['POST'])
34
- def proxy_to_iflow():
35
- """Alternative endpoints for iFlow API"""
36
- return proxy_to_iflow_internal()
37
-
38
- def proxy_to_iflow_internal():
39
- """Internal function that handles the iFlow API proxy logic"""
40
  try:
41
  # Validate proxy authentication from header
42
  auth_header = request.headers.get('X-API-Key') or request.headers.get('Authorization')
@@ -232,24 +222,35 @@ def proxy_to_iflow_internal():
232
  iflow_response = response.json()
233
  logger.info(f"iFlow response received successfully")
234
 
235
- # For /groq endpoint compatibility, return plain text
236
- if request.path == '/groq':
237
- # Extract content from iFlow's response
238
- if 'choices' in iflow_response and len(iflow_response['choices']) > 0:
239
- choice = iflow_response['choices'][0]
240
- if 'message' in choice:
241
- content = choice['message'].get('content', '')
242
-
243
- if not content:
244
- logger.warning("Empty content in iFlow response")
245
- content = "No response content"
246
-
247
- # Return plain text for /groq endpoint
248
- logger.info("Successfully returning response to app via /groq")
249
- return content, 200, {'Content-Type': 'text/plain'}
 
 
 
 
 
 
250
  else:
251
- # For /iflow and /chat/completions endpoints, return JSON
252
- return jsonify(iflow_response), 200
 
 
 
 
 
253
 
254
  except requests.exceptions.Timeout:
255
  logger.error("iFlow API request timeout")
@@ -284,6 +285,172 @@ def proxy_to_iflow_internal():
284
  }
285
  }), 500
286
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
287
  @app.route('/v1/models', methods=['GET'])
288
  def list_models():
289
  """List available models (OpenAI-compatible endpoint)"""
@@ -421,6 +588,8 @@ def health_check():
421
  timeout=15
422
  )
423
  health_status["iflow_api_accessible"] = test_response.status_code == 200
 
 
424
  except Exception as e:
425
  health_status["iflow_api_accessible"] = False
426
  health_status["iflow_api_error"] = str(e)[:100]
 
27
  @app.route('/groq', methods=['POST'])
28
  def proxy_to_groq():
29
  """Main proxy endpoint - keeps /groq URL but uses iFlow API internally"""
 
 
 
 
 
 
 
 
 
 
30
  try:
31
  # Validate proxy authentication from header
32
  auth_header = request.headers.get('X-API-Key') or request.headers.get('Authorization')
 
222
  iflow_response = response.json()
223
  logger.info(f"iFlow response received successfully")
224
 
225
+ # Extract content from iFlow's response
226
+ if 'choices' in iflow_response and len(iflow_response['choices']) > 0:
227
+ choice = iflow_response['choices'][0]
228
+ if 'message' in choice:
229
+ content = choice['message'].get('content', '')
230
+
231
+ if not content:
232
+ logger.warning("Empty content in iFlow response")
233
+ content = "No response content"
234
+
235
+ # Return plain text for /groq endpoint (original format)
236
+ logger.info("Successfully returning response to app via /groq")
237
+ return content, 200, {'Content-Type': 'text/plain'}
238
+ else:
239
+ logger.error("No message in iFlow response choice")
240
+ return jsonify({
241
+ 'error': {
242
+ 'message': 'Invalid response format from iFlow API',
243
+ 'type': 'api_error'
244
+ }
245
+ }), 500
246
  else:
247
+ logger.error("No choices in iFlow response")
248
+ return jsonify({
249
+ 'error': {
250
+ 'message': 'Invalid response format from iFlow API',
251
+ 'type': 'api_error'
252
+ }
253
+ }), 500
254
 
255
  except requests.exceptions.Timeout:
256
  logger.error("iFlow API request timeout")
 
285
  }
286
  }), 500
287
 
288
+ @app.route('/iflow', methods=['POST'])
289
+ @app.route('/chat/completions', methods=['POST'])
290
+ def proxy_to_iflow():
291
+ """Alternative endpoints for iFlow API (returns JSON format)"""
292
+ try:
293
+ # Validate proxy authentication from header
294
+ auth_header = request.headers.get('X-API-Key') or request.headers.get('Authorization')
295
+ if auth_header:
296
+ # Check if it's Bearer token format
297
+ if auth_header.startswith('Bearer '):
298
+ auth_key = auth_header.replace('Bearer ', '')
299
+ else:
300
+ auth_key = auth_header
301
+
302
+ if auth_key != PROXY_AUTH_KEY:
303
+ logger.warning("Unauthorized request with invalid API key")
304
+ return jsonify({
305
+ 'error': {
306
+ 'message': 'Invalid authentication credentials',
307
+ 'type': 'invalid_request_error',
308
+ 'code': 'invalid_api_key'
309
+ }
310
+ }), 401
311
+ else:
312
+ logger.warning("No authentication header provided")
313
+ return jsonify({
314
+ 'error': {
315
+ 'message': 'You must provide an API key',
316
+ 'type': 'invalid_request_error',
317
+ 'code': 'missing_api_key'
318
+ }
319
+ }), 401
320
+
321
+ # Get the Gemini format payload from app
322
+ gemini_payload = request.get_json()
323
+ if not gemini_payload:
324
+ logger.error("No JSON payload received")
325
+ return jsonify({
326
+ 'error': {
327
+ 'message': 'No JSON payload received',
328
+ 'type': 'invalid_request_error'
329
+ }
330
+ }), 400
331
+
332
+ # Check for streaming request
333
+ stream = gemini_payload.get('stream', False)
334
+
335
+ # Transform Gemini format to iFlow (OpenAI-compatible) format
336
+ iflow_payload = {
337
+ 'model': gemini_payload.get('model', DEFAULT_MODEL),
338
+ 'messages': [],
339
+ 'stream': stream
340
+ }
341
+
342
+ # Copy common parameters if present
343
+ common_params = ['temperature', 'max_tokens', 'top_p', 'frequency_penalty',
344
+ 'presence_penalty', 'stop', 'seed', 'response_format']
345
+
346
+ for param in common_params:
347
+ if param in gemini_payload:
348
+ iflow_payload[param] = gemini_payload[param]
349
+
350
+ # Convert messages from Gemini parts format to OpenAI content format
351
+ messages = gemini_payload.get('messages', [])
352
+ if not messages:
353
+ logger.error("No messages in payload")
354
+ return jsonify({
355
+ 'error': {
356
+ 'message': 'No messages in payload',
357
+ 'type': 'invalid_request_error'
358
+ }
359
+ }), 400
360
+
361
+ for msg in messages:
362
+ # Extract text from parts array
363
+ parts = msg.get('parts', [])
364
+
365
+ # Handle both Gemini format (parts array) and OpenAI format (content string)
366
+ if parts:
367
+ # Gemini format: parts array with text
368
+ content_parts = []
369
+ for part in parts:
370
+ text = part.get('text', '')
371
+ if text:
372
+ content_parts.append(text)
373
+
374
+ if not content_parts:
375
+ logger.warning(f"Message with role '{msg.get('role')}' has no text content")
376
+ continue
377
+
378
+ content = '\n'.join(content_parts)
379
+ else:
380
+ # OpenAI format: direct content field
381
+ content = msg.get('content', '')
382
+ if not content:
383
+ logger.warning(f"Message with role '{msg.get('role')}' has no content")
384
+ continue
385
+
386
+ # Map Gemini roles to OpenAI roles
387
+ role = msg.get('role')
388
+ if role == 'model':
389
+ role = 'assistant' # OpenAI uses 'assistant', not 'model'
390
+
391
+ # Handle function calls if present (for OpenAI-compatible format)
392
+ message_dict = {
393
+ 'role': role,
394
+ 'content': content
395
+ }
396
+
397
+ # Copy any additional fields that might be present
398
+ additional_fields = ['name', 'function_call', 'tool_calls', 'tool_call_id']
399
+ for field in additional_fields:
400
+ if field in msg:
401
+ message_dict[field] = msg[field]
402
+
403
+ iflow_payload['messages'].append(message_dict)
404
+
405
+ if not iflow_payload['messages']:
406
+ logger.error("No valid messages after transformation")
407
+ return jsonify({
408
+ 'error': {
409
+ 'message': 'No valid messages in payload',
410
+ 'type': 'invalid_request_error'
411
+ }
412
+ }), 400
413
+
414
+ # Call iFlow API
415
+ headers = {
416
+ 'Authorization': f'Bearer {IFLOW_API_KEY}',
417
+ 'Content-Type': 'application/json',
418
+ 'User-Agent': 'iFlow-Proxy/1.0'
419
+ }
420
+
421
+ response = requests.post(
422
+ IFLOW_API_URL,
423
+ json=iflow_payload,
424
+ headers=headers,
425
+ timeout=60
426
+ )
427
+
428
+ if response.status_code != 200:
429
+ error_text = response.text[:500]
430
+ logger.error(f"iFlow API error: {response.status_code} - {error_text}")
431
+ try:
432
+ error_json = response.json()
433
+ return jsonify(error_json), response.status_code
434
+ except:
435
+ return jsonify({
436
+ 'error': {
437
+ 'message': f'iFlow API error: {response.status_code}',
438
+ 'type': 'api_error'
439
+ }
440
+ }), response.status_code
441
+
442
+ # Return the iFlow response directly (OpenAI-compatible JSON)
443
+ return jsonify(response.json()), 200
444
+
445
+ except Exception as e:
446
+ logger.error(f"Error in iflow endpoint: {e}")
447
+ return jsonify({
448
+ 'error': {
449
+ 'message': f'Internal server error: {str(e)}',
450
+ 'type': 'server_error'
451
+ }
452
+ }), 500
453
+
454
  @app.route('/v1/models', methods=['GET'])
455
  def list_models():
456
  """List available models (OpenAI-compatible endpoint)"""
 
588
  timeout=15
589
  )
590
  health_status["iflow_api_accessible"] = test_response.status_code == 200
591
+ if test_response.status_code == 200:
592
+ health_status["iflow_api_test"] = "Connection successful"
593
  except Exception as e:
594
  health_status["iflow_api_accessible"] = False
595
  health_status["iflow_api_error"] = str(e)[:100]