NitinBot001 commited on
Commit
7936c8d
·
verified ·
1 Parent(s): 9018891

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -14
app.py CHANGED
@@ -2,18 +2,20 @@ from flask import Flask, request, jsonify, render_template
2
  import os
3
  from flask_cors import CORS
4
  from googletrans import Translator
5
- from openai import OpenAI
6
 
7
- # Load API key and base URL from environment variables
8
- OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") or os.getenv("GEMINI_API_KEY")
9
- if not OPENAI_API_KEY:
10
- raise ValueError("OpenAI API Key is missing. Set it in environment variables.")
 
11
 
12
- OPENAI_API_BASE = os.getenv("OPENAI_API_BASE", "https://generativelanguage.googleapis.com/v1beta/openai/") # Default is standard OpenAI
13
- OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gemini-2.5-flash") # Default model
 
14
 
15
- # Configure OpenAI client (supports custom base url for OpenAI-compatible APIs)
16
- client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_API_BASE)
17
 
18
  # Set the static folder path to the "static" folder
19
  STATIC_FOLDER = os.path.join(os.path.dirname(__file__), "static")
@@ -71,12 +73,17 @@ def chat():
71
  {"role": "user", "content": f"Context Data:\n{CONTEXT_DATA}\n\nUser Query: {user_message}"}
72
  ]
73
 
74
- # Call OpenAI (or OpenAI-compatible) chat API
75
- response = client.chat.completions.create(
76
- model=OPENAI_MODEL,
 
77
  messages=messages,
78
- temperature=0.7
 
 
79
  )
 
 
80
  ai_response = response.choices[0].message.content.strip()
81
 
82
  if target_lang.lower() != "en":
@@ -84,8 +91,9 @@ def chat():
84
 
85
  return jsonify({"response": ai_response})
86
  except Exception as e:
 
87
  return jsonify({"error": str(e)}), 500
88
 
89
 
90
  if __name__ == '__main__':
91
- app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)
 
2
  import os
3
  from flask_cors import CORS
4
  from googletrans import Translator
5
+ import litellm # MODIFIED: Import litellm instead of OpenAI
6
 
7
+ # Load API key, base URL, and model from environment variables
8
+ # LiteLLM can automatically read OPENAI_API_KEY, but we'll read it explicitly for clarity
9
+ API_KEY = os.getenv("OPENAI_API_KEY") or os.getenv("GEMINI_API_KEY")
10
+ if not API_KEY:
11
+ raise ValueError("API Key is missing. Set OPENAI_API_KEY or GEMINI_API_KEY in environment variables.")
12
 
13
+ # Note: For litellm, the parameter is 'api_base' not 'base_url'
14
+ API_BASE = os.getenv("OPENAI_API_BASE", "https://generativelanguage.googleapis.com/v1beta/openai/")
15
+ MODEL_NAME = os.getenv("OPENAI_MODEL", "gemini-1.5-flash") # Default model
16
 
17
+ # REMOVED: No need to instantiate a client with litellm
18
+ # client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_API_BASE)
19
 
20
  # Set the static folder path to the "static" folder
21
  STATIC_FOLDER = os.path.join(os.path.dirname(__file__), "static")
 
73
  {"role": "user", "content": f"Context Data:\n{CONTEXT_DATA}\n\nUser Query: {user_message}"}
74
  ]
75
 
76
+ # MODIFIED: Call litellm.completion directly
77
+ # Pass the model, messages, api_key, and api_base here
78
+ response = litellm.completion(
79
+ model=MODEL_NAME,
80
  messages=messages,
81
+ temperature=0.7,
82
+ api_key=API_KEY,
83
+ api_base=API_BASE
84
  )
85
+
86
+ # The response structure is the same as OpenAI's, so this part doesn't change
87
  ai_response = response.choices[0].message.content.strip()
88
 
89
  if target_lang.lower() != "en":
 
91
 
92
  return jsonify({"response": ai_response})
93
  except Exception as e:
94
+ # LiteLLM can raise specific exceptions, but catching the general one is fine
95
  return jsonify({"error": str(e)}), 500
96
 
97
 
98
  if __name__ == '__main__':
99
+ app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)