NitinBot001 commited on
Commit
d40b72f
·
verified ·
1 Parent(s): 4025023

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -27
app.py CHANGED
@@ -1,21 +1,25 @@
1
  from flask import Flask, request, jsonify, render_template
2
  import os
 
 
 
 
3
  from flask_cors import CORS
4
- from googletrans import Translator
5
- import litellm # MODIFIED: Import litellm instead of OpenAI
 
 
6
 
7
- # Load API key, base URL, and model from environment variables
8
- # LiteLLM can automatically read OPENAI_API_KEY, but we'll read it explicitly for clarity
9
- API_KEY = os.getenv("OPENAI_API_KEY") or os.getenv("GEMINI_API_KEY")
10
- if not API_KEY:
11
- raise ValueError("API Key is missing. Set OPENAI_API_KEY or GEMINI_API_KEY in environment variables.")
12
 
13
- # Note: For litellm, the parameter is 'api_base' not 'base_url'
14
- API_BASE = os.getenv("OPENAI_API_BASE", "https://generativelanguage.googleapis.com/v1beta/openai/")
15
- MODEL_NAME = os.getenv("OPENAI_MODEL", "gemini-1.5-flash") # Default model
16
 
17
- # REMOVED: No need to instantiate a client with litellm
18
- # client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_API_BASE)
19
 
20
  # Set the static folder path to the "static" folder
21
  STATIC_FOLDER = os.path.join(os.path.dirname(__file__), "static")
@@ -44,13 +48,10 @@ Response Rules:
44
  app = Flask(__name__, template_folder="templates", static_folder="static")
45
  CORS(app)
46
 
47
- # Translator instance
48
- translator = Translator()
49
 
50
  def translate_text(text, target_lang):
51
  try:
52
- translated = translator.translate(text, dest=target_lang)
53
- return translated.text
54
  except Exception as e:
55
  return f"Translation error: {str(e)}"
56
 
@@ -73,17 +74,12 @@ def chat():
73
  {"role": "user", "content": f"Context Data:\n{CONTEXT_DATA}\n\nUser Query: {user_message}"}
74
  ]
75
 
76
- # MODIFIED: Call litellm.completion directly
77
- # Pass the model, messages, api_key, and api_base here
78
- response = litellm.completion(
79
- model=MODEL_NAME,
80
  messages=messages,
81
- temperature=0.7,
82
- api_key=API_KEY,
83
- api_base=API_BASE
84
  )
85
-
86
- # The response structure is the same as OpenAI's, so this part doesn't change
87
  ai_response = response.choices[0].message.content.strip()
88
 
89
  if target_lang.lower() != "en":
@@ -91,9 +87,8 @@ def chat():
91
 
92
  return jsonify({"response": ai_response})
93
  except Exception as e:
94
- # LiteLLM can raise specific exceptions, but catching the general one is fine
95
  return jsonify({"error": str(e)}), 500
96
 
97
 
98
  if __name__ == '__main__':
99
- app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)
 
1
  from flask import Flask, request, jsonify, render_template
2
  import os
3
+ import subprocess
4
+ import sys
5
+ subprocess.check_call(["pip", "install", "", "openai"])
6
+ subprocess.check_call(["pip", "install", "--upgrade", "openai"])
7
  from flask_cors import CORS
8
+ from openai import *
9
+ import dotenv
10
+ # Load environment variables from .env file
11
+ dotenv.load_dotenv()
12
 
13
+ # Load API key and base URL from environment variables
14
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") or os.getenv("GEMINI_API_KEY")
15
+ if not OPENAI_API_KEY:
16
+ raise ValueError("OpenAI API Key is missing. Set it in environment variables.")
 
17
 
18
+ OPENAI_API_BASE = os.getenv("OPENAI_API_BASE", "https://generativelanguage.googleapis.com/v1beta/openai/") # Default is standard OpenAI
19
+ OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gemini-2.5-flash") # Default model
 
20
 
21
+ # Configure OpenAI client (supports custom base url for OpenAI-compatible APIs)
22
+ client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_API_BASE)
23
 
24
  # Set the static folder path to the "static" folder
25
  STATIC_FOLDER = os.path.join(os.path.dirname(__file__), "static")
 
48
  app = Flask(__name__, template_folder="templates", static_folder="static")
49
  CORS(app)
50
 
 
 
51
 
52
  def translate_text(text, target_lang):
53
  try:
54
+ return text
 
55
  except Exception as e:
56
  return f"Translation error: {str(e)}"
57
 
 
74
  {"role": "user", "content": f"Context Data:\n{CONTEXT_DATA}\n\nUser Query: {user_message}"}
75
  ]
76
 
77
+ # Call OpenAI (or OpenAI-compatible) chat API
78
+ response = client.chat.completions.create(
79
+ model=OPENAI_MODEL,
 
80
  messages=messages,
81
+ temperature=0.7
 
 
82
  )
 
 
83
  ai_response = response.choices[0].message.content.strip()
84
 
85
  if target_lang.lower() != "en":
 
87
 
88
  return jsonify({"response": ai_response})
89
  except Exception as e:
 
90
  return jsonify({"error": str(e)}), 500
91
 
92
 
93
  if __name__ == '__main__':
94
+ app.run(debug=True, host='0.0.0.0', port=5000, threaded=True)