File size: 3,145 Bytes
ed52b04 73c49c5 000825d ed52b04 81c8bfc 73c49c5 81c8bfc 73c49c5 ed52b04 81ca88f 2553d24 81ca88f 73c49c5 ed52b04 81c8bfc 73c49c5 ed52b04 81ca88f 81c8bfc ed52b04 73c49c5 ed52b04 73c49c5 ed52b04 81c8bfc ed52b04 73c49c5 81c8bfc 73c49c5 81c8bfc ed52b04 73c49c5 ed52b04 73c49c5 ed52b04 81c8bfc 73c49c5 1daa79c ed52b04 81c8bfc 1daa79c e9a0c72 1daa79c e9a0c72 1daa79c 81c8bfc 73c49c5 ed52b04 e9a0c72 ed52b04 e9a0c72 ed52b04 73c49c5 e9a0c72 ed52b04 73c49c5 81c8bfc 73c49c5 81c8bfc ed52b04 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 | from flask import Flask, request, jsonify
from flask_cors import CORS
from google import genai
from langchain_google_genai import GoogleGenerativeAIEmbeddings
from langchain_postgres import PGVector
import os
import re
import json
import threading
import logging
app = Flask(__name__)
CORS(app)
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s in %(module)s: %(message)s'
)
GOOGLE_API_KEY = os.getenv("API_KEY")
COLLECTION_NAME = os.getenv("COLLECTION")
CONNECTION_STRING = os.getenv("STRING")
PROMPT = os.getenv("PROMPT")
genai_client = genai.Client(api_key=GOOGLE_API_KEY)
shared_chat = genai_client.chats.create(
model="gemini-2.5-flash-lite"
)
if PROMPT:
shared_chat.send_message(PROMPT)
logging.info("Initial system prompt sent to Gemini model.")
chat_lock = threading.Lock()
def search_documents(question):
embeddings = GoogleGenerativeAIEmbeddings(
google_api_key=GOOGLE_API_KEY,
model="models/text-embedding-004"
)
db = PGVector(
embeddings,
connection=CONNECTION_STRING,
collection_name=COLLECTION_NAME,
use_jsonb=True
)
docs = db.similarity_search(question, k=5)
logging.info(f"Retrieved {len(docs)} documents for query: {question}")
return [doc.page_content for doc in docs]
@app.route("/chat", methods=["POST"])
def chat():
data = request.json
question = data.get("query")
if not question:
logging.warning("Received request without 'query' field.")
return jsonify({"error": "Missing 'query' field"}), 400
logging.info(f"Received query: {question}")
context = search_documents(question)
prompt = f"""
Context:
{context}
Question:
{question}
"""
try:
with chat_lock:
response = shared_chat.send_message(prompt)
output = response.text
logging.info("Response received from Gemini model.")
# ✅ Extract everything between the first '{' and last '}'
json_start = output.find("{")
json_end = output.rfind("}") + 1
if json_start == -1 or json_end == 0:
raise ValueError("No JSON object found in model output")
json_str = output[json_start:json_end]
# ✅ Fix invalid backslashes (e.g., '\w', '\t' not part of valid escapes)
json_str = re.sub(r'(?<!\\)\\(?!["\\/bfnrtu])', r'\\\\', json_str)
# ✅ Try to load the cleaned JSON
parsed_output = json.loads(json_str)
logging.info("✅ Successfully parsed JSON from extracted string.")
return jsonify(parsed_output)
except Exception as e:
logging.error(f"Exception occurred while parsing JSON: {str(e)}")
return jsonify({
"error": "Failed to process model response",
"raw_response": output if 'output' in locals() else '',
"exception": str(e)
}), 500
@app.route("/", methods=["GET"])
def default():
logging.info("Health check received.")
return "Backend Running Successfully"
if __name__ == "__main__":
logging.info("Starting Flask app...")
app.run(debug=True)
|