|
|
from flask import Flask, request, jsonify |
|
|
from flask_cors import CORS |
|
|
from google import genai |
|
|
from langchain_google_genai import GoogleGenerativeAIEmbeddings |
|
|
from langchain_postgres import PGVector |
|
|
import os |
|
|
import re |
|
|
import json |
|
|
import threading |
|
|
import logging |
|
|
|
|
|
app = Flask(__name__) |
|
|
CORS(app) |
|
|
|
|
|
logging.basicConfig( |
|
|
level=logging.INFO, |
|
|
format='[%(asctime)s] %(levelname)s in %(module)s: %(message)s' |
|
|
) |
|
|
|
|
|
GOOGLE_API_KEY = os.getenv("API_KEY") |
|
|
COLLECTION_NAME = os.getenv("COLLECTION") |
|
|
CONNECTION_STRING = os.getenv("STRING") |
|
|
PROMPT = os.getenv("PROMPT") |
|
|
|
|
|
genai_client = genai.Client(api_key=GOOGLE_API_KEY) |
|
|
shared_chat = genai_client.chats.create( |
|
|
model="gemini-2.5-flash-lite" |
|
|
) |
|
|
|
|
|
if PROMPT: |
|
|
shared_chat.send_message(PROMPT) |
|
|
logging.info("Initial system prompt sent to Gemini model.") |
|
|
|
|
|
chat_lock = threading.Lock() |
|
|
|
|
|
def search_documents(question): |
|
|
embeddings = GoogleGenerativeAIEmbeddings( |
|
|
google_api_key=GOOGLE_API_KEY, |
|
|
model="models/text-embedding-004" |
|
|
) |
|
|
db = PGVector( |
|
|
embeddings, |
|
|
connection=CONNECTION_STRING, |
|
|
collection_name=COLLECTION_NAME, |
|
|
use_jsonb=True |
|
|
) |
|
|
docs = db.similarity_search(question, k=5) |
|
|
logging.info(f"Retrieved {len(docs)} documents for query: {question}") |
|
|
return [doc.page_content for doc in docs] |
|
|
|
|
|
@app.route("/chat", methods=["POST"]) |
|
|
def chat(): |
|
|
data = request.json |
|
|
question = data.get("query") |
|
|
if not question: |
|
|
logging.warning("Received request without 'query' field.") |
|
|
return jsonify({"error": "Missing 'query' field"}), 400 |
|
|
|
|
|
logging.info(f"Received query: {question}") |
|
|
context = search_documents(question) |
|
|
|
|
|
prompt = f""" |
|
|
Context: |
|
|
{context} |
|
|
|
|
|
Question: |
|
|
{question} |
|
|
""" |
|
|
|
|
|
try: |
|
|
with chat_lock: |
|
|
response = shared_chat.send_message(prompt) |
|
|
output = response.text |
|
|
logging.info("Response received from Gemini model.") |
|
|
|
|
|
|
|
|
json_start = output.find("{") |
|
|
json_end = output.rfind("}") + 1 |
|
|
|
|
|
if json_start == -1 or json_end == 0: |
|
|
raise ValueError("No JSON object found in model output") |
|
|
|
|
|
json_str = output[json_start:json_end] |
|
|
|
|
|
|
|
|
json_str = re.sub(r'(?<!\\)\\(?!["\\/bfnrtu])', r'\\\\', json_str) |
|
|
|
|
|
|
|
|
parsed_output = json.loads(json_str) |
|
|
logging.info("β
Successfully parsed JSON from extracted string.") |
|
|
|
|
|
return jsonify(parsed_output) |
|
|
|
|
|
except Exception as e: |
|
|
logging.error(f"Exception occurred while parsing JSON: {str(e)}") |
|
|
return jsonify({ |
|
|
"error": "Failed to process model response", |
|
|
"raw_response": output if 'output' in locals() else '', |
|
|
"exception": str(e) |
|
|
}), 500 |
|
|
|
|
|
|
|
|
@app.route("/", methods=["GET"]) |
|
|
def default(): |
|
|
logging.info("Health check received.") |
|
|
return "Backend Running Successfully" |
|
|
|
|
|
if __name__ == "__main__": |
|
|
logging.info("Starting Flask app...") |
|
|
app.run(debug=True) |
|
|
|