Spaces:
Runtime error
Runtime error
| from flask import Flask, request, jsonify | |
| from flask_cors import CORS | |
| import sys | |
| import os | |
| import logging | |
| # Add project root to PYTHONPATH | |
| BASE_DIR = os.path.dirname(os.path.abspath(__file__)) | |
| sys.path.append(BASE_DIR) | |
| # ✅ Correct imports based on your structure | |
| from .python.vector_store import VectorStore | |
| from .embedder_wrapper import SyncEmbedder | |
| from .python.config import ( | |
| OPENAI_API_KEY, | |
| QDRANT_URL, | |
| QDRANT_API_KEY, | |
| COLLECTION_NAME | |
| ) | |
| # Configure logging | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| app = Flask(__name__) | |
| CORS( | |
| app, | |
| origins=[ | |
| "http://localhost:3000", | |
| "http://localhost:5000", | |
| "http://127.0.0.1:3000", | |
| "http://127.0.0.1:5000" | |
| ], | |
| supports_credentials=True, | |
| allow_headers=["Content-Type", "Authorization"] | |
| ) | |
| # Initialize services | |
| try: | |
| vector_store = VectorStore( | |
| url=QDRANT_URL, | |
| api_key=QDRANT_API_KEY, | |
| collection_name=COLLECTION_NAME | |
| ) | |
| embedder = SyncEmbedder(api_key=OPENAI_API_KEY) | |
| logger.info("✅ Services initialized successfully") | |
| except Exception as e: | |
| logger.error(f"❌ Failed to initialize services: {e}") | |
| raise | |
| def chat(): | |
| try: | |
| data = request.get_json() | |
| if not data or 'message' not in data: | |
| return jsonify({'error': 'Message field is required'}), 400 | |
| user_message = data['message'] | |
| # Create embedding | |
| query_embedding = embedder.embed_text(user_message) | |
| # Vector search | |
| similar_docs = vector_store.search_similar(query_embedding, top_k=5) | |
| context = "\n".join(doc.get("content", "") for doc in similar_docs) | |
| # Prompt | |
| if context.strip(): | |
| prompt = f""" | |
| Answer the question using the context below. | |
| If the answer is not in the context, say you don't know. | |
| Context: | |
| {context} | |
| Question: | |
| {user_message} | |
| Answer: | |
| """ | |
| else: | |
| prompt = f""" | |
| Answer the following question. | |
| If you don't know, say you don't know. | |
| Question: | |
| {user_message} | |
| Answer: | |
| """ | |
| # OpenAI call | |
| from openai import OpenAI | |
| client = OpenAI(api_key=OPENAI_API_KEY) | |
| response = client.chat.completions.create( | |
| model="gpt-3.5-turbo", | |
| messages=[{"role": "user", "content": prompt}], | |
| temperature=0.3, | |
| max_tokens=500 | |
| ) | |
| bot_response = response.choices[0].message.content.strip() | |
| return jsonify({ | |
| "response": bot_response, | |
| "sources": [doc.get("source", "") for doc in similar_docs], | |
| "scores": [doc.get("score", 0.0) for doc in similar_docs] | |
| }) | |
| except Exception as e: | |
| logger.error(f"Chat error: {e}") | |
| return jsonify({"error": str(e)}), 500 | |
| def health(): | |
| return jsonify({"status": "healthy"}) | |
| def document_count(): | |
| try: | |
| count = vector_store.get_all_documents_count() | |
| return jsonify({"count": count}) | |
| except Exception as e: | |
| logger.error(f"Count error: {e}") | |
| return jsonify({"error": str(e)}), 500 | |
| if __name__ == "__main__": | |
| app.run(host="0.0.0.0", port=7860, debug=False) | |