| | import os |
| | |
| | |
| | os.environ['TRANSFORMERS_CACHE'] = '/tmp/huggingface_cache' |
| | os.environ['HF_HOME'] = '/tmp/huggingface' |
| | |
| | from flask import Flask, request, jsonify |
| | from sentence_transformers import SentenceTransformer |
| | from pinecone import Pinecone |
| | import google.generativeai as genai |
| | from langdetect import detect |
| |
|
| | |
| | greetings_dict = { |
| | "ุงูุณูุงู
ุนูููู
": "ูุนูููู
ุงูุณูุงู
", |
| | "ุตุจุงุญ ุงูุฎูุฑ": "ุตุจุงุญ ุงูููุฑ", |
| | "ู
ุณุงุก ุงูุฎูุฑ": "ู
ุณุงุก ุงูููุฑ", |
| | "ุฃููุง": "ุฃููุง ุจูู", |
| | "ุฃููุงู": "ุฃููุงู ูุณููุงู", |
| | "ูุงู": "ูุงู", |
| | "ููุง": "ููุง ููู", |
| | "hello": "hello!", |
| | "hi": "hi!", |
| | "hey": "hey there!", |
| | "ุงุฒูู": "ุงูุญู
ุฏ ูููุ ุงูุช ุนุงู
ู ุงููุ", |
| | "ุงุฒููุ": "ุงูุญู
ุฏ ูููุ ุงูุช ุนุงู
ู ุงููุ" |
| | } |
| |
|
| | def check_greeting(question): |
| | for greeting in greetings_dict: |
| | if greeting.lower() in question.lower(): |
| | return greetings_dict[greeting] |
| | return None |
| | |
| | |
| | embedding_model = SentenceTransformer("intfloat/multilingual-e5-large") |
| | |
| | pc = Pinecone(api_key="pcsk_3ax4D8_PH7vWF1KWAMRpyjmEnXhwxswmHSjvqgwovna3xGGbfsgZsMRtRyFi9uCpPyi4B9") |
| | index = pc.Index("newindex") |
| | |
| | genai.configure(api_key="AIzaSyBXtRzMkpm9RNDO09A9N3XoG_vfjgUe5Vw") |
| | model = genai.GenerativeModel("gemini-2.0-flash") |
| | |
| | app = Flask(__name__) |
| | chat_history = [] |
| | |
| | def detect_language(text): |
| | try: |
| | return detect(text) |
| | except: |
| | return "unknown" |
| | |
| | def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, similarity_threshold=0.7): |
| | try: |
| | question_vector = embedding_model.encode(user_question).tolist() |
| | except Exception as e: |
| | return [f"โ Error embedding question: {e}"] |
| | |
| | try: |
| | search_result = index.query( |
| | vector=question_vector, |
| | top_k=top_k, |
| | include_metadata=True |
| | ) |
| | except Exception as e: |
| | return [f"โ Error querying Pinecone: {e}"] |
| | |
| | matches = [m for m in search_result.matches if m.score >= similarity_threshold] |
| | sorted_matches = sorted(matches, key=lambda x: x.score, reverse=True) |
| | |
| | answers = [] |
| | for m in sorted_matches: |
| | answer = m.metadata.get('answer', '').strip() |
| | source = m.metadata.get('source', 'unknown') |
| | score = round(m.score, 3) |
| | if answer: |
| | answers.append(f"โข ({score}) from [{source}]:\n{answer}") |
| | return answers if answers else ["โ ๏ธ No similar answers found."] |
| | |
| | def ask_gemini_with_combined_answer(user_question, pinecone_answers=[], history=[]): |
| | context = "\n".join([f"๐ค {q}\n๐ค {a}" for q, a in history]) |
| | extracted_info = "\n".join([f"โข {ans}" for ans in pinecone_answers]) if pinecone_answers else "None" |
| | lang = detect_language(user_question) |
| | |
| | if lang == "ar": |
| | |
| | instructions = """ |
| | โ ูุงู
: ุงุณุชุฎุฏู
ููุท ุงูู
ุนููู
ุงุช ู
ู ูุงุนุฏุฉ ุงูุจูุงูุงุช. |
| | ๐ ุงูู
ุญุงุฏุซุฉ ุงูุณุงุจูุฉ: |
| | {context} |
| | ๐ค ุงูู
ุณุชุฎุฏู
ูุณุฃู: {user_question} |
| | ๐ ู
ุนููู
ุงุช ู
ู ูุงุนุฏุฉ ุงูุจูุงูุงุช: |
| | {extracted_info} |
| | ๐ ุงูุฑุฏ: |
| | """ |
| | else: |
| | |
| | instructions = """ |
| | โ Important: Use only database information. |
| | ๐ Previous conversation: |
| | {context} |
| | ๐ค User asks: {user_question} |
| | ๐ Retrieved info: |
| | {extracted_info} |
| | ๐ Response: |
| | """ |
| | |
| | prompt = instructions.format( |
| | context=context or ("ูุง ููุฌุฏ" if lang == "ar" else "None"), |
| | user_question=user_question, |
| | extracted_info=extracted_info |
| | ) |
| | response = model.generate_content(prompt) |
| | return response.text.strip() |
| | |
| | @app.route("/ask", methods=["POST"]) |
| | def ask(): |
| | data = request.json |
| | question = data.get("question") |
| | if not question: |
| | return jsonify({"error": "Missing question"}), 400 |
| |
|
| | |
| | greeting_response = check_greeting(question) |
| | if greeting_response: |
| | return jsonify({"answer": greeting_response}) |
| |
|
| | pinecone_answer = get_answer_from_pinecone(question, embedding_model, index) |
| | final_answer = ask_gemini_with_combined_answer(question, pinecone_answer, chat_history) |
| | chat_history.append((question, final_answer)) |
| | return jsonify({ |
| | "answer": final_answer |
| | }) |
| | |
| | @app.route("/") |
| | def home(): |
| | return "๐ค API is running. Use POST /ask with {'question': '...'}" |
| | |
| | if __name__ == "__main__": |
| | app.run(host="0.0.0.0", port=7860) |