Hammad712 commited on
Commit
97c6bc6
·
verified ·
1 Parent(s): 41a3613

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +109 -0
main.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
+ from langchain.chains import RetrievalQA
4
+ from langchain.prompts import PromptTemplate
5
+ from langchain_community.vectorstores import FAISS
6
+ from langchain_huggingface import HuggingFaceEmbeddings
7
+ from langchain_groq import ChatGroq
8
+ import zipfile
9
+ import os
10
+
11
+ app = FastAPI()
12
+
13
+ # === Startup config ===
14
+ class QueryRequest(BaseModel):
15
+ question: str
16
+
17
+ llm = None
18
+ retriever = None
19
+ chain = None
20
+
21
+ @app.on_event("startup")
22
+ def load_components():
23
+ global llm, retriever, chain
24
+
25
+ api_key = os.getenv('api_key')
26
+
27
+ # --- Load LLM ---
28
+ llm = ChatGroq(
29
+ model="meta-llama/llama-4-scout-17b-16e-instruct",
30
+ temperature=0,
31
+ max_tokens=1024,
32
+ api_key=api_key
33
+ )
34
+
35
+ # --- Load Embeddings ---
36
+ embeddings = HuggingFaceEmbeddings(
37
+ model_name="intfloat/multilingual-e5-large",
38
+ model_kwargs={"device": "cpu"},
39
+ encode_kwargs={"normalize_embeddings": True},
40
+ )
41
+
42
+ # --- Unzip Vectorstore if needed ---
43
+ zip_path = "faiss_index.zip"
44
+ extract_path = "faiss_index"
45
+ if not os.path.exists(extract_path):
46
+ with zipfile.ZipFile(zip_path, 'r') as z:
47
+ z.extractall(extract_path)
48
+ print("✅ Unzipped FAISS index.")
49
+
50
+ # --- Load FAISS Vectorstore & create retriever ---
51
+ vectorstore = FAISS.load_local(
52
+ extract_path,
53
+ embeddings,
54
+ allow_dangerous_deserialization=True
55
+ )
56
+ retriever = vectorstore.as_retriever(search_kwargs={"k": 5})
57
+ print("✅ FAISS index loaded.")
58
+
59
+ # --- Prepare prompt template ---
60
+ ayat_finder_prompt = '''
61
+ You are an Arabic Ayat Finder assistant.
62
+ Your goal is to provide an accurate and concise answer by extracting the exact Arabic verse (Ayah) from the provided retrieved context.
63
+ Your task is to output only the exact Ayah in Arabic as it appears in the context, along with its full reference details.
64
+
65
+ Instructions:
66
+ 1. Locate the segment in the retrieved context that directly contains the requested Ayah.
67
+ 2. Output only the Arabic text of the Ayah, without any additional commentary or translation.
68
+ 3. Provide the complete reference alongside the Ayah, including:
69
+ - Surah number and name (Arabic and English)
70
+ - Ayah (verse) number
71
+ - Any other available metadata from the context (e.g., Juz number, Hizb, revelation order) if present.
72
+ 4. If the exact Ayah cannot be found in the context, respond with "لا أعلم".
73
+ 5. Do not add, infer, or modify any content; strictly reproduce what exists in the context.
74
+
75
+ Retrieved context:
76
+ {context}
77
+
78
+ User's question:
79
+ {question}
80
+
81
+ Your response:
82
+ '''
83
+
84
+ prompt = PromptTemplate(
85
+ template=quiz_solving_prompt,
86
+ input_variables=["context", "question"]
87
+ )
88
+
89
+ # --- Assemble a stateless RetrievalQA chain (no memory) ---
90
+ chain = RetrievalQA.from_chain_type(
91
+ llm=llm,
92
+ chain_type="stuff",
93
+ retriever=retriever,
94
+ return_source_documents=False,
95
+ chain_type_kwargs={"prompt": prompt},
96
+ verbose=False,
97
+ )
98
+
99
+ @app.get("/")
100
+ def root():
101
+ return {"message": "Quran Ayat Finder API is up."}
102
+
103
+ @app.post("/query")
104
+ def query(request: QueryRequest):
105
+ try:
106
+ result = chain.invoke({"query": request.question})
107
+ return {"answer": result["result"]}
108
+ except Exception as e:
109
+ raise HTTPException(status_code=500, detail=str(e))