File size: 1,598 Bytes
ecc164c
c4057d7
 
 
 
 
 
ecc164c
 
c4057d7
ecc164c
 
 
c4057d7
ecc164c
 
 
 
 
 
 
c4057d7
ecc164c
c4057d7
ecc164c
 
 
 
c4057d7
 
ecc164c
c4057d7
ecc164c
c4057d7
ecc164c
 
 
 
 
 
 
c4057d7
ecc164c
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from fastapi import FastAPI
import requests
from bs4 import BeautifulSoup
from googlesearch import search
from huggingface_hub import InferenceClient
import os

app = FastAPI()
client = InferenceClient("google/gemma-3-4b-it", token=os.getenv("HF_TOKEN"))

def search_and_scrape(question):
    # نفس منطق البحث والاستخراج السابق
    full_query = f"site:islamweb.net {question}"
    try:
        results = search(full_query, num_results=1, lang="ar")
        link = next(results)
        headers = {'User-Agent': 'Mozilla/5.0'}
        resp = requests.get(link, headers=headers, timeout=5)
        soup = BeautifulSoup(resp.text, 'html.parser')
        content = soup.find('div', {'class': 'item'}).get_text(strip=True)[:3000]
        return content, link
    except:
        return None, None

@app.post("/ask")
async def ask_ai(data: dict):
    question = data.get("question")
    context, link = search_and_scrape(question)
    
    if not context:
        return {"answer": "لم أجد نتائج.", "source": ""}

    system_msg = f"Answer only from context: {context}"
    
    # طلب الإجابة من الموديل
    response = client.chat_completion(
        messages=[
            {"role": "system", "content": system_msg},
            {"role": "user", "content": question}
        ],
        max_tokens=500
    )
    
    return {
        "answer": response.choices[0].message.content,
        "source": link
    }

# تشغيل FastAPI بدلاً من Gradio
if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=7860)