codeBOKER commited on
Commit
4883af7
·
verified ·
1 Parent(s): ce11a99

add files

Browse files
Files changed (3) hide show
  1. README.md +1 -10
  2. app.py +82 -0
  3. requirements.txt +6 -0
README.md CHANGED
@@ -1,10 +1 @@
1
- ---
2
- title: Customer Service
3
- emoji: 🐢
4
- colorFrom: yellow
5
- colorTo: green
6
- sdk: docker
7
- pinned: false
8
- ---
9
-
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+ # customer_service
 
 
 
 
 
 
 
 
 
app.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from fastapi import FastAPI, Request
3
+ from pinecone import Pinecone
4
+ from groq import Groq
5
+ import httpx # For sending messages back to Telegram
6
+
7
+ # 1. Configuration & Clients
8
+ # Use Hugging Face Secrets for these!
9
+ PINECONE_API_KEY = os.environ.get("PINECONE_API_KEY")
10
+ GROQ_API_KEY = os.environ.get("GROQ_API_KEY")
11
+ TELEGRAM_TOKEN = os.environ.get("TELEGRAM_TOKEN")
12
+ TELEGRAM_URL = f"https://api.telegram.org/bot{TELEGRAM_TOKEN}/sendMessage"
13
+
14
+ pc = Pinecone(api_key=PINECONE_API_KEY)
15
+ index = pc.Index("customerserviceindex")
16
+ groq_client = Groq(api_key=GROQ_API_KEY)
17
+
18
+ app = FastAPI()
19
+
20
+ # 2. The Core AI Logic
21
+ async def get_ai_response(user_query: str):
22
+ # Vectorize query using Pinecone Inference
23
+ query_embedding = pc.inference.embed(
24
+ model_id="multilingual-e5-large",
25
+ inputs=[user_query],
26
+ parameters={"input_type": "query"}
27
+ )
28
+
29
+ # Search Pinecone for Bank Context
30
+ search_results = index.query(
31
+ vector=query_embedding[0].values,
32
+ top_k=3,
33
+ include_metadata=True
34
+ )
35
+
36
+ context_text = "\n".join([res.metadata['original_text'] for res in search_results.matches])
37
+
38
+ # Construct the System Prompt
39
+ # We use facts from the profile: Islamic banking, based in Mukalla [cite: 15, 6]
40
+ prompt = f"""
41
+ You are the official AI assistant for Hadhramout Bank (بنك حضرموت).
42
+ Your tone is professional, helpful, and culturally respectful to the Yemeni community.
43
+ Use ONLY the provided context to answer. If the information isn't there,
44
+ kindly ask the customer to visit the main branch in Al Mukalla.
45
+
46
+ Context:
47
+ {context_text}
48
+
49
+ Customer Question: {user_query}
50
+ """
51
+
52
+ completion = groq_client.chat.completions.create(
53
+ messages=[{"role": "user", "content": prompt}],
54
+ model="llama3-8b-8192",
55
+ )
56
+ return completion.choices[0].message.content
57
+
58
+ # 3. The Webhook Endpoint
59
+ @app.post("/webhook")
60
+ async def telegram_webhook(request: Request):
61
+ data = await request.json()
62
+
63
+ if "message" in data:
64
+ chat_id = data["message"]["chat"]["id"]
65
+ user_text = data["message"].get("text", "")
66
+
67
+ if user_text:
68
+ # Get the intelligent response
69
+ ai_answer = await get_ai_response(user_text)
70
+
71
+ # Send back to Telegram
72
+ async with httpx.AsyncClient() as client:
73
+ await client.post(TELEGRAM_URL, json={
74
+ "chat_id": chat_id,
75
+ "text": ai_answer
76
+ })
77
+
78
+ return {"status": "ok"}
79
+
80
+ @app.get("/")
81
+ async def root():
82
+ return {"message": "Hadhramout Bank AI Backend is Live"}
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi==0.104.1
2
+ uvicorn==0.24.0
3
+ pinecone==2.2.4
4
+ groq==0.4.1
5
+ httpx==0.25.2
6
+ python-dotenv==1.0.0