EphAsad commited on
Commit
a8b0c6a
·
verified ·
1 Parent(s): 3de6254

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -5
app.py CHANGED
@@ -1,14 +1,24 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
 
3
  from rag_query import retrieve
 
4
 
5
  from config.settings import *
6
 
 
 
 
 
 
 
7
  def load_prompt():
8
  with open("prompts/rag_prompt.txt", "r", encoding="utf-8") as f:
9
  return f.read()
10
 
11
 
 
12
  def respond(
13
  message,
14
  history,
@@ -26,13 +36,10 @@ def respond(
26
  retrieved = retrieve(message)
27
 
28
  context_blocks = []
29
- sources = set()
30
-
31
  for item in retrieved:
32
  context_blocks.append(
33
  f"[{item['condition']} – {item['section']}]\n{item}"
34
  )
35
- sources.add(item["source_id"])
36
 
37
  context = "\n\n".join(context_blocks)
38
 
@@ -47,7 +54,6 @@ def respond(
47
  ]
48
 
49
  response = ""
50
-
51
  for chunk in client.chat_completion(
52
  messages,
53
  max_tokens=max_tokens,
@@ -60,6 +66,36 @@ def respond(
60
  yield response
61
 
62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  chatbot = gr.ChatInterface(
64
  respond,
65
  type="messages",
@@ -77,7 +113,22 @@ chatbot = gr.ChatInterface(
77
  with gr.Blocks() as demo:
78
  with gr.Sidebar():
79
  gr.LoginButton()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  chatbot.render()
81
 
 
82
  if __name__ == "__main__":
83
  demo.launch()
 
1
  import gradio as gr
2
+ import subprocess
3
+ import os
4
+ from huggingface_hub import InferenceClient, HfApi
5
  from rag_query import retrieve
6
+ from embed_index import main as build_index
7
 
8
  from config.settings import *
9
 
10
+ INDEX_FILES = [
11
+ FAISS_INDEX_PATH,
12
+ METADATA_PATH
13
+ ]
14
+
15
+
16
  def load_prompt():
17
  with open("prompts/rag_prompt.txt", "r", encoding="utf-8") as f:
18
  return f.read()
19
 
20
 
21
+ # ---------- RAG CHAT ----------
22
  def respond(
23
  message,
24
  history,
 
36
  retrieved = retrieve(message)
37
 
38
  context_blocks = []
 
 
39
  for item in retrieved:
40
  context_blocks.append(
41
  f"[{item['condition']} – {item['section']}]\n{item}"
42
  )
 
43
 
44
  context = "\n\n".join(context_blocks)
45
 
 
54
  ]
55
 
56
  response = ""
 
57
  for chunk in client.chat_completion(
58
  messages,
59
  max_tokens=max_tokens,
 
66
  yield response
67
 
68
 
69
+ # ---------- BUILD INDEX ----------
70
+ def rebuild_index_ui():
71
+ build_index()
72
+ return "✅ Index rebuilt successfully."
73
+
74
+
75
+ # ---------- COMMIT TO HF ----------
76
+ def commit_index_ui(hf_token: gr.OAuthToken):
77
+ api = HfApi(token=hf_token.token)
78
+
79
+ repo_id = os.environ.get("SPACE_ID") # HF injects this
80
+ if not repo_id:
81
+ return "❌ Could not determine Space repo ID."
82
+
83
+ for file_path in INDEX_FILES:
84
+ if not os.path.exists(file_path):
85
+ return f"❌ Missing file: {file_path}"
86
+
87
+ api.upload_file(
88
+ path_or_fileobj=file_path,
89
+ path_in_repo=file_path,
90
+ repo_id=repo_id,
91
+ repo_type="space",
92
+ commit_message="Update FAISS index"
93
+ )
94
+
95
+ return "⬆ Index committed to Hugging Face successfully."
96
+
97
+
98
+ # ---------- UI ----------
99
  chatbot = gr.ChatInterface(
100
  respond,
101
  type="messages",
 
113
  with gr.Blocks() as demo:
114
  with gr.Sidebar():
115
  gr.LoginButton()
116
+ build_btn = gr.Button("🔨 Build Index")
117
+ commit_btn = gr.Button("⬆ Commit to HF")
118
+ status_box = gr.Markdown()
119
+
120
+ build_btn.click(
121
+ rebuild_index_ui,
122
+ outputs=status_box
123
+ )
124
+
125
+ commit_btn.click(
126
+ commit_index_ui,
127
+ outputs=status_box
128
+ )
129
+
130
  chatbot.render()
131
 
132
+
133
  if __name__ == "__main__":
134
  demo.launch()