arubaDev commited on
Commit
d4f1a14
·
verified ·
1 Parent(s): e7e9bc6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -99
app.py CHANGED
@@ -12,9 +12,8 @@ MODELS = {
12
  "Meta LLaMA 3.1 (8B Instruct)": "meta-llama/Llama-3.1-8B-Instruct",
13
  "Mistral 7B Instruct": "mistralai/Mistral-7B-Instruct-v0.3",
14
  }
15
-
16
- DATASETS = ["The Stack", "CodeXGLUE"] # Dropdown for dataset selection
17
- HF_TOKEN = os.getenv("HF_TOKEN") # Set in your Space's Secrets
18
  DB_PATH = "history.db"
19
 
20
  SYSTEM_DEFAULT = (
@@ -90,10 +89,8 @@ def get_messages(session_id: int):
90
  conn = db()
91
  cur = conn.cursor()
92
  cur.execute("""
93
- SELECT role, content
94
- FROM messages
95
- WHERE session_id = ?
96
- ORDER BY id ASC
97
  """, (session_id,))
98
  rows = cur.fetchall()
99
  conn.close()
@@ -119,7 +116,7 @@ def update_session_title_if_needed(session_id: int, first_user_text: str):
119
  title = first_user_text.strip().split("\n")[0]
120
  title = (title[:50] + "…") if len(title) > 50 else title
121
  cur.execute("UPDATE sessions SET title=? WHERE id=?", (title or "New chat", session_id))
122
- conn.commit()
123
  conn.close()
124
 
125
  # ---------------------------
@@ -178,14 +175,13 @@ def delete_chat_cb(selected_label):
178
  return gr.update(choices=labels, value=selected), []
179
 
180
  FRONTEND_KEYWORDS = [
181
- "react", "vue", "angular", "html", "css", "javascript", "tailwind", "recharts", "typescript"
182
  ]
183
 
184
  def is_frontend_request(user_text: str) -> bool:
185
  text_lower = user_text.lower()
186
  return any(kw in text_lower for kw in FRONTEND_KEYWORDS)
187
 
188
- # --- Fixed send_cb to show user message ---
189
  def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens, temperature, top_p, model_choice, dataset_choice, *args):
190
  sid = label_to_id(selected_label)
191
  if sid is None:
@@ -193,14 +189,12 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
193
  labels, _ = list_sessions()
194
  selected_label = next((lbl for lbl in labels if lbl.startswith(f"{sid} ")), None)
195
 
196
- # Save user message
197
  add_message(sid, "user", user_text)
198
  update_session_title_if_needed(sid, user_text)
199
 
200
  display_msgs = chatbot_msgs[:]
201
  display_msgs.append({"role": "user", "content": user_text})
202
 
203
- # Check for frontend-heavy request
204
  if is_frontend_request(user_text):
205
  apology = "⚠️ I'm a backend-focused assistant and cannot provide frontend code."
206
  display_msgs.append({"role": "assistant", "content": apology})
@@ -208,14 +202,12 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
208
  yield (display_msgs, "", selected_label)
209
  return
210
 
211
- # Normal backend response
212
  display_msgs.append({"role": "assistant", "content": "…"})
213
  yield (display_msgs, "", selected_label)
214
 
215
  client = get_client(model_choice)
216
  api_messages = build_api_messages(sid, system_message)
217
  partial = ""
218
-
219
  try:
220
  for chunk in client.chat_completion(
221
  messages=api_messages,
@@ -224,7 +216,6 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
224
  top_p=float(top_p),
225
  stream=True,
226
  ):
227
- # --- FIX: handle models that send empty chunks or use message instead of delta ---
228
  if not hasattr(chunk, "choices") or not chunk.choices:
229
  continue
230
  choice = chunk.choices[0]
@@ -240,7 +231,6 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
240
  yield (display_msgs, "", selected_label)
241
 
242
  add_message(sid, "assistant", partial)
243
-
244
  except Exception as e:
245
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
246
  yield (display_msgs, "", selected_label)
@@ -254,16 +244,12 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
254
  if not msgs:
255
  return [], ""
256
 
257
- # Remove the last assistant message if it exists (to regenerate it)
258
  if msgs and msgs[-1]["role"] == "assistant":
259
  conn = db()
260
  cur = conn.cursor()
261
  cur.execute("""
262
- DELETE FROM messages
263
- WHERE id = (
264
- SELECT id FROM messages
265
- WHERE session_id=?
266
- ORDER BY id DESC LIMIT 1
267
  )
268
  """, (sid,))
269
  conn.commit()
@@ -276,7 +262,6 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
276
 
277
  client = get_client(model_choice)
278
  partial = ""
279
-
280
  try:
281
  for chunk in client.chat_completion(
282
  messages=api_messages,
@@ -285,7 +270,6 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
285
  top_p=float(top_p),
286
  stream=True,
287
  ):
288
- # --- FIX: handle models that send empty chunks or use message instead of delta ---
289
  if not hasattr(chunk, "choices") or not chunk.choices:
290
  continue
291
  choice = chunk.choices[0]
@@ -301,7 +285,6 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
301
  yield display_msgs
302
 
303
  add_message(sid, "assistant", partial)
304
-
305
  except Exception as e:
306
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
307
  yield display_msgs
@@ -318,90 +301,25 @@ if not labels:
318
  default_selected = labels[0] if labels else None
319
 
320
  with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.themes.Soft()) as demo:
321
- gr.HTML("""
322
- <style>
323
- button {
324
- background-color: #22c55e !important;
325
- color: #ffffff !important;
326
- border: none !important;
327
- }
328
- button:hover {
329
- background-color: #16a34a !important;
330
- }
331
- button:focus {
332
- outline: 2px solid #166534 !important;
333
- outline-offset: 2px;
334
- }
335
- </style>
336
- """)
337
-
338
  gr.Markdown("## 🗄️ LLaMA & Mistral Backend-Focused CRUD Automation — with Persistent History")
339
 
340
  with gr.Row():
341
  with gr.Column(scale=1, min_width=260):
342
- gr.Markdown("### 📁 Sessions")
343
- session_list = gr.Radio(
344
- choices=labels,
345
- value=default_selected,
346
- label="Your chats",
347
- interactive=True
348
- )
349
- # -----------------------
350
- # Editable title input
351
- # -----------------------
352
- edit_title_box = gr.Textbox(label="✏️ Rename Chat", placeholder="Edit selected chat title...")
353
- rename_btn = gr.Button("💾 Save Title")
354
-
355
- def rename_session_cb(new_title, selected_label):
356
- sid = label_to_id(selected_label)
357
- if sid and new_title.strip():
358
- conn = db()
359
- cur = conn.cursor()
360
- cur.execute("UPDATE sessions SET title=? WHERE id=?", (new_title.strip(), sid))
361
- conn.commit()
362
- conn.close()
363
-
364
- # Refresh the session list and keep the same one selected
365
- labels, _ = list_sessions()
366
- new_selected = next((lbl for lbl in labels if lbl.startswith(f"{sid} ")), None)
367
- return gr.update(choices=labels, value=new_selected)
368
-
369
- # Connect button to callback
370
- rename_btn.click(rename_session_cb, inputs=[edit_title_box, session_list], outputs=session_list)
371
 
372
- with gr.Row():
373
- new_btn = gr.Button(" New Chat", variant="primary")
374
- del_btn = gr.Button("🗑️ Delete", variant="stop")
375
- refresh_btn = gr.Button("🔄 Refresh", variant="secondary")
376
-
377
- gr.Markdown("### 🤖 Model Selection")
378
- model_choice = gr.Dropdown(
379
- choices=list(MODELS.keys()),
380
- value=list(MODELS.keys())[0],
381
- label="Choose a model",
382
- interactive=True
383
- )
384
 
385
- gr.Markdown("### 📚 Dataset Selection")
386
- dataset_choice = gr.Dropdown(
387
- choices=DATASETS,
388
- value=DATASETS[0],
389
- label="Select a dataset",
390
- interactive=True
391
- )
392
-
393
- gr.Markdown("### ⚙️ Generation Settings")
394
- system_box = gr.Textbox(
395
- value=SYSTEM_DEFAULT,
396
- label="System message",
397
- lines=5
398
- )
399
  max_tokens = gr.Slider(256, 4096, value=1200, step=16, label="Max tokens")
400
  temperature = gr.Slider(0.0, 2.0, value=0.25, step=0.05, label="Temperature")
401
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p")
402
 
403
  with gr.Column(scale=3):
404
- chatbot = gr.Chatbot(label="Assistant", height=520, type="messages")
405
  with gr.Row():
406
  user_box = gr.Textbox(placeholder="Describe your CRUD/backend task…", lines=3, scale=5)
407
  with gr.Row():
@@ -432,4 +350,4 @@ with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.th
432
  )
433
 
434
  if __name__ == "__main__":
435
- demo.launch()
 
12
  "Meta LLaMA 3.1 (8B Instruct)": "meta-llama/Llama-3.1-8B-Instruct",
13
  "Mistral 7B Instruct": "mistralai/Mistral-7B-Instruct-v0.3",
14
  }
15
+ DATASETS = ["The Stack", "CodeXGLUE"]
16
+ HF_TOKEN = os.getenv("HF_TOKEN")
 
17
  DB_PATH = "history.db"
18
 
19
  SYSTEM_DEFAULT = (
 
89
  conn = db()
90
  cur = conn.cursor()
91
  cur.execute("""
92
+ SELECT role, content FROM messages
93
+ WHERE session_id = ? ORDER BY id ASC
 
 
94
  """, (session_id,))
95
  rows = cur.fetchall()
96
  conn.close()
 
116
  title = first_user_text.strip().split("\n")[0]
117
  title = (title[:50] + "…") if len(title) > 50 else title
118
  cur.execute("UPDATE sessions SET title=? WHERE id=?", (title or "New chat", session_id))
119
+ conn.commit()
120
  conn.close()
121
 
122
  # ---------------------------
 
175
  return gr.update(choices=labels, value=selected), []
176
 
177
  FRONTEND_KEYWORDS = [
178
+ "react","vue","angular","html","css","javascript","tailwind","recharts","typescript"
179
  ]
180
 
181
  def is_frontend_request(user_text: str) -> bool:
182
  text_lower = user_text.lower()
183
  return any(kw in text_lower for kw in FRONTEND_KEYWORDS)
184
 
 
185
  def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens, temperature, top_p, model_choice, dataset_choice, *args):
186
  sid = label_to_id(selected_label)
187
  if sid is None:
 
189
  labels, _ = list_sessions()
190
  selected_label = next((lbl for lbl in labels if lbl.startswith(f"{sid} ")), None)
191
 
 
192
  add_message(sid, "user", user_text)
193
  update_session_title_if_needed(sid, user_text)
194
 
195
  display_msgs = chatbot_msgs[:]
196
  display_msgs.append({"role": "user", "content": user_text})
197
 
 
198
  if is_frontend_request(user_text):
199
  apology = "⚠️ I'm a backend-focused assistant and cannot provide frontend code."
200
  display_msgs.append({"role": "assistant", "content": apology})
 
202
  yield (display_msgs, "", selected_label)
203
  return
204
 
 
205
  display_msgs.append({"role": "assistant", "content": "…"})
206
  yield (display_msgs, "", selected_label)
207
 
208
  client = get_client(model_choice)
209
  api_messages = build_api_messages(sid, system_message)
210
  partial = ""
 
211
  try:
212
  for chunk in client.chat_completion(
213
  messages=api_messages,
 
216
  top_p=float(top_p),
217
  stream=True,
218
  ):
 
219
  if not hasattr(chunk, "choices") or not chunk.choices:
220
  continue
221
  choice = chunk.choices[0]
 
231
  yield (display_msgs, "", selected_label)
232
 
233
  add_message(sid, "assistant", partial)
 
234
  except Exception as e:
235
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
236
  yield (display_msgs, "", selected_label)
 
244
  if not msgs:
245
  return [], ""
246
 
 
247
  if msgs and msgs[-1]["role"] == "assistant":
248
  conn = db()
249
  cur = conn.cursor()
250
  cur.execute("""
251
+ DELETE FROM messages WHERE id = (
252
+ SELECT id FROM messages WHERE session_id=? ORDER BY id DESC LIMIT 1
 
 
 
253
  )
254
  """, (sid,))
255
  conn.commit()
 
262
 
263
  client = get_client(model_choice)
264
  partial = ""
 
265
  try:
266
  for chunk in client.chat_completion(
267
  messages=api_messages,
 
270
  top_p=float(top_p),
271
  stream=True,
272
  ):
 
273
  if not hasattr(chunk, "choices") or not chunk.choices:
274
  continue
275
  choice = chunk.choices[0]
 
285
  yield display_msgs
286
 
287
  add_message(sid, "assistant", partial)
 
288
  except Exception as e:
289
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
290
  yield display_msgs
 
301
  default_selected = labels[0] if labels else None
302
 
303
  with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.themes.Soft()) as demo:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
  gr.Markdown("## 🗄️ LLaMA & Mistral Backend-Focused CRUD Automation — with Persistent History")
305
 
306
  with gr.Row():
307
  with gr.Column(scale=1, min_width=260):
308
+ session_list = gr.Radio(choices=labels, value=default_selected, label="Your chats", interactive=True)
309
+ new_btn = gr.Button("➕ New Chat", variant="primary")
310
+ del_btn = gr.Button("🗑️ Delete", variant="stop")
311
+ refresh_btn = gr.Button("🔄 Refresh", variant="secondary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
312
 
313
+ model_choice = gr.Dropdown(choices=list(MODELS.keys()), value=list(MODELS.keys())[0], label="Choose a model", interactive=True)
314
+ dataset_choice = gr.Dropdown(choices=DATASETS, value=DATASETS[0], label="Select a dataset", interactive=True)
 
 
 
 
 
 
 
 
 
 
315
 
316
+ system_box = gr.Textbox(value=SYSTEM_DEFAULT, label="System message", lines=5)
 
 
 
 
 
 
 
 
 
 
 
 
 
317
  max_tokens = gr.Slider(256, 4096, value=1200, step=16, label="Max tokens")
318
  temperature = gr.Slider(0.0, 2.0, value=0.25, step=0.05, label="Temperature")
319
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p")
320
 
321
  with gr.Column(scale=3):
322
+ chatbot = gr.Chatbot(label="Assistant", height=720, type="messages")
323
  with gr.Row():
324
  user_box = gr.Textbox(placeholder="Describe your CRUD/backend task…", lines=3, scale=5)
325
  with gr.Row():
 
350
  )
351
 
352
  if __name__ == "__main__":
353
+ demo.launch()