Percy3822 commited on
Commit
f7938a4
·
verified ·
1 Parent(s): a836dc9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +72 -25
app.py CHANGED
@@ -14,8 +14,11 @@ RUNS.mkdir(exist_ok=True)
14
  # ----------------- Logging -----------------
15
  def append_log(msg: str):
16
  msg = (msg or "").rstrip("\n")
17
- with open(LOG, "a", encoding="utf-8") as lf:
18
- lf.write(msg + "\n")
 
 
 
19
 
20
  def read_logs():
21
  return LOG.read_text(encoding="utf-8")[-20000:] if LOG.exists() else "⏳ Waiting…"
@@ -49,6 +52,7 @@ def dropdown_update_safe(models, prefer=None):
49
 
50
  # ----------------- Dataset Upload -----------------
51
  def upload_dataset(file):
 
52
  if not file:
53
  return "❌ No file selected.", ls_workspace()
54
  if hasattr(file, "name") and os.path.isfile(file.name):
@@ -58,10 +62,7 @@ def upload_dataset(file):
58
 
59
  # ----------------- Training (Live Logs) -----------------
60
  def start_training_live(run_name):
61
- """
62
- Streams training logs to the UI while the subprocess runs.
63
- Yields tuples for outputs: [status, download_file, workspace, logs, model_dropdown]
64
- """
65
  # Quick guard: dataset must exist
66
  if not DATA.exists():
67
  msg = "❌ dataset.jsonl not found. Upload a JSONL dataset first."
@@ -124,9 +125,7 @@ def start_training_live(run_name):
124
  if line:
125
  append_log(line.rstrip("\n"))
126
  live_log.write(line)
127
- # Trim to last ~20k chars for UI
128
  text = live_log.getvalue()[-20000:]
129
- # yield with download hidden (until zip exists)
130
  yield (
131
  status_msg,
132
  gr.update(value=None, visible=False),
@@ -134,7 +133,6 @@ def start_training_live(run_name):
134
  text,
135
  dropdown_update_safe(list_models(), prefer=None),
136
  )
137
- # if zip appears during training (e.g., early save), surface it
138
  if zip_path.exists():
139
  yield (
140
  "📦 Model zip created during run.",
@@ -160,6 +158,7 @@ def start_training_live(run_name):
160
  yield (info, gr.update(value=None, visible=False), ls_workspace(), final_logs, model_update)
161
 
162
  def refresh_download():
 
163
  zips = sorted(RUNS.glob("*.zip"), key=lambda p: p.stat().st_mtime, reverse=True)
164
  latest = zips[0] if zips else None
165
  models = list_models()
@@ -171,6 +170,7 @@ def refresh_download():
171
 
172
  # ----------------- Import a Zip as Model Folder -----------------
173
  def import_zip(zfile):
 
174
  if not zfile:
175
  return "❌ No zip selected.", list_models()
176
  dest = ROOT / "imported_model"
@@ -222,6 +222,7 @@ def ping():
222
  return "✅ UI is connected and responding."
223
 
224
  def load_selected_model(model_path):
 
225
  # Dropdown may pass a list; coerce to string
226
  if isinstance(model_path, list):
227
  model_path = model_path[0] if model_path else None
@@ -242,11 +243,56 @@ def load_selected_model(model_path):
242
  append_log("❌ Load error:\n" + tb)
243
  return "❌ Error while loading model:\n" + "".join(traceback.format_exception_only(type(e), e))
244
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
245
  def generate_stream(model_path, prompt):
246
- """Stream intermediate status to prove the button is working, then final text."""
247
- # immediate feedback
248
  yield "⏳ Loading model…"
249
- append_log("▶ Generate clicked")
250
 
251
  # Coerce
252
  if isinstance(model_path, list):
@@ -269,10 +315,10 @@ def generate_stream(model_path, prompt):
269
  try:
270
  pipe = get_generation_pipeline(model_path)
271
  yield "⚙ Generating… (this may take a bit on CPU)"
272
- append_log(f"📝 Generating… prompt_len={len(prompt)}")
273
  result = pipe(
274
  prompt.strip(),
275
- max_new_tokens=80, # quicker to show something
276
  do_sample=True,
277
  temperature=0.3,
278
  top_p=0.9,
@@ -302,13 +348,13 @@ with gr.Blocks(title="Python AI — Train & Test") as app:
302
  gr.Markdown("### Choose a model folder or upload a .zip, then prompt it")
303
  with gr.Row():
304
  refresh_btn = gr.Button("↻ Refresh Model List")
305
- ping_btn = gr.Button("🔔 Ping UI") # quick sanity check
306
  model_list = gr.Dropdown(
307
  choices=list_models(),
308
  label="Available AIs",
309
  interactive=True,
310
- allow_custom_value=True, # keeps UI quiet when empty
311
- multiselect=False # force single selection
312
  )
313
  load_btn = gr.Button("📦 Load Model")
314
  load_status = gr.Textbox(label="Model Status", interactive=False)
@@ -321,7 +367,9 @@ with gr.Blocks(title="Python AI — Train & Test") as app:
321
  lines=8,
322
  placeholder="### Instruction:\nPython: write a function ...\n### Response:\n"
323
  )
324
- go = gr.Button("Generate")
 
 
325
  out = gr.Textbox(label="AI Response", lines=20)
326
 
327
  # ---------- Train Tab ----------
@@ -338,29 +386,28 @@ with gr.Blocks(title="Python AI — Train & Test") as app:
338
  refresh_dl_btn = gr.Button("Refresh Download")
339
 
340
  # ---------- Wiring ----------
341
- # Upload + workspace
342
  ds.change(upload_dataset, inputs=ds, outputs=[up_status, ws])
343
 
344
- # Train (live streaming)
345
  start.click(
346
  start_training_live,
347
  inputs=[run_name],
348
  outputs=[status, download_file, ws, logs, model_list]
349
  )
350
 
351
- # Download refresh
352
  refresh_dl_btn.click(
353
  refresh_download,
354
  outputs=[download_file, ws, model_list]
355
  )
356
 
357
- # Test tab helpers
358
  refresh_btn.click(lambda: dropdown_update_safe(list_models()), outputs=model_list)
359
  ping_btn.click(ping, outputs=out)
360
  load_btn.click(load_selected_model, inputs=[model_list], outputs=[load_status])
361
  zip_in.change(import_zip, inputs=zip_in, outputs=[import_status, model_list])
362
 
363
- # Streamed generation output
364
- go.click(generate_stream, inputs=[model_list, prompt], outputs=out)
 
365
 
366
- app.queue(default_concurrency_limit=1).launch()
 
 
 
14
  # ----------------- Logging -----------------
15
  def append_log(msg: str):
16
  msg = (msg or "").rstrip("\n")
17
+ try:
18
+ with open(LOG, "a", encoding="utf-8") as lf:
19
+ lf.write(msg + "\n")
20
+ except Exception:
21
+ pass
22
 
23
  def read_logs():
24
  return LOG.read_text(encoding="utf-8")[-20000:] if LOG.exists() else "⏳ Waiting…"
 
52
 
53
  # ----------------- Dataset Upload -----------------
54
  def upload_dataset(file):
55
+ append_log("📥 upload_dataset clicked")
56
  if not file:
57
  return "❌ No file selected.", ls_workspace()
58
  if hasattr(file, "name") and os.path.isfile(file.name):
 
62
 
63
  # ----------------- Training (Live Logs) -----------------
64
  def start_training_live(run_name):
65
+ append_log("🚀 start_training_live clicked")
 
 
 
66
  # Quick guard: dataset must exist
67
  if not DATA.exists():
68
  msg = "❌ dataset.jsonl not found. Upload a JSONL dataset first."
 
125
  if line:
126
  append_log(line.rstrip("\n"))
127
  live_log.write(line)
 
128
  text = live_log.getvalue()[-20000:]
 
129
  yield (
130
  status_msg,
131
  gr.update(value=None, visible=False),
 
133
  text,
134
  dropdown_update_safe(list_models(), prefer=None),
135
  )
 
136
  if zip_path.exists():
137
  yield (
138
  "📦 Model zip created during run.",
 
158
  yield (info, gr.update(value=None, visible=False), ls_workspace(), final_logs, model_update)
159
 
160
  def refresh_download():
161
+ append_log("↻ refresh_download clicked")
162
  zips = sorted(RUNS.glob("*.zip"), key=lambda p: p.stat().st_mtime, reverse=True)
163
  latest = zips[0] if zips else None
164
  models = list_models()
 
170
 
171
  # ----------------- Import a Zip as Model Folder -----------------
172
  def import_zip(zfile):
173
+ append_log("📦 import_zip clicked")
174
  if not zfile:
175
  return "❌ No zip selected.", list_models()
176
  dest = ROOT / "imported_model"
 
222
  return "✅ UI is connected and responding."
223
 
224
  def load_selected_model(model_path):
225
+ append_log("📦 load_selected_model clicked")
226
  # Dropdown may pass a list; coerce to string
227
  if isinstance(model_path, list):
228
  model_path = model_path[0] if model_path else None
 
243
  append_log("❌ Load error:\n" + tb)
244
  return "❌ Error while loading model:\n" + "".join(traceback.format_exception_only(type(e), e))
245
 
246
+ def generate_once(model_path, prompt):
247
+ """Non-streaming fallback."""
248
+ append_log("▶ generate_once clicked")
249
+ # Coerce
250
+ if isinstance(model_path, list):
251
+ model_path = model_path[0] if model_path else None
252
+
253
+ # validate
254
+ if not model_path:
255
+ msg = "❌ Select a model from the dropdown first."
256
+ append_log(msg); return msg
257
+ if not isinstance(model_path, str):
258
+ msg = f"❌ Invalid model path type: {type(model_path)._name_}"
259
+ append_log(msg); return msg
260
+ if not Path(model_path).exists():
261
+ msg = f"❌ Model folder not found: {model_path}"
262
+ append_log(msg); return msg
263
+ if not prompt or not prompt.strip():
264
+ msg = "❌ Enter a prompt."
265
+ append_log(msg); return msg
266
+
267
+ try:
268
+ pipe = get_generation_pipeline(model_path)
269
+ append_log(f"📝 Generating once… prompt_len={len(prompt)}")
270
+ result = pipe(
271
+ prompt.strip(),
272
+ max_new_tokens=80,
273
+ do_sample=True,
274
+ temperature=0.3,
275
+ top_p=0.9,
276
+ repetition_penalty=1.15,
277
+ no_repeat_ngram_size=4,
278
+ truncation=True,
279
+ return_full_text=True,
280
+ )
281
+ text = result[0].get("generated_text", "")
282
+ if not text:
283
+ append_log("⚠ Empty generated_text")
284
+ return "⚠ Model returned empty text. Try lowering temperature or adding more context."
285
+ append_log("✅ Generation OK.")
286
+ return text
287
+ except Exception as e:
288
+ tb = traceback.format_exc()
289
+ append_log("❌ Generation error:\n" + tb)
290
+ return "❌ Error during generation:\n" + "".join(traceback.format_exception_only(type(e), e))
291
+
292
  def generate_stream(model_path, prompt):
293
+ """Streaming version (if Frontend streaming works)."""
 
294
  yield "⏳ Loading model…"
295
+ append_log("▶ generate_stream clicked")
296
 
297
  # Coerce
298
  if isinstance(model_path, list):
 
315
  try:
316
  pipe = get_generation_pipeline(model_path)
317
  yield "⚙ Generating… (this may take a bit on CPU)"
318
+ append_log(f"📝 Generating (stream)… prompt_len={len(prompt)}")
319
  result = pipe(
320
  prompt.strip(),
321
+ max_new_tokens=80,
322
  do_sample=True,
323
  temperature=0.3,
324
  top_p=0.9,
 
348
  gr.Markdown("### Choose a model folder or upload a .zip, then prompt it")
349
  with gr.Row():
350
  refresh_btn = gr.Button("↻ Refresh Model List")
351
+ ping_btn = gr.Button("🔔 Ping UI") # sanity check
352
  model_list = gr.Dropdown(
353
  choices=list_models(),
354
  label="Available AIs",
355
  interactive=True,
356
+ allow_custom_value=True,
357
+ multiselect=False
358
  )
359
  load_btn = gr.Button("📦 Load Model")
360
  load_status = gr.Textbox(label="Model Status", interactive=False)
 
367
  lines=8,
368
  placeholder="### Instruction:\nPython: write a function ...\n### Response:\n"
369
  )
370
+ with gr.Row():
371
+ go_stream = gr.Button("Generate (stream)")
372
+ go_once = gr.Button("Generate (once)")
373
  out = gr.Textbox(label="AI Response", lines=20)
374
 
375
  # ---------- Train Tab ----------
 
386
  refresh_dl_btn = gr.Button("Refresh Download")
387
 
388
  # ---------- Wiring ----------
 
389
  ds.change(upload_dataset, inputs=ds, outputs=[up_status, ws])
390
 
 
391
  start.click(
392
  start_training_live,
393
  inputs=[run_name],
394
  outputs=[status, download_file, ws, logs, model_list]
395
  )
396
 
 
397
  refresh_dl_btn.click(
398
  refresh_download,
399
  outputs=[download_file, ws, model_list]
400
  )
401
 
 
402
  refresh_btn.click(lambda: dropdown_update_safe(list_models()), outputs=model_list)
403
  ping_btn.click(ping, outputs=out)
404
  load_btn.click(load_selected_model, inputs=[model_list], outputs=[load_status])
405
  zip_in.change(import_zip, inputs=zip_in, outputs=[import_status, model_list])
406
 
407
+ # Generation (two modes)
408
+ go_stream.click(generate_stream, inputs=[model_list, prompt], outputs=out)
409
+ go_once.click(generate_once, inputs=[model_list, prompt], outputs=out)
410
 
411
+ # Critical: disable SSR; ensure queue is enabled
412
+ app.queue(default_concurrency_limit=1)
413
+ app.launch(ssr_mode=False, show_error=True)