petter2025 commited on
Commit
1e5a278
·
verified ·
1 Parent(s): c35dc07

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -23
app.py CHANGED
@@ -199,8 +199,8 @@ async def handle_text(task_type, prompt):
199
  logger.error(f"Text task error: {e}")
200
  return {"error": str(e)}
201
 
202
- async def handle_image(prompt):
203
- """Handle image generation and quality analysis. Returns (image, json_data)."""
204
  global last_task_category
205
  last_task_category = "image"
206
  if image_pipe is None:
@@ -208,7 +208,7 @@ async def handle_image(prompt):
208
  try:
209
  import time
210
  start = time.time()
211
- image = image_pipe(prompt, num_inference_steps=2).images[0] # minimal steps
212
  gen_time = time.time() - start
213
  retrieval_score = retriever.get_similarity(prompt)
214
  event = AIEvent(
@@ -293,7 +293,7 @@ async def handle_audio(audio_file):
293
  return {"error": str(e)}
294
 
295
  async def read_iot_sensors(fault_type):
296
- """Read simulated IoT sensors, run diagnostics, and predict failure."""
297
  global last_task_category, iot_history
298
  last_task_category = "iot"
299
  iot_sim.set_fault(fault_type if fault_type != "none" else None)
@@ -302,10 +302,10 @@ async def read_iot_sensors(fault_type):
302
  if len(iot_history) > 100:
303
  iot_history.pop(0)
304
 
305
- # Create IoTEvent with valid component name (hyphens allowed, underscores not allowed)
306
  event = IoTEvent(
307
  timestamp=datetime.utcnow(),
308
- component="robotic-arm", # changed from 'robotic_arm'
309
  service_mesh="factory",
310
  latency_p99=0,
311
  error_rate=0.0,
@@ -317,10 +317,9 @@ async def read_iot_sensors(fault_type):
317
  motor_current=data['motor_current'],
318
  position_error=data['position_error']
319
  )
320
- # Run diagnostician
321
  diag_result = await robotics_diagnostician.analyze(event)
322
 
323
- # Simple failure prediction (linear extrapolation)
324
  prediction = None
325
  if len(iot_history) >= 5:
326
  temps = [h['temperature'] for h in iot_history[-5:]]
@@ -334,7 +333,9 @@ async def read_iot_sensors(fault_type):
334
  "time_to_overheat_min": time_to_threshold
335
  }
336
 
337
- return data, diag_result, prediction
 
 
338
 
339
  # ----------------------------------------------------------------------
340
  # Gradio UI
@@ -350,31 +351,71 @@ with gr.Blocks(title="ARF v4 – AI Reliability Lab", theme="soft") as demo:
350
  text_btn = gr.Button("Generate")
351
  text_output = gr.JSON(label="Analysis")
352
 
353
- # Tab 2: Image Generation
354
  with gr.TabItem("Image Generation"):
355
  img_prompt = gr.Textbox(label="Prompt", value="A cat wearing a hat")
 
356
  img_btn = gr.Button("Generate")
357
  img_output = gr.Image(label="Generated Image")
358
  img_json = gr.JSON(label="Analysis")
359
 
360
  # Tab 3: Audio Transcription
361
  with gr.TabItem("Audio Transcription"):
 
 
362
  audio_input = gr.Audio(type="filepath", label="Upload audio file")
363
  audio_btn = gr.Button("Transcribe")
364
  audio_output = gr.JSON(label="Analysis")
365
 
366
- # Tab 4: Robotics / IoT
367
  with gr.TabItem("Robotics / IoT"):
368
  gr.Markdown("### Simulated Robotic Arm Monitoring")
369
- fault_type = gr.Dropdown(
370
- ["none", "overheat", "vibration", "stall", "drift"],
371
- value="none",
372
- label="Inject Fault"
373
- )
374
- refresh_btn = gr.Button("Read Sensors")
375
- sensor_display = gr.JSON(label="Sensor Readings")
376
- diag_display = gr.JSON(label="Diagnosis")
377
- pred_display = gr.JSON(label="Failure Prediction")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
378
 
379
  # Feedback row (shared across all users – for demo purposes)
380
  with gr.Row():
@@ -389,8 +430,8 @@ with gr.Blocks(title="ARF v4 – AI Reliability Lab", theme="soft") as demo:
389
  outputs=text_output
390
  )
391
  img_btn.click(
392
- fn=lambda p: asyncio.run(handle_image(p)),
393
- inputs=img_prompt,
394
  outputs=[img_output, img_json]
395
  )
396
  audio_btn.click(
@@ -401,7 +442,7 @@ with gr.Blocks(title="ARF v4 – AI Reliability Lab", theme="soft") as demo:
401
  refresh_btn.click(
402
  fn=lambda f: asyncio.run(read_iot_sensors(f)),
403
  inputs=fault_type,
404
- outputs=[sensor_display, diag_display, pred_display]
405
  )
406
  feedback_up.click(fn=lambda: feedback(True), outputs=feedback_msg)
407
  feedback_down.click(fn=lambda: feedback(False), outputs=feedback_msg)
 
199
  logger.error(f"Text task error: {e}")
200
  return {"error": str(e)}
201
 
202
+ async def handle_image(prompt, steps):
203
+ """Handle image generation with configurable steps. Returns (image, json_data)."""
204
  global last_task_category
205
  last_task_category = "image"
206
  if image_pipe is None:
 
208
  try:
209
  import time
210
  start = time.time()
211
+ image = image_pipe(prompt, num_inference_steps=steps).images[0]
212
  gen_time = time.time() - start
213
  retrieval_score = retriever.get_similarity(prompt)
214
  event = AIEvent(
 
293
  return {"error": str(e)}
294
 
295
  async def read_iot_sensors(fault_type):
296
+ """Read simulated IoT sensors, run diagnostics, predict failure, and return updated plot data."""
297
  global last_task_category, iot_history
298
  last_task_category = "iot"
299
  iot_sim.set_fault(fault_type if fault_type != "none" else None)
 
302
  if len(iot_history) > 100:
303
  iot_history.pop(0)
304
 
305
+ # Create IoTEvent with valid component name
306
  event = IoTEvent(
307
  timestamp=datetime.utcnow(),
308
+ component="robotic-arm",
309
  service_mesh="factory",
310
  latency_p99=0,
311
  error_rate=0.0,
 
317
  motor_current=data['motor_current'],
318
  position_error=data['position_error']
319
  )
 
320
  diag_result = await robotics_diagnostician.analyze(event)
321
 
322
+ # Simple failure prediction
323
  prediction = None
324
  if len(iot_history) >= 5:
325
  temps = [h['temperature'] for h in iot_history[-5:]]
 
333
  "time_to_overheat_min": time_to_threshold
334
  }
335
 
336
+ # Prepare temperature history for plotting
337
+ temp_history = [h['temperature'] for h in iot_history[-20:]] # last 20 readings
338
+ return data, diag_result, prediction, temp_history
339
 
340
  # ----------------------------------------------------------------------
341
  # Gradio UI
 
351
  text_btn = gr.Button("Generate")
352
  text_output = gr.JSON(label="Analysis")
353
 
354
+ # Tab 2: Image Generation (enhanced with steps slider)
355
  with gr.TabItem("Image Generation"):
356
  img_prompt = gr.Textbox(label="Prompt", value="A cat wearing a hat")
357
+ img_steps = gr.Slider(1, 10, value=2, step=1, label="Inference Steps (higher = better quality, slower)")
358
  img_btn = gr.Button("Generate")
359
  img_output = gr.Image(label="Generated Image")
360
  img_json = gr.JSON(label="Analysis")
361
 
362
  # Tab 3: Audio Transcription
363
  with gr.TabItem("Audio Transcription"):
364
+ # Add a sample audio button for quick testing
365
+ gr.Markdown("Click the microphone to record, or upload a file. Try the sample: [Sample Audio](https://huggingface.co/datasets/Narsil/asr_dummy/resolve/main/1.flac)")
366
  audio_input = gr.Audio(type="filepath", label="Upload audio file")
367
  audio_btn = gr.Button("Transcribe")
368
  audio_output = gr.JSON(label="Analysis")
369
 
370
+ # Tab 4: Robotics / IoT (enhanced with live plot)
371
  with gr.TabItem("Robotics / IoT"):
372
  gr.Markdown("### Simulated Robotic Arm Monitoring")
373
+ with gr.Row():
374
+ with gr.Column():
375
+ fault_type = gr.Dropdown(
376
+ ["none", "overheat", "vibration", "stall", "drift"],
377
+ value="none",
378
+ label="Inject Fault"
379
+ )
380
+ refresh_btn = gr.Button("Read Sensors")
381
+ with gr.Column():
382
+ sensor_display = gr.JSON(label="Sensor Readings")
383
+ with gr.Row():
384
+ with gr.Column():
385
+ diag_display = gr.JSON(label="Diagnosis")
386
+ with gr.Column():
387
+ pred_display = gr.JSON(label="Failure Prediction")
388
+ with gr.Row():
389
+ temp_plot = gr.LinePlot(
390
+ label="Temperature History (last 20 readings)",
391
+ x="index",
392
+ y="temperature",
393
+ width=600,
394
+ height=300
395
+ )
396
+
397
+ # Tab 5: Enterprise – Marketing and Sales
398
+ with gr.TabItem("Enterprise"):
399
+ gr.Markdown("""
400
+ ## 🚀 ARF Enterprise – Governed Execution for Autonomous Infrastructure
401
+
402
+ Take ARF to production with enterprise‑grade safety, compliance, and learning.
403
+
404
+ ### Key Enterprise Features:
405
+ - **Autonomous Execution** – Deterministic, policy‑controlled healing actions.
406
+ - **Audit Trails & Compliance** – Full traceability for SOC2, HIPAA, GDPR.
407
+ - **Learning Loops** – Models improve over time with your data.
408
+ - **Multi‑Tenant Control** – Role‑based access and isolation.
409
+ - **Cloud Integrations** – Azure, AWS, GCP native clients.
410
+ - **24/7 Support & SLAs** – Enterprise‑grade reliability.
411
+
412
+ ### Get Started
413
+ - 📅 [Book a Demo](https://calendly.com/petter2025us/30min)
414
+ - 📧 [Contact Sales](mailto:petter2025us@outlook.com)
415
+ - 📄 [Download Datasheet](#) (coming soon)
416
+
417
+ *Already using ARF OSS? Upgrade seamlessly – same core, governed execution.*
418
+ """)
419
 
420
  # Feedback row (shared across all users – for demo purposes)
421
  with gr.Row():
 
430
  outputs=text_output
431
  )
432
  img_btn.click(
433
+ fn=lambda p, s: asyncio.run(handle_image(p, s)),
434
+ inputs=[img_prompt, img_steps],
435
  outputs=[img_output, img_json]
436
  )
437
  audio_btn.click(
 
442
  refresh_btn.click(
443
  fn=lambda f: asyncio.run(read_iot_sensors(f)),
444
  inputs=fault_type,
445
+ outputs=[sensor_display, diag_display, pred_display, temp_plot]
446
  )
447
  feedback_up.click(fn=lambda: feedback(True), outputs=feedback_msg)
448
  feedback_down.click(fn=lambda: feedback(False), outputs=feedback_msg)