Spaces:
Sleeping
Sleeping
File size: 2,646 Bytes
adfbf25 9374384 e98f677 384126b e98f677 559d776 bf0e73e e98f677 de40790 e98f677 559d776 e98f677 559d776 e98f677 559d776 e98f677 940168f e98f677 940168f e98f677 940168f 559d776 e98f677 de40790 940168f 35aea9a de40790 e98f677 35aea9a e98f677 940168f 559d776 e98f677 de40790 e98f677 de40790 4cd65a2 e98f677 e81860f b000db3 e98f677 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 | import streamlit as st
import tempfile
import os
import pandas as pd
import base64
from inference import InferencePipeline
# === Streamlit App Configuration ===
st.set_page_config(page_title="Solar Panel Fault Detection", layout="wide")
st.title("🔍 Solar Panel Fault Detection (Roboflow Workflow)")
st.write("Upload a thermal video (MP4). Faults will be detected using your custom Roboflow model.")
# === Globals for Fault Logging ===
fault_log = []
# === Fault Handler Callback ===
def my_sink(result, video_frame):
global fault_log
if result.get("predictions"):
frame_idx = result.get("frame_id", -1)
timestamp = result.get("timestamp", -1.0)
for pred in result["predictions"]:
label = pred["class"]
conf = round(pred["confidence"], 2)
x, y, w, h = pred["x"], pred["y"], pred["width"], pred["height"]
x1, y1 = int(x - w / 2), int(y - h / 2)
x2, y2 = int(x + w / 2), int(y + h / 2)
fault_log.append({
"Frame": frame_idx,
"Time (s)": round(timestamp, 2),
"Fault Type": label,
"Confidence": conf,
"Box": f"({x1},{y1},{x2},{y2})"
})
# === CSV Conversion ===
def convert_df(df):
return df.to_csv(index=False).encode("utf-8")
# === Streamlit UI ===
uploaded_file = st.file_uploader("📤 Upload thermal video", type=["mp4"])
if uploaded_file:
st.video(uploaded_file)
# Save uploaded file temporarily
temp_input_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mp4").name
with open(temp_input_path, "wb") as f:
f.write(uploaded_file.read())
st.info("⏳ Running fault detection...")
# Initialize and start the inference pipeline
pipeline = InferencePipeline.init_with_workflow(
api_key="dxkgGGHSZ3DI8XzVn29U",
workspace_name="naveen-kumar-hnmil",
workflow_id="custom-workflow",
video_reference=temp_input_path,
max_fps=0.5,
on_prediction=my_sink
# max_duration_seconds=30 # Optional: limit for testing
)
pipeline.start()
pipeline.join()
# Display results
if fault_log:
df = pd.DataFrame(fault_log)
st.subheader("📊 Detected Faults Table")
st.dataframe(df)
st.download_button(
"📥 Download Fault Log CSV",
convert_df(df),
"fault_log.csv",
"text/csv"
)
else:
st.success("✅ No faults detected.")
os.unlink(temp_input_path)
st.markdown("---")
st.caption("Built with Streamlit + Roboflow Inference SDK")
|