Spaces:
Sleeping
Sleeping
File size: 3,976 Bytes
4641fa2 48788e5 4641fa2 48788e5 4641fa2 9c3a8d2 876767b 9c3a8d2 4641fa2 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 | import gradio as gr
import os
import torch
# BINARY
from timeit import default_timer as timer
import model
class_names = ["Normal","Pneumonia"]
densenet121, transforms = model.create_densenet_model(num_classes=2)
state_dict = torch.load(
f="dense_90_tiny_lung_classifier_model.pth",
map_location="cpu",
weights_only=False
)
densenet121.load_state_dict(state_dict)
def predict(img):
class_names = ["Normal", "Pneumonia"]
start_time = timer()
img = transforms(img).unsqueeze(0)
densenet121.eval()
with torch.inference_mode():
# Get the probability for the positive class (Pneumonia)
prob_pneumonia = torch.sigmoid(densenet121(img)).item()
# Calculate the probability for the negative class (Normal)
prob_normal = 1.0 - prob_pneumonia
# Create the dictionary Gradio expects
pred_labels_and_probs = {"Normal": float(prob_normal),"Pneumonia": float(prob_pneumonia)}
pred_time = round(timer() - start_time, 5)
return pred_labels_and_probs, pred_time
example_list = 'examples' # The path to your directory
import gradio as gr
# Create title, description and article strings
title = "AI-Driven Diagnostic Assistant: Breast Cancer & Pneumonia Classification"
description = " Engineered a high-precision computer vision pipeline using DenseNet121 to assist in the automated screening of medical imaging. The model achieves 90% accuracy in identifying pathologies across MRI and X-ray datasets. To ensure accessibility, I deployed the model via a Gradio web interface, allowing for real-time inference and a streamlined 'human-in-the-loop' diagnostic workflow.\nDisclaimer: These AI tools are for informational and research purposes. Medical diagnoses must be made by qualified healthcare professionals."
article = "Created at Mauaque Resettlement Center Gonzales Compound"
# Create the Gradio demo
demo = gr.Interface(fn=predict, # mapping function from input to output
inputs=gr.Image(type="pil"), # what are the inputs?
outputs=[gr.Label(num_top_classes=2, label="Predictions Result"), # what are the outputs?
gr.Number(label="Prediction time (s)")], # our fn has two outputs, therefore we have two outputs
examples=example_list,
title=title,
description=description,
article=article)
# Launch the demo!
demo.launch(debug=True, # print errors locally?
share=True) # generate a publically shareable URL?
|