Spaces:
Runtime error
Runtime error
import torch | |
from monai.networks.nets import DenseNet121 | |
import gradio as gr | |
#from PIL import Image | |
model = DenseNet121(spatial_dims=2, in_channels=1, out_channels=6) | |
model.load_state_dict(torch.load('weights/mednist_model.pth', map_location=torch.device('cpu'))) | |
from monai.transforms import ( | |
EnsureChannelFirst, | |
Compose, | |
LoadImage, | |
ScaleIntensity, | |
) | |
test_transforms = Compose( | |
[LoadImage(image_only=True), EnsureChannelFirst(), ScaleIntensity()] | |
) | |
class_names = [ | |
'AbdomenCT', 'BreastMRI', 'CXR', 'ChestCT', 'Hand', 'HeadCT' | |
] | |
import os, glob | |
#examples_dir = './samples' | |
#example_files = glob.glob(os.path.join(examples_dir, '*.jpg')) | |
def classify_image(image_filepath): | |
input = test_transforms(image_filepath) | |
model.eval() | |
with torch.no_grad(): | |
pred = model(input.unsqueeze(dim=0)) | |
prob = torch.nn.functional.softmax(pred[0], dim=0) | |
confidences = {class_names[i]: float(prob[i]) for i in range(6)} | |
print(confidences) | |
return confidences | |
with gr.Blocks(title="Medical Image Classification with MONAI - ClassCat", | |
css=".gradio-container {background:mintcream;}" | |
) as demo: | |
gr.HTML("""<div style="font-family:'Times New Roman', 'Serif'; font-size:16pt; font-weight:bold; text-align:center; color:royalblue;">Medical Image Classification with MONAI</div>""") | |
with gr.Row(): | |
input_image = gr.Image(type="filepath", image_mode="L", shape=(64, 64)) | |
output_label=gr.Label(label="Probabilities", num_top_classes=3) | |
send_btn = gr.Button("Infer") | |
send_btn.click(fn=classify_image, inputs=input_image, outputs=output_label) | |
""" | |
with gr.Row(): | |
gr.Examples(['./samples/cat.jpg'], label='Sample images : cat', inputs=input_image) | |
gr.Examples(['./samples/cheetah.jpg'], label='cheetah', inputs=input_image) | |
gr.Examples(['./samples/hotdog.jpg'], label='hotdog', inputs=input_image) | |
gr.Examples(['./samples/lion.jpg'], label='lion', inputs=input_image) | |
#gr.Examples(example_files, inputs=input_image) | |
""" | |
#demo.queue(concurrency_count=3) | |
demo.launch(debug=True) | |