SamDaLamb commited on
Commit
8dd7af4
·
verified ·
1 Parent(s): 4928a9e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -0
app.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ def speclab(img):
4
+
5
+ # initialize the model
6
+ model = torch.hub.load('Nano1337/SpecLab', 'srdetect', force_reload=True) # for some reasons loads the model in src rather than demo
7
+ model.eval()
8
+
9
+ # preprocess image to be used as input
10
+ transforms = A.Compose([
11
+ A.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
12
+ ToTensorV2()
13
+ ])
14
+ input = transforms(image=img)['image']
15
+ input = input.unsqueeze(0)
16
+
17
+ # model prediction
18
+ output = model(input)
19
+
20
+ # overlay output onto original image
21
+ img[output==255] = [0, 255, 0]
22
+
23
+ return img
24
+
25
+ # define app features and run
26
+ title = "SpecLab Demo"
27
+ description = "<p style='text-align: center'>Gradio demo for an ASPP model architecture trained on the SpecLab dataset. To use it, simply add your image, or click one of the examples to load them. Since this demo is run on CPU only, please allow additional time for processing. </p>"
28
+ article = "<p style='text-align: center'><a href='https://github.com/Nano1337/SpecLab'>Github Repo</a></p>"
29
+ css = "#0 {object-fit: contain;} #1 {object-fit: contain;}"
30
+ demo = gr.Interface(fn=speclab,
31
+ title=title,
32
+ description=description,
33
+ article=article,
34
+ inputs=gr.Image(elem_id=0, show_label=False),
35
+ outputs=gr.Image(elem_id=1, show_label=False),
36
+ css=css,
37
+ examples=examples,
38
+ cache_examples=True,
39
+ allow_flagging='never')
40
+ demo.launch()