masdar commited on
Commit
8133a9d
·
1 Parent(s): 7e59f5a

upload app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -83,7 +83,10 @@ import gradio as gr
83
  i = gr.inputs.Image(shape=(112, 112), label="Echocardiogram")
84
  o = gr.outputs.Image(label="Segmentation Mask")
85
 
86
- #examples = [["img1.jpg"], ["img2.jpg"]]
 
 
 
87
  title = None #"Left Ventricle Segmentation"
88
  description = "This semantic segmentation model identifies the left ventricle in echocardiogram images."
89
  # videos. Accurate evaluation of the motion and size of the left ventricle is crucial for the assessment of cardiac function and ejection fraction. In this interface, the user inputs apical-4-chamber images from echocardiography videos and the model will output a prediction of the localization of the left ventricle in blue. This model was trained on the publicly released EchoNet-Dynamic dataset of 10k echocardiogram videos with 20k expert annotations of the left ventricle and published as part of ‘Video-based AI for beat-to-beat assessment of cardiac function’ by Ouyang et al. in Nature, 2020."
@@ -91,4 +94,8 @@ description = "This semantic segmentation model identifies the left ventricle in
91
  #thumbnail = "https://raw.githubusercontent.com/gradio-app/hub-echonet/master/thumbnail.png"
92
 
93
  #gr.Interface(segment, i, o, examples=examples, allow_flagging=False, analytics_enabled=False, thumbnail=thumbnail).launch()
94
- gr.Interface(segment, i, o, allow_flagging=False, analytics_enabled=False).launch()
 
 
 
 
 
83
  i = gr.inputs.Image(shape=(112, 112), label="Echocardiogram")
84
  o = gr.outputs.Image(label="Segmentation Mask")
85
 
86
+ examples = [["TCGA_CS_5395_19981004_12.png"],
87
+ ["TCGA_CS_5395_19981004_14.png"],
88
+ ["TCGA_DU_5849_19950405_24.png"]]
89
+
90
  title = None #"Left Ventricle Segmentation"
91
  description = "This semantic segmentation model identifies the left ventricle in echocardiogram images."
92
  # videos. Accurate evaluation of the motion and size of the left ventricle is crucial for the assessment of cardiac function and ejection fraction. In this interface, the user inputs apical-4-chamber images from echocardiography videos and the model will output a prediction of the localization of the left ventricle in blue. This model was trained on the publicly released EchoNet-Dynamic dataset of 10k echocardiogram videos with 20k expert annotations of the left ventricle and published as part of ‘Video-based AI for beat-to-beat assessment of cardiac function’ by Ouyang et al. in Nature, 2020."
 
94
  #thumbnail = "https://raw.githubusercontent.com/gradio-app/hub-echonet/master/thumbnail.png"
95
 
96
  #gr.Interface(segment, i, o, examples=examples, allow_flagging=False, analytics_enabled=False, thumbnail=thumbnail).launch()
97
+ gr.Interface(segment, i, o,
98
+ allow_flagging = False,
99
+ description = description,
100
+ examples = examples,
101
+ analytics_enabled = False).launch()