taesiri commited on
Commit
71e1d2b
·
1 Parent(s): 3717c61

update layout

Browse files
Files changed (1) hide show
  1. app.py +30 -4
app.py CHANGED
@@ -131,13 +131,39 @@ def search(query_image, searcher=searcher):
131
 
132
  blocks = gr.Blocks()
133
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
134
  with blocks:
135
  gr.Markdown(""" # CHM-Corr DEMO""")
136
- gr.Markdown(
137
- """ ### Parameters: N=50, k=20 - Using ``ImageNet Pretrained ResNet50`` features"""
138
- )
 
 
 
 
 
 
 
139
 
140
- input_image = gr.Image(type="filepath")
141
  run_btn = gr.Button("Classify")
142
  gr.Markdown(""" ### CHM-Corr Output Visualization """)
143
  viz_plot = gr.Image(type="pil", label="Visualization")
 
131
 
132
  blocks = gr.Blocks()
133
 
134
+ tldr = """
135
+ We propose two architectures of interpretable image classifiers
136
+ that first explain, and then predict by harnessing
137
+ the visual correspondences between a query image and exemplars.
138
+ Our models improve on several out-of-distribution (OOD) ImageNet
139
+ datasets while achieving competitive performance on ImageNet
140
+ than the black-box baselines (e.g. ImageNet-pretrained ResNet-50).
141
+ On a large-scale human study (∼60 users per method per dataset)
142
+ on ImageNet and CUB, our correspondence-based explanations led
143
+ to human-alone image classification accuracy and human-AI team
144
+ accuracy that are consistently better than that of kNN.
145
+ We show that it is possible to achieve complementary human-AI
146
+ team accuracy (i.e., that is higher than either AI-alone or
147
+ human-alone), on ImageNet and CUB.
148
+
149
+ <div align="center">
150
+ <a href="https://github.com/anguyen8/visual-correspondence-XAI">Github Page</a>
151
+ </div>
152
+ """
153
+
154
  with blocks:
155
  gr.Markdown(""" # CHM-Corr DEMO""")
156
+ gr.Markdown(f""" ## Description: \n {tldr}""")
157
+
158
+ with gr.Row():
159
+ input_image = gr.Image(type="filepath")
160
+
161
+ with gr.Column():
162
+ gr.Markdown(f"### Parameters:")
163
+ gr.Markdown(
164
+ "`N=50`\n `k=20` \nUsing `ImageNet Pretrained ResNet50` features"
165
+ )
166
 
 
167
  run_btn = gr.Button("Classify")
168
  gr.Markdown(""" ### CHM-Corr Output Visualization """)
169
  viz_plot = gr.Image(type="pil", label="Visualization")