Amitz244 commited on
Commit
87fcea9
·
verified ·
1 Parent(s): b94ab97

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -97,11 +97,12 @@ iface = gr.Interface(
97
  fn=predict_emotion,
98
  inputs=gr.Image(type="pil", label="Upload an Image"),
99
  outputs=[gr.Textbox(label="Emotion"), gr.Textbox(label="Memorability Score")],
100
- title="PerceptCLIP",
101
- description="This is an official demo of PerceptCLIP from the paper: [Don’t Judge Before You CLIP: A Unified Approach for Perceptual Tasks](https://arxiv.org/pdf/2503.13260). For each specific task, we fine-tune CLIP with LoRA and an MLP head. Our models achieve state-of-the-art performance. \nThis demo shows results from three models, one for each task - visual emotion analysis, memorability prediction, and image quality assessment.",
102
  examples=example_images
103
  )
104
 
105
 
 
106
  if __name__ == "__main__":
107
  iface.launch()
 
97
  fn=predict_emotion,
98
  inputs=gr.Image(type="pil", label="Upload an Image"),
99
  outputs=[gr.Textbox(label="Emotion"), gr.Textbox(label="Memorability Score")],
100
+ title="<div style='text-align: center;'>PerceptCLIP</div>",
101
+ description="<div style='text-align: center;'>This is an official demo of PerceptCLIP from the paper: [Don’t Judge Before You CLIP: A Unified Approach for Perceptual Tasks](https://arxiv.org/pdf/2503.13260). For each specific task, we fine-tune CLIP with LoRA and an MLP head. Our models achieve state-of-the-art performance. <br>This demo shows results from three models, one for each task - visual emotion analysis, memorability prediction, and image quality assessment.</div>",
102
  examples=example_images
103
  )
104
 
105
 
106
+
107
  if __name__ == "__main__":
108
  iface.launch()