Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,17 @@ import gradio as gr
|
|
2 |
from transformers import BlipProcessor, BlipForConditionalGeneration
|
3 |
from PIL import Image
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
# Load the model and processor
|
6 |
processor = BlipProcessor.from_pretrained("umarigan/blip-image-captioning-base-chestxray-finetuned")
|
7 |
model = BlipForConditionalGeneration.from_pretrained("umarigan/blip-image-captioning-base-chestxray-finetuned")
|
|
|
2 |
from transformers import BlipProcessor, BlipForConditionalGeneration
|
3 |
from PIL import Image
|
4 |
|
5 |
+
|
6 |
+
MARKDOWN = """
|
7 |
+
# Blip fine-tuned on chest xray images 🔥
|
8 |
+
<div>
|
9 |
+
<a href="https://github.com/UmarIgan/Machine-Learning/blob/master/examples/image_captioning_blip.ipynb">
|
10 |
+
<img src="https://d3i71xaburhd42.cloudfront.net/a3b42a83669998f65df60d7c065a70d07ca95e99/1-Figure1-1.png" alt="GitHub" style="display:inline-block;">
|
11 |
+
</a>
|
12 |
+
</div>
|
13 |
+
|
14 |
+
"""
|
15 |
+
|
16 |
# Load the model and processor
|
17 |
processor = BlipProcessor.from_pretrained("umarigan/blip-image-captioning-base-chestxray-finetuned")
|
18 |
model = BlipForConditionalGeneration.from_pretrained("umarigan/blip-image-captioning-base-chestxray-finetuned")
|