File size: 729 Bytes
c37097e 5d88e42 c37097e 8f26b20 f23e620 8f26b20 331bc97 c37097e 56aa136 fbfd467 c37097e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
from PIL import Image
import requests
import gradio as gr
from transformers import BlipProcessor, BlipForConditionalGeneration
model_id = "Salesforce/blip-image-captioning-base"
model = BlipForConditionalGeneration.from_pretrained(model_id)
processor = BlipProcessor.from_pretrained(model_id)
def launch(input):
inputs = []
for file in input:
inputs.append(processor(Image.open(file), return_tensors="pt"))
out = model.generate(**inputs)
return processor.decode(out[0], skip_special_tokens=True)
description = "Simple BLIP test app for image captioning."
iface = gr.Interface(launch, description=description, inputs=gr.inputs.File(file_count="multiple"), outputs="text")
iface.launch() |