import gradio as gr from transformers import AutoModelForCausalLM, AutoProcessor # Load the model and processor from HF Hub model_name = "NickoSELI/blip2-indian-food-captioning-private-checkopt-mock1" model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=True) processor = AutoProcessor.from_pretrained(model_name, use_auth_token=True) # Define a prediction function def predict(image): inputs = processor(images=image, return_tensors="pt") outputs = model.generate(**inputs) caption = processor.decode(outputs[0], skip_special_tokens=True) return caption # Create a Gradio interface interface = gr.Interface( fn=predict, inputs=gr.inputs.Image(type="pil"), outputs="text", title="Indian Food Captioning Model" ) # Launch the interface if __name__ == "__main__": interface.launch()