Spaces:
Sleeping
Sleeping
File size: 1,965 Bytes
80447ff 0063ac8 80447ff |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
# Import required libraries
import gradio as gr
from transformers import AutoModelForImageClassification, AutoFeatureExtractor
from PIL import Image
import torch
# Load the pre-trained Vision Transformer model and feature extractor
model = AutoModelForImageClassification.from_pretrained("google/vit-base-patch16-224")
extractor = AutoFeatureExtractor.from_pretrained("google/vit-base-patch16-224")
# Define the function to classify the crop using satellite images
def classify_image(image):
# Preprocess the image using the feature extractor
inputs = extractor(images=image, return_tensors="pt")
# Make the prediction using the model
with torch.no_grad():
outputs = model(**inputs)
# Extract predicted class
logits = outputs.logits
predicted_class_idx = logits.argmax(-1).item()
# Return the predicted class
return f"Predicted Class: {model.config.id2label[predicted_class_idx]}"
# Define the chatbot function for interaction
def respond(message, chat_history):
# Respond to the user's question
response = f"Received your message: '{message}' but currently, I'm focusing on classifying crops!"
return response
# Create a Gradio interface for crop classification
iface = gr.Interface(
fn=classify_image,
inputs=gr.Image(type="pil"),
outputs="text",
title="Real-Time Crop Classification",
description="Upload a satellite/mobile image to classify crops.",
)
# Create a Gradio Chatbot for interaction
chatbot = gr.ChatInterface(
fn=respond,
title="Agricultural Assistant Chatbot",
description="Ask questions about crop classification and more."
)
# Combine the crop classifier and chatbot in one interface
with gr.Blocks() as demo:
gr.Markdown("## Farmovation Chatbot")
with gr.Tab("Crop Classification"):
iface.render()
with gr.Tab("Agriculture Chatbot"):
chatbot.render()
# Launch the Gradio app in Google Colab
demo.launch()
|