import os import gradio as gr from transformers import AutoImageProcessor, AutoModelForImageClassification from PIL import Image import torch import numpy as np import requests import logging from dotenv import load_dotenv # Load .env file # Load environment variables load_dotenv() HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY") # Configure Logging logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") # Load Model & Processor model_name = "linkanjarad/mobilenet_v2_1.0_224-plant-disease-identification" try: processor = AutoImageProcessor.from_pretrained(model_name, use_fast=True) model = AutoModelForImageClassification.from_pretrained(model_name) logging.info("✅ Model and processor loaded successfully.") except Exception as e: logging.error(f"❌ Failed to load model: {str(e)}") raise RuntimeError("Failed to load the model. Please check the logs for details.") # Function to Get AI-Powered Treatment Suggestions def get_treatment_suggestions(disease_name): url = "https://api-inference.huggingface.co/models/OpenAGI/agriculture-gpt" headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"} data = {"inputs": f"What are the treatment options for {disease_name} in plants?"} try: response = requests.post(url, headers=headers, json=data) if response.status_code == 200: return response.json()[0]["generated_text"] else: return f"API Error: {response.status_code}" except Exception as e: return "Error retrieving treatment details." # Define Prediction Function def predict(image): try: image = Image.fromarray(np.uint8(image)).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits predicted_class_idx = logits.argmax(-1).item() predicted_label = model.config.id2label[predicted_class_idx] # Get AI-generated treatment suggestions treatment = get_treatment_suggestions(predicted_label) return f"Predicted Disease: {predicted_label}\nTreatment: {treatment}" except Exception as e: logging.error(f"Prediction failed: {str(e)}") return f"❌ Prediction failed: {str(e)}" # Gradio Interface iface = gr.Interface( fn=predict, inputs=gr.Image(type="numpy", label="Upload or capture plant image"), outputs=gr.Textbox(label="Result"), title="AI-Powered Plant Disease Detector", description="Upload a plant leaf image to detect diseases and get AI-powered treatment suggestions.", allow_flagging="never", ) # Launch Gradio App iface.launch()