EduFalcao commited on
Commit
76a9f88
·
verified ·
1 Parent(s): 78d8227

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -30
app.py CHANGED
@@ -1,43 +1,57 @@
1
  import gradio as gr
2
- from PIL import Image
3
- from transformers import pipeline
 
4
 
5
- # 1) Cria o classificador zero‐shot com CLIP
6
- classifier = pipeline(
7
- task="zero-shot-image-classification",
8
- model="openai/clip-vit-base-patch32"
9
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
- # 2) Define as labels
12
- LABELS = ["Healthy", "Leaf Blight", "Black Rot", "ESCA"]
13
-
14
- def predict(image: Image.Image):
15
- # Opcional: corrige EXIF e redimensiona como antes
16
- image = image.convert("RGB").resize((224,224))
17
-
18
- # Zero‐shot classification
19
- res = classifier(image, candidate_labels=LABELS)
20
- # res é lista de dicts: [{"label":..., "score":...}, ...]
21
-
22
- # Mapeia para texto ordenado
23
- probs = {item["label"]: float(item["score"]) for item in res}
24
- # Escolhe o mais provável
25
- best = max(probs, key=probs.get)
26
-
27
- # Formata saída
28
- prob_lines = "\n".join(f"{lbl}: {probs[lbl]:.2f}" for lbl in LABELS)
29
  return best, prob_lines
30
 
31
- # 3) Interface Gradio
32
  demo = gr.Interface(
33
  fn=predict,
34
- inputs=gr.Image(type="pil", label="Carrega a folha"),
35
  outputs=[
36
  gr.Textbox(label="Classe predita"),
37
- gr.Textbox(label="Probabilidades (0–1)")
38
  ],
39
- title="CropVision (Backup CLIP Zero-Shot)",
40
- description="Usa CLIP zero-shot para classificar folhas em Healthy, Leaf Blight, Black Rot ou ESCA"
41
  )
42
 
43
  if __name__ == "__main__":
 
1
  import gradio as gr
2
+ from PIL import Image, ImageOps
3
+ import torch
4
+ from transformers import CLIPProcessor, CLIPModel
5
 
6
+ # ─── 1) Carrega modelo e processor CLIP fine-tuned ───
7
+ MODEL_ID = "Keetawan/clip-vit-large-patch14-plant-disease-finetuned"
8
+ processor = CLIPProcessor.from_pretrained(MODEL_ID)
9
+ model = CLIPModel.from_pretrained(MODEL_ID)
10
+
11
+ # ─── 2) Labels que o modelo conhece ───
12
+ HF_LABELS = [
13
+ "Grape leaf with Black rot",
14
+ "Grape leaf with Esca (Black Measles)",
15
+ "Grape leaf with Leaf blight (Isariopsis Leaf Spot)",
16
+ "Healthy Grape leaf"
17
+ ]
18
+ # Mapeamento para as tuas classes curtas
19
+ MAP = {
20
+ "Grape leaf with Black rot": "Black Rot",
21
+ "Grape leaf with Esca (Black Measles)": "ESCA",
22
+ "Grape leaf with Leaf blight (Isariopsis Leaf Spot)": "Leaf Blight",
23
+ "Healthy Grape leaf": "Healthy"
24
+ }
25
+
26
+ def predict(img: Image.Image):
27
+ # Pré-processamento igual ao notebook
28
+ img = ImageOps.exif_transpose(img).convert("RGB")
29
+ img = img.resize((224,224))
30
+
31
+ # Zero-shot inference CLIP
32
+ inputs = processor(text=HF_LABELS, images=img, return_tensors="pt", padding=True)
33
+ outputs = model(**inputs)
34
+ probs = outputs.logits_per_image.softmax(dim=1)[0].tolist()
35
+
36
+ # Constrói dicionário label→prob
37
+ mapping = { MAP[HF_LABELS[i]]: probs[i] for i in range(len(probs)) }
38
+ # Escolhe a classe de maior probabilidade
39
+ best = max(mapping, key=mapping.get)
40
 
41
+ # Formata as probabilidades
42
+ prob_lines = "\n".join(f"{lbl}: {mapping[lbl]:.2f}" for lbl in ["Healthy","Leaf Blight","Black Rot","ESCA"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  return best, prob_lines
44
 
45
+ # ─── 3) UI Gradio ───────────────────────────────────────
46
  demo = gr.Interface(
47
  fn=predict,
48
+ inputs=gr.Image(type="pil", label="Carrega uma folha"),
49
  outputs=[
50
  gr.Textbox(label="Classe predita"),
51
+ gr.Textbox(label="Probabilidades")
52
  ],
53
+ title="CropVision CLIP Zero-Shot Fine-Tuned",
54
+ description="Healthy / Leaf Blight / Black Rot / ESCA"
55
  )
56
 
57
  if __name__ == "__main__":