Spaces:
Sleeping
Sleeping
Commit
·
a365161
1
Parent(s):
a4f243f
Converted numpy to tensor
Browse files
app.py
CHANGED
@@ -4,13 +4,15 @@ import math
|
|
4 |
import torch
|
5 |
import numpy as np
|
6 |
import gradio as gr
|
|
|
7 |
import matplotlib.pyplot as plt
|
8 |
from glob import glob
|
9 |
from PIL import Image
|
|
|
10 |
from models.common import DetectMultiBackend
|
|
|
11 |
from utils.augmentations import letterbox
|
12 |
from utils.plots import Annotator, colors
|
13 |
-
from pytorch_grad_cam import EigenCAM
|
14 |
from pytorch_grad_cam.utils.image import show_cam_on_image, scale_cam_image
|
15 |
from utils.torch_utils import select_device, smart_inference_mode
|
16 |
from utils.general import check_img_size, Profile, non_max_suppression, scale_boxes
|
@@ -66,6 +68,13 @@ def inference(input_img, conf_thres, iou_thres, is_false_detection_images=True,
|
|
66 |
im0 = input_img.copy()
|
67 |
im_resized = cv2.resize(im0, (640, 640))
|
68 |
rgb_img = im_resized.copy()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
stride, names, pt = model.stride, model.names, model.pt
|
70 |
imgsz = check_img_size((640, 640), s=stride) # check image size
|
71 |
|
|
|
4 |
import torch
|
5 |
import numpy as np
|
6 |
import gradio as gr
|
7 |
+
import albumentations
|
8 |
import matplotlib.pyplot as plt
|
9 |
from glob import glob
|
10 |
from PIL import Image
|
11 |
+
from pytorch_grad_cam import EigenCAM
|
12 |
from models.common import DetectMultiBackend
|
13 |
+
from albumentations.pytorch import ToTensorV2
|
14 |
from utils.augmentations import letterbox
|
15 |
from utils.plots import Annotator, colors
|
|
|
16 |
from pytorch_grad_cam.utils.image import show_cam_on_image, scale_cam_image
|
17 |
from utils.torch_utils import select_device, smart_inference_mode
|
18 |
from utils.general import check_img_size, Profile, non_max_suppression, scale_boxes
|
|
|
68 |
im0 = input_img.copy()
|
69 |
im_resized = cv2.resize(im0, (640, 640))
|
70 |
rgb_img = im_resized.copy()
|
71 |
+
transforms = albumentations.Compose(
|
72 |
+
# Normalize
|
73 |
+
[albumentations.Normalize([0.49139968, 0.48215841, 0.44653091],
|
74 |
+
[0.24703223, 0.24348513, 0.26158784]),
|
75 |
+
# Convert to tensor
|
76 |
+
ToTensorV2()])
|
77 |
+
im_resized = transforms(image = im_resized)['image']
|
78 |
stride, names, pt = model.stride, model.names, model.pt
|
79 |
imgsz = check_img_size((640, 640), s=stride) # check image size
|
80 |
|