Spaces:
Running
Running
Commit
·
5e812c3
1
Parent(s):
7d41601
adding text box
Browse files
app.py
CHANGED
@@ -12,15 +12,6 @@ from skimage.measure import label, regionprops
|
|
12 |
processor = CLIPSegProcessor.from_pretrained("CIDAS/clipseg-rd64-refined")
|
13 |
model = CLIPSegForImageSegmentation.from_pretrained("CIDAS/clipseg-rd64-refined")
|
14 |
|
15 |
-
|
16 |
-
random_images = []
|
17 |
-
images_dir = 'images/'
|
18 |
-
for idx, images in enumerate(os.listdir(images_dir)):
|
19 |
-
image = os.path.join(images_dir, images)
|
20 |
-
if os.path.isfile(image) and idx < 10:
|
21 |
-
random_images.append(image)
|
22 |
-
|
23 |
-
|
24 |
def rescale_bbox(bbox,orig_image_shape=(1024,1024),model_shape=352):
|
25 |
bbox = np.asarray(bbox)/model_shape
|
26 |
y1,y2 = bbox[::2] *orig_image_shape[0]
|
|
|
12 |
processor = CLIPSegProcessor.from_pretrained("CIDAS/clipseg-rd64-refined")
|
13 |
model = CLIPSegForImageSegmentation.from_pretrained("CIDAS/clipseg-rd64-refined")
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
def rescale_bbox(bbox,orig_image_shape=(1024,1024),model_shape=352):
|
16 |
bbox = np.asarray(bbox)/model_shape
|
17 |
y1,y2 = bbox[::2] *orig_image_shape[0]
|