Cyril666 commited on
Commit
2aff18a
·
1 Parent(s): adfbf25

First model version

Browse files
Files changed (1) hide show
  1. app.py +6 -1
app.py CHANGED
@@ -10,8 +10,12 @@ import glob
10
  import gradio as gr
11
  from demo import get_model, preprocess, postprocess, load
12
  from utils import Config, Logger, CharsetMapper
 
 
 
 
13
 
14
- def process_image(image):
15
  # rec model
16
  config = Config('configs/rec/train_abinet.yaml')
17
  config.model_vision_checkpoint = None
@@ -34,6 +38,7 @@ def process_image(image):
34
  result_polygons, result_masks, result_boxes = det_demo.run_on_opencv_image(image)
35
 
36
  # cut patch
 
37
  patchs = [image[box[1]:box[3], box[0]:box[2], :] for box in result_boxes]
38
  patchs = [preprocess(patch, config.dataset_image_width, config.dataset_image_height) for patch in patchs]
39
  patchs = torch.stack(patchs, dim=0)
 
10
  import gradio as gr
11
  from demo import get_model, preprocess, postprocess, load
12
  from utils import Config, Logger, CharsetMapper
13
+ import cv2
14
+ import pandas as pd
15
+ from det_demo import DetDemo
16
+ from maskrcnn_benchmark.config import cfg
17
 
18
+ def process_image(filepath):
19
  # rec model
20
  config = Config('configs/rec/train_abinet.yaml')
21
  config.model_vision_checkpoint = None
 
38
  result_polygons, result_masks, result_boxes = det_demo.run_on_opencv_image(image)
39
 
40
  # cut patch
41
+ image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
42
  patchs = [image[box[1]:box[3], box[0]:box[2], :] for box in result_boxes]
43
  patchs = [preprocess(patch, config.dataset_image_width, config.dataset_image_height) for patch in patchs]
44
  patchs = torch.stack(patchs, dim=0)