File size: 2,777 Bytes
db681f0
 
 
 
 
a95b69d
db681f0
 
 
a95b69d
 
 
 
 
 
 
 
db681f0
a95b69d
db681f0
a95b69d
 
 
db681f0
a95b69d
 
 
 
 
db681f0
a95b69d
 
 
 
815b94d
a95b69d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
db681f0
 
 
 
a95b69d
db681f0
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import gradio as gr
import subprocess
from PIL import Image
import tempfile
import os
import yaml

def predict(input_image: Image.Image):

    try:
        # Used as a context manager. Takes care of cleaning up the directory.
        # Even if an error is raised within the with block, the directory is removed.
        # No finally block needed
        with tempfile.TemporaryDirectory() as temp_dir:
            temp_image_path = os.path.join(temp_dir, 'temp_image.jpg')
            temp_list_path = os.path.join(temp_dir, 'temp_img_list.txt')
            temp_config_path = os.path.join(temp_dir, 'temp_config.yaml')

            input_image.save(temp_image_path)

            # Create a temporary img_list file
            with open(temp_list_path, 'w') as f:
                f.write(temp_image_path)

            # Read the original config file and create a temporary one
            with open('my_decode_config.yaml', 'r') as f:
                config_data = yaml.safe_load(f)
            
            config_data['img_list'] = temp_list_path

            with open(temp_config_path, 'w') as f:
                yaml.dump(config_data, f)

            try:
                subprocess.run(f"pylaia-htr-decode-ctc --config {temp_config_path} | tee predict.txt", shell=True, check=True)
            except subprocess.CalledProcessError as e:
                print(f"Command failed with error {e.returncode}, output:\n{e.output}")

            # subprocess.run(f"pylaia-htr-decode-ctc --config {temp_config_path} | tee predict.txt", shell=True, check=True)
            
            # Alternative to shell=True (ChatGPT suggestion)
            # from subprocess import Popen, PIPE

            # # Run the first command and capture its output
            # p1 = Popen(["pylaia-htr-decode-ctc", "--config", temp_config_path], stdout=PIPE)
            # output = p1.communicate()[0]

            # # Write the output to predict.txt
            # with open('predict.txt', 'wb') as f:
            #     f.write(output)

            # Read the output from predict.txt
            if os.path.exists('predict.txt'):
                with open('predict.txt', 'r') as f:
                    output = f.read().strip().split('\n')[-1]
            else:
                print('predict.txt does not exist')

        return output

    except subprocess.CalledProcessError as e:
        return f"Command failed with error {e.returncode}"

# Gradio interface
title = "PyLaia HTR"
description = "Inference using PyLaia models."
examples = [["examples/example01.jpg"], ["examples/example02.jpg"]]

iface = gr.Interface(
    fn=predict,
    inputs=gr.inputs.Image(type="pil"),
    outputs=gr.outputs.Textbox(),
    title=title,
    description=description,
    examples=examples
)

iface.launch()