Spaces:
Sleeping
Sleeping
import gradio as gr | |
import subprocess | |
from PIL import Image | |
import tempfile | |
import os | |
import yaml | |
def predict(input_image: Image.Image): | |
try: | |
# Used as a context manager. Takes care of cleaning up the directory. | |
# Even if an error is raised within the with block, the directory is removed. | |
# No finally block needed | |
with tempfile.TemporaryDirectory() as temp_dir: | |
temp_image_path = os.path.join(temp_dir, 'temp_image.jpg') | |
temp_list_path = os.path.join(temp_dir, 'temp_img_list.txt') | |
temp_config_path = os.path.join(temp_dir, 'temp_config.yaml') | |
input_image.save(temp_image_path) | |
# Create a temporary img_list file | |
with open(temp_list_path, 'w') as f: | |
f.write(temp_image_path) | |
# Read the original config file and create a temporary one | |
with open('my_decode_config.yaml', 'r') as f: | |
config_data = yaml.safe_load(f) | |
config_data['img_list'] = temp_list_path | |
with open(temp_config_path, 'w') as f: | |
yaml.dump(config_data, f) | |
try: | |
subprocess.run(f"pylaia-htr-decode-ctc --config {temp_config_path} | tee predict.txt", shell=True, check=True) | |
except subprocess.CalledProcessError as e: | |
print(f"Command failed with error {e.returncode}, output:\n{e.output}") | |
# subprocess.run(f"pylaia-htr-decode-ctc --config {temp_config_path} | tee predict.txt", shell=True, check=True) | |
# Alternative to shell=True (ChatGPT suggestion) | |
# from subprocess import Popen, PIPE | |
# # Run the first command and capture its output | |
# p1 = Popen(["pylaia-htr-decode-ctc", "--config", temp_config_path], stdout=PIPE) | |
# output = p1.communicate()[0] | |
# # Write the output to predict.txt | |
# with open('predict.txt', 'wb') as f: | |
# f.write(output) | |
# Read the output from predict.txt | |
if os.path.exists('predict.txt'): | |
with open('predict.txt', 'r') as f: | |
output = f.read().strip().split('\n')[-1] | |
else: | |
print('predict.txt does not exist') | |
return output | |
except subprocess.CalledProcessError as e: | |
return f"Command failed with error {e.returncode}" | |
# Gradio interface | |
title = "PyLaia HTR" | |
description = "Inference using PyLaia models." | |
examples = [["examples/example01.jpg"], ["examples/example02.jpg"]] | |
iface = gr.Interface( | |
fn=predict, | |
inputs=gr.inputs.Image(type="pil"), | |
outputs=gr.outputs.Textbox(), | |
title=title, | |
description=description, | |
examples=examples | |
) | |
iface.launch() | |