Spaces:
Runtime error
Runtime error
Commit
·
c83ef86
1
Parent(s):
ee6a3f0
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ìimport gradio as gr
|
2 |
+
import numpy as np
|
3 |
+
from math import ceil
|
4 |
+
import tensorflow.keras as tfk
|
5 |
+
|
6 |
+
model = tfk.models.load_model("autoencoder_model_d_0")
|
7 |
+
def double_res(input_image):
|
8 |
+
input_height = input_image.shape[0]
|
9 |
+
input_width = input_image.shape[1]
|
10 |
+
height = ceil(input_height/128)
|
11 |
+
width = ceil(input_width/128)
|
12 |
+
expanded_input_image = np.zeros((128*height, 128*width,3), dtype=np.uint8)
|
13 |
+
np.copyto(expanded_input_image[0:input_height, 0:input_width], input_image)
|
14 |
+
|
15 |
+
output_image = np.zeros((128*height*2, 128*width*2,3), dtype=np.float32)
|
16 |
+
|
17 |
+
for i in range(height):
|
18 |
+
for j in range(width):
|
19 |
+
temp_slice = expanded_input_image[i*128:(i+1)*128, j*128:(j+1)*128]/255
|
20 |
+
upsampled_slice = model.predict(temp_slice[np.newaxis, ...])
|
21 |
+
np.copyto(output_image[i*256:(i+1)*256, j*256:(j+1)*256], upsampled_slice[0])
|
22 |
+
if i!= 0 and j!= 0 and i != height-1 and j!=width-1:
|
23 |
+
# removing inner borders
|
24 |
+
right_slice = expanded_input_image[i*128:(i+1)*128, (j+1)*128-64:(j+1)*128+64]/255
|
25 |
+
right_upsampled_slice = model.predict(right_slice[np.newaxis, ...])
|
26 |
+
resized_right_slice = right_upsampled_slice[0][64:192,64:192]
|
27 |
+
np.copyto(output_image[i*256+64:(i+1)*256-64, (j+1)*256-64:(j+1)*256+64], resized_right_slice)
|
28 |
+
|
29 |
+
left_slice = expanded_input_image[i*128:(i+1)*128, j*128-64:(j)*128+64]/255
|
30 |
+
left_upsampled_slice = model.predict(left_slice[np.newaxis, ...])
|
31 |
+
resized_left_slice = left_upsampled_slice[0][64:192,64:192]
|
32 |
+
np.copyto(output_image[i*256+64:(i+1)*256-64, j*256-64:j*256+64], resized_left_slice)
|
33 |
+
|
34 |
+
upper_slice = expanded_input_image[(i+1)*128-64:(i+1)*128+64, j*128:(j+1)*128]/255
|
35 |
+
upper_upsampled_slice = model.predict(upper_slice[np.newaxis, ...])
|
36 |
+
resized_upper_slice = upper_upsampled_slice[0][64:192,64:192]
|
37 |
+
np.copyto(output_image[(i+1)*256-64:(i+1)*256+64, j*256+64:(j+1)*256-64], resized_upper_slice)
|
38 |
+
|
39 |
+
lower_slice = expanded_input_image[i*128-64:i*128+64, j*128:(j+1)*128]/255
|
40 |
+
lower_upsampled_slice = model.predict(lower_slice[np.newaxis, ...])
|
41 |
+
resized_lower_slice = lower_upsampled_slice[0][64:192,64:192]
|
42 |
+
np.copyto(output_image[i*256-64:i*256+64, j*256+64:(j+1)*256-64], resized_lower_slice)
|
43 |
+
|
44 |
+
|
45 |
+
# removing angles
|
46 |
+
lower_right_slice = expanded_input_image[i*128-64:i*128+64, (j+1)*128-64:(j+1)*128+64]/255
|
47 |
+
lower_right_upsampled_slice = model.predict(lower_right_slice[np.newaxis, ...])
|
48 |
+
resized_lower_right_slice = lower_right_upsampled_slice[0][64:192,64:192]
|
49 |
+
np.copyto(output_image[i*256-64:i*256+64, (j+1)*256-64:(j+1)*256+64], resized_lower_right_slice)
|
50 |
+
|
51 |
+
lower_left_slice = expanded_input_image[i*128-64:i*128+64, j*128-64:j*128+64]/255
|
52 |
+
lower_left_upsampled_slice = model.predict(lower_left_slice[np.newaxis, ...])
|
53 |
+
resized_lower_left_slice = lower_left_upsampled_slice[0][64:192,64:192]
|
54 |
+
np.copyto(output_image[i*256-64:i*256+64, j*256-64:j*256+64], resized_lower_left_slice)
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
resized_output_image = output_image[0:input_height*2,0:input_width*2]
|
60 |
+
return resized_output_image
|
61 |
+
|
62 |
+
demo = gr.Interface(
|
63 |
+
fn=double_res,
|
64 |
+
title="Double picture resolution",
|
65 |
+
description="Upload a picture and get the horizontal and vertical resolution doubled (4x pixels)",
|
66 |
+
allow_flagging="never",
|
67 |
+
inputs=[
|
68 |
+
gr.inputs.Image(type="numpy")
|
69 |
+
],
|
70 |
+
outputs=gr.Image(type="numpy"))
|
71 |
+
|
72 |
+
demo.launch()
|
73 |
+
|