Spaces:
Sleeping
Sleeping
Fix #28 app.py
Browse files
app.py
CHANGED
@@ -11,7 +11,7 @@ from torchvision.utils import save_image
|
|
11 |
import gradio as gr
|
12 |
import numpy as np
|
13 |
import io
|
14 |
-
import tempfile
|
15 |
|
16 |
# Aseg煤rate de que las funciones necesarias est茅n definidas (si no lo est谩n ya)
|
17 |
def resize(img, size):
|
@@ -266,19 +266,9 @@ class Solver(object):
|
|
266 |
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
|
267 |
])
|
268 |
# Convertir a PIL image antes de la transformaci贸n
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
source_temp_file.write(source_image)
|
273 |
-
reference_temp_file.write(reference_image)
|
274 |
-
# Verificar el contenido de los archivos temporales
|
275 |
-
print(f"Primeros 100 bytes de {source_temp_file.name}: {open(source_temp_file.name, 'rb').read(100)}")
|
276 |
-
print(f"Primeros 100 bytes de {reference_temp_file.name}: {open(reference_temp_file.name, 'rb').read(100)}")
|
277 |
-
source_image_pil = Image.open(source_temp_file.name)
|
278 |
-
reference_image_pil = Image.open(reference_temp_file.name)
|
279 |
-
except Exception as e:
|
280 |
-
print(f"Error al procesar las im谩genes: {e}")
|
281 |
-
raise # Re-raise la excepci贸n para que Gradio la capture
|
282 |
source_image = transform(source_image_pil).unsqueeze(0).to(self.device)
|
283 |
reference_image = transform(reference_image_pil).unsqueeze(0).to(self.device)
|
284 |
|
|
|
11 |
import gradio as gr
|
12 |
import numpy as np
|
13 |
import io
|
14 |
+
import tempfile # Importar tempfile
|
15 |
|
16 |
# Aseg煤rate de que las funciones necesarias est茅n definidas (si no lo est谩n ya)
|
17 |
def resize(img, size):
|
|
|
266 |
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
|
267 |
])
|
268 |
# Convertir a PIL image antes de la transformaci贸n
|
269 |
+
source_image_pil = Image.fromarray(source_image)
|
270 |
+
reference_image_pil = Image.fromarray(reference_image)
|
271 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
272 |
source_image = transform(source_image_pil).unsqueeze(0).to(self.device)
|
273 |
reference_image = transform(reference_image_pil).unsqueeze(0).to(self.device)
|
274 |
|