Spaces:
Running
Running
app.py
CHANGED
@@ -257,7 +257,8 @@ def greet(input):
|
|
257 |
ctx = torch.from_numpy(mtx_2d).to(device=device).float()
|
258 |
|
259 |
#samples, intermediate = sample_ddim_context(32, ctx, n=steps)
|
260 |
-
|
|
|
261 |
|
262 |
#samples, intermediate = sample_ddim(32, n=steps)
|
263 |
#ctx = F.one_hot(torch.randint(0, 5, (32,)), 5).to(device=device).float()
|
@@ -268,7 +269,7 @@ def greet(input):
|
|
268 |
#response = im.fromarray(intermediate[24][0][1]).convert("RGB")
|
269 |
|
270 |
sx_gen_store = np.moveaxis(intermediate,2,4)
|
271 |
-
nsx_gen_store = norm_all(sx_gen_store, sx_gen_store.shape[0],
|
272 |
|
273 |
response = intermediate.shape;
|
274 |
response2 = transform2(transform(nsx_gen_store[-1][0]))
|
|
|
257 |
ctx = torch.from_numpy(mtx_2d).to(device=device).float()
|
258 |
|
259 |
#samples, intermediate = sample_ddim_context(32, ctx, n=steps)
|
260 |
+
image_count = 1;
|
261 |
+
samples, intermediate = sample_ddpm_context(image_count, steps, ctx)
|
262 |
|
263 |
#samples, intermediate = sample_ddim(32, n=steps)
|
264 |
#ctx = F.one_hot(torch.randint(0, 5, (32,)), 5).to(device=device).float()
|
|
|
269 |
#response = im.fromarray(intermediate[24][0][1]).convert("RGB")
|
270 |
|
271 |
sx_gen_store = np.moveaxis(intermediate,2,4)
|
272 |
+
nsx_gen_store = norm_all(sx_gen_store, sx_gen_store.shape[0], image_count)
|
273 |
|
274 |
response = intermediate.shape;
|
275 |
response2 = transform2(transform(nsx_gen_store[-1][0]))
|