Spaces:
Sleeping
Sleeping
steps
Browse files
app.py
CHANGED
@@ -222,11 +222,11 @@ def sample_ddpm(n_sample, save_rate=20):
|
|
222 |
return samples, intermediate
|
223 |
|
224 |
def greet(input):
|
225 |
-
|
226 |
#samples, intermediate = sample_ddim(32, n=steps)
|
227 |
#ctx = F.one_hot(torch.randint(0, 5, (32,)), 5).to(device=device).float()
|
228 |
#samples, intermediate = sample_ddim_context(32, ctx, steps)
|
229 |
-
samples, intermediate = sample_ddpm(32,
|
230 |
#response = transform2(transform(np.moveaxis(samples.detach().cpu().numpy(),1,3)[-1]))
|
231 |
#response2 = transform2(transform(np.moveaxis(samples.detach().cpu().numpy(),1,3)[1]))
|
232 |
#response = im.fromarray(intermediate[24][0][1]).convert("RGB")
|
@@ -245,6 +245,6 @@ transform2 = transforms.ToPILImage()
|
|
245 |
|
246 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Text to find entities", lines=2)], outputs=[gr.HighlightedText(label="Text with entities")], title="NER with dslim/bert-base-NER", description="Find entities using the `dslim/bert-base-NER` model under the hood!", allow_flagging="never", examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
|
247 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.outputs.Image(type="pil", width=64, label="Output Image"), gr.outputs.Image(type="pil", width=64, label="Output Image2"), gr.outputs.Image(type="pil", width=64, label="Output Image3"), gr.outputs.Image(type="pil", width=64, label="Output Image4")])
|
248 |
-
iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="
|
249 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.Textbox()])
|
250 |
iface.launch()
|
|
|
222 |
return samples, intermediate
|
223 |
|
224 |
def greet(input):
|
225 |
+
steps = int(input)
|
226 |
#samples, intermediate = sample_ddim(32, n=steps)
|
227 |
#ctx = F.one_hot(torch.randint(0, 5, (32,)), 5).to(device=device).float()
|
228 |
#samples, intermediate = sample_ddim_context(32, ctx, steps)
|
229 |
+
samples, intermediate = sample_ddpm(32, steps)
|
230 |
#response = transform2(transform(np.moveaxis(samples.detach().cpu().numpy(),1,3)[-1]))
|
231 |
#response2 = transform2(transform(np.moveaxis(samples.detach().cpu().numpy(),1,3)[1]))
|
232 |
#response = im.fromarray(intermediate[24][0][1]).convert("RGB")
|
|
|
245 |
|
246 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Text to find entities", lines=2)], outputs=[gr.HighlightedText(label="Text with entities")], title="NER with dslim/bert-base-NER", description="Find entities using the `dslim/bert-base-NER` model under the hood!", allow_flagging="never", examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
|
247 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.outputs.Image(type="pil", width=64, label="Output Image"), gr.outputs.Image(type="pil", width=64, label="Output Image2"), gr.outputs.Image(type="pil", width=64, label="Output Image3"), gr.outputs.Image(type="pil", width=64, label="Output Image4")])
|
248 |
+
iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="steps", value=500)], outputs=[gr.Textbox(label="Info"), gr.Image(type="pil", width=64, label="Output Image"), gr.Image(type="pil", width=64, label="Output Image2"), gr.Image(type="pil", width=64, label="Output Image3"), gr.Image(type="pil", width=64, label="Output Image4")])
|
249 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.Textbox()])
|
250 |
iface.launch()
|