Spaces:
Running
Running
response
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ from PIL import Image as im
|
|
18 |
#openai.api_key = os.getenv('OPENAI_API_KEY')
|
19 |
|
20 |
class ContextUnet(nn.Module):
|
21 |
-
def __init__(self, in_channels, n_feat=256, n_cfeat=10, height=
|
22 |
super(ContextUnet, self).__init__()
|
23 |
|
24 |
# number of input channels, number of intermediate feature maps and number of classes
|
@@ -236,10 +236,37 @@ def greet(input):
|
|
236 |
|
237 |
response = intermediate.shape;
|
238 |
response2 = transform2(transform(nsx_gen_store[-1][0]))
|
239 |
-
response3 = transform2(transform(nsx_gen_store[-1][
|
240 |
-
response4 = transform2(transform(nsx_gen_store[-1][
|
241 |
-
response5 = transform2(transform(nsx_gen_store[-1][
|
242 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
243 |
|
244 |
#response = intermediate.shape;
|
245 |
#response2 = transform2(transform(np.moveaxis(intermediate,2,4)[0][0]))
|
@@ -247,7 +274,7 @@ def greet(input):
|
|
247 |
#response4 = transform2(transform(np.moveaxis(intermediate,2,4)[int(steps/4)][0]))
|
248 |
#response5 = transform2(transform(np.moveaxis(intermediate,2,4)[-1][0]))
|
249 |
|
250 |
-
return response, response2, response3, response4, response5
|
251 |
|
252 |
transform2 = transforms.ToPILImage()
|
253 |
|
@@ -256,6 +283,10 @@ transform2 = transforms.ToPILImage()
|
|
256 |
|
257 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Text to find entities", lines=2)], outputs=[gr.HighlightedText(label="Text with entities")], title="NER with dslim/bert-base-NER", description="Find entities using the `dslim/bert-base-NER` model under the hood!", allow_flagging="never", examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
|
258 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.outputs.Image(type="pil", width=64, label="Output Image"), gr.outputs.Image(type="pil", width=64, label="Output Image2"), gr.outputs.Image(type="pil", width=64, label="Output Image3"), gr.outputs.Image(type="pil", width=64, label="Output Image4")])
|
259 |
-
|
|
|
|
|
|
|
|
|
260 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.Textbox()])
|
261 |
iface.launch()
|
|
|
18 |
#openai.api_key = os.getenv('OPENAI_API_KEY')
|
19 |
|
20 |
class ContextUnet(nn.Module):
|
21 |
+
def __init__(self, in_channels, n_feat=256, n_cfeat=10, height=28): # cfeat - context features
|
22 |
super(ContextUnet, self).__init__()
|
23 |
|
24 |
# number of input channels, number of intermediate feature maps and number of classes
|
|
|
236 |
|
237 |
response = intermediate.shape;
|
238 |
response2 = transform2(transform(nsx_gen_store[-1][0]))
|
239 |
+
response3 = transform2(transform(nsx_gen_store[-1][1]))
|
240 |
+
response4 = transform2(transform(nsx_gen_store[-1][2]))
|
241 |
+
response5 = transform2(transform(nsx_gen_store[-1][3]))
|
242 |
+
response6 = transform2(transform(nsx_gen_store[-1][4]))
|
243 |
+
response7 = transform2(transform(nsx_gen_store[-1][5]))
|
244 |
+
response8 = transform2(transform(nsx_gen_store[-1][6]))
|
245 |
+
response9 = transform2(transform(nsx_gen_store[-1][7]))
|
246 |
+
response10 = transform2(transform(nsx_gen_store[-1][8]))
|
247 |
+
response11 = transform2(transform(nsx_gen_store[-1][9]))
|
248 |
+
response12 = transform2(transform(nsx_gen_store[-1][10]))
|
249 |
+
response13 = transform2(transform(nsx_gen_store[-1][11]))
|
250 |
+
response14 = transform2(transform(nsx_gen_store[-1][12]))
|
251 |
+
response15 = transform2(transform(nsx_gen_store[-1][13]))
|
252 |
+
response16 = transform2(transform(nsx_gen_store[-1][14]))
|
253 |
+
response17 = transform2(transform(nsx_gen_store[-1][15]))
|
254 |
+
response18 = transform2(transform(nsx_gen_store[-1][16]))
|
255 |
+
response19 = transform2(transform(nsx_gen_store[-1][17]))
|
256 |
+
response20 = transform2(transform(nsx_gen_store[-1][18]))
|
257 |
+
response21 = transform2(transform(nsx_gen_store[-1][19]))
|
258 |
+
response22 = transform2(transform(nsx_gen_store[-1][20]))
|
259 |
+
response23 = transform2(transform(nsx_gen_store[-1][21]))
|
260 |
+
response24 = transform2(transform(nsx_gen_store[-1][22]))
|
261 |
+
response25 = transform2(transform(nsx_gen_store[-1][23]))
|
262 |
+
response26 = transform2(transform(nsx_gen_store[-1][24]))
|
263 |
+
response27 = transform2(transform(nsx_gen_store[-1][25]))
|
264 |
+
response28 = transform2(transform(nsx_gen_store[-1][26]))
|
265 |
+
response29 = transform2(transform(nsx_gen_store[-1][27]))
|
266 |
+
response30= transform2(transform(nsx_gen_store[-1][28]))
|
267 |
+
response31 = transform2(transform(nsx_gen_store[-1][29]))
|
268 |
+
response32 = transform2(transform(nsx_gen_store[-1][30]))
|
269 |
+
response33 = transform2(transform(nsx_gen_store[-1][31]))
|
270 |
|
271 |
#response = intermediate.shape;
|
272 |
#response2 = transform2(transform(np.moveaxis(intermediate,2,4)[0][0]))
|
|
|
274 |
#response4 = transform2(transform(np.moveaxis(intermediate,2,4)[int(steps/4)][0]))
|
275 |
#response5 = transform2(transform(np.moveaxis(intermediate,2,4)[-1][0]))
|
276 |
|
277 |
+
return response, response2, response3, response4, response5, response6, response7, response8, response9, response10, response11, response12, response13, response14, response15, response16, response17, response18, response19, response20, response21, response22, response23, response24, response25, response26, response27, response28, response29, response30, response31, response32, response33
|
278 |
|
279 |
transform2 = transforms.ToPILImage()
|
280 |
|
|
|
283 |
|
284 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Text to find entities", lines=2)], outputs=[gr.HighlightedText(label="Text with entities")], title="NER with dslim/bert-base-NER", description="Find entities using the `dslim/bert-base-NER` model under the hood!", allow_flagging="never", examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
|
285 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.outputs.Image(type="pil", width=64, label="Output Image"), gr.outputs.Image(type="pil", width=64, label="Output Image2"), gr.outputs.Image(type="pil", width=64, label="Output Image3"), gr.outputs.Image(type="pil", width=64, label="Output Image4")])
|
286 |
+
|
287 |
+
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="steps", value=20)], outputs=[gr.Textbox(label="Info"), gr.Image(type="pil", width=64, label="Output Image"), gr.Image(type="pil", width=64, label="Output Image2"), gr.Image(type="pil", width=64, label="Output Image3"), gr.Image(type="pil", width=64, label="Output Image4")])
|
288 |
+
iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="steps", value=20)], outputs=[gr.Textbox(label="Info"), gr.Image(type="pil", width=64, label="Output Image"), gr.Image(type="pil", width=64, label="Output Image2"), gr.Image(type="pil", width=64, label="Output Image3"), gr.Image(type="pil", width=64, label="Output Image4"), gr.Image(type="pil", width=64, label="Output Image5"), gr.Image(type="pil", width=64, label="Output Image6"), , gr.Image(type="pil", width=64, label="Output Image7"), gr.Image(type="pil", width=64, label="Output Image8"), gr.Image(type="pil", width=64, label="Output Image9"), gr.Image(type="pil", width=64, label="Output Image10"), gr.Image(type="pil", width=64, label="Output Image11"), gr.Image(type="pil", width=64, label="Output Image12"), gr.Image(type="pil", width=64, label="Output Image13"), gr.Image(type="pil", width=64, label="Output Image14"), gr.Image(type="pil", width=64, label="Output Image15"), gr.Image(type="pil", width=64, label="Output Image16"), gr.Image(type="pil", width=64, label="Output Image17"), gr.Image(type="pil", width=64, label="Output Image18"), gr.Image(type="pil", width=64, label="Output Image19"), gr.Image(type="pil", width=64, label="Output Image20"), gr.Image(type="pil", width=64, label="Output Image21"), gr.Image(type="pil", width=64, label="Output Image22"), , gr.Image(type="pil", width=64, label="Output Image23"), gr.Image(type="pil", width=64, label="Output Image24"), gr.Image(type="pil", width=64, label="Output Image25"), gr.Image(type="pil", width=64, label="Output Image26"), gr.Image(type="pil", width=64, label="Output Image27"), gr.Image(type="pil", width=64, label="Output Image28"), gr.Image(type="pil", width=64, label="Output Image29"), gr.Image(type="pil", width=64, label="Output Image30"), , gr.Image(type="pil", width=64, label="Output Image31"), gr.Image(type="pil", width=64, label="Output Image32")])
|
289 |
+
|
290 |
+
|
291 |
#iface = gr.Interface(fn=greet, inputs=[gr.Textbox(label="Co-Retailing Business")], outputs=[gr.Textbox()])
|
292 |
iface.launch()
|