Spaces:
Build error
Build error
File size: 1,193 Bytes
bb3ea39 f2f0d51 bb3ea39 f2f0d51 9651aac f2f0d51 9651aac f2f0d51 9651aac f2f0d51 9651aac f2f0d51 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import gradio as gr
def greet(name):
return "Hello " + name + "!!"
iface = gr.Interface(fn=greet, inputs="text", outputs="text")
iface.launch()
# # Model to use
# net_path = 'fire.pth'
# # CPU / GPU
# device = 'cpu'
# # Images will be downscaled to this size prior processing with the network
# image_size = 1024
# # Wrapper
# def generate_matching_superfeatures(im1, im2, scale=6):
# # Possible Scales for multiscale inference
# scales = [2.0, 1.414, 1.0, 0.707, 0.5, 0.353, 0.25]
# # GRADIO APP
# title = "Visualizing Super-features"
# description = "TBD"
# article = "<p style='text-align: center'><a href='https://github.com/naver/fire' target='_blank'>Original Github Repo</a></p>"
# iface = gr.Interface(
# fn=generate_matching_superfeatures,
# inputs=[
# gr.inputs.Image(shape=(240, 240), type="pil"),
# gr.inputs.Image(shape=(240, 240), type="pil"),
# gr.inputs.Slider(minimum=1, maximum=7, step=1, default=2, label="Scale")],
# outputs="plot",
# enable_queue=True,
# title=title,
# description=description,
# article=article,
# examples=[["chateau_1.png", "chateau_2.png", 6]],
# )
# iface.launch()
|