Spaces:
Sleeping
Sleeping
Commit
·
feeebdc
1
Parent(s):
7cbd81a
go
Browse files- Dockerfile +3 -1
- main.py +3 -15
- requirements.txt +0 -3
Dockerfile
CHANGED
@@ -1,7 +1,9 @@
|
|
1 |
# read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
|
2 |
# you will also find guides on how best to write your Dockerfile
|
3 |
|
4 |
-
FROM
|
|
|
|
|
5 |
|
6 |
WORKDIR /code
|
7 |
|
|
|
1 |
# read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
|
2 |
# you will also find guides on how best to write your Dockerfile
|
3 |
|
4 |
+
FROM dnxie/resym
|
5 |
+
|
6 |
+
RUN git clone https://github.com/lt-asset/resym /home/ReSym
|
7 |
|
8 |
WORKDIR /code
|
9 |
|
main.py
CHANGED
@@ -1,26 +1,14 @@
|
|
1 |
import gradio as gr
|
2 |
-
import torch
|
3 |
-
import requests
|
4 |
-
from torchvision import transforms
|
5 |
-
|
6 |
-
model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
|
7 |
-
response = requests.get("https://git.io/JJkYN")
|
8 |
-
labels = response.text.split("\n")
|
9 |
-
|
10 |
|
11 |
def predict(inp):
|
12 |
-
|
13 |
-
with torch.no_grad():
|
14 |
-
prediction = torch.nn.functional.softmax(model(inp)[0], dim=0)
|
15 |
-
confidences = {labels[i]: float(prediction[i]) for i in range(1000)}
|
16 |
-
return confidences
|
17 |
|
18 |
|
19 |
def run():
|
20 |
demo = gr.Interface(
|
21 |
fn=predict,
|
22 |
-
inputs=gr.
|
23 |
-
outputs=gr.
|
24 |
)
|
25 |
|
26 |
demo.launch(server_name="0.0.0.0", server_port=7860)
|
|
|
1 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
def predict(inp):
|
4 |
+
return "Hi"
|
|
|
|
|
|
|
|
|
5 |
|
6 |
|
7 |
def run():
|
8 |
demo = gr.Interface(
|
9 |
fn=predict,
|
10 |
+
inputs=gr.Text,
|
11 |
+
outputs=gr.Text,
|
12 |
)
|
13 |
|
14 |
demo.launch(server_name="0.0.0.0", server_port=7860)
|
requirements.txt
CHANGED
@@ -1,4 +1 @@
|
|
1 |
gradio
|
2 |
-
torch
|
3 |
-
torchvision
|
4 |
-
requests
|
|
|
1 |
gradio
|
|
|
|
|
|