Spaces:
Sleeping
Sleeping
Upload 3 files
Browse files- Dockerfile +3 -3
- app.py +17 -25
Dockerfile
CHANGED
@@ -10,12 +10,12 @@ COPY ./requirements.txt /code/requirements.txt
|
|
10 |
# Install requirements.txt
|
11 |
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
12 |
|
13 |
-
# Set up a new user named "user"
|
14 |
-
RUN useradd
|
15 |
# Switch to the "user" user
|
16 |
USER user
|
17 |
# Set home to the user's home directory
|
18 |
-
|
19 |
ENV HOME=/home/user \
|
20 |
PATH=/home/user/.local/bin:$PATH
|
21 |
|
|
|
10 |
# Install requirements.txt
|
11 |
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
12 |
|
13 |
+
# Set up a new user named "user"
|
14 |
+
RUN useradd user
|
15 |
# Switch to the "user" user
|
16 |
USER user
|
17 |
# Set home to the user's home directory
|
18 |
+
|
19 |
ENV HOME=/home/user \
|
20 |
PATH=/home/user/.local/bin:$PATH
|
21 |
|
app.py
CHANGED
@@ -1,28 +1,20 @@
|
|
1 |
from fastapi import FastAPI
|
|
|
|
|
|
|
2 |
from transformers import pipeline
|
3 |
-
|
4 |
-
# Create a new FastAPI app instance
|
5 |
app = FastAPI()
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
"""
|
20 |
-
Using the text2text-generation pipeline from `transformers`, generate text
|
21 |
-
from the given input text. The model used is `google/flan-t5-small`, which
|
22 |
-
can be found [here](<https://huggingface.co/google/flan-t5-small>).
|
23 |
-
"""
|
24 |
-
# Use the pipeline to generate text from the given input text
|
25 |
-
output = pipe(text)
|
26 |
-
|
27 |
-
# Return the generated text in a JSON response
|
28 |
-
return {"output": output[0]["generated_text"]}
|
|
|
1 |
from fastapi import FastAPI
|
2 |
+
from fastapi.staticfiles import StaticFiles
|
3 |
+
from fastapi.responses import FileResponse
|
4 |
+
|
5 |
from transformers import pipeline
|
6 |
+
|
|
|
7 |
app = FastAPI()
|
8 |
+
|
9 |
+
pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small")
|
10 |
+
|
11 |
+
@app.get("/infer_t5")
|
12 |
+
def t5(input):
|
13 |
+
output = pipe_flan(input)
|
14 |
+
return {"output": output[0]["generated_text"]}
|
15 |
+
|
16 |
+
app.mount("/", StaticFiles(directory="static", html=True), name="static")
|
17 |
+
|
18 |
+
@app.get("/")
|
19 |
+
def index() -> FileResponse:
|
20 |
+
return FileResponse(path="/app/static/index.html", media_type="text/html")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|