|
import os |
|
import requests |
|
import json |
|
from io import BytesIO |
|
|
|
from fastapi import FastAPI |
|
from fastapi.staticfiles import StaticFiles |
|
from fastapi.responses import FileResponse, StreamingResponse |
|
|
|
from modules.inference import infer_t5 |
|
from modules.dataset import query_emotion |
|
|
|
|
|
|
|
API_TOKEN = os.getenv("BIG_GAN_TOKEN") |
|
|
|
app = FastAPI(docs_url=None, redoc_url=None) |
|
|
|
app.mount("/static", StaticFiles(directory="static"), name="static") |
|
|
|
|
|
@app.head("/") |
|
@app.get("/") |
|
def index() -> FileResponse: |
|
return FileResponse(path="static/index.html", media_type="text/html") |
|
|
|
|
|
@app.get("/infer_biggan") |
|
def biggan(input): |
|
output = requests.request( |
|
"POST", |
|
"https://api-inference.huggingface.co/models/osanseviero/BigGAN-deep-128", |
|
headers={"Authorization": f"Bearer {API_TOKEN}"}, |
|
data=json.dumps(input), |
|
) |
|
|
|
return StreamingResponse(BytesIO(output.content), media_type="image/png") |
|
|
|
|
|
@app.get("/infer_t5") |
|
def t5(input): |
|
output = infer_t5(input) |
|
|
|
return {"output": output} |
|
|
|
|
|
@app.get("/query_emotion") |
|
def emotion(start, end): |
|
output = query_emotion(int(start), int(end)) |
|
|
|
return {"output": output} |
|
|