File size: 316 Bytes
27eebf3 a75dc18 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
import torch
from fastapi import FastAPI
from pydantic import BaseModel
app = FastAPI()
class InferenceRequest(BaseModel):
prompt: str
max_length: int = 16
temperature: int = 0.9
@app.post('/predict')
def predict(request: InferenceRequest):
return {
'generated_text': request.prompt
} |