tinyvlm / handler.py
ChiefTheLord's picture
Rename app.py to handler.py
59a927d verified
raw
history blame contribute delete
316 Bytes
import torch
from fastapi import FastAPI
from pydantic import BaseModel
app = FastAPI()
class InferenceRequest(BaseModel):
prompt: str
max_length: int = 16
temperature: int = 0.9
@app.post('/predict')
def predict(request: InferenceRequest):
return {
'generated_text': request.prompt
}