fastapi==0.74.* requests==2.27.* uvicorn[standard]==0.17.* sentencepiece==0.1.* torch==1.11.* transformers==4.* numpy==1.* llama-cpp-python