File size: 636 Bytes
9bf2007
 
dcd2d54
3e6fc0f
9bf2007
5ed2b9f
e48a0c0
e5e2748
3e6fc0f
9bf2007
5ed2b9f
 
 
 
 
 
9bf2007
5ed2b9f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
import transformers
import torch

from fastapi import FastAPI

from transformers import AutoModelForCausalLM, AutoTokenizer

app = FastAPI()
@app.get("/")
def llama():
    tokenizer = AutoTokenizer.from_pretrained("Upstage/SOLAR-10.7B-v1.0")
    model = AutoModelForCausalLM.from_pretrained("Upstage/SOLAR-10.7B-v1.0", device_map="auto", torch_dtype=torch.float16,)
    text = "Hi, my name is "
    inputs = tokenizer(text, return_tensors="pt")
    outputs = model.generate(**inputs, max_new_tokens=64)
    print(tokenizer.decode(outputs[0], skip_special_tokens=True))

    return tokenizer.decode(outputs[0], skip_special_tokens=True)