tridungduong16
commited on
Commit
·
1b3a8f0
1
Parent(s):
e9f87f2
Update README.md
Browse files
README.md
CHANGED
@@ -43,8 +43,8 @@ The models can be used as auto-regressive samplers as follows:
|
|
43 |
import torch
|
44 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
45 |
|
46 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
47 |
-
model = AutoModelForCausalLM.from_pretrained("
|
48 |
inputs = tokenizer("The world is", return_tensors="pt")
|
49 |
sample = model.generate(**inputs, max_length=128)
|
50 |
print(tokenizer.decode(sample[0]))
|
|
|
43 |
import torch
|
44 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
45 |
|
46 |
+
tokenizer = AutoTokenizer.from_pretrained("tridungduong16/xgen-7b-8k-base-orca", trust_remote_code=True)
|
47 |
+
model = AutoModelForCausalLM.from_pretrained("tridungduong16/xgen-7b-8k-base-orca", torch_dtype=torch.bfloat16)
|
48 |
inputs = tokenizer("The world is", return_tensors="pt")
|
49 |
sample = model.generate(**inputs, max_length=128)
|
50 |
print(tokenizer.decode(sample[0]))
|