Commit
·
256d44d
1
Parent(s):
4998255
Update README.md
Browse files
README.md
CHANGED
@@ -12,8 +12,8 @@ This model is a 13.5 billion parameter model that combines four sets of 16 layer
|
|
12 |
```python
|
13 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
14 |
import torch
|
15 |
-
tokenizer = AutoTokenizer.from_pretrained("sudy-super/baku-13b")
|
16 |
-
model = AutoModelForCausalLM.from_pretrained("sudy-super/baku-13b", device_map="auto", torch_dtype=torch.bfloat16)
|
17 |
prompt = "大規模言語モデルとは、"
|
18 |
token_ids = tokenizer.encode(prompt, add_special_tokens=False, return_tensors="pt")
|
19 |
with torch.no_grad():
|
|
|
12 |
```python
|
13 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
14 |
import torch
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained("sudy-super/baku-13b-v2")
|
16 |
+
model = AutoModelForCausalLM.from_pretrained("sudy-super/baku-13b-v2", device_map="auto", torch_dtype=torch.bfloat16)
|
17 |
prompt = "大規模言語モデルとは、"
|
18 |
token_ids = tokenizer.encode(prompt, add_special_tokens=False, return_tensors="pt")
|
19 |
with torch.no_grad():
|