sudy-super commited on
Commit
eccb765
·
1 Parent(s): b7cefeb

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -0
README.md CHANGED
@@ -12,8 +12,10 @@ This model is a 10.2 billion parameter model that combines two sets of 24 layers
12
  ```python
13
  from transformers import AutoTokenizer, AutoModelForCausalLM
14
  import torch
 
15
  tokenizer = AutoTokenizer.from_pretrained("sudy-super/baku-10b")
16
  model = AutoModelForCausalLM.from_pretrained("sudy-super/baku-10b", device_map="auto", torch_dtype=torch.bfloat16)
 
17
  prompt = "LLMとは、"
18
  token_ids = tokenizer.encode(prompt, add_special_tokens=False, return_tensors="pt")
19
  with torch.no_grad():
@@ -27,5 +29,6 @@ with torch.no_grad():
27
  eos_token_id=tokenizer.eos_token_id
28
  )
29
  result = tokenizer.decode(output_ids.tolist()[0])
 
30
  print(result)
31
  ```
 
12
  ```python
13
  from transformers import AutoTokenizer, AutoModelForCausalLM
14
  import torch
15
+
16
  tokenizer = AutoTokenizer.from_pretrained("sudy-super/baku-10b")
17
  model = AutoModelForCausalLM.from_pretrained("sudy-super/baku-10b", device_map="auto", torch_dtype=torch.bfloat16)
18
+
19
  prompt = "LLMとは、"
20
  token_ids = tokenizer.encode(prompt, add_special_tokens=False, return_tensors="pt")
21
  with torch.no_grad():
 
29
  eos_token_id=tokenizer.eos_token_id
30
  )
31
  result = tokenizer.decode(output_ids.tolist()[0])
32
+
33
  print(result)
34
  ```