Update README.md
Browse files
README.md
CHANGED
@@ -47,7 +47,6 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
47 |
|
48 |
tokenizer = AutoTokenizer.from_pretrained("pfnet/nekomata-14b-pfn-qfin", trust_remote_code=True)
|
49 |
|
50 |
-
|
51 |
# Use GPU with bf16 (recommended for supported devices)
|
52 |
# model = AutoModelForCausalLM.from_pretrained("pfnet/nekomata-14b-pfn-qfin", device_map="auto", trust_remote_code=True, bf16=True)
|
53 |
|
|
|
47 |
|
48 |
tokenizer = AutoTokenizer.from_pretrained("pfnet/nekomata-14b-pfn-qfin", trust_remote_code=True)
|
49 |
|
|
|
50 |
# Use GPU with bf16 (recommended for supported devices)
|
51 |
# model = AutoModelForCausalLM.from_pretrained("pfnet/nekomata-14b-pfn-qfin", device_map="auto", trust_remote_code=True, bf16=True)
|
52 |
|