Update README.md
Browse files
README.md
CHANGED
@@ -24,6 +24,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
24 |
|
25 |
|
26 |
# HACK: override the dtype_byte_size function in transformers to support float8 types
|
|
|
27 |
def new_dtype_byte_size(dtype):
|
28 |
if dtype == torch.bool:
|
29 |
return 1 / 8
|
|
|
24 |
|
25 |
|
26 |
# HACK: override the dtype_byte_size function in transformers to support float8 types
|
27 |
+
# Fix is posted upstream https://github.com/huggingface/transformers/pull/30488
|
28 |
def new_dtype_byte_size(dtype):
|
29 |
if dtype == torch.bool:
|
30 |
return 1 / 8
|