Spaces:
Sleeping
Sleeping
File size: 316 Bytes
a36cb22 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
import torch
from transformers import BitsAndBytesConfig
import config
def load_bits_and_bytes_config():
bnb_config = BitsAndBytesConfig(
load_in_4bit=config.ENABLE_4BIT,
bnb_4bit_quant_type=config.QUANTIZATION_TYPE,
bnb_4bit_compute_dtype=torch.float16
)
return bnb_config |