Failing to load in oobabooga exllama and autogptq
I've never reported something like this so pardon me.
autogptq
raceback (most recent call last):
File “H:\AI\oobabooga_windows\text-generation-webui\modules\ui_model_menu.py”, line 194, in load_model_wrapper
shared.model, shared.tokenizer = load_model(shared.model_name, loader)
File “H:\AI\oobabooga_windows\text-generation-webui\modules\models.py”, line 76, in load_model
output = load_func_maploader
File “H:\AI\oobabooga_windows\text-generation-webui\modules\models.py”, line 302, in AutoGPTQ_loader
return modules.AutoGPTQ_loader.load_quantized(model_name)
File “H:\AI\oobabooga_windows\text-generation-webui\modules\AutoGPTQ_loader.py”, line 57, in load_quantized
model = AutoGPTQForCausalLM.from_quantized(path_to_model, **params)
File “H:\AI\oobabooga_windows\installer_files\env\lib\site-packages\auto_gptq\modeling\auto.py”, line 108, in from_quantized
return quant_func(
File “H:\AI\oobabooga_windows\installer_files\env\lib\site-packages\auto_gptq\modeling_base.py”, line 875, in from_quantized
accelerate.utils.modeling.load_checkpoint_in_model(
File “H:\AI\oobabooga_windows\installer_files\env\lib\site-packages\accelerate\utils\modeling.py”, line 1414, in load_checkpoint_in_model
set_module_tensor_to_device(
File “H:\AI\oobabooga_windows\installer_files\env\lib\site-packages\accelerate\utils\modeling.py”, line 285, in set_module_tensor_to_device
raise ValueError(
ValueError: Trying to set a tensor of shape torch.Size([32000, 5120]) in “weight” (which has shape torch.Size([32000, 4096])), this look incorrect.
exllama
Traceback (most recent call last):
File “H:\AI\oobabooga_windows\text-generation-webui\modules\ui_model_menu.py”, line 194, in load_model_wrapper
shared.model, shared.tokenizer = load_model(shared.model_name, loader)
File “H:\AI\oobabooga_windows\text-generation-webui\modules\models.py”, line 76, in load_model
output = load_func_maploader
File “H:\AI\oobabooga_windows\text-generation-webui\modules\models.py”, line 308, in ExLlama_loader
model, tokenizer = ExllamaModel.from_pretrained(model_name)
File “H:\AI\oobabooga_windows\text-generation-webui\modules\exllama.py”, line 75, in from_pretrained
model = ExLlama(config)
File “H:\AI\oobabooga_windows\installer_files\env\lib\site-packages\exllama\model.py”, line 814, in init
device = self.config.device_map.map(key)
File “H:\AI\oobabooga_windows\installer_files\env\lib\site-packages\exllama\model.py”, line 666, in map
return self.layers[num]
IndexError: list index out of range