tiny-random-LlamaForCausalLM-msr-vtt-04c4fdaa-50d9-4708-a895-880f38983416
/
last-checkpoint
/adapter_config.json
{ | |
"alpha_pattern": {}, | |
"auto_mapping": null, | |
"base_model_name_or_path": "trl-internal-testing/tiny-random-LlamaForCausalLM", | |
"bias": "none", | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layer_replication": null, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"loftq_config": {}, | |
"lora_alpha": 64, | |
"lora_dropout": 0.3, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": [ | |
"lm_head" | |
], | |
"peft_type": "LORA", | |
"r": 32, | |
"rank_pattern": {}, | |
"revision": null, | |
"target_modules": [ | |
"model.layers.1.mlp.down_proj", | |
"model.layers.1.self_attn.v_proj", | |
"model.layers.1.self_attn.k_proj", | |
"model.layers.1.self_attn.q_proj", | |
"model.layers.0.mlp.up_proj", | |
"model.layers.0.self_attn.v_proj", | |
"model.layers.1.mlp.gate_proj", | |
"model.embed_tokens", | |
"model.layers.1.mlp.up_proj", | |
"model.layers.1.self_attn.o_proj", | |
"model.layers.0.self_attn.q_proj", | |
"model.layers.0.mlp.down_proj" | |
], | |
"task_type": "CAUSAL_LM", | |
"use_dora": false, | |
"use_rslora": true | |
} |