{ "model_name_or_path": "NousResearch/Llama-2-7b-hf", "max_length": 256, "model_kwargs": {}, "pooling_strategy": "last", "lora_config_kwargs": { "task_type": "CAUSAL_LM", "r": 32, "lora_alpha": 32, "lora_dropout": 0.1, "bias": "none" }, "is_llm": true, "apply_billm": false, "billm_model_class": null, "apply_lora": true, "tokenizer_padding_side": null, "angle_emb_version": "0.5.1" }