mjbuehler commited on
Commit
01affee
1 Parent(s): ed71b5a

Upload 2 files

Browse files
Files changed (2) hide show
  1. xlora_classifier.safetensors +3 -0
  2. xlora_config.json +11 -0
xlora_classifier.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44db6f15b3d7708c42d4e1ad59b7a6804087308608cbcc8f98453d1ee1414728
3
+ size 11799520
xlora_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"base_model_id":"HuggingFaceH4/zephyr-7b-beta", adapters={
2
+ "adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/",
3
+ "adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/",
4
+ "adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/",
5
+ "adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/",
6
+ "adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/",
7
+ "adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/",
8
+ "adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/",
9
+ "adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/",
10
+ "adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/",
11
+ }, "hidden_size": 4096, "enable_softmax": true, "enable_softmax_topk": false, "layerwise_scalings": true, "xlora_depth": 1, "xlora_size": 2048, "enable_relu_and_dropout": true, "use_bias": true, "xlora_dropout_p": 0.2, "stop_token_id": null, "use_trainable_adapters": false, "softmax_temperature": 1.0, "top_k_lora": null, "scaling_pass_value": 0.0}