TeunS commited on
Commit
f281d7c
1 Parent(s): 6b54bf8

Delete adapters folder

Browse files
adapters/0000100_adapters.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b4e16f8276d4190e2d7c151d5ea9191c96ac9f7aa02f9d54ece844a6c1d74d4
3
- size 17909586
 
 
 
 
adapters/adapter_config.json DELETED
@@ -1,29 +0,0 @@
1
- {
2
- "adapter_path": "/Users/teun/Documents/output-cleaned/adapters",
3
- "batch_size": 4,
4
- "config": null,
5
- "data": "/Users/teun/Documents/output-cleaned",
6
- "fine_tune_type": "lora",
7
- "grad_checkpoint": null,
8
- "iters": 600,
9
- "learning_rate": 1e-05,
10
- "lora_parameters": {
11
- "rank": 8,
12
- "alpha": 16,
13
- "dropout": 0.0,
14
- "scale": 10.0
15
- },
16
- "lr_schedule": null,
17
- "max_seq_length": 2048,
18
- "model": "mlx-community/gemma-2-9b-it-4bit",
19
- "num_layers": 16,
20
- "resume_adapter_file": null,
21
- "save_every": 100,
22
- "seed": 0,
23
- "steps_per_eval": 200,
24
- "steps_per_report": 10,
25
- "test": false,
26
- "test_batches": 500,
27
- "train": true,
28
- "val_batches": 25
29
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
adapters/adapters.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b4e16f8276d4190e2d7c151d5ea9191c96ac9f7aa02f9d54ece844a6c1d74d4
3
- size 17909586