{ "algo": "lora", "multiplier": 1.0, "linear_dim": 64, "linear_alpha": 32, "apply_preset": { "target_module": [ "Attention", "FeedForward" ], "module_algo_map": { "Attention": { "factor": 16 }, "FeedForward": { "factor": 8 } } } }