{ "algo": "lokr", "multiplier": 1, "linear_dim": 1000000, "linear_alpha": 1, "factor": 16, "init_lokr_norm": 0.001, "apply_preset": { "target_module": [ "FluxTransformerBlock", "FluxSingleTransformerBlock" ], "module_algo_map": { "Attention": { "factor": 16 }, "FeedForward": { "factor": 8 } } } }