| { |
| "config": { |
| "architecture": "union", |
| "configs": [ |
| { |
| "alpha": 1, |
| "architecture": "lora", |
| "attn_matrices": [ |
| "k", |
| "v" |
| ], |
| "composition_mode": "scale", |
| "dropout": 0.0, |
| "init_weights": "ia3", |
| "intermediate_lora": false, |
| "output_lora": false, |
| "r": 1, |
| "selfattn_lora": true |
| }, |
| { |
| "adapter_residual_before_ln": false, |
| "cross_adapter": false, |
| "factorized_phm_W": true, |
| "factorized_phm_rule": false, |
| "hypercomplex_nonlinearity": "glorot-uniform", |
| "init_weights": "bert", |
| "inv_adapter": "nice", |
| "inv_adapter_reduction_factor": 2, |
| "is_parallel": false, |
| "learn_phm": true, |
| "leave_out": [], |
| "ln_after": false, |
| "ln_before": false, |
| "mh_adapter": false, |
| "non_linearity": "relu", |
| "original_ln_after": false, |
| "original_ln_before": false, |
| "output_adapter": false, |
| "phm_bias": true, |
| "phm_c_init": "normal", |
| "phm_dim": 4, |
| "phm_init_range": 0.0001, |
| "phm_layer": false, |
| "phm_rank": 1, |
| "reduction_factor": 16, |
| "residual_before_ln": true, |
| "scaling": 1.0, |
| "shared_W_phm": false, |
| "shared_phm_rule": true |
| } |
| ] |
| }, |
| "config_id": "d2c8db3436274834", |
| "hidden_size": 1024, |
| "model_class": "BloomForCausalLM", |
| "model_name": "bigscience/bloom-560m", |
| "model_type": "bloom", |
| "name": "oscar_ia3+inv_am", |
| "version": "3.1.0a0" |
| } |