| { | |
| "gemma": { | |
| "bias": 0.04835902899503708, | |
| "token_coeff": 0.767971899360421, | |
| "position_coeff": 2.004627879709005 | |
| }, | |
| "model_class": "Gemma2ForCausalLM" | |
| } |
| { | |
| "gemma": { | |
| "bias": 0.04835902899503708, | |
| "token_coeff": 0.767971899360421, | |
| "position_coeff": 2.004627879709005 | |
| }, | |
| "model_class": "Gemma2ForCausalLM" | |
| } |