{ "architectures": [ "CustomModel5" ], "auto_map": { "AutoConfig": "configuration_custom5.CustomConfig5", "AutoModelForCausalLM": "modeling_custom5.CustomModel5" }, "coeffs": [ 1.0, -1.0 ], "model_type": "custom5", "models": [ "EleutherAI/pythia-160m", "EleutherAI/pythia-70m" ], "torch_dtype": "float16", "transformers_version": "4.34.0", "vocab_size": 50304 }