| { | |
| "answer_token_id": 10, | |
| "architectures": [ | |
| "RMTForReasoning" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "huggingface.RMTConfig", | |
| "AutoModel": "huggingface.RMTForReasoning" | |
| }, | |
| "base_model_name": "HuggingFaceTB/SmolLM2-135M", | |
| "bos_token_id": 0, | |
| "eos_token_id": 0, | |
| "max_n_segments": 10, | |
| "memory_cell_cls": "MemoryCell", | |
| "model_type": "rmt", | |
| "num_mem_tokens": 32, | |
| "recurrent_wrapper_cls": "RecurrentWrapperNoSegmentationGenerate", | |
| "think_token_id": 8, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.54.1" | |
| } | |