| { | |
| "adaptive_state": { | |
| "meta": { | |
| "fusion_mode": "SAVANT\u2013\u03a64.1\u221e+ symbiotic", | |
| "fusion_timestamp": "2025-10-09" | |
| }, | |
| "resonance_vector": [ | |
| 0.006346969060972333 | |
| ] | |
| }, | |
| "architectures": [ | |
| "BertModel" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "classifier_dropout": null, | |
| "dtype": "float32", | |
| "geometry": "icosa-dodeca-resonant", | |
| "gradient_checkpointing": false, | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 384, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 1536, | |
| "layer_norm_eps": 1e-12, | |
| "max_position_embeddings": 512, | |
| "model_type": "bert", | |
| "num_attention_heads": 12, | |
| "num_hidden_layers": 6, | |
| "pad_token_id": 0, | |
| "position_embedding_type": "absolute", | |
| "problem_type": "single_label_classification", | |
| "rrf_mode": "\u03a64.1\u221e+", | |
| "rrf_profile": { | |
| "branch": "+\u221e", | |
| "embed_fiber_dim": 384, | |
| "embedder": { | |
| "base_model": "sentence-transformers/all-MiniLM-L6-v2", | |
| "hf_model_id": "antonypamo/RRFSAVANTMADE", | |
| "max_seq_length": 256, | |
| "output_dim": 384 | |
| }, | |
| "geometry": "icosa-dodeca-resonant", | |
| "mode_family": "\u03a6", | |
| "mode_id": "\u03a64.1\u221e+", | |
| "phi_order": 4.1, | |
| "savant_engine": { | |
| "ceremonies": [ | |
| { | |
| "description": "Alinear base interna del embedder con el espectro \u03a6/icosa\u00e9drico", | |
| "id": "pre_align", | |
| "kind": "spectral_init" | |
| }, | |
| { | |
| "dataset_size": 363, | |
| "id": "resonant_finetune", | |
| "kind": "contrastive", | |
| "loss": "CosineSimilarityLoss", | |
| "reference": "RRF similarity corpus 2025" | |
| }, | |
| { | |
| "description": "Normalizaci\u00f3n y estabilizaci\u00f3n resonante post-ajuste", | |
| "id": "integration_pass", | |
| "kind": "self_supervised" | |
| } | |
| ], | |
| "ceremony_mode": "cemones-\u03a64.1\u221e+", | |
| "engine_name": "SavantEngine-RRF-Made", | |
| "engine_version": "main" | |
| }, | |
| "symbiotic": true, | |
| "symbiotic_energy": -0.695067822933197 | |
| }, | |
| "symbiotic_energy": -0.695067822933197, | |
| "transformers_version": "4.57.1", | |
| "type_vocab_size": 2, | |
| "use_cache": true, | |
| "vocab_size": 30522 | |
| } | |