| { | |
| "_name_or_path": "ibm-granite/granite-timeseries-ttm-r2", | |
| "adaptive_patching_levels": 3, | |
| "architectures": [ | |
| "TinyTimeMixerForPrediction" | |
| ], | |
| "categorical_vocab_size_list": null, | |
| "context_length": 512, | |
| "d_model": 192, | |
| "decoder_adaptive_patching_levels": 0, | |
| "decoder_d_model": 128, | |
| "decoder_mode": "common_channel", | |
| "decoder_num_layers": 2, | |
| "decoder_raw_residual": false, | |
| "distribution_output": "student_t", | |
| "dropout": 0.2, | |
| "enable_forecast_channel_mixing": false, | |
| "exogenous_channel_indices": null, | |
| "expansion_factor": 2, | |
| "fcm_context_length": 1, | |
| "fcm_gated_attn": true, | |
| "fcm_mix_layers": 2, | |
| "fcm_prepend_past": true, | |
| "fcm_prepend_past_offset": null, | |
| "fcm_use_mixer": false, | |
| "frequency_token_vocab_size": 5, | |
| "gated_attn": true, | |
| "head_dropout": 0.7, | |
| "init_embed": "pytorch", | |
| "init_linear": "pytorch", | |
| "init_processing": true, | |
| "init_std": 0.02, | |
| "loss": "mse", | |
| "mode": "common_channel", | |
| "model_type": "tinytimemixer", | |
| "norm_eps": 1e-05, | |
| "norm_mlp": "LayerNorm", | |
| "num_input_channels": 1, | |
| "num_layers": 2, | |
| "num_parallel_samples": 100, | |
| "num_patches": 8, | |
| "patch_last": true, | |
| "patch_length": 64, | |
| "patch_stride": 64, | |
| "positional_encoding_type": "sincos", | |
| "post_init": false, | |
| "prediction_channel_indices": null, | |
| "prediction_filter_length": null, | |
| "prediction_length": 96, | |
| "quantization_config": { | |
| "_load_in_4bit": false, | |
| "_load_in_8bit": true, | |
| "bnb_4bit_compute_dtype": "float32", | |
| "bnb_4bit_quant_storage": "uint8", | |
| "bnb_4bit_quant_type": "fp4", | |
| "bnb_4bit_use_double_quant": false, | |
| "llm_int8_enable_fp32_cpu_offload": false, | |
| "llm_int8_has_fp16_weight": false, | |
| "llm_int8_skip_modules": null, | |
| "llm_int8_threshold": 6.0, | |
| "load_in_4bit": false, | |
| "load_in_8bit": true, | |
| "quant_method": "bitsandbytes" | |
| }, | |
| "resolution_prefix_tuning": false, | |
| "scaling": "std", | |
| "self_attn": false, | |
| "self_attn_heads": 1, | |
| "torch_dtype": "float16", | |
| "transformers_version": "4.45.2", | |
| "use_decoder": true, | |
| "use_positional_encoding": false | |
| } | |