Harryx2025 commited on
Commit
06f8e4a
·
verified ·
1 Parent(s): c085c89

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "patchmoe",
3
  "architectures": [
4
- "PatchMoEForPrediction"
5
  ],
6
  "auto_map": {
7
- "AutoConfig": "configuration_patch_moe.PatchMoeConfig",
8
- "AutoModelForCausalLM": "modeling_patch_moe.PatchMoEForPrediction"
9
  },
10
  "disable_bias_linear": false,
11
  "do_base_forecast": false,
@@ -19,7 +19,7 @@
19
  "k_layernorm": false,
20
  "kv_channels": 64,
21
  "mask_pad_value": 255.0,
22
- "model_type": "patch_moe",
23
  "moe_expert_final_layernorm": true,
24
  "moe_ffn_hidden_size": 4096,
25
  "moe_router_enable_expert_bias": false,
 
1
  {
2
+ "_name_or_path": "FalconTST",
3
  "architectures": [
4
+ "FalconTSTForPrediction"
5
  ],
6
  "auto_map": {
7
+ "AutoConfig": "configuration_FalconTST.FalconTSTConfig",
8
+ "AutoModelForCausalLM": "modeling_FalconTST.FalconTSTForPrediction"
9
  },
10
  "disable_bias_linear": false,
11
  "do_base_forecast": false,
 
19
  "k_layernorm": false,
20
  "kv_channels": 64,
21
  "mask_pad_value": 255.0,
22
+ "model_type": "FalconTST",
23
  "moe_expert_final_layernorm": true,
24
  "moe_ffn_hidden_size": 4096,
25
  "moe_router_enable_expert_bias": false,