razmars commited on
Commit
18e05fe
·
verified ·
1 Parent(s): 1b82127

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -3
config.json CHANGED
@@ -8,19 +8,16 @@
8
  "AutoModelForCausalLM": "modeling_super_linear.SuperLinearForCausalLM"
9
  },
10
  "auto_regressive": 1,
11
- "con": 0, # remove
12
  "d_model": 128,
13
  "dropout": 0.0,
14
  "fft_len": 5000,
15
  "freeze_experts": 1,
16
  "freq_experts": "mean_naive_1/4_1/6_1/7_1/8_1/12_1/14_1/16_1/21_1/24_1/28_1/30_1/32_1/36_1/42_1/48_1/52_1/56_1/60_1/72_1/84_1/90_1/96_1/120_1/144_1/168_1/180_1/224_1/252_1/288_1/336_1/365_1/504_1/672_1/1008_1/1440_1/2016_1/3600",
17
  "inf_pred_len": 96,
18
- "ker_len": 50, # remove
19
  "layer_type": "RLinear",
20
  "linear_checkpoints_dir": "checkpoints5",
21
  "linear_checkpoints_path": "/cs/azencot_fsas/MoE/",
22
  "load_linear": 0,
23
- "manual_moe": 0, # remove
24
  "max_horizon": 96,
25
  "misc_moe": 10,
26
  "mlp_gating": 0,
 
8
  "AutoModelForCausalLM": "modeling_super_linear.SuperLinearForCausalLM"
9
  },
10
  "auto_regressive": 1,
 
11
  "d_model": 128,
12
  "dropout": 0.0,
13
  "fft_len": 5000,
14
  "freeze_experts": 1,
15
  "freq_experts": "mean_naive_1/4_1/6_1/7_1/8_1/12_1/14_1/16_1/21_1/24_1/28_1/30_1/32_1/36_1/42_1/48_1/52_1/56_1/60_1/72_1/84_1/90_1/96_1/120_1/144_1/168_1/180_1/224_1/252_1/288_1/336_1/365_1/504_1/672_1/1008_1/1440_1/2016_1/3600",
16
  "inf_pred_len": 96,
 
17
  "layer_type": "RLinear",
18
  "linear_checkpoints_dir": "checkpoints5",
19
  "linear_checkpoints_path": "/cs/azencot_fsas/MoE/",
20
  "load_linear": 0,
 
21
  "max_horizon": 96,
22
  "misc_moe": 10,
23
  "mlp_gating": 0,