jingyq1 commited on
Commit
7712091
·
verified ·
1 Parent(s): 5dbc15a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -0
config.json CHANGED
@@ -46,8 +46,13 @@
46
  "intermediate_size": 5632,
47
  "max_position_embeddings": 16384,
48
  "model_type": "llama",
 
49
  "num_attention_heads": 16,
50
  "num_hidden_layers": 24,
 
 
 
 
51
  "num_key_value_heads": 16,
52
  "torch_dtype": "bfloat16",
53
  "vocab_size": 102400
 
46
  "intermediate_size": 5632,
47
  "max_position_embeddings": 16384,
48
  "model_type": "llama",
49
+ "num_SmoothPatchBlender": 12,
50
  "num_attention_heads": 16,
51
  "num_hidden_layers": 24,
52
+ "num_hop": [
53
+ 1,
54
+ 2
55
+ ],
56
  "num_key_value_heads": 16,
57
  "torch_dtype": "bfloat16",
58
  "vocab_size": 102400