Axel Delaval commited on
Commit
e589a8c
·
1 Parent(s): 71c1d87

changing context size

Browse files
Files changed (1) hide show
  1. mlc-chat-config.json +4 -4
mlc-chat-config.json CHANGED
@@ -14,8 +14,8 @@
14
  "rope_theta": 1000000,
15
  "vocab_size": 151936,
16
  "tie_word_embeddings": true,
17
- "context_window_size": 40960,
18
- "prefill_chunk_size": 2048,
19
  "tensor_parallel_shards": 1,
20
  "head_dim": 128,
21
  "dtype": "float16",
@@ -23,9 +23,9 @@
23
  "weight_block_size": null
24
  },
25
  "vocab_size": 151936,
26
- "context_window_size": 40960,
27
  "sliding_window_size": -1,
28
- "prefill_chunk_size": 2048,
29
  "attention_sink_size": -1,
30
  "tensor_parallel_shards": 1,
31
  "pipeline_parallel_stages": 1,
 
14
  "rope_theta": 1000000,
15
  "vocab_size": 151936,
16
  "tie_word_embeddings": true,
17
+ "context_window_size": 4096,
18
+ "prefill_chunk_size": 512,
19
  "tensor_parallel_shards": 1,
20
  "head_dim": 128,
21
  "dtype": "float16",
 
23
  "weight_block_size": null
24
  },
25
  "vocab_size": 151936,
26
+ "context_window_size": 4096,
27
  "sliding_window_size": -1,
28
+ "prefill_chunk_size": 512,
29
  "attention_sink_size": -1,
30
  "tensor_parallel_shards": 1,
31
  "pipeline_parallel_stages": 1,