felixleb commited on
Commit
97ef4ef
·
verified ·
1 Parent(s): 032ca99

Chess Challenge submission by felixleb

Browse files
Files changed (4) hide show
  1. README.md +5 -5
  2. config.json +5 -6
  3. model.safetensors +2 -2
  4. vocab.json +0 -0
README.md CHANGED
@@ -13,14 +13,14 @@ Chess model submitted to the LLM Course Chess Challenge.
13
 
14
  ## Submission Info
15
 
16
- - **Submitted by**: [Eithannak](https://huggingface.co/Eithannak)
17
- - **Parameters**: 997,872
18
  - **Organization**: LLM-course
19
 
20
  ## Model Details
21
 
22
  - **Architecture**: Chess Transformer (GPT-style)
23
- - **Vocab size**: 5581
24
- - **Embedding dim**: 96
25
- - **Layers**: 5
26
  - **Heads**: 4
 
13
 
14
  ## Submission Info
15
 
16
+ - **Submitted by**: [felixleb](https://huggingface.co/felixleb)
17
+ - **Parameters**: 909,824
18
  - **Organization**: LLM-course
19
 
20
  ## Model Details
21
 
22
  - **Architecture**: Chess Transformer (GPT-style)
23
+ - **Vocab size**: 1682
24
+ - **Embedding dim**: 128
25
+ - **Layers**: 4
26
  - **Heads**: 4
config.json CHANGED
@@ -9,13 +9,12 @@
9
  "layer_norm_epsilon": 1e-05,
10
  "model_type": "chess_transformer",
11
  "n_ctx": 256,
12
- "n_embd": 96,
13
  "n_head": 4,
14
- "n_inner": 192,
15
- "n_layer": 5,
16
  "pad_token_id": 0,
17
- "rope_theta": 10000.0,
18
  "tie_weights": true,
19
- "transformers_version": "4.57.5",
20
- "vocab_size": 5581
21
  }
 
9
  "layer_norm_epsilon": 1e-05,
10
  "model_type": "chess_transformer",
11
  "n_ctx": 256,
12
+ "n_embd": 128,
13
  "n_head": 4,
14
+ "n_inner": 384,
15
+ "n_layer": 4,
16
  "pad_token_id": 0,
 
17
  "tie_weights": true,
18
+ "transformers_version": "4.57.3",
19
+ "vocab_size": 1682
20
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:52ecaf7b03785f2be92583d2896a89c8898b29e7b739642bb9a4bc8df515798d
3
- size 3995496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:afc3a5896b197224752804e7a8f89c3d406972db8958da5af3ac0035bc9a23e3
3
+ size 3643696
vocab.json CHANGED
The diff for this file is too large to render. See raw diff