bboeun commited on
Commit
cb6b246
·
verified ·
1 Parent(s): a4631ba

bboeun/klue-bert-base-mrc-ko

Browse files
adapter_config.json CHANGED
@@ -26,8 +26,8 @@
26
  "revision": null,
27
  "target_modules": [
28
  "attention.self.key",
29
- "attention.self.value",
30
- "attention.self.query"
31
  ],
32
  "task_type": "CAUSAL_LM",
33
  "trainable_token_indices": null,
 
26
  "revision": null,
27
  "target_modules": [
28
  "attention.self.key",
29
+ "attention.self.query",
30
+ "attention.self.value"
31
  ],
32
  "task_type": "CAUSAL_LM",
33
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6f1e83ce58d4b594627b4c3f043550e9c967af2a4b48752133c3acc8ebf7c7e1
3
  size 3549056
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d6fd5909ef1b7ac9ec3545a0a4d6d4027e59f45c3d771a15d9490604618dffd
3
  size 3549056
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 512
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "[PAD]"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9e9a25e5f6f00a377da311700027f6707f4d6c3874812c61350bec59380e6e3
3
  size 5713
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e8f99e66592be41a943e09eb276abe2aaf36b2796c2855933dc26304a6cd83a
3
  size 5713