adityaprakhar commited on
Commit
bc606d8
·
verified ·
1 Parent(s): c03fafa

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,12 +1,15 @@
1
  {
2
- "_name_or_path": "microsoft/layoutlm-base-cased",
3
  "architectures": [
4
  "LayoutLMForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
 
8
  "eos_token_id": 2,
9
- "gradient_checkpointing": false,
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
@@ -29,6 +32,7 @@
29
  "15": "telephone"
30
  },
31
  "initializer_range": 0.02,
 
32
  "intermediate_size": 3072,
33
  "label2id": {
34
  "SBN": 0,
@@ -51,15 +55,24 @@
51
  "layer_norm_eps": 1e-05,
52
  "max_2d_position_embeddings": 1024,
53
  "max_position_embeddings": 514,
 
 
54
  "model_type": "layoutlm",
55
  "num_attention_heads": 12,
 
56
  "num_hidden_layers": 12,
57
  "pad_token_id": 1,
 
58
  "position_embedding_type": "absolute",
59
- "tokenizer_class": "RobertaTokenizer",
 
 
 
 
60
  "torch_dtype": "float32",
61
  "transformers_version": "4.39.0.dev0",
62
  "type_vocab_size": 1,
63
  "use_cache": true,
 
64
  "vocab_size": 50265
65
  }
 
1
  {
2
+ "_name_or_path": "hslee/layoutlmv3",
3
  "architectures": [
4
  "LayoutLMForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "coordinate_size": 128,
10
  "eos_token_id": 2,
11
+ "has_relative_attention_bias": true,
12
+ "has_spatial_attention_bias": true,
13
  "hidden_act": "gelu",
14
  "hidden_dropout_prob": 0.1,
15
  "hidden_size": 768,
 
32
  "15": "telephone"
33
  },
34
  "initializer_range": 0.02,
35
+ "input_size": 224,
36
  "intermediate_size": 3072,
37
  "label2id": {
38
  "SBN": 0,
 
55
  "layer_norm_eps": 1e-05,
56
  "max_2d_position_embeddings": 1024,
57
  "max_position_embeddings": 514,
58
+ "max_rel_2d_pos": 256,
59
+ "max_rel_pos": 128,
60
  "model_type": "layoutlm",
61
  "num_attention_heads": 12,
62
+ "num_channels": 3,
63
  "num_hidden_layers": 12,
64
  "pad_token_id": 1,
65
+ "patch_size": 16,
66
  "position_embedding_type": "absolute",
67
+ "rel_2d_pos_bins": 64,
68
+ "rel_pos_bins": 32,
69
+ "second_input_size": 112,
70
+ "shape_size": 128,
71
+ "text_embed": true,
72
  "torch_dtype": "float32",
73
  "transformers_version": "4.39.0.dev0",
74
  "type_vocab_size": 1,
75
  "use_cache": true,
76
+ "visual_embed": true,
77
  "vocab_size": 50265
78
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c9dc11f0b5ecdc79109b265f6f1316c450a74dd9de5dec9ac6083699e9ee576d
3
  size 511239464
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1871228504668cf5108735be45764e196f56c7e5cd19230e44423bd6202ac0a6
3
  size 511239464
runs/Mar15_05-42-24_91b63dc5e9e0/events.out.tfevents.1710481345.91b63dc5e9e0.375.1 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:42a6eeed6d4b46bfc39578233d693b2e5ef10ca27c083be4af670864a444d2d8
3
- size 10663
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f0a3e02409c81acfddcccc9086618a4f2f673b87f1741d695566e771cd63b79
3
+ size 12433
runs/Mar15_06-02-51_91b63dc5e9e0/events.out.tfevents.1710482572.91b63dc5e9e0.375.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:974108d30477175a98be110f59825c4ce50f646831cd3ac19e044521f9b6f480
3
+ size 8480
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:75e0aa4d7ef5fa7f679513fd9693ea2003c2dbc8d1f3a3f1975eea1da8265893
3
  size 4856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50bd49b680208a517f6ca94219822bc12576bd9f1963abc882a1fdcf6cfc9b84
3
  size 4856