Sayan01 commited on
Commit
c837566
·
verified ·
1 Parent(s): 15aadb1

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -13,8 +13,8 @@
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",
16
- "num_attention_heads": 12,
17
- "num_hidden_layers": 12,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",
20
  "problem_type": "single_label_classification",
 
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",
16
+ "num_attention_heads": 6,
17
+ "num_hidden_layers": 6,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",
20
  "problem_type": "single_label_classification",
logs/events.out.tfevents.1727750529.spartan-gpgpu147.hpc.unimelb.edu.au.135992.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b746a593267426b3f0684c1a2a2af43bf96e989e92444270fc640c28c1c4c03
3
- size 10683
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:654e1cbccabc7b5c819d4b6fc9899fa765c5ab6ddad9a36386fd368be59b0ec7
3
+ size 11637
logs/events.out.tfevents.1727758170.spartan-gpgpu147.hpc.unimelb.edu.au.135992.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:937b1021168cee08fac8297fb0045745c7d43902e6ba7a3ac906b20ce3a3f3a3
3
+ size 477
logs/events.out.tfevents.1727763312.spartan-gpgpu147.hpc.unimelb.edu.au.153246.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d174df840f02d18932a4d59d42b45b6d58bf0988b5f5f2f97a8af55a6137b69
3
+ size 5881
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b2b285e6bcfeed09de6eece32508aa7e9f2d45d4e50d27bb6f6f6a20e4ced2f6
3
- size 133466376
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a6208ad975a80f3f3049ee09772821acd6f7f11648e1e6dd81b1de1cda7dfe3
3
+ size 90867952
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 512
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "[PAD]"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,