ahmedbasemdev commited on
Commit
b3ccc6a
·
verified ·
1 Parent(s): 210d9a1

Training in progress, epoch 2

Browse files
config.json ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "ntu-spml/distilhubert",
3
+ "activation_dropout": 0.1,
4
+ "apply_spec_augment": false,
5
+ "architectures": [
6
+ "HubertForSequenceClassification"
7
+ ],
8
+ "attention_dropout": 0.1,
9
+ "bos_token_id": 1,
10
+ "classifier_proj_size": 256,
11
+ "conv_bias": false,
12
+ "conv_dim": [
13
+ 512,
14
+ 512,
15
+ 512,
16
+ 512,
17
+ 512,
18
+ 512,
19
+ 512
20
+ ],
21
+ "conv_kernel": [
22
+ 10,
23
+ 3,
24
+ 3,
25
+ 3,
26
+ 3,
27
+ 2,
28
+ 2
29
+ ],
30
+ "conv_stride": [
31
+ 5,
32
+ 2,
33
+ 2,
34
+ 2,
35
+ 2,
36
+ 2,
37
+ 2
38
+ ],
39
+ "ctc_loss_reduction": "sum",
40
+ "ctc_zero_infinity": false,
41
+ "do_stable_layer_norm": false,
42
+ "eos_token_id": 2,
43
+ "feat_extract_activation": "gelu",
44
+ "feat_extract_norm": "group",
45
+ "feat_proj_dropout": 0.0,
46
+ "feat_proj_layer_norm": false,
47
+ "final_dropout": 0.0,
48
+ "hidden_act": "gelu",
49
+ "hidden_dropout": 0.1,
50
+ "hidden_size": 768,
51
+ "id2label": {
52
+ "0": "alexandria",
53
+ "1": "bedoui",
54
+ "2": "UpperEgyption",
55
+ "3": "LowerEgyption"
56
+ },
57
+ "initializer_range": 0.02,
58
+ "intermediate_size": 3072,
59
+ "label2id": {
60
+ "LowerEgyption": 3,
61
+ "UpperEgyption": 2,
62
+ "alexandria": 0,
63
+ "bedoui": 1
64
+ },
65
+ "layer_norm_eps": 1e-05,
66
+ "layerdrop": 0.0,
67
+ "mask_feature_length": 10,
68
+ "mask_feature_min_masks": 0,
69
+ "mask_feature_prob": 0.0,
70
+ "mask_time_length": 10,
71
+ "mask_time_min_masks": 2,
72
+ "mask_time_prob": 0.05,
73
+ "model_type": "hubert",
74
+ "num_attention_heads": 12,
75
+ "num_conv_pos_embedding_groups": 16,
76
+ "num_conv_pos_embeddings": 128,
77
+ "num_feat_extract_layers": 7,
78
+ "num_hidden_layers": 2,
79
+ "pad_token_id": 0,
80
+ "torch_dtype": "float32",
81
+ "transformers_version": "4.44.2",
82
+ "use_weighted_layer_sum": false,
83
+ "vocab_size": 32
84
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:120ed79a3dca1b23820a25a2f80c46812f1ce528d5b91776e2b601c745f0eef0
3
+ size 94765560
preprocessor_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "return_attention_mask": true,
8
+ "sampling_rate": 16000
9
+ }
runs/Sep26_15-44-51_eb374c82efae/events.out.tfevents.1727365515.eb374c82efae.30.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3dbc1ef49b3142b0aea23ad203dcc98b138ce996f82aadc27019c9ab7298a28
3
+ size 6358
runs/Sep26_15-46-57_eb374c82efae/events.out.tfevents.1727365619.eb374c82efae.30.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7142d2a4a4a45dbebc0854d4d41cad8eb97be5f2715d342a94a75aea93406e76
3
+ size 5946
runs/Sep26_15-47-39_eb374c82efae/events.out.tfevents.1727365660.eb374c82efae.30.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e589aa95f34db3b1a781552f14a1997c56a9a5395e5a84430227731b84adc53f
3
+ size 5946
runs/Sep26_15-53-32_eb374c82efae/events.out.tfevents.1727366039.eb374c82efae.327.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:207f61a45e17b6b6c343340d141652244a8d8952d97f4be3ba58da73f6e38a41
3
+ size 4184
runs/Sep26_16-09-12_eb374c82efae/events.out.tfevents.1727366957.eb374c82efae.543.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:256b58f30d2487f32d017fba3bc9b9a7d4efb9dfcb31b40eb36d6b43679c4d2a
3
+ size 13449
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02880c16d95f6e25ff0a37f1e5711768af55b6c931b5b984383892221818b6e0
3
+ size 5240