Yucheng Yin commited on
Commit
518fb5c
·
1 Parent(s): a4fedc2

add model weights

Browse files
README.md ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - generated_from_trainer
4
+ metrics:
5
+ - accuracy
6
+ model-index:
7
+ - name: test-mlm-bert-base-uncased-pcap-3M-mixed
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # test-mlm-bert-base-uncased-pcap-3M-mixed
15
+
16
+ This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
17
+ It achieves the following results on the evaluation set:
18
+ - Loss: 0.8973
19
+ - Accuracy: 0.7592
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 5e-05
39
+ - train_batch_size: 32
40
+ - eval_batch_size: 32
41
+ - seed: 42
42
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
+ - lr_scheduler_type: linear
44
+ - num_epochs: 3.0
45
+
46
+ ### Training results
47
+
48
+
49
+
50
+ ### Framework versions
51
+
52
+ - Transformers 4.31.0.dev0
53
+ - Pytorch 2.0.1+cu117
54
+ - Datasets 2.13.0
55
+ - Tokenizers 0.13.3
all_results.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "eval_accuracy": 0.7592383351783283,
4
+ "eval_loss": 0.8973440527915955,
5
+ "eval_runtime": 160.5876,
6
+ "eval_samples": 34257,
7
+ "eval_samples_per_second": 213.323,
8
+ "eval_steps_per_second": 6.669,
9
+ "perplexity": 2.4530792025535115,
10
+ "train_loss": 0.46753893842028726,
11
+ "train_runtime": 20527.2727,
12
+ "train_samples": 650883,
13
+ "train_samples_per_second": 95.125,
14
+ "train_steps_per_second": 2.973
15
+ }
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "BertForMaskedLM"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "classifier_dropout": null,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1,
9
+ "hidden_size": 768,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 3072,
12
+ "layer_norm_eps": 1e-12,
13
+ "max_position_embeddings": 512,
14
+ "model_type": "bert",
15
+ "num_attention_heads": 12,
16
+ "num_hidden_layers": 12,
17
+ "pad_token_id": 0,
18
+ "position_embedding_type": "absolute",
19
+ "torch_dtype": "float32",
20
+ "transformers_version": "4.31.0.dev0",
21
+ "type_vocab_size": 2,
22
+ "use_cache": true,
23
+ "vocab_size": 30522
24
+ }
eval_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "eval_accuracy": 0.7592383351783283,
4
+ "eval_loss": 0.8973440527915955,
5
+ "eval_runtime": 160.5876,
6
+ "eval_samples": 34257,
7
+ "eval_samples_per_second": 213.323,
8
+ "eval_steps_per_second": 6.669,
9
+ "perplexity": 2.4530792025535115
10
+ }
generation_config.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "pad_token_id": 0,
4
+ "transformers_version": "4.31.0.dev0"
5
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d7138d950153da9f5ae3062a942a1427ba9f85559f511823190e3500cbb2b38
3
+ size 438130617
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": true,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": null,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "train_loss": 0.46753893842028726,
4
+ "train_runtime": 20527.2727,
5
+ "train_samples": 650883,
6
+ "train_samples_per_second": 95.125,
7
+ "train_steps_per_second": 2.973
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,757 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.0,
5
+ "global_step": 61023,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.02,
12
+ "learning_rate": 4.9590318404536e-05,
13
+ "loss": 1.7769,
14
+ "step": 500
15
+ },
16
+ {
17
+ "epoch": 0.05,
18
+ "learning_rate": 4.9180636809071987e-05,
19
+ "loss": 0.7561,
20
+ "step": 1000
21
+ },
22
+ {
23
+ "epoch": 0.07,
24
+ "learning_rate": 4.877095521360799e-05,
25
+ "loss": 0.694,
26
+ "step": 1500
27
+ },
28
+ {
29
+ "epoch": 0.1,
30
+ "learning_rate": 4.8361273618143984e-05,
31
+ "loss": 0.6611,
32
+ "step": 2000
33
+ },
34
+ {
35
+ "epoch": 0.12,
36
+ "learning_rate": 4.795159202267997e-05,
37
+ "loss": 0.6388,
38
+ "step": 2500
39
+ },
40
+ {
41
+ "epoch": 0.15,
42
+ "learning_rate": 4.754191042721597e-05,
43
+ "loss": 0.6214,
44
+ "step": 3000
45
+ },
46
+ {
47
+ "epoch": 0.17,
48
+ "learning_rate": 4.7132228831751964e-05,
49
+ "loss": 0.6166,
50
+ "step": 3500
51
+ },
52
+ {
53
+ "epoch": 0.2,
54
+ "learning_rate": 4.672254723628796e-05,
55
+ "loss": 0.615,
56
+ "step": 4000
57
+ },
58
+ {
59
+ "epoch": 0.22,
60
+ "learning_rate": 4.6312865640823955e-05,
61
+ "loss": 0.6017,
62
+ "step": 4500
63
+ },
64
+ {
65
+ "epoch": 0.25,
66
+ "learning_rate": 4.590318404535995e-05,
67
+ "loss": 0.596,
68
+ "step": 5000
69
+ },
70
+ {
71
+ "epoch": 0.27,
72
+ "learning_rate": 4.549350244989594e-05,
73
+ "loss": 0.592,
74
+ "step": 5500
75
+ },
76
+ {
77
+ "epoch": 0.29,
78
+ "learning_rate": 4.5083820854431934e-05,
79
+ "loss": 0.5848,
80
+ "step": 6000
81
+ },
82
+ {
83
+ "epoch": 0.32,
84
+ "learning_rate": 4.4674139258967936e-05,
85
+ "loss": 0.581,
86
+ "step": 6500
87
+ },
88
+ {
89
+ "epoch": 0.34,
90
+ "learning_rate": 4.4264457663503925e-05,
91
+ "loss": 0.5779,
92
+ "step": 7000
93
+ },
94
+ {
95
+ "epoch": 0.37,
96
+ "learning_rate": 4.385477606803992e-05,
97
+ "loss": 0.5692,
98
+ "step": 7500
99
+ },
100
+ {
101
+ "epoch": 0.39,
102
+ "learning_rate": 4.3445094472575916e-05,
103
+ "loss": 0.5604,
104
+ "step": 8000
105
+ },
106
+ {
107
+ "epoch": 0.42,
108
+ "learning_rate": 4.303541287711191e-05,
109
+ "loss": 0.5536,
110
+ "step": 8500
111
+ },
112
+ {
113
+ "epoch": 0.44,
114
+ "learning_rate": 4.262573128164791e-05,
115
+ "loss": 0.5418,
116
+ "step": 9000
117
+ },
118
+ {
119
+ "epoch": 0.47,
120
+ "learning_rate": 4.22160496861839e-05,
121
+ "loss": 0.5366,
122
+ "step": 9500
123
+ },
124
+ {
125
+ "epoch": 0.49,
126
+ "learning_rate": 4.180636809071989e-05,
127
+ "loss": 0.526,
128
+ "step": 10000
129
+ },
130
+ {
131
+ "epoch": 0.52,
132
+ "learning_rate": 4.1396686495255886e-05,
133
+ "loss": 0.522,
134
+ "step": 10500
135
+ },
136
+ {
137
+ "epoch": 0.54,
138
+ "learning_rate": 4.098700489979188e-05,
139
+ "loss": 0.5135,
140
+ "step": 11000
141
+ },
142
+ {
143
+ "epoch": 0.57,
144
+ "learning_rate": 4.057732330432788e-05,
145
+ "loss": 0.5078,
146
+ "step": 11500
147
+ },
148
+ {
149
+ "epoch": 0.59,
150
+ "learning_rate": 4.016764170886387e-05,
151
+ "loss": 0.5082,
152
+ "step": 12000
153
+ },
154
+ {
155
+ "epoch": 0.61,
156
+ "learning_rate": 3.975796011339987e-05,
157
+ "loss": 0.4986,
158
+ "step": 12500
159
+ },
160
+ {
161
+ "epoch": 0.64,
162
+ "learning_rate": 3.9348278517935864e-05,
163
+ "loss": 0.4907,
164
+ "step": 13000
165
+ },
166
+ {
167
+ "epoch": 0.66,
168
+ "learning_rate": 3.893859692247186e-05,
169
+ "loss": 0.4896,
170
+ "step": 13500
171
+ },
172
+ {
173
+ "epoch": 0.69,
174
+ "learning_rate": 3.8528915327007854e-05,
175
+ "loss": 0.4836,
176
+ "step": 14000
177
+ },
178
+ {
179
+ "epoch": 0.71,
180
+ "learning_rate": 3.811923373154384e-05,
181
+ "loss": 0.4822,
182
+ "step": 14500
183
+ },
184
+ {
185
+ "epoch": 0.74,
186
+ "learning_rate": 3.770955213607984e-05,
187
+ "loss": 0.4734,
188
+ "step": 15000
189
+ },
190
+ {
191
+ "epoch": 0.76,
192
+ "learning_rate": 3.7299870540615834e-05,
193
+ "loss": 0.4733,
194
+ "step": 15500
195
+ },
196
+ {
197
+ "epoch": 0.79,
198
+ "learning_rate": 3.689018894515183e-05,
199
+ "loss": 0.4721,
200
+ "step": 16000
201
+ },
202
+ {
203
+ "epoch": 0.81,
204
+ "learning_rate": 3.6480507349687825e-05,
205
+ "loss": 0.4689,
206
+ "step": 16500
207
+ },
208
+ {
209
+ "epoch": 0.84,
210
+ "learning_rate": 3.607082575422382e-05,
211
+ "loss": 0.4694,
212
+ "step": 17000
213
+ },
214
+ {
215
+ "epoch": 0.86,
216
+ "learning_rate": 3.5661144158759816e-05,
217
+ "loss": 0.4656,
218
+ "step": 17500
219
+ },
220
+ {
221
+ "epoch": 0.88,
222
+ "learning_rate": 3.5251462563295804e-05,
223
+ "loss": 0.4663,
224
+ "step": 18000
225
+ },
226
+ {
227
+ "epoch": 0.91,
228
+ "learning_rate": 3.484178096783181e-05,
229
+ "loss": 0.4671,
230
+ "step": 18500
231
+ },
232
+ {
233
+ "epoch": 0.93,
234
+ "learning_rate": 3.4432099372367795e-05,
235
+ "loss": 0.4623,
236
+ "step": 19000
237
+ },
238
+ {
239
+ "epoch": 0.96,
240
+ "learning_rate": 3.402241777690379e-05,
241
+ "loss": 0.4615,
242
+ "step": 19500
243
+ },
244
+ {
245
+ "epoch": 0.98,
246
+ "learning_rate": 3.3612736181439786e-05,
247
+ "loss": 0.4528,
248
+ "step": 20000
249
+ },
250
+ {
251
+ "epoch": 1.01,
252
+ "learning_rate": 3.320305458597578e-05,
253
+ "loss": 0.4572,
254
+ "step": 20500
255
+ },
256
+ {
257
+ "epoch": 1.03,
258
+ "learning_rate": 3.279337299051178e-05,
259
+ "loss": 0.4524,
260
+ "step": 21000
261
+ },
262
+ {
263
+ "epoch": 1.06,
264
+ "learning_rate": 3.238369139504777e-05,
265
+ "loss": 0.4504,
266
+ "step": 21500
267
+ },
268
+ {
269
+ "epoch": 1.08,
270
+ "learning_rate": 3.197400979958377e-05,
271
+ "loss": 0.4563,
272
+ "step": 22000
273
+ },
274
+ {
275
+ "epoch": 1.11,
276
+ "learning_rate": 3.156432820411976e-05,
277
+ "loss": 0.4513,
278
+ "step": 22500
279
+ },
280
+ {
281
+ "epoch": 1.13,
282
+ "learning_rate": 3.115464660865575e-05,
283
+ "loss": 0.4503,
284
+ "step": 23000
285
+ },
286
+ {
287
+ "epoch": 1.16,
288
+ "learning_rate": 3.0744965013191754e-05,
289
+ "loss": 0.4457,
290
+ "step": 23500
291
+ },
292
+ {
293
+ "epoch": 1.18,
294
+ "learning_rate": 3.0335283417727743e-05,
295
+ "loss": 0.4488,
296
+ "step": 24000
297
+ },
298
+ {
299
+ "epoch": 1.2,
300
+ "learning_rate": 2.992560182226374e-05,
301
+ "loss": 0.4497,
302
+ "step": 24500
303
+ },
304
+ {
305
+ "epoch": 1.23,
306
+ "learning_rate": 2.951592022679973e-05,
307
+ "loss": 0.4481,
308
+ "step": 25000
309
+ },
310
+ {
311
+ "epoch": 1.25,
312
+ "learning_rate": 2.9106238631335726e-05,
313
+ "loss": 0.4439,
314
+ "step": 25500
315
+ },
316
+ {
317
+ "epoch": 1.28,
318
+ "learning_rate": 2.8696557035871725e-05,
319
+ "loss": 0.4393,
320
+ "step": 26000
321
+ },
322
+ {
323
+ "epoch": 1.3,
324
+ "learning_rate": 2.8286875440407717e-05,
325
+ "loss": 0.441,
326
+ "step": 26500
327
+ },
328
+ {
329
+ "epoch": 1.33,
330
+ "learning_rate": 2.7877193844943712e-05,
331
+ "loss": 0.4394,
332
+ "step": 27000
333
+ },
334
+ {
335
+ "epoch": 1.35,
336
+ "learning_rate": 2.7467512249479704e-05,
337
+ "loss": 0.4387,
338
+ "step": 27500
339
+ },
340
+ {
341
+ "epoch": 1.38,
342
+ "learning_rate": 2.7057830654015696e-05,
343
+ "loss": 0.4341,
344
+ "step": 28000
345
+ },
346
+ {
347
+ "epoch": 1.4,
348
+ "learning_rate": 2.6648149058551695e-05,
349
+ "loss": 0.4345,
350
+ "step": 28500
351
+ },
352
+ {
353
+ "epoch": 1.43,
354
+ "learning_rate": 2.623846746308769e-05,
355
+ "loss": 0.4368,
356
+ "step": 29000
357
+ },
358
+ {
359
+ "epoch": 1.45,
360
+ "learning_rate": 2.5828785867623683e-05,
361
+ "loss": 0.4348,
362
+ "step": 29500
363
+ },
364
+ {
365
+ "epoch": 1.47,
366
+ "learning_rate": 2.5419104272159678e-05,
367
+ "loss": 0.4324,
368
+ "step": 30000
369
+ },
370
+ {
371
+ "epoch": 1.5,
372
+ "learning_rate": 2.500942267669567e-05,
373
+ "loss": 0.4321,
374
+ "step": 30500
375
+ },
376
+ {
377
+ "epoch": 1.52,
378
+ "learning_rate": 2.4599741081231666e-05,
379
+ "loss": 0.4313,
380
+ "step": 31000
381
+ },
382
+ {
383
+ "epoch": 1.55,
384
+ "learning_rate": 2.4190059485767664e-05,
385
+ "loss": 0.4266,
386
+ "step": 31500
387
+ },
388
+ {
389
+ "epoch": 1.57,
390
+ "learning_rate": 2.3780377890303656e-05,
391
+ "loss": 0.4287,
392
+ "step": 32000
393
+ },
394
+ {
395
+ "epoch": 1.6,
396
+ "learning_rate": 2.3370696294839652e-05,
397
+ "loss": 0.4215,
398
+ "step": 32500
399
+ },
400
+ {
401
+ "epoch": 1.62,
402
+ "learning_rate": 2.2961014699375647e-05,
403
+ "loss": 0.4255,
404
+ "step": 33000
405
+ },
406
+ {
407
+ "epoch": 1.65,
408
+ "learning_rate": 2.255133310391164e-05,
409
+ "loss": 0.4258,
410
+ "step": 33500
411
+ },
412
+ {
413
+ "epoch": 1.67,
414
+ "learning_rate": 2.2141651508447635e-05,
415
+ "loss": 0.4255,
416
+ "step": 34000
417
+ },
418
+ {
419
+ "epoch": 1.7,
420
+ "learning_rate": 2.173196991298363e-05,
421
+ "loss": 0.4212,
422
+ "step": 34500
423
+ },
424
+ {
425
+ "epoch": 1.72,
426
+ "learning_rate": 2.1322288317519626e-05,
427
+ "loss": 0.4218,
428
+ "step": 35000
429
+ },
430
+ {
431
+ "epoch": 1.75,
432
+ "learning_rate": 2.0912606722055618e-05,
433
+ "loss": 0.4179,
434
+ "step": 35500
435
+ },
436
+ {
437
+ "epoch": 1.77,
438
+ "learning_rate": 2.0502925126591613e-05,
439
+ "loss": 0.4205,
440
+ "step": 36000
441
+ },
442
+ {
443
+ "epoch": 1.79,
444
+ "learning_rate": 2.009324353112761e-05,
445
+ "loss": 0.4183,
446
+ "step": 36500
447
+ },
448
+ {
449
+ "epoch": 1.82,
450
+ "learning_rate": 1.9683561935663604e-05,
451
+ "loss": 0.4175,
452
+ "step": 37000
453
+ },
454
+ {
455
+ "epoch": 1.84,
456
+ "learning_rate": 1.92738803401996e-05,
457
+ "loss": 0.4196,
458
+ "step": 37500
459
+ },
460
+ {
461
+ "epoch": 1.87,
462
+ "learning_rate": 1.886419874473559e-05,
463
+ "loss": 0.4183,
464
+ "step": 38000
465
+ },
466
+ {
467
+ "epoch": 1.89,
468
+ "learning_rate": 1.8454517149271587e-05,
469
+ "loss": 0.4148,
470
+ "step": 38500
471
+ },
472
+ {
473
+ "epoch": 1.92,
474
+ "learning_rate": 1.8044835553807583e-05,
475
+ "loss": 0.4103,
476
+ "step": 39000
477
+ },
478
+ {
479
+ "epoch": 1.94,
480
+ "learning_rate": 1.7635153958343575e-05,
481
+ "loss": 0.4167,
482
+ "step": 39500
483
+ },
484
+ {
485
+ "epoch": 1.97,
486
+ "learning_rate": 1.722547236287957e-05,
487
+ "loss": 0.4092,
488
+ "step": 40000
489
+ },
490
+ {
491
+ "epoch": 1.99,
492
+ "learning_rate": 1.6815790767415565e-05,
493
+ "loss": 0.4124,
494
+ "step": 40500
495
+ },
496
+ {
497
+ "epoch": 2.02,
498
+ "learning_rate": 1.640610917195156e-05,
499
+ "loss": 0.4107,
500
+ "step": 41000
501
+ },
502
+ {
503
+ "epoch": 2.04,
504
+ "learning_rate": 1.5996427576487556e-05,
505
+ "loss": 0.4117,
506
+ "step": 41500
507
+ },
508
+ {
509
+ "epoch": 2.06,
510
+ "learning_rate": 1.558674598102355e-05,
511
+ "loss": 0.4107,
512
+ "step": 42000
513
+ },
514
+ {
515
+ "epoch": 2.09,
516
+ "learning_rate": 1.5177064385559544e-05,
517
+ "loss": 0.4116,
518
+ "step": 42500
519
+ },
520
+ {
521
+ "epoch": 2.11,
522
+ "learning_rate": 1.4767382790095538e-05,
523
+ "loss": 0.4125,
524
+ "step": 43000
525
+ },
526
+ {
527
+ "epoch": 2.14,
528
+ "learning_rate": 1.4357701194631535e-05,
529
+ "loss": 0.4116,
530
+ "step": 43500
531
+ },
532
+ {
533
+ "epoch": 2.16,
534
+ "learning_rate": 1.3948019599167528e-05,
535
+ "loss": 0.4083,
536
+ "step": 44000
537
+ },
538
+ {
539
+ "epoch": 2.19,
540
+ "learning_rate": 1.353833800370352e-05,
541
+ "loss": 0.4103,
542
+ "step": 44500
543
+ },
544
+ {
545
+ "epoch": 2.21,
546
+ "learning_rate": 1.3128656408239518e-05,
547
+ "loss": 0.4111,
548
+ "step": 45000
549
+ },
550
+ {
551
+ "epoch": 2.24,
552
+ "learning_rate": 1.2718974812775511e-05,
553
+ "loss": 0.4023,
554
+ "step": 45500
555
+ },
556
+ {
557
+ "epoch": 2.26,
558
+ "learning_rate": 1.2309293217311505e-05,
559
+ "loss": 0.4069,
560
+ "step": 46000
561
+ },
562
+ {
563
+ "epoch": 2.29,
564
+ "learning_rate": 1.18996116218475e-05,
565
+ "loss": 0.4063,
566
+ "step": 46500
567
+ },
568
+ {
569
+ "epoch": 2.31,
570
+ "learning_rate": 1.1489930026383496e-05,
571
+ "loss": 0.4078,
572
+ "step": 47000
573
+ },
574
+ {
575
+ "epoch": 2.34,
576
+ "learning_rate": 1.108024843091949e-05,
577
+ "loss": 0.4036,
578
+ "step": 47500
579
+ },
580
+ {
581
+ "epoch": 2.36,
582
+ "learning_rate": 1.0670566835455484e-05,
583
+ "loss": 0.4065,
584
+ "step": 48000
585
+ },
586
+ {
587
+ "epoch": 2.38,
588
+ "learning_rate": 1.0260885239991479e-05,
589
+ "loss": 0.403,
590
+ "step": 48500
591
+ },
592
+ {
593
+ "epoch": 2.41,
594
+ "learning_rate": 9.851203644527474e-06,
595
+ "loss": 0.3999,
596
+ "step": 49000
597
+ },
598
+ {
599
+ "epoch": 2.43,
600
+ "learning_rate": 9.441522049063468e-06,
601
+ "loss": 0.4095,
602
+ "step": 49500
603
+ },
604
+ {
605
+ "epoch": 2.46,
606
+ "learning_rate": 9.031840453599464e-06,
607
+ "loss": 0.4048,
608
+ "step": 50000
609
+ },
610
+ {
611
+ "epoch": 2.48,
612
+ "learning_rate": 8.622158858135457e-06,
613
+ "loss": 0.3998,
614
+ "step": 50500
615
+ },
616
+ {
617
+ "epoch": 2.51,
618
+ "learning_rate": 8.212477262671451e-06,
619
+ "loss": 0.4,
620
+ "step": 51000
621
+ },
622
+ {
623
+ "epoch": 2.53,
624
+ "learning_rate": 7.802795667207447e-06,
625
+ "loss": 0.4019,
626
+ "step": 51500
627
+ },
628
+ {
629
+ "epoch": 2.56,
630
+ "learning_rate": 7.393114071743441e-06,
631
+ "loss": 0.3953,
632
+ "step": 52000
633
+ },
634
+ {
635
+ "epoch": 2.58,
636
+ "learning_rate": 6.983432476279437e-06,
637
+ "loss": 0.3952,
638
+ "step": 52500
639
+ },
640
+ {
641
+ "epoch": 2.61,
642
+ "learning_rate": 6.573750880815431e-06,
643
+ "loss": 0.3996,
644
+ "step": 53000
645
+ },
646
+ {
647
+ "epoch": 2.63,
648
+ "learning_rate": 6.164069285351425e-06,
649
+ "loss": 0.4002,
650
+ "step": 53500
651
+ },
652
+ {
653
+ "epoch": 2.65,
654
+ "learning_rate": 5.75438768988742e-06,
655
+ "loss": 0.3983,
656
+ "step": 54000
657
+ },
658
+ {
659
+ "epoch": 2.68,
660
+ "learning_rate": 5.344706094423414e-06,
661
+ "loss": 0.394,
662
+ "step": 54500
663
+ },
664
+ {
665
+ "epoch": 2.7,
666
+ "learning_rate": 4.935024498959409e-06,
667
+ "loss": 0.3937,
668
+ "step": 55000
669
+ },
670
+ {
671
+ "epoch": 2.73,
672
+ "learning_rate": 4.525342903495403e-06,
673
+ "loss": 0.3955,
674
+ "step": 55500
675
+ },
676
+ {
677
+ "epoch": 2.75,
678
+ "learning_rate": 4.115661308031398e-06,
679
+ "loss": 0.4004,
680
+ "step": 56000
681
+ },
682
+ {
683
+ "epoch": 2.78,
684
+ "learning_rate": 3.705979712567393e-06,
685
+ "loss": 0.4024,
686
+ "step": 56500
687
+ },
688
+ {
689
+ "epoch": 2.8,
690
+ "learning_rate": 3.296298117103387e-06,
691
+ "loss": 0.3976,
692
+ "step": 57000
693
+ },
694
+ {
695
+ "epoch": 2.83,
696
+ "learning_rate": 2.886616521639382e-06,
697
+ "loss": 0.3932,
698
+ "step": 57500
699
+ },
700
+ {
701
+ "epoch": 2.85,
702
+ "learning_rate": 2.4769349261753767e-06,
703
+ "loss": 0.3955,
704
+ "step": 58000
705
+ },
706
+ {
707
+ "epoch": 2.88,
708
+ "learning_rate": 2.0672533307113713e-06,
709
+ "loss": 0.4007,
710
+ "step": 58500
711
+ },
712
+ {
713
+ "epoch": 2.9,
714
+ "learning_rate": 1.6575717352473659e-06,
715
+ "loss": 0.4005,
716
+ "step": 59000
717
+ },
718
+ {
719
+ "epoch": 2.93,
720
+ "learning_rate": 1.2478901397833605e-06,
721
+ "loss": 0.3935,
722
+ "step": 59500
723
+ },
724
+ {
725
+ "epoch": 2.95,
726
+ "learning_rate": 8.382085443193551e-07,
727
+ "loss": 0.3957,
728
+ "step": 60000
729
+ },
730
+ {
731
+ "epoch": 2.97,
732
+ "learning_rate": 4.285269488553496e-07,
733
+ "loss": 0.3972,
734
+ "step": 60500
735
+ },
736
+ {
737
+ "epoch": 3.0,
738
+ "learning_rate": 1.8845353391344248e-08,
739
+ "loss": 0.3934,
740
+ "step": 61000
741
+ },
742
+ {
743
+ "epoch": 3.0,
744
+ "step": 61023,
745
+ "total_flos": 3.740919470993169e+17,
746
+ "train_loss": 0.46753893842028726,
747
+ "train_runtime": 20527.2727,
748
+ "train_samples_per_second": 95.125,
749
+ "train_steps_per_second": 2.973
750
+ }
751
+ ],
752
+ "max_steps": 61023,
753
+ "num_train_epochs": 3,
754
+ "total_flos": 3.740919470993169e+17,
755
+ "trial_name": null,
756
+ "trial_params": null
757
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee054504d25a1b8fd6235eb839166f6d9ceb2805c1ce1ebadd1504e257334465
3
+ size 4027
vocab.txt ADDED
@@ -0,0 +1,537 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [UNK]
2
+ [PAD]
3
+ [MASK]
4
+ [CLS]
5
+ [SEP]
6
+ 00___numeric___srcip_00___0
7
+ 00___numeric___srcip_00___1
8
+ 00___numeric___srcip_00___2
9
+ 00___numeric___srcip_00___3
10
+ 00___numeric___srcip_00___4
11
+ 00___numeric___srcip_01___0
12
+ 00___numeric___srcip_01___1
13
+ 00___numeric___srcip_01___2
14
+ 00___numeric___srcip_01___3
15
+ 00___numeric___srcip_01___4
16
+ 00___numeric___srcip_01___5
17
+ 00___numeric___srcip_01___6
18
+ 00___numeric___srcip_01___7
19
+ 00___numeric___srcip_01___8
20
+ 00___numeric___srcip_01___9
21
+ 00___numeric___srcip_02___0
22
+ 00___numeric___srcip_02___1
23
+ 00___numeric___srcip_02___2
24
+ 00___numeric___srcip_02___3
25
+ 00___numeric___srcip_02___4
26
+ 00___numeric___srcip_02___5
27
+ 00___numeric___srcip_02___6
28
+ 00___numeric___srcip_02___7
29
+ 00___numeric___srcip_02___8
30
+ 00___numeric___srcip_02___9
31
+ 00___numeric___srcip_03___0
32
+ 00___numeric___srcip_03___1
33
+ 00___numeric___srcip_03___2
34
+ 00___numeric___srcip_03___3
35
+ 00___numeric___srcip_03___4
36
+ 00___numeric___srcip_03___5
37
+ 00___numeric___srcip_03___6
38
+ 00___numeric___srcip_03___7
39
+ 00___numeric___srcip_03___8
40
+ 00___numeric___srcip_03___9
41
+ 00___numeric___srcip_04___0
42
+ 00___numeric___srcip_04___1
43
+ 00___numeric___srcip_04___2
44
+ 00___numeric___srcip_04___3
45
+ 00___numeric___srcip_04___4
46
+ 00___numeric___srcip_04___5
47
+ 00___numeric___srcip_04___6
48
+ 00___numeric___srcip_04___7
49
+ 00___numeric___srcip_04___8
50
+ 00___numeric___srcip_04___9
51
+ 00___numeric___srcip_05___0
52
+ 00___numeric___srcip_05___1
53
+ 00___numeric___srcip_05___2
54
+ 00___numeric___srcip_05___3
55
+ 00___numeric___srcip_05___4
56
+ 00___numeric___srcip_05___5
57
+ 00___numeric___srcip_05___6
58
+ 00___numeric___srcip_05___7
59
+ 00___numeric___srcip_05___8
60
+ 00___numeric___srcip_05___9
61
+ 00___numeric___srcip_06___0
62
+ 00___numeric___srcip_06___1
63
+ 00___numeric___srcip_06___2
64
+ 00___numeric___srcip_06___3
65
+ 00___numeric___srcip_06___4
66
+ 00___numeric___srcip_06___5
67
+ 00___numeric___srcip_06___6
68
+ 00___numeric___srcip_06___7
69
+ 00___numeric___srcip_06___8
70
+ 00___numeric___srcip_06___9
71
+ 00___numeric___srcip_07___0
72
+ 00___numeric___srcip_07___1
73
+ 00___numeric___srcip_07___2
74
+ 00___numeric___srcip_07___3
75
+ 00___numeric___srcip_07___4
76
+ 00___numeric___srcip_07___5
77
+ 00___numeric___srcip_07___6
78
+ 00___numeric___srcip_07___7
79
+ 00___numeric___srcip_07___8
80
+ 00___numeric___srcip_07___9
81
+ 00___numeric___srcip_08___0
82
+ 00___numeric___srcip_08___1
83
+ 00___numeric___srcip_08___2
84
+ 00___numeric___srcip_08___3
85
+ 00___numeric___srcip_08___4
86
+ 00___numeric___srcip_08___5
87
+ 00___numeric___srcip_08___6
88
+ 00___numeric___srcip_08___7
89
+ 00___numeric___srcip_08___8
90
+ 00___numeric___srcip_08___9
91
+ 00___numeric___srcip_09___0
92
+ 00___numeric___srcip_09___1
93
+ 00___numeric___srcip_09___2
94
+ 00___numeric___srcip_09___3
95
+ 00___numeric___srcip_09___4
96
+ 00___numeric___srcip_09___5
97
+ 00___numeric___srcip_09___6
98
+ 00___numeric___srcip_09___7
99
+ 00___numeric___srcip_09___8
100
+ 00___numeric___srcip_09___9
101
+ 01___numeric___dstip_00___0
102
+ 01___numeric___dstip_00___1
103
+ 01___numeric___dstip_00___2
104
+ 01___numeric___dstip_00___3
105
+ 01___numeric___dstip_00___4
106
+ 01___numeric___dstip_01___0
107
+ 01___numeric___dstip_01___1
108
+ 01___numeric___dstip_01___2
109
+ 01___numeric___dstip_01___3
110
+ 01___numeric___dstip_01___4
111
+ 01___numeric___dstip_01___5
112
+ 01___numeric___dstip_01___6
113
+ 01___numeric___dstip_01___7
114
+ 01___numeric___dstip_01___8
115
+ 01___numeric___dstip_01___9
116
+ 01___numeric___dstip_02___0
117
+ 01___numeric___dstip_02___1
118
+ 01___numeric___dstip_02___2
119
+ 01___numeric___dstip_02___3
120
+ 01___numeric___dstip_02___4
121
+ 01___numeric___dstip_02___5
122
+ 01___numeric___dstip_02___6
123
+ 01___numeric___dstip_02___7
124
+ 01___numeric___dstip_02___8
125
+ 01___numeric___dstip_02___9
126
+ 01___numeric___dstip_03___0
127
+ 01___numeric___dstip_03___1
128
+ 01___numeric___dstip_03___2
129
+ 01___numeric___dstip_03___3
130
+ 01___numeric___dstip_03___4
131
+ 01___numeric___dstip_03___5
132
+ 01___numeric___dstip_03___6
133
+ 01___numeric___dstip_03___7
134
+ 01___numeric___dstip_03___8
135
+ 01___numeric___dstip_03___9
136
+ 01___numeric___dstip_04___0
137
+ 01___numeric___dstip_04___1
138
+ 01___numeric___dstip_04___2
139
+ 01___numeric___dstip_04___3
140
+ 01___numeric___dstip_04___4
141
+ 01___numeric___dstip_04___5
142
+ 01___numeric___dstip_04___6
143
+ 01___numeric___dstip_04___7
144
+ 01___numeric___dstip_04___8
145
+ 01___numeric___dstip_04___9
146
+ 01___numeric___dstip_05___0
147
+ 01___numeric___dstip_05___1
148
+ 01___numeric___dstip_05___2
149
+ 01___numeric___dstip_05___3
150
+ 01___numeric___dstip_05___4
151
+ 01___numeric___dstip_05___5
152
+ 01___numeric___dstip_05___6
153
+ 01___numeric___dstip_05___7
154
+ 01___numeric___dstip_05___8
155
+ 01___numeric___dstip_05___9
156
+ 01___numeric___dstip_06___0
157
+ 01___numeric___dstip_06___1
158
+ 01___numeric___dstip_06___2
159
+ 01___numeric___dstip_06___3
160
+ 01___numeric___dstip_06___4
161
+ 01___numeric___dstip_06___5
162
+ 01___numeric___dstip_06___6
163
+ 01___numeric___dstip_06___7
164
+ 01___numeric___dstip_06___8
165
+ 01___numeric___dstip_06___9
166
+ 01___numeric___dstip_07___0
167
+ 01___numeric___dstip_07___1
168
+ 01___numeric___dstip_07___2
169
+ 01___numeric___dstip_07___3
170
+ 01___numeric___dstip_07___4
171
+ 01___numeric___dstip_07___5
172
+ 01___numeric___dstip_07___6
173
+ 01___numeric___dstip_07___7
174
+ 01___numeric___dstip_07___8
175
+ 01___numeric___dstip_07___9
176
+ 01___numeric___dstip_08___0
177
+ 01___numeric___dstip_08___1
178
+ 01___numeric___dstip_08___2
179
+ 01___numeric___dstip_08___3
180
+ 01___numeric___dstip_08___4
181
+ 01___numeric___dstip_08___5
182
+ 01___numeric___dstip_08___6
183
+ 01___numeric___dstip_08___7
184
+ 01___numeric___dstip_08___8
185
+ 01___numeric___dstip_08___9
186
+ 01___numeric___dstip_09___0
187
+ 01___numeric___dstip_09___1
188
+ 01___numeric___dstip_09___2
189
+ 01___numeric___dstip_09___3
190
+ 01___numeric___dstip_09___4
191
+ 01___numeric___dstip_09___5
192
+ 01___numeric___dstip_09___6
193
+ 01___numeric___dstip_09___7
194
+ 01___numeric___dstip_09___8
195
+ 01___numeric___dstip_09___9
196
+ 02___numeric___srcport_00___0
197
+ 02___numeric___srcport_00___1
198
+ 02___numeric___srcport_00___2
199
+ 02___numeric___srcport_00___3
200
+ 02___numeric___srcport_00___4
201
+ 02___numeric___srcport_00___5
202
+ 02___numeric___srcport_00___6
203
+ 02___numeric___srcport_01___0
204
+ 02___numeric___srcport_01___1
205
+ 02___numeric___srcport_01___2
206
+ 02___numeric___srcport_01___3
207
+ 02___numeric___srcport_01___4
208
+ 02___numeric___srcport_01___5
209
+ 02___numeric___srcport_01___6
210
+ 02___numeric___srcport_01___7
211
+ 02___numeric___srcport_01___8
212
+ 02___numeric___srcport_01___9
213
+ 02___numeric___srcport_02___0
214
+ 02___numeric___srcport_02___1
215
+ 02___numeric___srcport_02___2
216
+ 02___numeric___srcport_02___3
217
+ 02___numeric___srcport_02___4
218
+ 02___numeric___srcport_02___5
219
+ 02___numeric___srcport_02___6
220
+ 02___numeric___srcport_02___7
221
+ 02___numeric___srcport_02___8
222
+ 02___numeric___srcport_02___9
223
+ 02___numeric___srcport_03___0
224
+ 02___numeric___srcport_03___1
225
+ 02___numeric___srcport_03___2
226
+ 02___numeric___srcport_03___3
227
+ 02___numeric___srcport_03___4
228
+ 02___numeric___srcport_03___5
229
+ 02___numeric___srcport_03___6
230
+ 02___numeric___srcport_03___7
231
+ 02___numeric___srcport_03___8
232
+ 02___numeric___srcport_03___9
233
+ 02___numeric___srcport_04___0
234
+ 02___numeric___srcport_04___1
235
+ 02___numeric___srcport_04___2
236
+ 02___numeric___srcport_04___3
237
+ 02___numeric___srcport_04___4
238
+ 02___numeric___srcport_04___5
239
+ 02___numeric___srcport_04___6
240
+ 02___numeric___srcport_04___7
241
+ 02___numeric___srcport_04___8
242
+ 02___numeric___srcport_04___9
243
+ 03___numeric___dstport_00___0
244
+ 03___numeric___dstport_00___1
245
+ 03___numeric___dstport_00___2
246
+ 03___numeric___dstport_00___3
247
+ 03___numeric___dstport_00___4
248
+ 03___numeric___dstport_00___5
249
+ 03___numeric___dstport_00___6
250
+ 03___numeric___dstport_01___0
251
+ 03___numeric___dstport_01___1
252
+ 03___numeric___dstport_01___2
253
+ 03___numeric___dstport_01___3
254
+ 03___numeric___dstport_01___4
255
+ 03___numeric___dstport_01___5
256
+ 03___numeric___dstport_01___6
257
+ 03___numeric___dstport_01___7
258
+ 03___numeric___dstport_01___8
259
+ 03___numeric___dstport_01___9
260
+ 03___numeric___dstport_02___0
261
+ 03___numeric___dstport_02___1
262
+ 03___numeric___dstport_02___2
263
+ 03___numeric___dstport_02___3
264
+ 03___numeric___dstport_02___4
265
+ 03___numeric___dstport_02___5
266
+ 03___numeric___dstport_02___6
267
+ 03___numeric___dstport_02___7
268
+ 03___numeric___dstport_02___8
269
+ 03___numeric___dstport_02___9
270
+ 03___numeric___dstport_03___0
271
+ 03___numeric___dstport_03___1
272
+ 03___numeric___dstport_03___2
273
+ 03___numeric___dstport_03___3
274
+ 03___numeric___dstport_03___4
275
+ 03___numeric___dstport_03___5
276
+ 03___numeric___dstport_03___6
277
+ 03___numeric___dstport_03___7
278
+ 03___numeric___dstport_03___8
279
+ 03___numeric___dstport_03___9
280
+ 03___numeric___dstport_04___0
281
+ 03___numeric___dstport_04___1
282
+ 03___numeric___dstport_04___2
283
+ 03___numeric___dstport_04___3
284
+ 03___numeric___dstport_04___4
285
+ 03___numeric___dstport_04___5
286
+ 03___numeric___dstport_04___6
287
+ 03___numeric___dstport_04___7
288
+ 03___numeric___dstport_04___8
289
+ 03___numeric___dstport_04___9
290
+ 04___categorical___proto___tcp
291
+ 04___categorical___proto___udp
292
+ 05___numeric___time_00___1
293
+ 05___numeric___time_01___2
294
+ 05___numeric___time_01___3
295
+ 05___numeric___time_01___5
296
+ 05___numeric___time_02___2
297
+ 05___numeric___time_02___3
298
+ 05___numeric___time_02___6
299
+ 05___numeric___time_03___1
300
+ 05___numeric___time_04___0
301
+ 05___numeric___time_04___1
302
+ 05___numeric___time_04___9
303
+ 05___numeric___time_05___0
304
+ 05___numeric___time_05___1
305
+ 05___numeric___time_05___7
306
+ 05___numeric___time_06___0
307
+ 05___numeric___time_06___5
308
+ 05___numeric___time_06___8
309
+ 05___numeric___time_07___3
310
+ 05___numeric___time_07___4
311
+ 05___numeric___time_07___5
312
+ 05___numeric___time_07___6
313
+ 05___numeric___time_07___7
314
+ 05___numeric___time_07___8
315
+ 05___numeric___time_07___9
316
+ 05___numeric___time_08___0
317
+ 05___numeric___time_08___1
318
+ 05___numeric___time_08___2
319
+ 05___numeric___time_08___3
320
+ 05___numeric___time_08___4
321
+ 05___numeric___time_08___5
322
+ 05___numeric___time_08___6
323
+ 05___numeric___time_08___7
324
+ 05___numeric___time_08___8
325
+ 05___numeric___time_08___9
326
+ 05___numeric___time_09___0
327
+ 05___numeric___time_09___1
328
+ 05___numeric___time_09___2
329
+ 05___numeric___time_09___3
330
+ 05___numeric___time_09___4
331
+ 05___numeric___time_09___5
332
+ 05___numeric___time_09___6
333
+ 05___numeric___time_09___7
334
+ 05___numeric___time_09___8
335
+ 05___numeric___time_09___9
336
+ 05___numeric___time_10___0
337
+ 05___numeric___time_10___1
338
+ 05___numeric___time_10___2
339
+ 05___numeric___time_10___3
340
+ 05___numeric___time_10___4
341
+ 05___numeric___time_10___5
342
+ 05___numeric___time_10___6
343
+ 05___numeric___time_10___7
344
+ 05___numeric___time_10___8
345
+ 05___numeric___time_10___9
346
+ 05___numeric___time_11___0
347
+ 05___numeric___time_11___1
348
+ 05___numeric___time_11___2
349
+ 05___numeric___time_11___3
350
+ 05___numeric___time_11___4
351
+ 05___numeric___time_11___5
352
+ 05___numeric___time_11___6
353
+ 05___numeric___time_11___7
354
+ 05___numeric___time_11___8
355
+ 05___numeric___time_11___9
356
+ 05___numeric___time_12___0
357
+ 05___numeric___time_12___1
358
+ 05___numeric___time_12___2
359
+ 05___numeric___time_12___3
360
+ 05___numeric___time_12___4
361
+ 05___numeric___time_12___5
362
+ 05___numeric___time_12___6
363
+ 05___numeric___time_12___7
364
+ 05___numeric___time_12___8
365
+ 05___numeric___time_12___9
366
+ 05___numeric___time_13___0
367
+ 05___numeric___time_13___1
368
+ 05___numeric___time_13___2
369
+ 05___numeric___time_13___3
370
+ 05___numeric___time_13___4
371
+ 05___numeric___time_13___5
372
+ 05___numeric___time_13___6
373
+ 05___numeric___time_13___7
374
+ 05___numeric___time_13___8
375
+ 05___numeric___time_13___9
376
+ 05___numeric___time_14___0
377
+ 05___numeric___time_14___1
378
+ 05___numeric___time_14___2
379
+ 05___numeric___time_14___3
380
+ 05___numeric___time_14___4
381
+ 05___numeric___time_14___5
382
+ 05___numeric___time_14___6
383
+ 05___numeric___time_14___7
384
+ 05___numeric___time_14___8
385
+ 05___numeric___time_14___9
386
+ 05___numeric___time_15___0
387
+ 05___numeric___time_15___1
388
+ 05___numeric___time_15___2
389
+ 05___numeric___time_15___3
390
+ 05___numeric___time_15___4
391
+ 05___numeric___time_15___5
392
+ 05___numeric___time_15___6
393
+ 05___numeric___time_15___7
394
+ 05___numeric___time_15___8
395
+ 05___numeric___time_15___9
396
+ 06___numeric___pkt_len_00___0
397
+ 06___numeric___pkt_len_00___1
398
+ 06___numeric___pkt_len_01___0
399
+ 06___numeric___pkt_len_01___1
400
+ 06___numeric___pkt_len_01___2
401
+ 06___numeric___pkt_len_01___3
402
+ 06___numeric___pkt_len_01___4
403
+ 06___numeric___pkt_len_01___5
404
+ 06___numeric___pkt_len_01___6
405
+ 06___numeric___pkt_len_01___7
406
+ 06___numeric___pkt_len_01___8
407
+ 06___numeric___pkt_len_01___9
408
+ 06___numeric___pkt_len_02___0
409
+ 06___numeric___pkt_len_02___1
410
+ 06___numeric___pkt_len_02___2
411
+ 06___numeric___pkt_len_02___3
412
+ 06___numeric___pkt_len_02___4
413
+ 06___numeric___pkt_len_02___5
414
+ 06___numeric___pkt_len_02___6
415
+ 06___numeric___pkt_len_02___7
416
+ 06___numeric___pkt_len_02___8
417
+ 06___numeric___pkt_len_02___9
418
+ 06___numeric___pkt_len_03___0
419
+ 06___numeric___pkt_len_03___1
420
+ 06___numeric___pkt_len_03___2
421
+ 06___numeric___pkt_len_03___3
422
+ 06___numeric___pkt_len_03___4
423
+ 06___numeric___pkt_len_03___5
424
+ 06___numeric___pkt_len_03___6
425
+ 06___numeric___pkt_len_03___7
426
+ 06___numeric___pkt_len_03___8
427
+ 06___numeric___pkt_len_03___9
428
+ 07___numeric___version_00___4
429
+ 08___numeric___ihl_00___5
430
+ 09___numeric___tos_00___0
431
+ 09___numeric___tos_00___1
432
+ 09___numeric___tos_00___2
433
+ 09___numeric___tos_01___0
434
+ 09___numeric___tos_01___1
435
+ 09___numeric___tos_01___2
436
+ 09___numeric___tos_01___3
437
+ 09___numeric___tos_01___4
438
+ 09___numeric___tos_01___5
439
+ 09___numeric___tos_01___6
440
+ 09___numeric___tos_01___7
441
+ 09___numeric___tos_01___8
442
+ 09___numeric___tos_01___9
443
+ 09___numeric___tos_02___0
444
+ 09___numeric___tos_02___1
445
+ 09___numeric___tos_02___2
446
+ 09___numeric___tos_02___3
447
+ 09___numeric___tos_02___4
448
+ 09___numeric___tos_02___5
449
+ 09___numeric___tos_02___6
450
+ 09___numeric___tos_02___7
451
+ 09___numeric___tos_02___8
452
+ 09___numeric___tos_02___9
453
+ 10___numeric___id_00___0
454
+ 10___numeric___id_00___1
455
+ 10___numeric___id_00___2
456
+ 10___numeric___id_00___3
457
+ 10___numeric___id_00___4
458
+ 10___numeric___id_00___5
459
+ 10___numeric___id_00___6
460
+ 10___numeric___id_01___0
461
+ 10___numeric___id_01___1
462
+ 10___numeric___id_01___2
463
+ 10___numeric___id_01___3
464
+ 10___numeric___id_01___4
465
+ 10___numeric___id_01___5
466
+ 10___numeric___id_01___6
467
+ 10___numeric___id_01___7
468
+ 10___numeric___id_01___8
469
+ 10___numeric___id_01___9
470
+ 10___numeric___id_02___0
471
+ 10___numeric___id_02___1
472
+ 10___numeric___id_02___2
473
+ 10___numeric___id_02___3
474
+ 10___numeric___id_02___4
475
+ 10___numeric___id_02___5
476
+ 10___numeric___id_02___6
477
+ 10___numeric___id_02___7
478
+ 10___numeric___id_02___8
479
+ 10___numeric___id_02___9
480
+ 10___numeric___id_03___0
481
+ 10___numeric___id_03___1
482
+ 10___numeric___id_03___2
483
+ 10___numeric___id_03___3
484
+ 10___numeric___id_03___4
485
+ 10___numeric___id_03___5
486
+ 10___numeric___id_03___6
487
+ 10___numeric___id_03___7
488
+ 10___numeric___id_03___8
489
+ 10___numeric___id_03___9
490
+ 10___numeric___id_04___0
491
+ 10___numeric___id_04___1
492
+ 10___numeric___id_04___2
493
+ 10___numeric___id_04___3
494
+ 10___numeric___id_04___4
495
+ 10___numeric___id_04___5
496
+ 10___numeric___id_04___6
497
+ 10___numeric___id_04___7
498
+ 10___numeric___id_04___8
499
+ 10___numeric___id_04___9
500
+ 11___numeric___flag_00___0
501
+ 11___numeric___flag_00___1
502
+ 11___numeric___flag_00___2
503
+ 12___numeric___off_00___0
504
+ 12___numeric___off_00___1
505
+ 12___numeric___off_01___0
506
+ 12___numeric___off_01___7
507
+ 12___numeric___off_01___8
508
+ 12___numeric___off_01___9
509
+ 12___numeric___off_02___0
510
+ 12___numeric___off_02___2
511
+ 12___numeric___off_02___4
512
+ 12___numeric___off_02___5
513
+ 12___numeric___off_02___7
514
+ 12___numeric___off_02___9
515
+ 13___numeric___ttl_00___0
516
+ 13___numeric___ttl_00___1
517
+ 13___numeric___ttl_00___2
518
+ 13___numeric___ttl_01___0
519
+ 13___numeric___ttl_01___1
520
+ 13___numeric___ttl_01___2
521
+ 13___numeric___ttl_01___3
522
+ 13___numeric___ttl_01___4
523
+ 13___numeric___ttl_01___5
524
+ 13___numeric___ttl_01___6
525
+ 13___numeric___ttl_01___7
526
+ 13___numeric___ttl_01___8
527
+ 13___numeric___ttl_01___9
528
+ 13___numeric___ttl_02___0
529
+ 13___numeric___ttl_02___1
530
+ 13___numeric___ttl_02___2
531
+ 13___numeric___ttl_02___3
532
+ 13___numeric___ttl_02___4
533
+ 13___numeric___ttl_02___5
534
+ 13___numeric___ttl_02___6
535
+ 13___numeric___ttl_02___7
536
+ 13___numeric___ttl_02___8
537
+ 13___numeric___ttl_02___9