Vui Seng Chua commited on
Commit ·
06bf9e5
1
Parent(s): 187aad7
Add content
Browse files- .gitattributes +1 -0
- README.md +7 -0
- r0.030-squad-bert-b-mvmt-8bit/all_results.json +5 -0
- r0.030-squad-bert-b-mvmt-8bit/compressed_graph.dot +0 -0
- r0.030-squad-bert-b-mvmt-8bit/eval_nbest_predictions.json +3 -0
- r0.030-squad-bert-b-mvmt-8bit/eval_predictions.json +0 -0
- r0.030-squad-bert-b-mvmt-8bit/eval_results.json +5 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/sparsity_structures.csv +73 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/sparsity_structures.md +74 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/sparsity_structures.pkl +0 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.bin +3 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.mapping +0 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.onnx +3 -0
- r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.xml +0 -0
- r0.030-squad-bert-b-mvmt-8bit/nncf-mvmt-p3.json +67 -0
- r0.030-squad-bert-b-mvmt-8bit/original_graph.dot +0 -0
.gitattributes
CHANGED
|
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
| 27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
| 27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
eval_nbest_predictions.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
### BERT-base compressed by JPQD with Regularization Factor 0.03
|
| 2 |
+
```
|
| 3 |
+
F1: 87.66
|
| 4 |
+
EM: 80.23
|
| 5 |
+
|
| 6 |
+
```
|
| 7 |
+
|
r0.030-squad-bert-b-mvmt-8bit/all_results.json
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"eval_exact_match": 80.22705771050141,
|
| 3 |
+
"eval_f1": 87.662990199249,
|
| 4 |
+
"eval_samples": 10784
|
| 5 |
+
}
|
r0.030-squad-bert-b-mvmt-8bit/compressed_graph.dot
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
r0.030-squad-bert-b-mvmt-8bit/eval_nbest_predictions.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5c6afe439a0ce8006a7704171e8e4423440f2665f465a315c51647eb53b31595
|
| 3 |
+
size 48932650
|
r0.030-squad-bert-b-mvmt-8bit/eval_predictions.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
r0.030-squad-bert-b-mvmt-8bit/eval_results.json
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"eval_exact_match": 80.22705771050141,
|
| 3 |
+
"eval_f1": 87.662990199249,
|
| 4 |
+
"eval_samples": 10784
|
| 5 |
+
}
|
r0.030-squad-bert-b-mvmt-8bit/ir/sparsity_structures.csv
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pt_module_name,block_id,orig_w_shape,final_w_shape,orig_b_shape,final_b_shape,prune_by,id_to_keep,head_id_to_keep,nncf_graph_node
|
| 2 |
+
nncf_module.bert.encoder.layer.0.attention.self.key,0,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[3, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 3 |
+
nncf_module.bert.encoder.layer.0.attention.output.dense,0,"(768, 768)","(768, 192)","(768,)","(768,)",group of 64 cols,See pkl,"[3, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 4 |
+
nncf_module.bert.encoder.layer.0.attention.self.query,0,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[3, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 5 |
+
nncf_module.bert.encoder.layer.0.attention.self.value,0,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[3, 8, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 6 |
+
nncf_module.bert.encoder.layer.0.output.dense,1,"(768, 3072)","(768, 612)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 7 |
+
nncf_module.bert.encoder.layer.0.intermediate.dense,1,"(3072, 768)","(612, 768)","(3072,)","(612,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 8 |
+
nncf_module.bert.encoder.layer.1.attention.self.value,2,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[4, 7, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 9 |
+
nncf_module.bert.encoder.layer.1.attention.self.query,2,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[4, 7, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 10 |
+
nncf_module.bert.encoder.layer.1.attention.self.key,2,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[4, 7, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 11 |
+
nncf_module.bert.encoder.layer.1.attention.output.dense,2,"(768, 768)","(768, 192)","(768,)","(768,)",group of 64 cols,See pkl,"[4, 7, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 12 |
+
nncf_module.bert.encoder.layer.1.intermediate.dense,3,"(3072, 768)","(614, 768)","(3072,)","(614,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 13 |
+
nncf_module.bert.encoder.layer.1.output.dense,3,"(768, 3072)","(768, 614)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 14 |
+
nncf_module.bert.encoder.layer.2.attention.self.key,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 15 |
+
nncf_module.bert.encoder.layer.2.attention.output.dense,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 16 |
+
nncf_module.bert.encoder.layer.2.attention.self.value,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 17 |
+
nncf_module.bert.encoder.layer.2.attention.self.query,4,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 18 |
+
nncf_module.bert.encoder.layer.2.intermediate.dense,5,"(3072, 768)","(742, 768)","(3072,)","(742,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 19 |
+
nncf_module.bert.encoder.layer.2.output.dense,5,"(768, 3072)","(768, 742)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 20 |
+
nncf_module.bert.encoder.layer.3.attention.self.value,6,"(768, 768)","(384, 768)","(768,)","(384,)",group of 64 rows,See pkl,"[0, 1, 5, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 21 |
+
nncf_module.bert.encoder.layer.3.attention.output.dense,6,"(768, 768)","(768, 384)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 5, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 22 |
+
nncf_module.bert.encoder.layer.3.attention.self.query,6,"(768, 768)","(384, 768)","(768,)","(384,)",group of 64 rows,See pkl,"[0, 1, 5, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 23 |
+
nncf_module.bert.encoder.layer.3.attention.self.key,6,"(768, 768)","(384, 768)","(768,)","(384,)",group of 64 rows,See pkl,"[0, 1, 5, 8, 9, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 24 |
+
nncf_module.bert.encoder.layer.3.intermediate.dense,7,"(3072, 768)","(733, 768)","(3072,)","(733,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 25 |
+
nncf_module.bert.encoder.layer.3.output.dense,7,"(768, 3072)","(768, 733)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 26 |
+
nncf_module.bert.encoder.layer.4.attention.self.value,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 27 |
+
nncf_module.bert.encoder.layer.4.attention.self.query,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 28 |
+
nncf_module.bert.encoder.layer.4.attention.self.key,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 29 |
+
nncf_module.bert.encoder.layer.4.attention.output.dense,8,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 30 |
+
nncf_module.bert.encoder.layer.4.intermediate.dense,9,"(3072, 768)","(660, 768)","(3072,)","(660,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 31 |
+
nncf_module.bert.encoder.layer.4.output.dense,9,"(768, 3072)","(768, 660)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 32 |
+
nncf_module.bert.encoder.layer.5.attention.self.query,10,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[4, 8, 9, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 33 |
+
nncf_module.bert.encoder.layer.5.attention.output.dense,10,"(768, 768)","(768, 256)","(768,)","(768,)",group of 64 cols,See pkl,"[4, 8, 9, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 34 |
+
nncf_module.bert.encoder.layer.5.attention.self.value,10,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[4, 8, 9, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 35 |
+
nncf_module.bert.encoder.layer.5.attention.self.key,10,"(768, 768)","(256, 768)","(768,)","(256,)",group of 64 rows,See pkl,"[4, 8, 9, 10]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 36 |
+
nncf_module.bert.encoder.layer.5.intermediate.dense,11,"(3072, 768)","(623, 768)","(3072,)","(623,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 37 |
+
nncf_module.bert.encoder.layer.5.output.dense,11,"(768, 3072)","(768, 623)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 38 |
+
nncf_module.bert.encoder.layer.6.attention.self.value,12,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[4, 8, 9]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 39 |
+
nncf_module.bert.encoder.layer.6.attention.self.query,12,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[4, 8, 9]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 40 |
+
nncf_module.bert.encoder.layer.6.attention.self.key,12,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[4, 8, 9]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 41 |
+
nncf_module.bert.encoder.layer.6.attention.output.dense,12,"(768, 768)","(768, 192)","(768,)","(768,)",group of 64 cols,See pkl,"[4, 8, 9]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 42 |
+
nncf_module.bert.encoder.layer.6.intermediate.dense,13,"(3072, 768)","(463, 768)","(3072,)","(463,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 43 |
+
nncf_module.bert.encoder.layer.6.output.dense,13,"(768, 3072)","(768, 463)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 44 |
+
nncf_module.bert.encoder.layer.7.attention.output.dense,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 45 |
+
nncf_module.bert.encoder.layer.7.attention.self.key,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 46 |
+
nncf_module.bert.encoder.layer.7.attention.self.value,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 47 |
+
nncf_module.bert.encoder.layer.7.attention.self.query,14,"(768, 768)","(768, 768)","(768,)","(768,)",group of 64 rows,See pkl,"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 48 |
+
nncf_module.bert.encoder.layer.7.intermediate.dense,15,"(3072, 768)","(358, 768)","(3072,)","(358,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 49 |
+
nncf_module.bert.encoder.layer.7.output.dense,15,"(768, 3072)","(768, 358)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 50 |
+
nncf_module.bert.encoder.layer.8.attention.output.dense,16,"(768, 768)","(768, 192)","(768,)","(768,)",group of 64 cols,See pkl,"[9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 51 |
+
nncf_module.bert.encoder.layer.8.attention.self.query,16,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 52 |
+
nncf_module.bert.encoder.layer.8.attention.self.key,16,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 53 |
+
nncf_module.bert.encoder.layer.8.attention.self.value,16,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[9, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 54 |
+
nncf_module.bert.encoder.layer.8.output.dense,17,"(768, 3072)","(768, 217)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 55 |
+
nncf_module.bert.encoder.layer.8.intermediate.dense,17,"(3072, 768)","(217, 768)","(3072,)","(217,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 56 |
+
nncf_module.bert.encoder.layer.9.attention.self.query,18,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 57 |
+
nncf_module.bert.encoder.layer.9.attention.output.dense,18,"(768, 768)","(768, 320)","(768,)","(768,)",group of 64 cols,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 58 |
+
nncf_module.bert.encoder.layer.9.attention.self.value,18,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 59 |
+
nncf_module.bert.encoder.layer.9.attention.self.key,18,"(768, 768)","(320, 768)","(768,)","(320,)",group of 64 rows,See pkl,"[0, 2, 6, 8, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 60 |
+
nncf_module.bert.encoder.layer.9.output.dense,19,"(768, 3072)","(768, 102)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 61 |
+
nncf_module.bert.encoder.layer.9.intermediate.dense,19,"(3072, 768)","(102, 768)","(3072,)","(102,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 62 |
+
nncf_module.bert.encoder.layer.10.attention.self.key,20,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[3, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 63 |
+
nncf_module.bert.encoder.layer.10.attention.self.query,20,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[3, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 64 |
+
nncf_module.bert.encoder.layer.10.attention.output.dense,20,"(768, 768)","(768, 192)","(768,)","(768,)",group of 64 cols,See pkl,"[3, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 65 |
+
nncf_module.bert.encoder.layer.10.attention.self.value,20,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[3, 10, 11]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 66 |
+
nncf_module.bert.encoder.layer.10.output.dense,21,"(768, 3072)","(768, 110)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 67 |
+
nncf_module.bert.encoder.layer.10.intermediate.dense,21,"(3072, 768)","(110, 768)","(3072,)","(110,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
| 68 |
+
nncf_module.bert.encoder.layer.11.attention.self.query,22,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[1, 2, 3]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0
|
| 69 |
+
nncf_module.bert.encoder.layer.11.attention.self.key,22,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[1, 2, 3]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0
|
| 70 |
+
nncf_module.bert.encoder.layer.11.attention.output.dense,22,"(768, 768)","(768, 192)","(768,)","(768,)",group of 64 cols,See pkl,"[1, 2, 3]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0
|
| 71 |
+
nncf_module.bert.encoder.layer.11.attention.self.value,22,"(768, 768)","(192, 768)","(768,)","(192,)",group of 64 rows,See pkl,"[1, 2, 3]",BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0
|
| 72 |
+
nncf_module.bert.encoder.layer.11.output.dense,23,"(768, 3072)","(768, 123)","(768,)","(768,)",col,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0
|
| 73 |
+
nncf_module.bert.encoder.layer.11.intermediate.dense,23,"(3072, 768)","(123, 768)","(3072,)","(123,)",row,See pkl,,BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0
|
r0.030-squad-bert-b-mvmt-8bit/ir/sparsity_structures.md
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
| | pt_module_name | block_id | orig_w_shape | final_w_shape | orig_b_shape | final_b_shape | prune_by | id_to_keep | head_id_to_keep | nncf_graph_node |
|
| 2 |
+
|---:|:---------------------------------------------------------|-----------:|:---------------|:----------------|:---------------|:----------------|:-----------------|:-------------|:---------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 3 |
+
| 0 | nncf_module.bert.encoder.layer.0.attention.self.key | 0 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [3, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 4 |
+
| 1 | nncf_module.bert.encoder.layer.0.attention.output.dense | 0 | (768, 768) | (768, 192) | (768,) | (768,) | group of 64 cols | See pkl | [3, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 5 |
+
| 2 | nncf_module.bert.encoder.layer.0.attention.self.query | 0 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [3, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 6 |
+
| 3 | nncf_module.bert.encoder.layer.0.attention.self.value | 0 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [3, 8, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 7 |
+
| 4 | nncf_module.bert.encoder.layer.0.output.dense | 1 | (768, 3072) | (768, 612) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 8 |
+
| 5 | nncf_module.bert.encoder.layer.0.intermediate.dense | 1 | (3072, 768) | (612, 768) | (3072,) | (612,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 9 |
+
| 6 | nncf_module.bert.encoder.layer.1.attention.self.value | 2 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [4, 7, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 10 |
+
| 7 | nncf_module.bert.encoder.layer.1.attention.self.query | 2 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [4, 7, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 11 |
+
| 8 | nncf_module.bert.encoder.layer.1.attention.self.key | 2 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [4, 7, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 12 |
+
| 9 | nncf_module.bert.encoder.layer.1.attention.output.dense | 2 | (768, 768) | (768, 192) | (768,) | (768,) | group of 64 cols | See pkl | [4, 7, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 13 |
+
| 10 | nncf_module.bert.encoder.layer.1.intermediate.dense | 3 | (3072, 768) | (614, 768) | (3072,) | (614,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 14 |
+
| 11 | nncf_module.bert.encoder.layer.1.output.dense | 3 | (768, 3072) | (768, 614) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 15 |
+
| 12 | nncf_module.bert.encoder.layer.2.attention.self.key | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 16 |
+
| 13 | nncf_module.bert.encoder.layer.2.attention.output.dense | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 17 |
+
| 14 | nncf_module.bert.encoder.layer.2.attention.self.value | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 18 |
+
| 15 | nncf_module.bert.encoder.layer.2.attention.self.query | 4 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 19 |
+
| 16 | nncf_module.bert.encoder.layer.2.intermediate.dense | 5 | (3072, 768) | (742, 768) | (3072,) | (742,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 20 |
+
| 17 | nncf_module.bert.encoder.layer.2.output.dense | 5 | (768, 3072) | (768, 742) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 21 |
+
| 18 | nncf_module.bert.encoder.layer.3.attention.self.value | 6 | (768, 768) | (384, 768) | (768,) | (384,) | group of 64 rows | See pkl | [0, 1, 5, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 22 |
+
| 19 | nncf_module.bert.encoder.layer.3.attention.output.dense | 6 | (768, 768) | (768, 384) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 5, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 23 |
+
| 20 | nncf_module.bert.encoder.layer.3.attention.self.query | 6 | (768, 768) | (384, 768) | (768,) | (384,) | group of 64 rows | See pkl | [0, 1, 5, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 24 |
+
| 21 | nncf_module.bert.encoder.layer.3.attention.self.key | 6 | (768, 768) | (384, 768) | (768,) | (384,) | group of 64 rows | See pkl | [0, 1, 5, 8, 9, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 25 |
+
| 22 | nncf_module.bert.encoder.layer.3.intermediate.dense | 7 | (3072, 768) | (733, 768) | (3072,) | (733,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 26 |
+
| 23 | nncf_module.bert.encoder.layer.3.output.dense | 7 | (768, 3072) | (768, 733) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 27 |
+
| 24 | nncf_module.bert.encoder.layer.4.attention.self.value | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 28 |
+
| 25 | nncf_module.bert.encoder.layer.4.attention.self.query | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 29 |
+
| 26 | nncf_module.bert.encoder.layer.4.attention.self.key | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 30 |
+
| 27 | nncf_module.bert.encoder.layer.4.attention.output.dense | 8 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 31 |
+
| 28 | nncf_module.bert.encoder.layer.4.intermediate.dense | 9 | (3072, 768) | (660, 768) | (3072,) | (660,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 32 |
+
| 29 | nncf_module.bert.encoder.layer.4.output.dense | 9 | (768, 3072) | (768, 660) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 33 |
+
| 30 | nncf_module.bert.encoder.layer.5.attention.self.query | 10 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [4, 8, 9, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 34 |
+
| 31 | nncf_module.bert.encoder.layer.5.attention.output.dense | 10 | (768, 768) | (768, 256) | (768,) | (768,) | group of 64 cols | See pkl | [4, 8, 9, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 35 |
+
| 32 | nncf_module.bert.encoder.layer.5.attention.self.value | 10 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [4, 8, 9, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 36 |
+
| 33 | nncf_module.bert.encoder.layer.5.attention.self.key | 10 | (768, 768) | (256, 768) | (768,) | (256,) | group of 64 rows | See pkl | [4, 8, 9, 10] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 37 |
+
| 34 | nncf_module.bert.encoder.layer.5.intermediate.dense | 11 | (3072, 768) | (623, 768) | (3072,) | (623,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 38 |
+
| 35 | nncf_module.bert.encoder.layer.5.output.dense | 11 | (768, 3072) | (768, 623) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 39 |
+
| 36 | nncf_module.bert.encoder.layer.6.attention.self.value | 12 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [4, 8, 9] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 40 |
+
| 37 | nncf_module.bert.encoder.layer.6.attention.self.query | 12 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [4, 8, 9] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 41 |
+
| 38 | nncf_module.bert.encoder.layer.6.attention.self.key | 12 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [4, 8, 9] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 42 |
+
| 39 | nncf_module.bert.encoder.layer.6.attention.output.dense | 12 | (768, 768) | (768, 192) | (768,) | (768,) | group of 64 cols | See pkl | [4, 8, 9] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 43 |
+
| 40 | nncf_module.bert.encoder.layer.6.intermediate.dense | 13 | (3072, 768) | (463, 768) | (3072,) | (463,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 44 |
+
| 41 | nncf_module.bert.encoder.layer.6.output.dense | 13 | (768, 3072) | (768, 463) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 45 |
+
| 42 | nncf_module.bert.encoder.layer.7.attention.output.dense | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 cols | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 46 |
+
| 43 | nncf_module.bert.encoder.layer.7.attention.self.key | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 47 |
+
| 44 | nncf_module.bert.encoder.layer.7.attention.self.value | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 48 |
+
| 45 | nncf_module.bert.encoder.layer.7.attention.self.query | 14 | (768, 768) | (768, 768) | (768,) | (768,) | group of 64 rows | See pkl | [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 49 |
+
| 46 | nncf_module.bert.encoder.layer.7.intermediate.dense | 15 | (3072, 768) | (358, 768) | (3072,) | (358,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 50 |
+
| 47 | nncf_module.bert.encoder.layer.7.output.dense | 15 | (768, 3072) | (768, 358) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 51 |
+
| 48 | nncf_module.bert.encoder.layer.8.attention.output.dense | 16 | (768, 768) | (768, 192) | (768,) | (768,) | group of 64 cols | See pkl | [9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 52 |
+
| 49 | nncf_module.bert.encoder.layer.8.attention.self.query | 16 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 53 |
+
| 50 | nncf_module.bert.encoder.layer.8.attention.self.key | 16 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 54 |
+
| 51 | nncf_module.bert.encoder.layer.8.attention.self.value | 16 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [9, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 55 |
+
| 52 | nncf_module.bert.encoder.layer.8.output.dense | 17 | (768, 3072) | (768, 217) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 56 |
+
| 53 | nncf_module.bert.encoder.layer.8.intermediate.dense | 17 | (3072, 768) | (217, 768) | (3072,) | (217,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 57 |
+
| 54 | nncf_module.bert.encoder.layer.9.attention.self.query | 18 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 58 |
+
| 55 | nncf_module.bert.encoder.layer.9.attention.output.dense | 18 | (768, 768) | (768, 320) | (768,) | (768,) | group of 64 cols | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 59 |
+
| 56 | nncf_module.bert.encoder.layer.9.attention.self.value | 18 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 60 |
+
| 57 | nncf_module.bert.encoder.layer.9.attention.self.key | 18 | (768, 768) | (320, 768) | (768,) | (320,) | group of 64 rows | See pkl | [0, 2, 6, 8, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 61 |
+
| 58 | nncf_module.bert.encoder.layer.9.output.dense | 19 | (768, 3072) | (768, 102) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 62 |
+
| 59 | nncf_module.bert.encoder.layer.9.intermediate.dense | 19 | (3072, 768) | (102, 768) | (3072,) | (102,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 63 |
+
| 60 | nncf_module.bert.encoder.layer.10.attention.self.key | 20 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [3, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 64 |
+
| 61 | nncf_module.bert.encoder.layer.10.attention.self.query | 20 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [3, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 65 |
+
| 62 | nncf_module.bert.encoder.layer.10.attention.output.dense | 20 | (768, 768) | (768, 192) | (768,) | (768,) | group of 64 cols | See pkl | [3, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 66 |
+
| 63 | nncf_module.bert.encoder.layer.10.attention.self.value | 20 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [3, 10, 11] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 67 |
+
| 64 | nncf_module.bert.encoder.layer.10.output.dense | 21 | (768, 3072) | (768, 110) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 68 |
+
| 65 | nncf_module.bert.encoder.layer.10.intermediate.dense | 21 | (3072, 768) | (110, 768) | (3072,) | (110,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
| 69 |
+
| 66 | nncf_module.bert.encoder.layer.11.attention.self.query | 22 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [1, 2, 3] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0 |
|
| 70 |
+
| 67 | nncf_module.bert.encoder.layer.11.attention.self.key | 22 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [1, 2, 3] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0 |
|
| 71 |
+
| 68 | nncf_module.bert.encoder.layer.11.attention.output.dense | 22 | (768, 768) | (768, 192) | (768,) | (768,) | group of 64 cols | See pkl | [1, 2, 3] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 72 |
+
| 69 | nncf_module.bert.encoder.layer.11.attention.self.value | 22 | (768, 768) | (192, 768) | (768,) | (192,) | group of 64 rows | See pkl | [1, 2, 3] | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0 |
|
| 73 |
+
| 70 | nncf_module.bert.encoder.layer.11.output.dense | 23 | (768, 3072) | (768, 123) | (768,) | (768,) | col | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0 |
|
| 74 |
+
| 71 | nncf_module.bert.encoder.layer.11.intermediate.dense | 23 | (3072, 768) | (123, 768) | (3072,) | (123,) | row | See pkl | | BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0 |
|
r0.030-squad-bert-b-mvmt-8bit/ir/sparsity_structures.pkl
ADDED
|
Binary file (102 kB). View file
|
|
|
r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5ef01c1e2fba1df78b087de1a05ba2afb178f5c006a0f5abcc423d345384106a
|
| 3 |
+
size 45939236
|
r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.mapping
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.onnx
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4a625e63f394da7cb3fd715fc4d3feaf6f9629b5e479a803b5905f5a1e8c549e
|
| 3 |
+
size 182971746
|
r0.030-squad-bert-b-mvmt-8bit/ir/squad-BertForQuestionAnswering.cropped.8bit.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
r0.030-squad-bert-b-mvmt-8bit/nncf-mvmt-p3.json
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"input_info": [
|
| 3 |
+
{
|
| 4 |
+
"sample_size": [1, 384],
|
| 5 |
+
"type": "long"
|
| 6 |
+
},
|
| 7 |
+
{
|
| 8 |
+
"sample_size": [1, 384],
|
| 9 |
+
"type": "long"
|
| 10 |
+
},
|
| 11 |
+
{
|
| 12 |
+
"sample_size": [1, 384],
|
| 13 |
+
"type": "long"
|
| 14 |
+
}
|
| 15 |
+
],
|
| 16 |
+
"compression":
|
| 17 |
+
[
|
| 18 |
+
{
|
| 19 |
+
"algorithm": "movement_sparsity",
|
| 20 |
+
"params": {
|
| 21 |
+
"schedule": "threshold_polynomial_decay",
|
| 22 |
+
"power": 3,
|
| 23 |
+
"init_importance_threshold": 0.0,
|
| 24 |
+
"final_importance_threshold": 0.1,
|
| 25 |
+
"warmup_start_epoch": 1,
|
| 26 |
+
"warmup_end_epoch": 10,
|
| 27 |
+
"steps_per_epoch": 5533,
|
| 28 |
+
"importance_regularization_factor": 0.03,
|
| 29 |
+
"update_per_optimizer_step": true,
|
| 30 |
+
},
|
| 31 |
+
"sparse_structure_by_scopes": [
|
| 32 |
+
["block", [32, 32], "{re}.*BertAttention*"],
|
| 33 |
+
["per_dim", [0], "{re}.*BertIntermediate*"],
|
| 34 |
+
["per_dim", [1], "{re}.*BertOutput*"]
|
| 35 |
+
],
|
| 36 |
+
"ignored_scopes": ["{re}.*NNCFEmbedding", "{re}.*qa_outputs*"]
|
| 37 |
+
},
|
| 38 |
+
{
|
| 39 |
+
"algorithm": "quantization",
|
| 40 |
+
"initializer": {
|
| 41 |
+
"range": {
|
| 42 |
+
"num_init_samples": 32,
|
| 43 |
+
"type": "percentile",
|
| 44 |
+
"params":
|
| 45 |
+
{
|
| 46 |
+
"min_percentile": 0.01,
|
| 47 |
+
"max_percentile": 99.99
|
| 48 |
+
}
|
| 49 |
+
},
|
| 50 |
+
|
| 51 |
+
"batchnorm_adaptation": {
|
| 52 |
+
"num_bn_adaptation_samples": 200
|
| 53 |
+
}
|
| 54 |
+
},
|
| 55 |
+
"activations":
|
| 56 |
+
{
|
| 57 |
+
"mode": "symmetric"
|
| 58 |
+
},
|
| 59 |
+
"weights":
|
| 60 |
+
{
|
| 61 |
+
"mode": "symmetric",
|
| 62 |
+
"signed": true,
|
| 63 |
+
"per_channel": false
|
| 64 |
+
}
|
| 65 |
+
}
|
| 66 |
+
]
|
| 67 |
+
}
|
r0.030-squad-bert-b-mvmt-8bit/original_graph.dot
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|