Remove nested directory: BitTransformerLM/tests/test_distil.py
Browse files
BitTransformerLM/tests/test_distil.py
DELETED
|
@@ -1,20 +0,0 @@
|
|
| 1 |
-
import os, sys; sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
| 2 |
-
|
| 3 |
-
import torch
|
| 4 |
-
|
| 5 |
-
from bit_transformer import BitTransformerLM, distill_step, TelemetryLog
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
def test_distill_prunes_weights():
|
| 9 |
-
model = BitTransformerLM(d_model=32, nhead=4, num_layers=1, dim_feedforward=64, max_seq_len=8)
|
| 10 |
-
attn = torch.rand(2, 4, 8, 8)
|
| 11 |
-
telemetry = TelemetryLog(attention_maps=attn)
|
| 12 |
-
pruned = distill_step(model, scale=0.5, telemetry=telemetry)
|
| 13 |
-
total = 0
|
| 14 |
-
zeros = 0
|
| 15 |
-
for m in pruned.modules():
|
| 16 |
-
if isinstance(m, torch.nn.Linear):
|
| 17 |
-
w = m.weight.detach()
|
| 18 |
-
total += w.numel()
|
| 19 |
-
zeros += (w == 0).sum().item()
|
| 20 |
-
assert zeros >= int(total * 0.5)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|