WCNegentropy commited on
Commit
c92ac3a
·
verified ·
1 Parent(s): 1deb983

Remove nested directory: BitTransformerLM/bit_transformer/utils.py

Browse files
BitTransformerLM/bit_transformer/utils.py DELETED
@@ -1,28 +0,0 @@
1
- import os
2
- import gzip
3
- import torch
4
- import torch.nn as nn
5
-
6
-
7
- def save_model(model: torch.nn.Module, path: str) -> None:
8
- """Save a model using gzip compression."""
9
- os.makedirs(os.path.dirname(path), exist_ok=True)
10
- with gzip.open(path, 'wb') as f:
11
- torch.save(model, f)
12
-
13
-
14
- def load_model(path: str) -> torch.nn.Module:
15
- """Load a model saved with ``save_model``."""
16
- with gzip.open(path, 'rb') as f:
17
- model = torch.load(f, map_location="cpu", weights_only=False)
18
- return model
19
-
20
-
21
- def set_dropout(model: torch.nn.Module, p: float) -> None:
22
- """Set dropout probability ``p`` for all dropout layers in ``model``."""
23
- for module in model.modules():
24
- if isinstance(module, nn.Dropout):
25
- module.p = p
26
-
27
-
28
- __all__ = ["save_model", "load_model", "set_dropout"]