🔨 [Add] round_up / chunk function
Browse files- yolo/tools/module_helper.py +26 -2
yolo/tools/module_helper.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
-
from typing import Tuple
|
| 2 |
|
| 3 |
-
from torch import nn
|
| 4 |
from torch.nn.common_types import _size_2_t
|
| 5 |
|
| 6 |
|
|
@@ -34,3 +34,27 @@ def get_activation(activation: str) -> nn.Module:
|
|
| 34 |
return activation_map[activation.lower()]()
|
| 35 |
else:
|
| 36 |
raise ValueError(f"Activation function '{activation}' is not found in torch.nn")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Tuple, Union
|
| 2 |
|
| 3 |
+
from torch import Tensor, nn
|
| 4 |
from torch.nn.common_types import _size_2_t
|
| 5 |
|
| 6 |
|
|
|
|
| 34 |
return activation_map[activation.lower()]()
|
| 35 |
else:
|
| 36 |
raise ValueError(f"Activation function '{activation}' is not found in torch.nn")
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def round_up(x: Union[int, Tensor], div: int = 1) -> Union[int, Tensor]:
|
| 40 |
+
"""
|
| 41 |
+
Rounds up `x` to the bigger-nearest multiple of `div`.
|
| 42 |
+
"""
|
| 43 |
+
return x + (-x % div)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def make_chunk(input_list, chunk_num):
|
| 47 |
+
"""
|
| 48 |
+
Args: input_list: [0, 1, 2, 3, 4, 5], chunk: 2
|
| 49 |
+
Return: [[0, 1, 2], [3, 4, 5]]
|
| 50 |
+
"""
|
| 51 |
+
list_size = len(input_list)
|
| 52 |
+
|
| 53 |
+
if list_size % chunk_num != 0:
|
| 54 |
+
raise ValueError(
|
| 55 |
+
f"The length of the input list ({list_size}) must be exactly\
|
| 56 |
+
divisible by the number of chunks ({chunk_num})."
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
chunk_size = list_size // chunk_num
|
| 60 |
+
return [input_list[i : i + chunk_size] for i in range(0, list_size, chunk_size)]
|