|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from __future__ import annotations |
|
|
|
|
|
import torch.nn as nn |
|
|
|
|
|
|
|
|
class DropPath(nn.Module): |
|
|
"""Stochastic drop paths per sample for residual blocks. |
|
|
Based on: |
|
|
https://github.com/rwightman/pytorch-image-models |
|
|
""" |
|
|
|
|
|
def __init__(self, drop_prob: float = 0.0, scale_by_keep: bool = True) -> None: |
|
|
""" |
|
|
Args: |
|
|
drop_prob: drop path probability. |
|
|
scale_by_keep: scaling by non-dropped probability. |
|
|
""" |
|
|
super().__init__() |
|
|
self.drop_prob = drop_prob |
|
|
self.scale_by_keep = scale_by_keep |
|
|
|
|
|
if not (0 <= drop_prob <= 1): |
|
|
raise ValueError("Drop path prob should be between 0 and 1.") |
|
|
|
|
|
def drop_path(self, x, drop_prob: float = 0.0, training: bool = False, scale_by_keep: bool = True): |
|
|
if drop_prob == 0.0 or not training: |
|
|
return x |
|
|
keep_prob = 1 - drop_prob |
|
|
shape = (x.shape[0],) + (1,) * (x.ndim - 1) |
|
|
random_tensor = x.new_empty(shape).bernoulli_(keep_prob) |
|
|
if keep_prob > 0.0 and scale_by_keep: |
|
|
random_tensor.div_(keep_prob) |
|
|
return x * random_tensor |
|
|
|
|
|
def forward(self, x): |
|
|
return self.drop_path(x, self.drop_prob, self.training, self.scale_by_keep) |
|
|
|