Upload model
Browse files- modeling.py +14 -1
modeling.py
CHANGED
|
@@ -433,7 +433,7 @@ class LSPTransformer(nn.Module):
|
|
| 433 |
def init_weights(self) -> None:
|
| 434 |
prior_prob = 0.01
|
| 435 |
bias_value = -math.log((1 - prior_prob) / prior_prob)
|
| 436 |
-
self.class_head.bias
|
| 437 |
|
| 438 |
# initialize regression layers
|
| 439 |
for head in self.point_head:
|
|
@@ -519,6 +519,19 @@ class LSPTransformer(nn.Module):
|
|
| 519 |
ref_points, self.query_block_size, self.query_block_size
|
| 520 |
).flatten(1, 2),
|
| 521 |
"embeddings": tgt.flatten(1, 2),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 522 |
}
|
| 523 |
|
| 524 |
|
|
|
|
| 433 |
def init_weights(self) -> None:
|
| 434 |
prior_prob = 0.01
|
| 435 |
bias_value = -math.log((1 - prior_prob) / prior_prob)
|
| 436 |
+
nn.init.constant_(self.class_head.bias, bias_value)
|
| 437 |
|
| 438 |
# initialize regression layers
|
| 439 |
for head in self.point_head:
|
|
|
|
| 519 |
ref_points, self.query_block_size, self.query_block_size
|
| 520 |
).flatten(1, 2),
|
| 521 |
"embeddings": tgt.flatten(1, 2),
|
| 522 |
+
"aux_outputs": [
|
| 523 |
+
{
|
| 524 |
+
"logits": a,
|
| 525 |
+
"points": b,
|
| 526 |
+
"radial_distances": c,
|
| 527 |
+
}
|
| 528 |
+
for a, b, c in zip(
|
| 529 |
+
logits_list[:-1],
|
| 530 |
+
ref_points_list[:-1],
|
| 531 |
+
radial_distances_list[:-1],
|
| 532 |
+
strict=True,
|
| 533 |
+
)
|
| 534 |
+
],
|
| 535 |
}
|
| 536 |
|
| 537 |
|