matejpekar commited on
Commit
b5a1bdb
·
verified ·
1 Parent(s): 4bf8d83

Upload model

Browse files
Files changed (4) hide show
  1. config.json +6 -2
  2. configuration.py +2 -2
  3. model.safetensors +1 -1
  4. modeling.py +5 -3
config.json CHANGED
@@ -1,7 +1,11 @@
1
  {
2
  "architectures": [
3
- "LSPDETR"
4
  ],
 
 
 
 
5
  "backbone": "microsoft/swinv2-tiny-patch4-window16-256",
6
  "depths": [
7
  6,
@@ -16,7 +20,7 @@
16
  192,
17
  96
18
  ],
19
- "model_type": "lspdetr",
20
  "num_classes": 2,
21
  "num_heads": 12,
22
  "num_radial_distances": 64,
 
1
  {
2
  "architectures": [
3
+ "LSPDetrModel"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration.LSPDetrConfig",
7
+ "AutoModelForObjectDetection": "modeling.LSPDetrModel"
8
+ },
9
  "backbone": "microsoft/swinv2-tiny-patch4-window16-256",
10
  "depths": [
11
  6,
 
20
  192,
21
  96
22
  ],
23
+ "model_type": "lsp_detr",
24
  "num_classes": 2,
25
  "num_heads": 12,
26
  "num_radial_distances": 64,
configuration.py CHANGED
@@ -1,8 +1,8 @@
1
  from transformers import PretrainedConfig
2
 
3
 
4
- class LSPDETRConfig(PretrainedConfig):
5
- model_type = "LSP-DETR"
6
 
7
  def __init__(
8
  self,
 
1
  from transformers import PretrainedConfig
2
 
3
 
4
+ class LSPDetrConfig(PretrainedConfig):
5
+ model_type = "lsp_detr"
6
 
7
  def __init__(
8
  self,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b709eb65d0a8b89794d9472f9ee3b6e2a6217ce7e0a4e68c3370a9183b27485b
3
  size 205650424
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34df87bc194a31875e6fad557746c2c5e94f027039211df60054807f61107bd0
3
  size 205650424
modeling.py CHANGED
@@ -8,7 +8,7 @@ from torch.nn.utils import parametrize
8
  from transformers import PreTrainedModel, Swinv2Backbone
9
  from transformers.models.swinv2.modeling_swinv2 import window_partition, window_reverse
10
 
11
- from .configuration import LSPDETRConfig
12
 
13
 
14
  def init_freqs(head_dim: int, num_heads: int, pos_dim: int, theta: float) -> Tensor:
@@ -643,8 +643,10 @@ class LSPTransformer(nn.Module):
643
  return ref_points.unsqueeze(-2) + polar
644
 
645
 
646
- class LSPDETR(PreTrainedModel):
647
- def __init__(self, config: LSPDETRConfig) -> None:
 
 
648
  super().__init__(config)
649
 
650
  self.backbone = Swinv2Backbone.from_pretrained(
 
8
  from transformers import PreTrainedModel, Swinv2Backbone
9
  from transformers.models.swinv2.modeling_swinv2 import window_partition, window_reverse
10
 
11
+ from .configuration import LSPDetrConfig
12
 
13
 
14
  def init_freqs(head_dim: int, num_heads: int, pos_dim: int, theta: float) -> Tensor:
 
643
  return ref_points.unsqueeze(-2) + polar
644
 
645
 
646
+ class LSPDetrModel(PreTrainedModel):
647
+ config_class = LSPDetrConfig
648
+
649
+ def __init__(self, config: LSPDetrConfig) -> None:
650
  super().__init__(config)
651
 
652
  self.backbone = Swinv2Backbone.from_pretrained(