oriyonay commited on
Commit
547f936
·
verified ·
1 Parent(s): 04bf858

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +78 -3
  2. myna.py +7 -5
config.json CHANGED
@@ -1,11 +1,86 @@
1
  {
2
- "_name_or_path": "oriyonay/myna-base",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "architectures": [
4
  "Myna"
5
  ],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  "auto_map": {
7
  "AutoConfig": "myna.MynaConfig",
8
  "AutoModel": "myna.Myna"
9
- },
10
- "model_type": "myna"
11
  }
 
1
  {
2
+ "return_dict": true,
3
+ "output_hidden_states": false,
4
+ "output_attentions": false,
5
+ "torchscript": false,
6
+ "torch_dtype": "float32",
7
+ "use_bfloat16": false,
8
+ "tf_legacy_loss": false,
9
+ "pruned_heads": {},
10
+ "tie_word_embeddings": true,
11
+ "chunk_size_feed_forward": 0,
12
+ "is_encoder_decoder": false,
13
+ "is_decoder": false,
14
+ "cross_attention_hidden_size": null,
15
+ "add_cross_attention": false,
16
+ "tie_encoder_decoder": false,
17
+ "max_length": 20,
18
+ "min_length": 0,
19
+ "do_sample": false,
20
+ "early_stopping": false,
21
+ "num_beams": 1,
22
+ "num_beam_groups": 1,
23
+ "diversity_penalty": 0.0,
24
+ "temperature": 1.0,
25
+ "top_k": 50,
26
+ "top_p": 1.0,
27
+ "typical_p": 1.0,
28
+ "repetition_penalty": 1.0,
29
+ "length_penalty": 1.0,
30
+ "no_repeat_ngram_size": 0,
31
+ "encoder_no_repeat_ngram_size": 0,
32
+ "bad_words_ids": null,
33
+ "num_return_sequences": 1,
34
+ "output_scores": false,
35
+ "return_dict_in_generate": false,
36
+ "forced_bos_token_id": null,
37
+ "forced_eos_token_id": null,
38
+ "remove_invalid_values": false,
39
+ "exponential_decay_length_penalty": null,
40
+ "suppress_tokens": null,
41
+ "begin_suppress_tokens": null,
42
  "architectures": [
43
  "Myna"
44
  ],
45
+ "finetuning_task": null,
46
+ "id2label": {
47
+ "0": "LABEL_0",
48
+ "1": "LABEL_1"
49
+ },
50
+ "label2id": {
51
+ "LABEL_0": 0,
52
+ "LABEL_1": 1
53
+ },
54
+ "tokenizer_class": null,
55
+ "prefix": null,
56
+ "bos_token_id": null,
57
+ "pad_token_id": null,
58
+ "eos_token_id": null,
59
+ "sep_token_id": null,
60
+ "decoder_start_token_id": null,
61
+ "task_specific_params": null,
62
+ "problem_type": null,
63
+ "_name_or_path": "oriyonay/myna-base",
64
+ "transformers_version": "4.41.2",
65
+ "spec_size": [
66
+ 128,
67
+ 4096
68
+ ],
69
+ "patch_size": 16,
70
+ "dim": 384,
71
+ "depth": 12,
72
+ "heads": 6,
73
+ "mlp_dim": 1536,
74
+ "dim_head": 64,
75
+ "arch": "vit-s-16",
76
+ "additional_patch_size": null,
77
+ "hybrid_mode": false,
78
+ "n_samples": 50000,
79
+ "sr": 16000,
80
+ "n_frames": 96,
81
+ "model_type": "myna",
82
  "auto_map": {
83
  "AutoConfig": "myna.MynaConfig",
84
  "AutoModel": "myna.Myna"
85
+ }
 
86
  }
myna.py CHANGED
@@ -20,7 +20,7 @@ import shutil
20
 
21
 
22
  def pair(t):
23
- return t if isinstance(t, tuple) else (t, t)
24
 
25
 
26
  def posemb_sincos_2d(h, w, dim, temperature: int = 10000, dtype = torch.float32):
@@ -268,7 +268,7 @@ class Myna(PreTrainedModel, PyTorchModelHubMixin):
268
 
269
  def _make_embeddings(self, patch_height, patch_width, patch_dim, dim, image_height, image_width):
270
  to_patch_embedding = nn.Sequential(
271
- Rearrange("b c (h p1) (w p2) -> b (h w) (p1 p2 c)", p1 = patch_height, p2 = patch_width),
272
  nn.LayerNorm(patch_dim),
273
  nn.Linear(patch_dim, dim),
274
  nn.LayerNorm(dim),
@@ -286,7 +286,7 @@ class Myna(PreTrainedModel, PyTorchModelHubMixin):
286
  n_frames = self.config.n_frames
287
  if n_samples and n_samples != self.config.n_samples:
288
  n_frames = self.config._get_n_frames(n_samples)
289
- spec = self.preprocessor(filename, n_frames)
290
  return self(spec)
291
 
292
  @property
@@ -298,7 +298,8 @@ def save_model_and_push(model, repo_name, save_dir='myna-temp', to_hub=False):
298
  model.save_pretrained(save_dir)
299
  shutil.copy('myna.py', save_dir)
300
 
301
- config = {
 
302
  '_name_or_path': repo_name,
303
  'architectures': ['Myna'],
304
  'auto_map': {
@@ -306,7 +307,7 @@ def save_model_and_push(model, repo_name, save_dir='myna-temp', to_hub=False):
306
  'AutoModel': 'myna.Myna'
307
  },
308
  'model_type': 'myna'
309
- }
310
 
311
  with open(os.path.join(save_dir, 'config.json'), 'w') as f:
312
  json.dump(config, f, indent=4)
@@ -323,6 +324,7 @@ def save_model_and_push(model, repo_name, save_dir='myna-temp', to_hub=False):
323
  if __name__ == '__main__':
324
  config = MynaConfig(
325
  arch='vit-s-16',
 
326
  additional_patch_size=None,
327
  hybrid_mode=False
328
  )
 
20
 
21
 
22
  def pair(t):
23
+ return t if isinstance(t, (tuple, list)) else (t, t)
24
 
25
 
26
  def posemb_sincos_2d(h, w, dim, temperature: int = 10000, dtype = torch.float32):
 
268
 
269
  def _make_embeddings(self, patch_height, patch_width, patch_dim, dim, image_height, image_width):
270
  to_patch_embedding = nn.Sequential(
271
+ Rearrange('b c (h p1) (w p2) -> b (h w) (p1 p2 c)', p1 = patch_height, p2 = patch_width),
272
  nn.LayerNorm(patch_dim),
273
  nn.Linear(patch_dim, dim),
274
  nn.LayerNorm(dim),
 
286
  n_frames = self.config.n_frames
287
  if n_samples and n_samples != self.config.n_samples:
288
  n_frames = self.config._get_n_frames(n_samples)
289
+ spec = self.preprocessor(filename, n_frames).to(self.device)
290
  return self(spec)
291
 
292
  @property
 
298
  model.save_pretrained(save_dir)
299
  shutil.copy('myna.py', save_dir)
300
 
301
+ config = model.config.to_dict()
302
+ config.update({
303
  '_name_or_path': repo_name,
304
  'architectures': ['Myna'],
305
  'auto_map': {
 
307
  'AutoModel': 'myna.Myna'
308
  },
309
  'model_type': 'myna'
310
+ })
311
 
312
  with open(os.path.join(save_dir, 'config.json'), 'w') as f:
313
  json.dump(config, f, indent=4)
 
324
  if __name__ == '__main__':
325
  config = MynaConfig(
326
  arch='vit-s-16',
327
+ patch_size=16,
328
  additional_patch_size=None,
329
  hybrid_mode=False
330
  )