eniddealla commited on
Commit
acf7c1f
·
1 Parent(s): 9351f61

new model

Browse files
Files changed (3) hide show
  1. config.json +6 -14
  2. model.pth +2 -2
  3. vocab.json +0 -0
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "dashboard_logger": "tensorboard",
14
  "save_on_interrupt": true,
15
  "log_model_step": 100,
16
- "save_step": 50000,
17
  "save_n_checkpoints": 1,
18
  "save_checkpoints": true,
19
  "save_all_best": false,
@@ -27,12 +27,12 @@
27
  "distributed_url": "tcp://localhost:54321",
28
  "mixed_precision": false,
29
  "precision": "fp16",
30
- "epochs": 100,
31
  "batch_size": 8,
32
  "eval_batch_size": 16,
33
  "grad_clip": 0.0,
34
  "scheduler_after_epoch": true,
35
- "lr": 1e-05,
36
  "optimizer": "AdamW",
37
  "optimizer_params": {
38
  "betas": [
@@ -127,7 +127,7 @@
127
  "gpt_layers": 30,
128
  "gpt_n_model_channels": 1024,
129
  "gpt_n_heads": 16,
130
- "gpt_number_text_tokens": 9481,
131
  "gpt_start_text_token": 261,
132
  "gpt_stop_text_token": 0,
133
  "gpt_num_audio_tokens": 1026,
@@ -148,11 +148,11 @@
148
  "gpt_loss_text_ce_weight": 0.01,
149
  "gpt_loss_mel_ce_weight": 1.0,
150
  "debug_loading_failures": false,
151
- "max_wav_length": 330750,
152
  "max_text_length": 400,
153
  "mel_norm_file": "checkpoints/XTTS_v2.0_original_model_files/mel_stats.pth",
154
  "dvae_checkpoint": "checkpoints/XTTS_v2.0_original_model_files/dvae.pth",
155
- "xtts_checkpoint": "checkpoints/GPT_XTTS_FT-November-19-2024_04+25PM-0000000/best_model.pth",
156
  "vocoder": ""
157
  },
158
  "model_dir": null,
@@ -174,14 +174,6 @@
174
  "ko",
175
  "ja",
176
  "hi",
177
- "ar",
178
- "ar",
179
- "ar",
180
- "ar",
181
- "ar",
182
- "ar",
183
- "ar",
184
- "ar",
185
  "ar"
186
  ],
187
  "temperature": 0.75,
 
13
  "dashboard_logger": "tensorboard",
14
  "save_on_interrupt": true,
15
  "log_model_step": 100,
16
+ "save_step": 25000,
17
  "save_n_checkpoints": 1,
18
  "save_checkpoints": true,
19
  "save_all_best": false,
 
27
  "distributed_url": "tcp://localhost:54321",
28
  "mixed_precision": false,
29
  "precision": "fp16",
30
+ "epochs": 50,
31
  "batch_size": 8,
32
  "eval_batch_size": 16,
33
  "grad_clip": 0.0,
34
  "scheduler_after_epoch": true,
35
+ "lr": 5e-06,
36
  "optimizer": "AdamW",
37
  "optimizer_params": {
38
  "betas": [
 
127
  "gpt_layers": 30,
128
  "gpt_n_model_channels": 1024,
129
  "gpt_n_heads": 16,
130
+ "gpt_number_text_tokens": 8448,
131
  "gpt_start_text_token": 261,
132
  "gpt_stop_text_token": 0,
133
  "gpt_num_audio_tokens": 1026,
 
148
  "gpt_loss_text_ce_weight": 0.01,
149
  "gpt_loss_mel_ce_weight": 1.0,
150
  "debug_loading_failures": false,
151
+ "max_wav_length": 264600,
152
  "max_text_length": 400,
153
  "mel_norm_file": "checkpoints/XTTS_v2.0_original_model_files/mel_stats.pth",
154
  "dvae_checkpoint": "checkpoints/XTTS_v2.0_original_model_files/dvae.pth",
155
+ "xtts_checkpoint": "checkpoints/XTTS_v2.0_original_model_files/model.pth",
156
  "vocoder": ""
157
  },
158
  "model_dir": null,
 
174
  "ko",
175
  "ja",
176
  "hi",
 
 
 
 
 
 
 
 
177
  "ar"
178
  ],
179
  "temperature": 0.75,
model.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dcb281e19fe2a884edd4e0ab7117cd8deabf8a82d0e8967723d3193f382c0e9c
3
- size 5676773553
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f5839be23bff6d4e5136d933ccbd0e935b7f7d220c39181dbd79c581dcb87bd
3
+ size 5651374121
vocab.json CHANGED
The diff for this file is too large to render. See raw diff