Spaces:
Paused
Paused
Update main.py (#4)
Browse files- Update main.py (1df55422db7d0a17428edcaf3664f1635639a114)
main.py
CHANGED
|
@@ -10,13 +10,16 @@ import torch
|
|
| 10 |
import soundfile as sf
|
| 11 |
import numpy as np
|
| 12 |
from kokoro import KModel, KPipeline
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
app = FastAPI()
|
| 15 |
|
| 16 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
| 17 |
models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
|
| 18 |
pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in ['a', 'p', 'e']}
|
| 19 |
-
|
| 20 |
CHOICES = {
|
| 21 |
'馃嚭馃嚫 馃毢 Heart 鉂わ笍': 'af_heart',
|
| 22 |
'馃嚭馃嚫 馃毢 Alloy': 'af_alloy',
|
|
|
|
| 10 |
import soundfile as sf
|
| 11 |
import numpy as np
|
| 12 |
from kokoro import KModel, KPipeline
|
| 13 |
+
import os
|
| 14 |
+
|
| 15 |
+
os.environ['HUGGINGFACE_HUB_CACHE'] = '/app/models'
|
| 16 |
|
| 17 |
app = FastAPI()
|
| 18 |
|
| 19 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
| 20 |
models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
|
| 21 |
pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in ['a', 'p', 'e']}
|
| 22 |
+
# ... (restante do c贸digo inalterado)
|
| 23 |
CHOICES = {
|
| 24 |
'馃嚭馃嚫 馃毢 Heart 鉂わ笍': 'af_heart',
|
| 25 |
'馃嚭馃嚫 馃毢 Alloy': 'af_alloy',
|