Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -12,6 +12,22 @@ os.makedirs(OUTPUT_DIR, exist_ok=True)
|
|
| 12 |
|
| 13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
# Load model
|
| 16 |
config = XttsConfig()
|
| 17 |
config.load_json(os.path.join(MODEL_DIR, "config.json"))
|
|
|
|
| 12 |
|
| 13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 14 |
|
| 15 |
+
from huggingface_hub import hf_hub_download
|
| 16 |
+
# ------------------------
|
| 17 |
+
# Download model files from Hugging Face if not present
|
| 18 |
+
# ------------------------
|
| 19 |
+
MODEL_REPO = "MariaKaiser/EGTTS_finetuned" # replace with your repo name
|
| 20 |
+
|
| 21 |
+
if not os.path.exists(os.path.join(MODEL_DIR, "config.json")):
|
| 22 |
+
os.makedirs(MODEL_DIR, exist_ok=True)
|
| 23 |
+
config_file = hf_hub_download(repo_id=MODEL_REPO, filename="config.json", cache_dir=MODEL_DIR)
|
| 24 |
+
vocab_file = hf_hub_download(repo_id=MODEL_REPO, filename="vocab.json", cache_dir=MODEL_DIR)
|
| 25 |
+
model_file = hf_hub_download(repo_id=MODEL_REPO, filename="model.pth", cache_dir=MODEL_DIR)
|
| 26 |
+
else:
|
| 27 |
+
config_file = os.path.join(MODEL_DIR, "config.json")
|
| 28 |
+
vocab_file = os.path.join(MODEL_DIR, "vocab.json")
|
| 29 |
+
model_file = os.path.join(MODEL_DIR, "model.pth")
|
| 30 |
+
|
| 31 |
# Load model
|
| 32 |
config = XttsConfig()
|
| 33 |
config.load_json(os.path.join(MODEL_DIR, "config.json"))
|