Spaces:
Sleeping
Sleeping
Upload 6 files
Browse files- Dockerfile +2 -5
- convert_model.py +29 -2
Dockerfile
CHANGED
|
@@ -14,7 +14,7 @@ ENV PYTHONUNBUFFERED=1 \
|
|
| 14 |
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
| 15 |
DEBIAN_FRONTEND=noninteractive \
|
| 16 |
MODELS_DIR=/app/models \
|
| 17 |
-
GENIE_DATA_DIR=/app/
|
| 18 |
|
| 19 |
# Install system dependencies
|
| 20 |
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
@@ -45,7 +45,7 @@ RUN pip install --no-cache-dir genie-tts
|
|
| 45 |
# Create directories
|
| 46 |
RUN mkdir -p /app/models/liang/onnx \
|
| 47 |
&& mkdir -p /app/models/liang/reference \
|
| 48 |
-
&& mkdir -p /app/
|
| 49 |
&& mkdir -p /app/temp
|
| 50 |
|
| 51 |
# Download model files
|
|
@@ -78,9 +78,6 @@ COPY models/liang/config.json /app/models/liang/config.json
|
|
| 78 |
# Copy application code
|
| 79 |
COPY app.py .
|
| 80 |
|
| 81 |
-
# Download Genie base data (uses a script to avoid inline Python issues)
|
| 82 |
-
RUN python -c 'import genie_tts; genie_tts.download_genie_data()'
|
| 83 |
-
|
| 84 |
# Expose port (Hugging Face Spaces uses 7860)
|
| 85 |
EXPOSE 7860
|
| 86 |
|
|
|
|
| 14 |
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
| 15 |
DEBIAN_FRONTEND=noninteractive \
|
| 16 |
MODELS_DIR=/app/models \
|
| 17 |
+
GENIE_DATA_DIR=/app/GenieData
|
| 18 |
|
| 19 |
# Install system dependencies
|
| 20 |
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
|
|
| 45 |
# Create directories
|
| 46 |
RUN mkdir -p /app/models/liang/onnx \
|
| 47 |
&& mkdir -p /app/models/liang/reference \
|
| 48 |
+
&& mkdir -p /app/GenieData \
|
| 49 |
&& mkdir -p /app/temp
|
| 50 |
|
| 51 |
# Download model files
|
|
|
|
| 78 |
# Copy application code
|
| 79 |
COPY app.py .
|
| 80 |
|
|
|
|
|
|
|
|
|
|
| 81 |
# Expose port (Hugging Face Spaces uses 7860)
|
| 82 |
EXPOSE 7860
|
| 83 |
|
convert_model.py
CHANGED
|
@@ -2,15 +2,42 @@
|
|
| 2 |
"""
|
| 3 |
Model Conversion Script
|
| 4 |
=======================
|
| 5 |
-
This script downloads and converts PyTorch models to ONNX format.
|
| 6 |
"""
|
| 7 |
|
| 8 |
import os
|
| 9 |
import sys
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
def main():
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
|
|
|
|
|
|
| 14 |
import genie_tts as genie
|
| 15 |
|
| 16 |
ckpt_path = os.environ.get("CKPT_PATH", "/app/temp/model.ckpt")
|
|
|
|
| 2 |
"""
|
| 3 |
Model Conversion Script
|
| 4 |
=======================
|
| 5 |
+
This script downloads GenieData resources and converts PyTorch models to ONNX format.
|
| 6 |
"""
|
| 7 |
|
| 8 |
import os
|
| 9 |
import sys
|
| 10 |
|
| 11 |
+
def download_genie_data():
|
| 12 |
+
"""Download GenieData resources from HuggingFace"""
|
| 13 |
+
from huggingface_hub import snapshot_download
|
| 14 |
+
|
| 15 |
+
genie_data_dir = os.environ.get("GENIE_DATA_DIR", "./GenieData")
|
| 16 |
+
|
| 17 |
+
if os.path.exists(genie_data_dir) and os.listdir(genie_data_dir):
|
| 18 |
+
print(f"GenieData already exists at {genie_data_dir}")
|
| 19 |
+
return
|
| 20 |
+
|
| 21 |
+
print("🚀 Starting download Genie-TTS resources from HuggingFace...")
|
| 22 |
+
snapshot_download(
|
| 23 |
+
repo_id="High-Logic/Genie",
|
| 24 |
+
repo_type="model",
|
| 25 |
+
allow_patterns="GenieData/*",
|
| 26 |
+
local_dir=".",
|
| 27 |
+
local_dir_use_symlinks=False, # Don't use symlinks in Docker
|
| 28 |
+
)
|
| 29 |
+
print("✅ Genie-TTS resources downloaded successfully.")
|
| 30 |
+
|
| 31 |
def main():
|
| 32 |
+
# Set environment variable for GenieData location BEFORE importing genie_tts
|
| 33 |
+
os.environ["GENIE_DATA_DIR"] = "/app/GenieData"
|
| 34 |
+
|
| 35 |
+
# Step 1: Download GenieData resources
|
| 36 |
+
print("Step 1: Downloading GenieData resources...")
|
| 37 |
+
download_genie_data()
|
| 38 |
|
| 39 |
+
# Step 2: Now import genie_tts (it will check for GenieData)
|
| 40 |
+
print("Step 2: Starting ONNX conversion...")
|
| 41 |
import genie_tts as genie
|
| 42 |
|
| 43 |
ckpt_path = os.environ.get("CKPT_PATH", "/app/temp/model.ckpt")
|