File size: 3,566 Bytes
7bcde8c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
#!/usr/bin/env python3
"""
╔══════════════════════════════════════╗
β•‘         SAMPLE GENERATOR            β•‘
β•‘   Retro-styled AI music sampler     β•‘
β•‘   Powered by Meta's MusicGen        β•‘
β•šβ•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•

Local usage:
    pip install -r requirements.txt
    python app.py  β†’  http://localhost:7860

HF Spaces:
    Push this folder to a Docker Space β€” the Dockerfile handles everything.
"""

from flask import Flask, send_file, send_from_directory, jsonify, request
import os, uuid, traceback, threading

app = Flask(__name__)
_model       = None
_model_name  = None   # model id from first load; reused for all later requests
_model_lock  = threading.Lock()


# ─── Serve the frontend ────────────────────────────────────────────────────

@app.route('/')
def index():
    return send_from_directory(os.path.dirname(__file__), 'index.html')


# ─── Generate endpoint ─────────────────────────────────────────────────────

@app.route('/api/generate', methods=['POST'])
def generate():
    global _model, _model_name

    data        = request.get_json() or {}
    description = data.get('description', 'ambient electronic music').strip()
    duration    = max(1, min(int(data.get('duration', 8)), 60))
    model_id    = data.get('model', 'facebook/musicgen-small')

    # Whitelist allowed model IDs
    allowed = {'facebook/musicgen-small', 'facebook/musicgen-medium'}
    if model_id not in allowed:
        return jsonify({'error': f'Unknown model: {model_id}'}), 400

    try:
        # ── Load model once per process (first request wins; concurrent first calls share one load)
        with _model_lock:
            if _model is None:
                from audiocraft.models import MusicGen
                _model = MusicGen.get_pretrained(model_id)
                _model_name = model_id
            # Already loaded: never call get_pretrained again (ignore later model_id changes)

        _model.set_generation_params(duration=duration)
        wav = _model.generate([description])

        # ── Save to /tmp ───────────────────────────────────────────────
        tmp_path = f'/tmp/sample_{uuid.uuid4().hex[:10]}'
        from audiocraft.data.audio import audio_write
        audio_write(tmp_path, wav[0].cpu(), _model.sample_rate,
                    strategy='loudness')

        return send_file(f'{tmp_path}.wav', mimetype='audio/wav')

    except Exception as e:
        return jsonify({
            'error':     str(e),
            'traceback': traceback.format_exc()
        }), 500


# ─── Entry point ───────────────────────────────────────────────────────────

if __name__ == '__main__':
    # HF Spaces requires port 7860; falls back to that locally too
    port = int(os.environ.get('PORT', 7860))
    print('\n' + '=' * 50)
    print('  SAMPLE GENERATOR')
    print('=' * 50)
    print(f'  β†’ http://localhost:{port}')
    print('=' * 50 + '\n')
    app.run(host='0.0.0.0', port=port, debug=False)