Curlyblaze commited on
Commit
c650933
·
verified ·
1 Parent(s): c4e9d49

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -18
app.py CHANGED
@@ -1,23 +1,63 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
-
4
- # Logic for your app
5
- def master_audio(input_file):
6
- # This is where the magic happens.
7
- # You would typically call a model like SonicMaster here.
8
- # For now, we'll return the file to show the UI structure.
9
- return input_file
10
-
11
- # Create the UI
12
- with gr.Blocks() as demo:
13
- gr.Markdown("# 🎵 AI Pro Studio")
14
- gr.Markdown("Upload your demo and let AI polish the EQ and Dynamics.")
 
 
 
 
 
15
 
16
- with gr.Row():
17
- audio_input = gr.Audio(type="filepath", label="Upload Raw Song")
18
- audio_output = gr.Audio(label="Mastered Version")
 
 
 
 
 
19
 
20
- btn = gr.Button("Master My Track")
21
- btn.click(fn=master_audio, inputs=audio_input, outputs=audio_output)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
  demo.launch()
 
1
  import gradio as gr
2
+ from gradio_client import Client, handle_file
3
+ from transformers import pipeline
4
+
5
+ # 1. Initialize the Genre Classifier (Local/Lightweight)
6
+ # This model identifies genres like Pop, Rock, Hip-Hop, etc.
7
+ classifier = pipeline("audio-classification", model="MIT/ast-finetuned-audioset")
8
+
9
+ # 2. Connect to the Mastering Engine
10
+ mastering_client = Client("amaai-lab/SonicMaster")
11
+
12
+ def auto_master(audio_path):
13
+ if audio_path is None:
14
+ return None, "No audio detected."
15
+
16
+ # Step 1: Detect Genre
17
+ # We take a 30-second snippet to speed up detection
18
+ results = classifier(audio_path)
19
+ top_genre = results[0]['label']
20
 
21
+ # Step 2: Map Genre to a Professional Prompt
22
+ genre_prompts = {
23
+ "Hip Hop": "Master this for heavy bass and crisp vocals. High energy.",
24
+ "Rock": "Give it a warm analog feel with punchy drums and clear guitars.",
25
+ "Pop": "Make it sound like a radio hit. Bright, loud, and very wide.",
26
+ "Classical": "Preserve the dynamic range but add professional clarity.",
27
+ "Electronic": "Focus on sub-bass clarity and sharp high-end transients."
28
+ }
29
 
30
+ # Fallback if genre isn't in our list
31
+ final_prompt = genre_prompts.get(top_genre, "Master this song to sound professionally produced.")
32
+
33
+ # Step 3: Send to SonicMaster
34
+ result = mastering_client.predict(
35
+ audio=handle_file(audio_path),
36
+ prompt=final_prompt,
37
+ seed=42,
38
+ steps=20,
39
+ cfg_scale=3.5,
40
+ api_name="/predict"
41
+ )
42
+
43
+ return result[1], f"Detected Genre: {top_genre} | Applied Style: {final_prompt}"
44
+
45
+ # 3. Create the "Smart" UI
46
+ with gr.Blocks(theme=gr.themes.Monochrome()) as demo:
47
+ gr.Markdown("# 🤖 Smart AI Mastering Studio")
48
+ gr.Markdown("Upload a track. We'll identify the genre and apply custom studio settings.")
49
+
50
+ with gr.Row():
51
+ input_audio = gr.Audio(label="Raw Audio File", type="filepath")
52
+ output_audio = gr.Audio(label="AI Mastered Output")
53
+
54
+ status_msg = gr.Textbox(label="Analysis Status", interactive=False)
55
+ run_btn = gr.Button("✨ Auto-Master My Track", variant="primary")
56
+
57
+ run_btn.click(
58
+ fn=auto_master,
59
+ inputs=input_audio,
60
+ outputs=[output_audio, status_msg]
61
+ )
62
 
63
  demo.launch()