Sourikta commited on
Commit
a1ea715
·
verified ·
1 Parent(s): 36463f9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -0
app.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from magenta.models.music_vae import configs
3
+ from magenta.models.music_vae.trained_model import TrainedModel
4
+ import note_seq
5
+
6
+ # Load the MusicVAE model
7
+ CONFIG = configs.CONFIG_MAP['cat-mel_2bar_big']
8
+ checkpoint = 'https://storage.googleapis.com/magentadata/models/music_vae/checkpoints/cat-mel_2bar_big.ckpt'
9
+ music_vae = TrainedModel(CONFIG, batch_size=4, checkpoint_dir_or_path=checkpoint)
10
+
11
+ def generate_music(tempo):
12
+ """Generate music based on the tempo."""
13
+ # Generate a latent vector
14
+ z = music_vae.sample(n=1)
15
+
16
+ # Decode the latent vector into a music sequence
17
+ generated_sequence = music_vae.decode(z)
18
+
19
+ # Adjust the sequence tempo
20
+ for note in generated_sequence[0].notes:
21
+ note.velocity = max(40, min(127, int(tempo))) # Adjust note velocity
22
+
23
+ # Save as MIDI file
24
+ output_midi = "generated_music.mid"
25
+ note_seq.sequence_proto_to_midi_file(generated_sequence[0], output_midi)
26
+ return output_midi
27
+
28
+ # Define the Gradio Interface
29
+ with gr.Blocks() as demo:
30
+ gr.Markdown("# Interactive Music Composer 🎵")
31
+ tempo = gr.Slider(40, 200, step=10, label="Select Tempo (BPM)")
32
+ generate_button = gr.Button("Generate Music")
33
+ output_file = gr.Audio(label="Generated Music")
34
+
35
+ generate_button.click(fn=generate_music, inputs=tempo, outputs=output_file)
36
+
37
+ # Launch the app
38
+ demo.launch()