import gradio as gr import moviepy.editor as mp import numpy as np import cv2 from PIL import Image, ImageDraw, ImageFont def create_horror_video(logo, flippy_walk, fliqpy_run, credits, voice, jumpscare_sfx, music): # Load assets logo_clip = mp.ImageClip(logo).set_duration(10) # Show logo for 10 seconds jumpscare_clip = mp.ImageClip(jumpscare_sfx).set_duration(0.5) # Quick jump scare flippy_clip = mp.ImageClip(flippy_walk).set_duration(5) # Flippy walking fliqpy_clip = mp.ImageClip(fliqpy_run).set_duration(3) # Fliqpy running at the screen credits_clip = mp.ImageClip(credits).set_duration(5) # Show credits # Flippy's voice voice_clip = mp.AudioFileClip(voice).set_duration(5) # Add text: Flippy's dialogue flippy_text = mp.TextClip("Man. I don’t like this blood. I’m a killer 😭", fontsize=50, color='red', font="Arial") flippy_text = flippy_text.set_position('center').set_duration(3) # Merge clips final_video = mp.concatenate_videoclips([ logo_clip, jumpscare_clip, flippy_clip.set_audio(voice_clip), flippy_text.set_position(('center', 'bottom')), fliqpy_clip, credits_clip ]) # Add background music music_clip = mp.AudioFileClip(music).set_duration(final_video.duration) final_video = final_video.set_audio(mp.CompositeAudioClip([final_video.audio, music_clip])) # Export video output_path = "htf_analog_horror.mp4" final_video.write_videofile(output_path, fps=24, codec="libx264") return output_path # Gradio Interface iface = gr.Interface( fn=create_horror_video, inputs=[ gr.File(label="Upload Logo (Image)"), gr.File(label="Upload Flippy Walking (Image)"), gr.File(label="Upload Fliqpy Running (Image)"), gr.File(label="Upload Credits Screen (Image)"), gr.File(label="Upload Flippy's Voice (MP3/WAV)"), gr.File(label="Upload Jumpscare SFX (MP3/WAV)"), gr.File(label="Upload Background Music (MP3/WAV)") ], outputs=gr.Video(label="Generated Analog Horror Video"), title="HTF Analog Horror Video Creator", description="Upload your HTF images and sounds, and this AI will generate a scary analog horror video!" ) iface.launch()