Spaces:
Runtime error
Runtime error
| import torch | |
| from diffusers import AnimateDiffPipeline, DDIMScheduler, MotionAdapter | |
| from diffusers.utils import export_to_gif | |
| from moviepy.editor import VideoFileClip, concatenate_videoclips | |
| adapter = MotionAdapter.from_pretrained("guoyww/animatediff-motion-adapter-v1-5-2", torch_dtype=torch.float16) | |
| pipeline = AnimateDiffPipeline.from_pretrained("emilianJR/epiCRealism", motion_adapter=adapter, torch_dtype=torch.float16) | |
| scheduler = DDIMScheduler.from_pretrained( | |
| "emilianJR/epiCRealism", | |
| subfolder="scheduler", | |
| clip_sample=False, | |
| timestep_spacing="linspace", | |
| beta_schedule="linear", | |
| steps_offset=1, | |
| ) | |
| pipeline.scheduler = scheduler | |
| pipeline.enable_vae_slicing() | |
| #pipeline.enable_model_cpu_offload() | |
| def gen_movie(frames_desc): | |
| frames_desc = frames_desc[0] | |
| x = 1 | |
| for frame_description in frames_desc: | |
| output = pipeline( | |
| prompt="frame_description", | |
| negative_prompt="high resolution", | |
| num_frames=16, | |
| guidance_scale=7.5, | |
| num_inference_steps=50, | |
| generator=torch.Generator("cpu").manual_seed(0), | |
| ) | |
| frames = output.frames[0] | |
| export_to_gif(frames, f'animation{x}.gif') | |
| x += 1 | |
| # List to store VideoFileClip objects for each input GIF | |
| video_clips = [] | |
| # Load each input GIF file and append it to the list | |
| for x in range(1,2): | |
| video_clips.append(VideoFileClip(f'animation{x}.gif')) | |
| # Concatenate the video clips to create a single video | |
| final_clip = concatenate_videoclips(video_clips) | |
| # Export the final video to a new MP4 file | |
| final_clip.write_videofile("combined_video.mp4", codec="libx264", fps=24) | |