File size: 6,503 Bytes
b89b569 96fb70e 57c34dd 0160fe0 26a3c72 28e48f7 4f181ef 0160fe0 26a3c72 28e48f7 0160fe0 4f181ef 26a3c72 4f181ef 26a3c72 4f181ef 26a3c72 4f181ef 26a3c72 4f181ef 26a3c72 4f181ef 26a3c72 4f181ef 26a3c72 28e48f7 0160fe0 4f181ef 26a3c72 4f181ef 26a3c72 4f181ef 0160fe0 4f181ef 0160fe0 b89b569 57c34dd b89b569 96fb70e 57c34dd 56d6fa1 0160fe0 4f181ef 0160fe0 b89b569 57c34dd af6ade4 dc0af82 b89b569 0160fe0 b89b569 57c34dd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
import gradio as gr
import os
import shutil
import uuid
import subprocess
from threading import Timer
from functools import partial
import time
from smolagents import ToolCollection, ToolCallingAgent, InferenceClientModel, EMPTY_PROMPT_TEMPLATES
MODEL = "deepseek-ai/DeepSeek-R1-0528"
MAX_STEPS = 10
BUILD_SERVER_MCP_CONFIG = {"url": "https://prathje-gradio-motioncanvas-mcp-server.hf.space/gradio_api/mcp/sse", "transport": "sse"}
DOCS_SERVER_MCP_CONFIG = {"url": "https://prathje-gradio-motioncanvas-docs-mcp-server.hf.space/gradio_api/mcp/sse", "transport": "sse"}
SYSTEM_PROMPT = "You are a helpful assistant that generates motion canvas scenes. The user prompts his ideas. You should use the recursive_list with path '.' to check for available classes and examples. You should generate the code for a single standalone motion canvas scene.tsx and build it using the build tool. Please fix any errors. But if the build succeeds, you are done. You do not need to return the code or the logs. You can use all tools provided to you to help you with your task."
from smolagents import tool
gr.set_static_paths(paths=[os.path.join(os.path.dirname(__file__), "public")])
def get_local_path(project_id):
return os.path.join(os.path.dirname(__file__), "public", "project-" + project_id + ".js")
def get_public_path(project_id):
return "/gradio_api/file=" + get_local_path(project_id)
BUILD_TIMEOUT=30
# Code, Path, Logs
LAST_BUILT_PROJECT_RESULT = "", "", ""
# In theory, we should be using the build server mcp, but it not working right now and we are running out of time ;)
# Note that we added success/failed to the response here.s
@tool
def build_project(code: str) -> str:
"""Build a Motion Canvas project.
Args:
code: TypeScript code for the scene to build
"""
global LAST_BUILT_PROJECT_RESULT
# we cache the result here in case the agent calls this tool multiple times with the same code...
if code == LAST_BUILT_PROJECT_RESULT[0]:
return "Success", LAST_BUILT_PROJECT_RESULT[1], LAST_BUILT_PROJECT_RESULT[2]
LAST_BUILT_PROJECT_RESULT = "", "", ""
# generate a random uuid for the project
project_id = str(uuid.uuid4())
tmp_dir = os.path.join("/tmp/", project_id)
shutil.copytree(os.environ['MC_PROJECT_DIR'], tmp_dir, dirs_exist_ok=False, symlinks=True)
acc_logs = ""
try:
# write code to scene.ts
with open(os.path.join(tmp_dir, "src", "scenes", "example.tsx"), "w") as f:
f.write(code)
process = subprocess.Popen(
"npm run build",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
shell=True,
cwd=tmp_dir
)
timer = Timer(BUILD_TIMEOUT, process.kill)
timer.start()
while True:
line = process.stdout.readline()
if line:
acc_logs += line.rstrip() + "\n"
elif process.poll() is not None:
break
timer.cancel()
# Check for errors
stderr_output = process.stderr.read()
if stderr_output:
acc_logs += "\n" + stderr_output
# check if the build was successful
if process.returncode != 0:
return "Failed", "", acc_logs,
else:
# copy dist/project.js to get_local_path(id)
shutil.copy(os.path.join(tmp_dir, "dist", "project.js"), get_local_path(project_id))
LAST_BUILT_PROJECT_RESULT = code, get_public_path(project_id), acc_logs
return "Success", get_public_path(project_id), acc_logs
except Exception as e:
return "Failed", "", acc_logs + "\n" + "Error building project: " + str(e)
finally:
# cleanup tmp dir
shutil.rmtree(tmp_dir)
all_tools = []
def generate(message, history, code, logs):
global LAST_BUILT_PROJECT_RESULT
LAST_BUILT_PROJECT_RESULT = "", "", ""
output = "No response"
with ToolCollection.from_mcp(BUILD_SERVER_MCP_CONFIG, trust_remote_code=True) as build_tool_collection:
with ToolCollection.from_mcp(DOCS_SERVER_MCP_CONFIG, trust_remote_code=True) as docs_tool_collection:
all_tools = docs_tool_collection.tools + [build_project]
model = InferenceClientModel(model_id=MODEL)
agent = ToolCallingAgent(tools=[*all_tools], model=model, return_full_result=True, max_steps=MAX_STEPS)
agent.prompt_templates["system_prompt"] = SYSTEM_PROMPT
try:
res = agent.run(
message,
additional_args={'history': history, 'code': code, 'logs': logs}
)
output = res.output
except Exception as e:
print(e)
output = "An error occurred while generating the code"
return output, LAST_BUILT_PROJECT_RESULT[0], LAST_BUILT_PROJECT_RESULT[1], LAST_BUILT_PROJECT_RESULT[2]
from gradio_motioncanvasplayer import MotionCanvasPlayer
# Just some example project that servers as a placholder in the beginning
example_project_path = "https://prathje-gradio-motioncanvasplayer.hf.space/gradio_api/file=/home/user/app/public/project-3.17.2.js"
def load_example(example):
return example['project_path'], example['code'], ""
with gr.Blocks(theme=gr.themes.Monochrome()) as app:
gr.Markdown("# Motion Canvas Agent")
gr.Markdown("Leverage the power of AI and Motion Canvas to create animations using TypeScript.")
gr.Markdown("You can find a demo video here: https://youtu.be/cw6GxBicU4o")
player = MotionCanvasPlayer(example_project_path, auto=True, quality=1.0, width=1920, height=1080, variables="{}", render=False)
code = gr.Code(value="", language="typescript", render=False)
logs = gr.Textbox(value="", label="Build Logs", interactive=False, render=False)
with gr.Row():
with gr.Column():
gr.Markdown("## Chat")
chat = gr.ChatInterface(fn=generate, type="messages", additional_inputs=[code, logs], additional_outputs=[code, player, logs])
with gr.Column():
gr.Markdown("## Preview")
player.render()
if __name__ == "__main__":
# Todo: In the future we could allow to use this as an MCP server, but right now, we need the preview to be available.
app.launch(mcp_server=False, strict_cors=False) |