| import os |
| import gradio as gr |
| from huggingface_hub import HfApi, SpaceHardware |
|
|
| |
| HF_TOKEN = os.getenv("HF_TOKEN") |
| TRAINING_SPACE_ID = "your_space_id_here" |
|
|
| |
| api = HfApi(token=HF_TOKEN) |
|
|
| |
| def get_task(): |
| |
| return None |
|
|
| |
| def add_task(task): |
| |
| return f"Task '{task}' added!" |
|
|
| |
| def mark_as_done(task): |
| |
| return f"Task '{task}' completed!" |
|
|
| |
| def train_and_upload(task): |
| |
| return f"Training model with task: {task}" |
|
|
| |
| def gradio_fn(task_input, history): |
| task = get_task() |
|
|
| if task is None: |
| |
| add_task_response = add_task(task_input) |
| api.request_space_hardware(repo_id=TRAINING_SPACE_ID, hardware=SpaceHardware.T4_MEDIUM) |
| |
| |
| history.append(("Bot", add_task_response)) |
| return "", history |
| else: |
| |
| runtime = api.get_space_runtime(repo_id=TRAINING_SPACE_ID) |
| if runtime.hardware == SpaceHardware.T4_MEDIUM: |
| |
| train_and_upload_response = train_and_upload(task) |
| mark_as_done_response = mark_as_done(task) |
| |
| |
| history.append(("Bot", train_and_upload_response)) |
| history.append(("Bot", mark_as_done_response)) |
| |
| |
| api.request_space_hardware(repo_id=TRAINING_SPACE_ID, hardware=SpaceHardware.CPU_BASIC) |
| else: |
| |
| api.request_space_hardware(repo_id=TRAINING_SPACE_ID, hardware=SpaceHardware.T4_MEDIUM) |
| history.append(("Bot", "Requesting GPU hardware...")) |
| |
| return "", history |
|
|
| |
| chat_interface = gr.Interface( |
| fn=gradio_fn, |
| inputs=[gr.Textbox(label="Enter task name", placeholder="Type your task here...", lines=1)], |
| outputs=[gr.Chatbot()], |
| live=True, |
| title="Task Manager Bot", |
| description="Interact with the bot to manage tasks and trigger model training." |
| ) |
|
|
| |
| chat_interface.launch() |
|
|