'use client'; import { useState } from 'react'; import { Button } from '@headlessui/react'; import { SelectInput, TextInput, Checkbox } from '@/components/formInputs'; import Card from '@/components/Card'; import { apiClient } from '@/utils/api'; import { JobConfig } from '@/types'; import useSettings from '@/hooks/useSettings'; import { upsertJob } from '@/utils/storage/jobStorage'; import { useAuth } from '@/contexts/AuthContext'; interface HFJobsWorkflowProps { jobConfig: JobConfig; onComplete: (jobId: string, localJobId?: string) => void; } type Step = 'validate' | 'upload' | 'submit' | 'complete'; export default function HFJobsWorkflow({ jobConfig, onComplete }: HFJobsWorkflowProps) { const { settings } = useSettings(); const { token: authToken } = useAuth(); const [defaultNamespace, setDefaultNamespace] = useState(''); const [currentStep, setCurrentStep] = useState('validate'); const [loading, setLoading] = useState(false); const [error, setError] = useState(null); // Form state const [datasetSource, setDatasetSource] = useState<'upload' | 'existing'>('upload'); const [datasetName, setDatasetName] = useState(`${jobConfig.config.name}-dataset`); const [existingDatasetId, setExistingDatasetId] = useState(''); const [hardware, setHardware] = useState(settings.HF_JOBS_DEFAULT_HARDWARE || 'a100-large'); const [namespace, setNamespace] = useState(settings.HF_JOBS_NAMESPACE || ''); const [autoUpload, setAutoUpload] = useState(true); // Progress state const [validationResult, setValidationResult] = useState(null); const [uploadResult, setUploadResult] = useState(null); const [jobResult, setJobResult] = useState(null); const validateToken = async () => { setLoading(true); setError(null); const effectiveToken = authToken || settings.HF_TOKEN; try { if (!effectiveToken) { throw new Error('A valid Hugging Face token is required to continue.'); } const response = await apiClient.post('/api/hf-hub', { action: 'whoami', token: effectiveToken, }); if (response.data.user) { setValidationResult(response.data.user); const resolvedName = response.data.user.name || ''; setDefaultNamespace(resolvedName); if (!namespace) { setNamespace(resolvedName); } setCurrentStep('upload'); } } catch (err: any) { setError(err.response?.data?.error || 'Failed to validate token'); } finally { setLoading(false); } }; const uploadDataset = async () => { setLoading(true); setError(null); const effectiveToken = authToken || settings.HF_TOKEN; try { if (!effectiveToken) { throw new Error('A valid Hugging Face token is required to continue.'); } const resolvedNamespace = namespace || defaultNamespace; if (!resolvedNamespace) { throw new Error('Unable to determine a namespace. Validate your HF token or set a namespace in Settings.'); } if (datasetSource === 'existing') { // Use existing dataset - just validate it exists if (!existingDatasetId) { throw new Error('Please enter a dataset ID'); } // Validate dataset exists const validateResponse = await apiClient.post('/api/hf-hub', { action: 'validateDataset', token: effectiveToken, datasetId: existingDatasetId, }); if (validateResponse.data.exists) { setUploadResult({ repoId: existingDatasetId, url: `https://huggingface.co/datasets/${existingDatasetId}`, existing: true, }); setCurrentStep('submit'); } else { throw new Error(`Dataset ${existingDatasetId} not found or not accessible`); } } else { if (!resolvedNamespace) { throw new Error('Unable to determine a namespace. Validate your HF token or set a namespace in Settings.'); } // Upload new dataset // First, create the dataset repository const createResponse = await apiClient.post('/api/hf-hub', { action: 'createDataset', token: effectiveToken, namespace: resolvedNamespace, datasetName, }); if (!createResponse.data.success) { throw new Error('Failed to create dataset repository'); } // Get dataset path from first dataset in config const datasetPath = jobConfig.config.process[0].datasets[0]?.folder_path; if (!datasetPath || datasetPath.trim() === '' || datasetPath === datasetName) { throw new Error('Dataset path could not be resolved. Please ensure the dataset folder exists on the host.'); } // Upload dataset files const uploadResponse = await apiClient.post('/api/hf-hub', { action: 'uploadDataset', token: effectiveToken, namespace: resolvedNamespace, datasetName, datasetPath, }); if (uploadResponse.data.success) { setUploadResult({ repoId: uploadResponse.data.repoId, url: `https://huggingface.co/datasets/${uploadResponse.data.repoId}`, existing: false, }); setCurrentStep('submit'); } } } catch (err: any) { setError(err.response?.data?.error || 'Failed to process dataset'); } finally { setLoading(false); } }; const submitJob = async () => { setLoading(true); setError(null); const effectiveToken = authToken || settings.HF_TOKEN; try { const resolvedNamespace = namespace || defaultNamespace; if (!resolvedNamespace) { throw new Error('Unable to determine a namespace. Validate your HF token or set a namespace in Settings.'); } if (!effectiveToken) { throw new Error('A valid Hugging Face token is required to continue.'); } const datasetRepo = uploadResult?.repoId || (datasetSource === 'existing' ? existingDatasetId : `${resolvedNamespace}/${datasetName}`); const response = await apiClient.post('/api/hf-jobs', { action: 'submitJob', token: effectiveToken, hardware, namespace: resolvedNamespace, jobConfig, datasetRepo, }); if (response.data.success) { const hfJobId = response.data.jobId; // Save job to local database for tracking let localJobId = undefined; try { const savedJob = await upsertJob({ name: `${jobConfig.config.name}-hf-cloud`, gpu_ids: hardware, job_config: { ...jobConfig, hf_job_id: hfJobId, hf_job_url: hfJobId !== 'unknown' && resolvedNamespace ? `https://huggingface.co/jobs/${resolvedNamespace}/${hfJobId}` : null, dataset_repo: datasetRepo, hardware, is_hf_job: true, training_backend: 'hf-jobs', hf_job_submitted: true, }, info: response.data.message || 'HF Job submitted', status: 'submitted', }); localJobId = savedJob.id; console.log('Saved HF Job to local storage:', savedJob); } catch (localSaveError: any) { console.warn('Failed to save HF Job locally:', localSaveError); // Don't fail the whole process if local save fails } setJobResult({ jobId: hfJobId, message: response.data.message, localJobId: localJobId, }); setCurrentStep('complete'); onComplete(hfJobId, localJobId); } } catch (err: any) { setError(err.response?.data?.error || 'Failed to submit job'); } finally { setLoading(false); } }; const renderStepContent = () => { switch (currentStep) { case 'validate': return (

First, let's validate your Hugging Face token and get your username for dataset uploads.

{validationResult && (

✓ Token valid! Logged in as: {validationResult.name}

)}
); case 'upload': return (

Choose whether to upload a new dataset or use an existing one from HF Hub.

setDatasetSource(value as 'upload' | 'existing')} options={[ { value: 'upload', label: 'Upload New Dataset' }, { value: 'existing', label: 'Use Existing HF Dataset' } ]} /> {datasetSource === 'upload' ? ( <> ) : ( <>

Enter the full dataset ID (namespace/name) from HuggingFace Hub

)} {uploadResult && (

✓ Dataset {uploadResult.existing ? 'validated' : 'uploaded'} successfully!

{uploadResult.existing ? 'Using dataset:' : 'View at:'} {uploadResult.repoId}

)}
); case 'submit': return (

Configure and submit your training job to HF Jobs.

{jobResult && (

✓ Job submitted successfully!

Job ID: {jobResult.jobId}

)}
); case 'complete': return (

🎉 Training job submitted!

Your training job has been submitted to Hugging Face Jobs and is now running in the cloud.

Job ID: {jobResult?.jobId}

{jobResult?.jobId && jobResult.jobId !== 'unknown' && (

Monitor Job: View on HF Jobs →

)}

Dataset: {uploadResult?.repoId}

Hardware: {hardware}

Next steps:

  • Monitor your job progress using: hf jobs logs {jobResult?.jobId}
  • The trained model will be uploaded to: {namespace}/{jobConfig.config.name}-lora
  • You'll receive notifications when training completes
); default: return null; } }; return (
{/* Progress indicator */}
{(['validate', 'upload', 'submit', 'complete'] as Step[]).map((step, index) => (
{index + 1}
{index < 3 && (
)}
))}
{/* Error display */} {error && (

{error}

)} {/* Current step content */} {renderStepContent()}
); }