multimodalart HF Staff commited on
Commit
4ff2db9
·
verified ·
1 Parent(s): 9cc172c

apply fix to the labels and error management

Browse files
ui/src/app/api/hf-hub/route.ts CHANGED
@@ -74,8 +74,8 @@ export async function POST(request: NextRequest) {
74
 
75
  const addUploadFile = async (absolutePath: string, repoFilePath: string) => {
76
  const buffer = await readFile(absolutePath);
77
- const file = new File([buffer], path.basename(absolutePath));
78
- addUploadContent(repoFilePath, file);
79
  };
80
 
81
  const walkDirectory = async (basePath: string, repoPrefix: string) => {
 
74
 
75
  const addUploadFile = async (absolutePath: string, repoFilePath: string) => {
76
  const buffer = await readFile(absolutePath);
77
+ const blob = new Blob([buffer]);
78
+ addUploadContent(repoFilePath, blob);
79
  };
80
 
81
  const walkDirectory = async (basePath: string, repoPrefix: string) => {
ui/src/app/api/hf-jobs/route.ts CHANGED
@@ -594,17 +594,41 @@ def generate_model_card_readme(repo_id: str, config: dict, model_name: str, cura
594
  license_info = {"license": "creativeml-openrail-m"}
595
 
596
  # Generate tags based on model architecture
597
- tags = ["text-to-image"]
598
-
599
- if "xl" in arch.lower():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
600
  tags.append("stable-diffusion-xl")
601
- if "flux" in arch.lower():
602
  tags.append("flux")
603
- if "lumina" in arch.lower():
604
  tags.append("lumina2")
605
- if "sd3" in arch.lower() or "v3" in arch.lower():
606
  tags.append("sd3")
607
-
608
  # Add LoRA-specific tags
609
  tags.extend(["lora", "diffusers", "template:sd-lora", "ai-toolkit"])
610
 
@@ -797,7 +821,7 @@ async function submitHFJobUV(token: string, hardware: string, scriptPath: string
797
 
798
  console.log('Setting up environment with HF_TOKEN for job submission');
799
  const namespaceArgs = namespaceOverride ? ` --namespace ${namespaceOverride}` : '';
800
- console.log(`Command: hf jobs uv run --flavor ${hardware} --timeout 7h --secrets HF_TOKEN --detach${namespaceArgs} ${scriptPath}`);
801
 
802
  // Use hf jobs uv run command with timeout and detach to get job ID
803
  const args = [
 
594
  license_info = {"license": "creativeml-openrail-m"}
595
 
596
  # Generate tags based on model architecture
597
+ tags = []
598
+ lower_arch = (arch or "").lower()
599
+ lower_model_name = (model_config.get("name_or_path", "") or "").lower()
600
+
601
+ instruction_arches = {'flux_kontext', 'hidream_e1', 'qwen_image_edit'}
602
+ is_instruction = lower_arch in instruction_arches or 'kontext' in lower_model_name
603
+
604
+ datasets_config = config.get('config', {}).get('process', [{}])[0].get('datasets', [])
605
+ is_video = (
606
+ 'video' in lower_arch
607
+ or process_config.get('type') == 'video'
608
+ or any(isinstance(dataset, dict) and dataset.get('do_i2v') for dataset in datasets_config)
609
+ or lower_arch.startswith('wan21')
610
+ or 'wan21' in lower_model_name
611
+ )
612
+
613
+ if is_instruction:
614
+ tags.append("image-to-image")
615
+ elif is_video:
616
+ if 'i2v' in model_name.lower() or 'i2v' in lower_arch or 'i2v' in lower_model_name:
617
+ tags.append("image-to-video")
618
+ else:
619
+ tags.append("text-to-video")
620
+ else:
621
+ tags.append("text-to-image")
622
+
623
+ if "xl" in lower_arch:
624
  tags.append("stable-diffusion-xl")
625
+ if "flux" in lower_arch:
626
  tags.append("flux")
627
+ if "lumina" in lower_arch:
628
  tags.append("lumina2")
629
+ if "sd3" in lower_arch or "v3" in lower_arch:
630
  tags.append("sd3")
631
+
632
  # Add LoRA-specific tags
633
  tags.extend(["lora", "diffusers", "template:sd-lora", "ai-toolkit"])
634
 
 
821
 
822
  console.log('Setting up environment with HF_TOKEN for job submission');
823
  const namespaceArgs = namespaceOverride ? ` --namespace ${namespaceOverride}` : '';
824
+ console.log(`Command: hf jobs uv run --flavor ${hardware} --timeout 5h --secrets HF_TOKEN --detach${namespaceArgs} ${scriptPath}`);
825
 
826
  // Use hf jobs uv run command with timeout and detach to get job ID
827
  const args = [
ui/src/components/HFJobsWorkflow.tsx CHANGED
@@ -24,6 +24,8 @@ type DatasetUploadPlan = {
24
 
25
  const ensurePosixPath = (value: string) => value.replace(/\\/g, '/').replace(/^\/+/, '');
26
 
 
 
27
  const buildDatasetUploadPlan = (jobConfig: JobConfig): DatasetUploadPlan => {
28
  const datasetEntries = jobConfig?.config?.process?.[0]?.datasets ?? [];
29
  const sampleEntries = jobConfig?.config?.process?.[0]?.sample?.samples ?? [];
@@ -152,6 +154,30 @@ export default function HFJobsWorkflow({ jobConfig, onComplete, hackathonEligibl
152
  const [autoUpload, setAutoUpload] = useState(true);
153
  const [participateHackathon, setParticipateHackathon] = useState(false);
154
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
  useEffect(() => {
156
  if (!hackathonEligible && participateHackathon) {
157
  setParticipateHackathon(false);
@@ -296,6 +322,23 @@ export default function HFJobsWorkflow({ jobConfig, onComplete, hackathonEligibl
296
  if (!effectiveToken) {
297
  throw new Error('A valid Hugging Face token is required to continue.');
298
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
299
  const datasetRepo =
300
  uploadResult?.repoId ||
301
  (datasetSource === 'existing'
 
24
 
25
  const ensurePosixPath = (value: string) => value.replace(/\\/g, '/').replace(/^\/+/, '');
26
 
27
+ const INSTRUCTION_ARCHES = new Set(['flux_kontext', 'hidream_e1', 'qwen_image_edit']);
28
+
29
  const buildDatasetUploadPlan = (jobConfig: JobConfig): DatasetUploadPlan => {
30
  const datasetEntries = jobConfig?.config?.process?.[0]?.datasets ?? [];
31
  const sampleEntries = jobConfig?.config?.process?.[0]?.sample?.samples ?? [];
 
154
  const [autoUpload, setAutoUpload] = useState(true);
155
  const [participateHackathon, setParticipateHackathon] = useState(false);
156
 
157
+ const requiresControlImages = (() => {
158
+ try {
159
+ const arch = jobConfig?.config?.process?.[0]?.model?.arch;
160
+ return typeof arch === 'string' && INSTRUCTION_ARCHES.has(arch.toLowerCase());
161
+ } catch (error) {
162
+ return false;
163
+ }
164
+ })();
165
+
166
+ const hasControlDataset = (() => {
167
+ try {
168
+ const datasets = jobConfig?.config?.process?.[0]?.datasets ?? [];
169
+ return datasets.some((dataset: any) => {
170
+ const controlPath = dataset?.control_path;
171
+ if (Array.isArray(controlPath)) {
172
+ return controlPath.some(path => typeof path === 'string' && path.trim() !== '');
173
+ }
174
+ return typeof controlPath === 'string' && controlPath.trim() !== '';
175
+ });
176
+ } catch (error) {
177
+ return false;
178
+ }
179
+ })();
180
+
181
  useEffect(() => {
182
  if (!hackathonEligible && participateHackathon) {
183
  setParticipateHackathon(false);
 
322
  if (!effectiveToken) {
323
  throw new Error('A valid Hugging Face token is required to continue.');
324
  }
325
+
326
+ if (requiresControlImages) {
327
+ if (!hasControlDataset) {
328
+ setError('Instruction models require a control dataset. Please select one before submitting.');
329
+ setLoading(false);
330
+ return;
331
+ }
332
+
333
+ const samples = jobConfig?.config?.process?.[0]?.sample?.samples ?? [];
334
+ const missingCtrl = samples.filter((sample: any) => !sample?.ctrl_img || !String(sample.ctrl_img).trim());
335
+ if (missingCtrl.length > 0) {
336
+ setError('Instruction models require a control image for every sample prompt. Please add control images before submitting.');
337
+ setLoading(false);
338
+ return;
339
+ }
340
+ }
341
+
342
  const datasetRepo =
343
  uploadResult?.repoId ||
344
  (datasetSource === 'existing'