notaneimu commited on
Commit
44f343d
·
1 Parent(s): 9634fe9

match colors, exposure, grain

Browse files
dist/assets/{index-1AzwDbQ5.js → index-D3DuJDE3.js} RENAMED
The diff for this file is too large to render. See raw diff
 
dist/index.html CHANGED
@@ -4,7 +4,7 @@
4
  <meta charset="UTF-8" />
5
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
  <title>ONNX in Browser</title>
7
- <script type="module" crossorigin src="/assets/index-1AzwDbQ5.js"></script>
8
  <link rel="stylesheet" crossorigin href="/assets/index-T6D1olwS.css">
9
  </head>
10
  <body>
 
4
  <meta charset="UTF-8" />
5
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
  <title>ONNX in Browser</title>
7
+ <script type="module" crossorigin src="/assets/index-D3DuJDE3.js"></script>
8
  <link rel="stylesheet" crossorigin href="/assets/index-T6D1olwS.css">
9
  </head>
10
  <body>
src/components/ControlPanel.vue CHANGED
@@ -391,6 +391,94 @@ function handleImageChange(event: Event) {
391
  <span class="hidden text-xs text-stone-500 group-open:inline">▼</span>
392
  </summary>
393
  <div class="flex flex-col gap-2.5 pt-2">
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
394
  <label class="flex flex-col gap-1">
395
  <span class="text-xs font-medium text-stone-600">Seam blend width (input px)</span>
396
  <input
 
391
  <span class="hidden text-xs text-stone-500 group-open:inline">▼</span>
392
  </summary>
393
  <div class="flex flex-col gap-2.5 pt-2">
394
+ <label class="flex items-center justify-between rounded-lg border border-stone-200 bg-stone-50 px-3 py-2">
395
+ <span class="text-xs font-medium text-stone-600">Color correct final image</span>
396
+ <input
397
+ v-model="props.controls.colorCorrectionEnabled"
398
+ type="checkbox"
399
+ :disabled="controlsDisabled"
400
+ class="h-4 w-4 accent-stone-700 disabled:opacity-60"
401
+ />
402
+ </label>
403
+
404
+ <label class="flex flex-col gap-1">
405
+ <span class="text-xs font-medium text-stone-600">Color correction strength (%)</span>
406
+ <input
407
+ v-model="props.controls.colorCorrectionStrength"
408
+ type="number"
409
+ min="0"
410
+ max="100"
411
+ step="5"
412
+ placeholder="100"
413
+ :disabled="controlsDisabled || !props.controls.colorCorrectionEnabled"
414
+ class="w-full rounded-lg border border-stone-300 bg-white px-3 py-2 text-sm text-stone-800 outline-none transition focus:border-stone-500 disabled:cursor-not-allowed disabled:opacity-60"
415
+ />
416
+ <p class="m-0 text-xs italic leading-snug text-stone-500">Matches RGB color balance and black/white levels back toward the source image.</p>
417
+ </label>
418
+
419
+ <label class="flex flex-col gap-1">
420
+ <span class="text-xs font-medium text-stone-600">Black/white clip (%)</span>
421
+ <input
422
+ v-model="props.controls.colorCorrectionClip"
423
+ type="number"
424
+ min="0"
425
+ max="10"
426
+ step="0.1"
427
+ placeholder="0.6"
428
+ :disabled="controlsDisabled || !props.controls.colorCorrectionEnabled"
429
+ class="w-full rounded-lg border border-stone-300 bg-white px-3 py-2 text-sm text-stone-800 outline-none transition focus:border-stone-500 disabled:cursor-not-allowed disabled:opacity-60"
430
+ />
431
+ <p class="m-0 text-xs italic leading-snug text-stone-500">Ignores tiny outliers when matching source black and white points.</p>
432
+ </label>
433
+
434
+ <label class="flex items-center justify-between rounded-lg border border-stone-200 bg-stone-50 px-3 py-2">
435
+ <span class="text-xs font-medium text-stone-600">Add film grain</span>
436
+ <input
437
+ v-model="props.controls.filmGrainEnabled"
438
+ type="checkbox"
439
+ :disabled="controlsDisabled"
440
+ class="h-4 w-4 accent-stone-700 disabled:opacity-60"
441
+ />
442
+ </label>
443
+
444
+ <label class="flex flex-col gap-1">
445
+ <span class="text-xs font-medium text-stone-600">Film grain amount (%)</span>
446
+ <input
447
+ v-model="props.controls.filmGrainAmount"
448
+ type="number"
449
+ min="0"
450
+ max="100"
451
+ step="1"
452
+ placeholder="6"
453
+ :disabled="controlsDisabled || !props.controls.filmGrainEnabled"
454
+ class="w-full rounded-lg border border-stone-300 bg-white px-3 py-2 text-sm text-stone-800 outline-none transition focus:border-stone-500 disabled:cursor-not-allowed disabled:opacity-60"
455
+ />
456
+ </label>
457
+
458
+ <label class="flex flex-col gap-1">
459
+ <span class="text-xs font-medium text-stone-600">Film grain size (px)</span>
460
+ <input
461
+ v-model="props.controls.filmGrainSize"
462
+ type="number"
463
+ min="0.5"
464
+ max="8"
465
+ step="0.1"
466
+ placeholder="1.2"
467
+ :disabled="controlsDisabled || !props.controls.filmGrainEnabled"
468
+ class="w-full rounded-lg border border-stone-300 bg-white px-3 py-2 text-sm text-stone-800 outline-none transition focus:border-stone-500 disabled:cursor-not-allowed disabled:opacity-60"
469
+ />
470
+ </label>
471
+
472
+ <label class="flex items-center justify-between rounded-lg border border-stone-200 bg-stone-50 px-3 py-2">
473
+ <span class="text-xs font-medium text-stone-600">Monochrome grain</span>
474
+ <input
475
+ v-model="props.controls.filmGrainMonochrome"
476
+ type="checkbox"
477
+ :disabled="controlsDisabled || !props.controls.filmGrainEnabled"
478
+ class="h-4 w-4 accent-stone-700 disabled:opacity-60"
479
+ />
480
+ </label>
481
+
482
  <label class="flex flex-col gap-1">
483
  <span class="text-xs font-medium text-stone-600">Seam blend width (input px)</span>
484
  <input
src/composables/useOnnxInspector.ts CHANGED
@@ -6,9 +6,12 @@ import ortWasmThreadedJsepWasmUrl from "/node_modules/onnxruntime-web/dist/ort-w
6
  import { computed, onUnmounted, reactive, ref, shallowRef, watch } from "vue";
7
  import configData from "../../config.json";
8
  import {
 
 
9
  basicStats,
10
  blobToImage,
11
  canvasToBlobUrl,
 
12
  downscaleCanvas,
13
  describeSession,
14
  flushUi,
@@ -64,7 +67,13 @@ const MAX_TILE_COLOR_OFFSET = 18;
64
  const DEFAULT_TILE_CORRECTION_DAMPENING = 0.75;
65
  const MAX_SEAM_BLEND_WIDTH = 64;
66
  const DEFAULT_SEAM_CORRECTION_STRENGTH = "100";
 
 
 
 
67
  const MAX_SEAM_CORRECTION_STRENGTH = 300;
 
 
68
  const PREVIEW_REFRESH_DEBOUNCE_MS = 180;
69
  const DOWNSCALE_ALGORITHMS: DownscaleAlgorithm[] = ["lanczos", "area", "bicubic", "nearest"];
70
  const ALGORITHM_LABEL: Record<DownscaleAlgorithm, string> = {
@@ -86,6 +95,13 @@ const DEFAULT_CONTROLS: InspectorControls = {
86
  outputScale: "",
87
  preResizeAlgorithm: "lanczos",
88
  outputScaleAlgorithm: "lanczos",
 
 
 
 
 
 
 
89
  compareWithClassicUpscale: false,
90
  classicCompareAlgorithm: "lanczos",
91
  optLevel: "all",
@@ -113,6 +129,15 @@ interface PreviewRenderSource {
113
  sourceHeight: number;
114
  }
115
 
 
 
 
 
 
 
 
 
 
116
  interface ClassicCompareInputCache {
117
  blob: Blob;
118
  targetWidth: number;
@@ -214,6 +239,34 @@ function resolveSeamCorrectionStrength(rawValue: string | number | null | undefi
214
  return Math.max(0, Math.min(MAX_SEAM_CORRECTION_STRENGTH, percent)) / 100;
215
  }
216
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
217
  function getAutomaticTileBlendSize(overlapSize: number): number {
218
  if (overlapSize < 1) {
219
  return 0;
@@ -521,6 +574,7 @@ export function useOnnxInspector() {
521
  let webgpuListenerAttached = false;
522
  let abortController: AbortController | null = null;
523
  let previewRenderSource: PreviewRenderSource | null = null;
 
524
  let classicCompareInputCache: ClassicCompareInputCache | null = null;
525
  let classicCompareContext: ClassicCompareContext | null = null;
526
 
@@ -618,6 +672,7 @@ export function useOnnxInspector() {
618
  sourceWidth = 0,
619
  sourceHeight = 0,
620
  ) {
 
621
  if (!canvas || sourceWidth <= 0 || sourceHeight <= 0) {
622
  previewRenderSource = null;
623
  return;
@@ -630,6 +685,68 @@ export function useOnnxInspector() {
630
  };
631
  }
632
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
633
  function setCurrentRun(run: CurrentRun | null) {
634
  releaseObjectUrl(currentRun.value?.preview?.url ?? null);
635
  currentRun.value = run;
@@ -640,14 +757,25 @@ export function useOnnxInspector() {
640
  }
641
 
642
  async function releaseCurrentSession() {
643
- if (!session.value) {
 
644
  return;
645
  }
646
 
647
  session.value = null;
648
  sessionProvider.value = null;
649
  modelMetadata.value = null;
650
- log("Released previous model session.");
 
 
 
 
 
 
 
 
 
 
651
  }
652
 
653
  function resetModelInputPreview() {
@@ -733,6 +861,16 @@ export function useOnnxInspector() {
733
  outputScale: toStoredText(controls.outputScale, DEFAULT_CONTROLS.outputScale),
734
  preResizeAlgorithm: controls.preResizeAlgorithm,
735
  outputScaleAlgorithm: controls.outputScaleAlgorithm,
 
 
 
 
 
 
 
 
 
 
736
  compareWithClassicUpscale: Boolean(controls.compareWithClassicUpscale),
737
  classicCompareAlgorithm: controls.classicCompareAlgorithm,
738
  optLevel: controls.optLevel,
@@ -790,6 +928,25 @@ export function useOnnxInspector() {
790
  controls.width = toStoredText(parsed.width, DEFAULT_CONTROLS.width);
791
  controls.height = toStoredText(parsed.height, DEFAULT_CONTROLS.height);
792
  controls.outputScale = toStoredText(parsed.outputScale, DEFAULT_CONTROLS.outputScale);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
793
 
794
  if (
795
  parsed.preResizeAlgorithm &&
@@ -967,6 +1124,13 @@ export function useOnnxInspector() {
967
  executionProviders: providers,
968
  graphOptimizationLevel: controls.optLevel,
969
  });
 
 
 
 
 
 
 
970
 
971
  session.value = createdSession;
972
  sessionProvider.value = provider;
@@ -1025,7 +1189,7 @@ export function useOnnxInspector() {
1025
  executionProvider: getSessionExecutionProvider(session.value),
1026
  });
1027
  } catch (error) {
1028
- session.value = null;
1029
  updateStatus("Model load failed", "error");
1030
  log("Model load failed.", {
1031
  error: error instanceof Error ? error.message : String(error),
@@ -1091,7 +1255,7 @@ export function useOnnxInspector() {
1091
 
1092
  await loadModelSession();
1093
  } catch (error) {
1094
- session.value = null;
1095
  modelAsset.value = null;
1096
  updateStatus("Model load failed", "error");
1097
  log("Model load failed.", {
@@ -1460,35 +1624,64 @@ export function useOnnxInspector() {
1460
  sourceWidth: number,
1461
  sourceHeight: number,
1462
  ) {
1463
- const requestedScale = getOutputScale(controls.outputScale);
1464
- const requestedWidth = requestedScale ? Math.max(1, Math.round(sourceWidth * requestedScale)) : null;
1465
- const requestedHeight = requestedScale ? Math.max(1, Math.round(sourceHeight * requestedScale)) : null;
1466
- const targetWidth = requestedWidth ? Math.min(renderedCanvas.width, requestedWidth) : renderedCanvas.width;
1467
- const targetHeight = requestedHeight ? Math.min(renderedCanvas.height, requestedHeight) : renderedCanvas.height;
1468
- const willDownscale = targetWidth < renderedCanvas.width || targetHeight < renderedCanvas.height;
1469
- let previewCanvas = renderedCanvas;
1470
 
1471
- if (requestedScale && !willDownscale) {
1472
- log("Skipped post-upscale downscale because the requested scale is not smaller than the model output.", {
1473
- requestedScale,
1474
- requestedSize: `${requestedWidth}x${requestedHeight}`,
1475
- outputSize: `${renderedCanvas.width}x${renderedCanvas.height}`,
1476
- });
1477
- } else if (willDownscale) {
1478
- updateStatus("Downscaling…", "busy");
1479
- resultInfo.progress = "100% (downscaling…)";
1480
  await flushUi();
1481
- previewCanvas = await downscaleCanvas(
1482
- renderedCanvas,
1483
- targetWidth,
1484
- targetHeight,
1485
- controls.outputScaleAlgorithm,
 
 
 
1486
  );
1487
- log("Downscaled output after inference.", {
1488
- requestedScale,
1489
- algorithm: controls.outputScaleAlgorithm,
1490
- from: `${renderedCanvas.width}x${renderedCanvas.height}`,
1491
- to: `${previewCanvas.width}x${previewCanvas.height}`,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1492
  });
1493
  }
1494
 
@@ -1634,7 +1827,7 @@ export function useOnnxInspector() {
1634
  }, PREVIEW_REFRESH_DEBOUNCE_MS);
1635
  }
1636
 
1637
- async function refreshPreviewFromCachedRender(trigger: "outputScale" | "outputScaleAlgorithm") {
1638
  const source = previewRenderSource;
1639
  const existingRun = currentRun.value;
1640
  if (!source || !existingRun || isRunning.value || isModelLoading.value) {
@@ -1681,7 +1874,7 @@ export function useOnnxInspector() {
1681
  });
1682
  }
1683
 
1684
- function schedulePreviewRefresh(trigger: "outputScale" | "outputScaleAlgorithm") {
1685
  if (!initialized.value || !currentRun.value || !previewRenderSource || isRunning.value || isModelLoading.value) {
1686
  return;
1687
  }
@@ -2249,6 +2442,15 @@ export function useOnnxInspector() {
2249
  return;
2250
  }
2251
 
 
 
 
 
 
 
 
 
 
2252
  if (isRunning.value) {
2253
  log("Run already in progress.");
2254
  return;
@@ -2487,21 +2689,47 @@ export function useOnnxInspector() {
2487
  );
2488
 
2489
  watch(
2490
- () => [controls.outputScale, controls.outputScaleAlgorithm] as const,
 
 
 
 
 
 
 
 
 
 
 
2491
  (next, previous) => {
2492
  if (!initialized.value) {
2493
  return;
2494
  }
2495
 
2496
- if (next[0] === previous[0] && next[1] === previous[1]) {
2497
  return;
2498
  }
2499
 
2500
- if (next[1] !== previous[1]) {
2501
- schedulePreviewRefresh("outputScaleAlgorithm");
2502
- } else {
2503
- schedulePreviewRefresh("outputScale");
2504
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2505
  },
2506
  );
2507
 
 
6
  import { computed, onUnmounted, reactive, ref, shallowRef, watch } from "vue";
7
  import configData from "../../config.json";
8
  import {
9
+ applyColorLevelCorrection,
10
+ applyFilmGrain,
11
  basicStats,
12
  blobToImage,
13
  canvasToBlobUrl,
14
+ cloneCanvas,
15
  downscaleCanvas,
16
  describeSession,
17
  flushUi,
 
67
  const DEFAULT_TILE_CORRECTION_DAMPENING = 0.75;
68
  const MAX_SEAM_BLEND_WIDTH = 64;
69
  const DEFAULT_SEAM_CORRECTION_STRENGTH = "100";
70
+ const DEFAULT_COLOR_CORRECTION_STRENGTH = "100";
71
+ const DEFAULT_COLOR_CORRECTION_CLIP = "0.6";
72
+ const DEFAULT_FILM_GRAIN_AMOUNT = "6";
73
+ const DEFAULT_FILM_GRAIN_SIZE = "1.2";
74
  const MAX_SEAM_CORRECTION_STRENGTH = 300;
75
+ const MAX_COLOR_CORRECTION_CLIP = 10;
76
+ const MAX_FILM_GRAIN_SIZE = 8;
77
  const PREVIEW_REFRESH_DEBOUNCE_MS = 180;
78
  const DOWNSCALE_ALGORITHMS: DownscaleAlgorithm[] = ["lanczos", "area", "bicubic", "nearest"];
79
  const ALGORITHM_LABEL: Record<DownscaleAlgorithm, string> = {
 
95
  outputScale: "",
96
  preResizeAlgorithm: "lanczos",
97
  outputScaleAlgorithm: "lanczos",
98
+ colorCorrectionEnabled: false,
99
+ colorCorrectionStrength: DEFAULT_COLOR_CORRECTION_STRENGTH,
100
+ colorCorrectionClip: DEFAULT_COLOR_CORRECTION_CLIP,
101
+ filmGrainEnabled: false,
102
+ filmGrainAmount: DEFAULT_FILM_GRAIN_AMOUNT,
103
+ filmGrainSize: DEFAULT_FILM_GRAIN_SIZE,
104
+ filmGrainMonochrome: true,
105
  compareWithClassicUpscale: false,
106
  classicCompareAlgorithm: "lanczos",
107
  optLevel: "all",
 
129
  sourceHeight: number;
130
  }
131
 
132
+ interface PreviewBaseCache {
133
+ renderCanvas: HTMLCanvasElement;
134
+ sourceWidth: number;
135
+ sourceHeight: number;
136
+ outputScale: string;
137
+ outputScaleAlgorithm: DownscaleAlgorithm;
138
+ canvas: HTMLCanvasElement;
139
+ }
140
+
141
  interface ClassicCompareInputCache {
142
  blob: Blob;
143
  targetWidth: number;
 
239
  return Math.max(0, Math.min(MAX_SEAM_CORRECTION_STRENGTH, percent)) / 100;
240
  }
241
 
242
+ function resolveColorCorrectionStrength(rawValue: string | number | null | undefined): number {
243
+ const parsed = parseNumericControl(rawValue);
244
+ const defaultStrength = Number.parseFloat(DEFAULT_COLOR_CORRECTION_STRENGTH);
245
+ const percent = parsed === null ? defaultStrength : parsed;
246
+ return Math.max(0, Math.min(100, percent)) / 100;
247
+ }
248
+
249
+ function resolveColorCorrectionClip(rawValue: string | number | null | undefined): number {
250
+ const parsed = parseNumericControl(rawValue);
251
+ const defaultClip = Number.parseFloat(DEFAULT_COLOR_CORRECTION_CLIP);
252
+ const percent = parsed === null ? defaultClip : parsed;
253
+ return Math.max(0, Math.min(MAX_COLOR_CORRECTION_CLIP, percent)) / 100;
254
+ }
255
+
256
+ function resolveFilmGrainAmount(rawValue: string | number | null | undefined): number {
257
+ const parsed = parseNumericControl(rawValue);
258
+ const defaultAmount = Number.parseFloat(DEFAULT_FILM_GRAIN_AMOUNT);
259
+ const percent = parsed === null ? defaultAmount : parsed;
260
+ return Math.max(0, Math.min(100, percent)) / 100;
261
+ }
262
+
263
+ function resolveFilmGrainSize(rawValue: string | number | null | undefined): number {
264
+ const parsed = parseNumericControl(rawValue);
265
+ const defaultSize = Number.parseFloat(DEFAULT_FILM_GRAIN_SIZE);
266
+ const size = parsed === null ? defaultSize : parsed;
267
+ return Math.max(0.5, Math.min(MAX_FILM_GRAIN_SIZE, size));
268
+ }
269
+
270
  function getAutomaticTileBlendSize(overlapSize: number): number {
271
  if (overlapSize < 1) {
272
  return 0;
 
574
  let webgpuListenerAttached = false;
575
  let abortController: AbortController | null = null;
576
  let previewRenderSource: PreviewRenderSource | null = null;
577
+ let previewBaseCache: PreviewBaseCache | null = null;
578
  let classicCompareInputCache: ClassicCompareInputCache | null = null;
579
  let classicCompareContext: ClassicCompareContext | null = null;
580
 
 
672
  sourceWidth = 0,
673
  sourceHeight = 0,
674
  ) {
675
+ previewBaseCache = null;
676
  if (!canvas || sourceWidth <= 0 || sourceHeight <= 0) {
677
  previewRenderSource = null;
678
  return;
 
685
  };
686
  }
687
 
688
+ async function buildPreviewBase(
689
+ renderedCanvas: HTMLCanvasElement,
690
+ sourceWidth: number,
691
+ sourceHeight: number,
692
+ ): Promise<HTMLCanvasElement> {
693
+ const outputScale = getControlText(controls.outputScale);
694
+ const outputScaleAlgorithm = controls.outputScaleAlgorithm;
695
+ if (
696
+ previewBaseCache &&
697
+ previewBaseCache.renderCanvas === renderedCanvas &&
698
+ previewBaseCache.sourceWidth === sourceWidth &&
699
+ previewBaseCache.sourceHeight === sourceHeight &&
700
+ previewBaseCache.outputScale === outputScale &&
701
+ previewBaseCache.outputScaleAlgorithm === outputScaleAlgorithm
702
+ ) {
703
+ return previewBaseCache.canvas;
704
+ }
705
+
706
+ const requestedScale = getOutputScale(controls.outputScale);
707
+ const requestedWidth = requestedScale ? Math.max(1, Math.round(sourceWidth * requestedScale)) : null;
708
+ const requestedHeight = requestedScale ? Math.max(1, Math.round(sourceHeight * requestedScale)) : null;
709
+ const targetWidth = requestedWidth ? Math.min(renderedCanvas.width, requestedWidth) : renderedCanvas.width;
710
+ const targetHeight = requestedHeight ? Math.min(renderedCanvas.height, requestedHeight) : renderedCanvas.height;
711
+ const willDownscale = targetWidth < renderedCanvas.width || targetHeight < renderedCanvas.height;
712
+ let baseCanvas = renderedCanvas;
713
+
714
+ if (requestedScale && !willDownscale) {
715
+ log("Skipped post-upscale downscale because the requested scale is not smaller than the model output.", {
716
+ requestedScale,
717
+ requestedSize: `${requestedWidth}x${requestedHeight}`,
718
+ outputSize: `${renderedCanvas.width}x${renderedCanvas.height}`,
719
+ });
720
+ } else if (willDownscale) {
721
+ updateStatus("Downscaling…", "busy");
722
+ resultInfo.progress = "100% (downscaling…)";
723
+ await flushUi();
724
+ baseCanvas = await downscaleCanvas(
725
+ renderedCanvas,
726
+ targetWidth,
727
+ targetHeight,
728
+ controls.outputScaleAlgorithm,
729
+ );
730
+ log("Downscaled output after inference.", {
731
+ requestedScale,
732
+ algorithm: controls.outputScaleAlgorithm,
733
+ from: `${renderedCanvas.width}x${renderedCanvas.height}`,
734
+ to: `${baseCanvas.width}x${baseCanvas.height}`,
735
+ });
736
+ }
737
+
738
+ previewBaseCache = {
739
+ renderCanvas: renderedCanvas,
740
+ sourceWidth,
741
+ sourceHeight,
742
+ outputScale,
743
+ outputScaleAlgorithm,
744
+ canvas: baseCanvas,
745
+ };
746
+
747
+ return baseCanvas;
748
+ }
749
+
750
  function setCurrentRun(run: CurrentRun | null) {
751
  releaseObjectUrl(currentRun.value?.preview?.url ?? null);
752
  currentRun.value = run;
 
757
  }
758
 
759
  async function releaseCurrentSession() {
760
+ const currentSession = session.value;
761
+ if (!currentSession) {
762
  return;
763
  }
764
 
765
  session.value = null;
766
  sessionProvider.value = null;
767
  modelMetadata.value = null;
768
+
769
+ try {
770
+ if (typeof currentSession.release === "function") {
771
+ await currentSession.release();
772
+ }
773
+ log("Released previous model session.");
774
+ } catch (error) {
775
+ log("Failed to release previous model session cleanly.", {
776
+ error: error instanceof Error ? error.message : String(error),
777
+ });
778
+ }
779
  }
780
 
781
  function resetModelInputPreview() {
 
861
  outputScale: toStoredText(controls.outputScale, DEFAULT_CONTROLS.outputScale),
862
  preResizeAlgorithm: controls.preResizeAlgorithm,
863
  outputScaleAlgorithm: controls.outputScaleAlgorithm,
864
+ colorCorrectionEnabled: Boolean(controls.colorCorrectionEnabled),
865
+ colorCorrectionStrength: toStoredText(
866
+ controls.colorCorrectionStrength,
867
+ DEFAULT_CONTROLS.colorCorrectionStrength,
868
+ ),
869
+ colorCorrectionClip: toStoredText(controls.colorCorrectionClip, DEFAULT_CONTROLS.colorCorrectionClip),
870
+ filmGrainEnabled: Boolean(controls.filmGrainEnabled),
871
+ filmGrainAmount: toStoredText(controls.filmGrainAmount, DEFAULT_CONTROLS.filmGrainAmount),
872
+ filmGrainSize: toStoredText(controls.filmGrainSize, DEFAULT_CONTROLS.filmGrainSize),
873
+ filmGrainMonochrome: Boolean(controls.filmGrainMonochrome),
874
  compareWithClassicUpscale: Boolean(controls.compareWithClassicUpscale),
875
  classicCompareAlgorithm: controls.classicCompareAlgorithm,
876
  optLevel: controls.optLevel,
 
928
  controls.width = toStoredText(parsed.width, DEFAULT_CONTROLS.width);
929
  controls.height = toStoredText(parsed.height, DEFAULT_CONTROLS.height);
930
  controls.outputScale = toStoredText(parsed.outputScale, DEFAULT_CONTROLS.outputScale);
931
+ if (typeof parsed.colorCorrectionEnabled === "boolean") {
932
+ controls.colorCorrectionEnabled = parsed.colorCorrectionEnabled;
933
+ }
934
+ controls.colorCorrectionStrength = toStoredText(
935
+ parsed.colorCorrectionStrength,
936
+ DEFAULT_CONTROLS.colorCorrectionStrength,
937
+ );
938
+ controls.colorCorrectionClip = toStoredText(
939
+ parsed.colorCorrectionClip,
940
+ DEFAULT_CONTROLS.colorCorrectionClip,
941
+ );
942
+ if (typeof parsed.filmGrainEnabled === "boolean") {
943
+ controls.filmGrainEnabled = parsed.filmGrainEnabled;
944
+ }
945
+ controls.filmGrainAmount = toStoredText(parsed.filmGrainAmount, DEFAULT_CONTROLS.filmGrainAmount);
946
+ controls.filmGrainSize = toStoredText(parsed.filmGrainSize, DEFAULT_CONTROLS.filmGrainSize);
947
+ if (typeof parsed.filmGrainMonochrome === "boolean") {
948
+ controls.filmGrainMonochrome = parsed.filmGrainMonochrome;
949
+ }
950
 
951
  if (
952
  parsed.preResizeAlgorithm &&
 
1124
  executionProviders: providers,
1125
  graphOptimizationLevel: controls.optLevel,
1126
  });
1127
+ const actualProvider = getSessionExecutionProvider(createdSession);
1128
+ if (actualProvider && actualProvider !== provider) {
1129
+ await createdSession.release();
1130
+ throw new Error(
1131
+ `Requested ${provider} session, but ONNX Runtime prepared ${actualProvider}. Refusing provider fallback.`,
1132
+ );
1133
+ }
1134
 
1135
  session.value = createdSession;
1136
  sessionProvider.value = provider;
 
1189
  executionProvider: getSessionExecutionProvider(session.value),
1190
  });
1191
  } catch (error) {
1192
+ await releaseCurrentSession();
1193
  updateStatus("Model load failed", "error");
1194
  log("Model load failed.", {
1195
  error: error instanceof Error ? error.message : String(error),
 
1255
 
1256
  await loadModelSession();
1257
  } catch (error) {
1258
+ await releaseCurrentSession();
1259
  modelAsset.value = null;
1260
  updateStatus("Model load failed", "error");
1261
  log("Model load failed.", {
 
1624
  sourceWidth: number,
1625
  sourceHeight: number,
1626
  ) {
1627
+ const shouldColorCorrect = controls.colorCorrectionEnabled && !!imageAsset.value;
1628
+ const shouldApplyFilmGrain = controls.filmGrainEnabled;
1629
+ const previewBase = await buildPreviewBase(renderedCanvas, sourceWidth, sourceHeight);
1630
+ let previewCanvas = previewBase;
 
 
 
1631
 
1632
+ if (shouldColorCorrect || shouldApplyFilmGrain) {
1633
+ previewCanvas = cloneCanvas(previewCanvas);
1634
+ }
1635
+
1636
+ if (shouldColorCorrect || shouldApplyFilmGrain) {
1637
+ updateStatus("Finishing…", "busy");
1638
+ resultInfo.progress = "100% (finishing…)";
 
 
1639
  await flushUi();
1640
+ }
1641
+
1642
+ if (shouldColorCorrect && imageAsset.value) {
1643
+ const sourceImage = await blobToImage(imageAsset.value.blob);
1644
+ const sourceCanvasAtInputSize = await buildClassicCompareInputCanvas(
1645
+ sourceImage,
1646
+ sourceWidth,
1647
+ sourceHeight,
1648
  );
1649
+ const referenceCanvas =
1650
+ sourceCanvasAtInputSize.width === previewCanvas.width &&
1651
+ sourceCanvasAtInputSize.height === previewCanvas.height
1652
+ ? sourceCanvasAtInputSize
1653
+ : await downscaleCanvas(
1654
+ sourceCanvasAtInputSize,
1655
+ previewCanvas.width,
1656
+ previewCanvas.height,
1657
+ controls.outputScaleAlgorithm,
1658
+ );
1659
+
1660
+ const strength = resolveColorCorrectionStrength(controls.colorCorrectionStrength);
1661
+ const clipFraction = resolveColorCorrectionClip(controls.colorCorrectionClip);
1662
+ await applyColorLevelCorrection(previewCanvas, referenceCanvas, {
1663
+ strength,
1664
+ clipFraction,
1665
+ });
1666
+ log("Applied final-image color correction.", {
1667
+ strengthPercent: Math.round(strength * 100),
1668
+ clipPercent: round(clipFraction * 100),
1669
+ referenceSize: `${referenceCanvas.width}x${referenceCanvas.height}`,
1670
+ });
1671
+ }
1672
+
1673
+ if (shouldApplyFilmGrain) {
1674
+ const amount = resolveFilmGrainAmount(controls.filmGrainAmount);
1675
+ const size = resolveFilmGrainSize(controls.filmGrainSize);
1676
+ await applyFilmGrain(previewCanvas, {
1677
+ amount,
1678
+ size,
1679
+ monochrome: controls.filmGrainMonochrome,
1680
+ });
1681
+ log("Applied film grain.", {
1682
+ amountPercent: Math.round(amount * 100),
1683
+ size,
1684
+ monochrome: controls.filmGrainMonochrome,
1685
  });
1686
  }
1687
 
 
1827
  }, PREVIEW_REFRESH_DEBOUNCE_MS);
1828
  }
1829
 
1830
+ async function refreshPreviewFromCachedRender(trigger: string) {
1831
  const source = previewRenderSource;
1832
  const existingRun = currentRun.value;
1833
  if (!source || !existingRun || isRunning.value || isModelLoading.value) {
 
1874
  });
1875
  }
1876
 
1877
+ function schedulePreviewRefresh(trigger: string) {
1878
  if (!initialized.value || !currentRun.value || !previewRenderSource || isRunning.value || isModelLoading.value) {
1879
  return;
1880
  }
 
2442
  return;
2443
  }
2444
 
2445
+ if (sessionProvider.value && sessionProvider.value !== controls.provider) {
2446
+ updateStatus("Load model", "error");
2447
+ log("Run skipped because the loaded session provider does not match the selected provider.", {
2448
+ loadedProvider: sessionProvider.value,
2449
+ selectedProvider: controls.provider,
2450
+ });
2451
+ return;
2452
+ }
2453
+
2454
  if (isRunning.value) {
2455
  log("Run already in progress.");
2456
  return;
 
2689
  );
2690
 
2691
  watch(
2692
+ () =>
2693
+ [
2694
+ controls.outputScale,
2695
+ controls.outputScaleAlgorithm,
2696
+ controls.colorCorrectionEnabled,
2697
+ controls.colorCorrectionStrength,
2698
+ controls.colorCorrectionClip,
2699
+ controls.filmGrainEnabled,
2700
+ controls.filmGrainAmount,
2701
+ controls.filmGrainSize,
2702
+ controls.filmGrainMonochrome,
2703
+ ] as const,
2704
  (next, previous) => {
2705
  if (!initialized.value) {
2706
  return;
2707
  }
2708
 
2709
+ if (next.every((value, index) => value === previous[index])) {
2710
  return;
2711
  }
2712
 
2713
+ const trigger =
2714
+ next[1] !== previous[1]
2715
+ ? "outputScaleAlgorithm"
2716
+ : next[0] !== previous[0]
2717
+ ? "outputScale"
2718
+ : next[2] !== previous[2]
2719
+ ? "colorCorrectionToggle"
2720
+ : next[3] !== previous[3]
2721
+ ? "colorCorrectionStrength"
2722
+ : next[4] !== previous[4]
2723
+ ? "colorCorrectionClip"
2724
+ : next[5] !== previous[5]
2725
+ ? "filmGrainToggle"
2726
+ : next[6] !== previous[6]
2727
+ ? "filmGrainAmount"
2728
+ : next[7] !== previous[7]
2729
+ ? "filmGrainSize"
2730
+ : "filmGrainMonochrome";
2731
+
2732
+ schedulePreviewRefresh(trigger);
2733
  },
2734
  );
2735
 
src/lib/onnxHelpers.ts CHANGED
@@ -840,6 +840,317 @@ export function canvasToBlobUrl(canvas: HTMLCanvasElement, mimeType = "image/png
840
  });
841
  }
842
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
843
  interface ResampleContributor {
844
  offsets: number[];
845
  weights: number[];
 
840
  });
841
  }
842
 
843
+ export function cloneCanvas(sourceCanvas: HTMLCanvasElement): HTMLCanvasElement {
844
+ const clone = document.createElement("canvas");
845
+ clone.width = sourceCanvas.width;
846
+ clone.height = sourceCanvas.height;
847
+ const ctx = clone.getContext("2d");
848
+ if (!ctx) {
849
+ throw new Error("2D canvas context is unavailable.");
850
+ }
851
+
852
+ ctx.drawImage(sourceCanvas, 0, 0);
853
+ return clone;
854
+ }
855
+
856
+ interface ChannelLevelStats {
857
+ low: number;
858
+ mid: number;
859
+ high: number;
860
+ }
861
+
862
+ function getLevelStatsFromHistogram(histogram: Uint32Array, total: number, clipFraction: number): ChannelLevelStats {
863
+ const clampedClip = clamp(clipFraction, 0, 0.2);
864
+ const lowIndex = Math.max(0, Math.floor(total * clampedClip));
865
+ const midIndex = Math.max(0, Math.floor(total * 0.5));
866
+ const highIndex = Math.max(0, Math.ceil(total * (1 - clampedClip)) - 1);
867
+ const low = getPercentileFromHistogram(histogram, lowIndex);
868
+ const mid = getPercentileFromHistogram(histogram, midIndex);
869
+ const high = getPercentileFromHistogram(histogram, highIndex);
870
+
871
+ return {
872
+ low,
873
+ mid,
874
+ high: Math.max(low + 1, high),
875
+ };
876
+ }
877
+
878
+ function getPercentileFromHistogram(histogram: Uint32Array, targetIndex: number): number {
879
+ let cumulative = 0;
880
+ for (let value = 0; value < histogram.length; value += 1) {
881
+ cumulative += histogram[value];
882
+ if (cumulative > targetIndex) {
883
+ return value;
884
+ }
885
+ }
886
+
887
+ return histogram.length - 1;
888
+ }
889
+
890
+ function buildChannelLevelStats(data: Uint8ClampedArray, clipFraction: number): ChannelLevelStats[] {
891
+ const histograms = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
892
+ let total = 0;
893
+
894
+ for (let offset = 0; offset < data.length; offset += 4) {
895
+ if (data[offset + 3] === 0) {
896
+ continue;
897
+ }
898
+
899
+ histograms[0][data[offset]] += 1;
900
+ histograms[1][data[offset + 1]] += 1;
901
+ histograms[2][data[offset + 2]] += 1;
902
+ total += 1;
903
+ }
904
+
905
+ if (total < 2) {
906
+ return [
907
+ { low: 0, mid: 128, high: 255 },
908
+ { low: 0, mid: 128, high: 255 },
909
+ { low: 0, mid: 128, high: 255 },
910
+ ];
911
+ }
912
+
913
+ return histograms.map((histogram) => getLevelStatsFromHistogram(histogram, total, clipFraction));
914
+ }
915
+
916
+ function buildLuminanceLevelStats(data: Uint8ClampedArray, clipFraction: number): ChannelLevelStats {
917
+ const histogram = new Uint32Array(256);
918
+ let total = 0;
919
+
920
+ for (let offset = 0; offset < data.length; offset += 4) {
921
+ if (data[offset + 3] === 0) {
922
+ continue;
923
+ }
924
+
925
+ const luma = Math.round(data[offset] * 0.2126 + data[offset + 1] * 0.7152 + data[offset + 2] * 0.0722);
926
+ histogram[clamp(luma, 0, 255)] += 1;
927
+ total += 1;
928
+ }
929
+
930
+ if (total < 2) {
931
+ return { low: 0, mid: 128, high: 255 };
932
+ }
933
+
934
+ return getLevelStatsFromHistogram(histogram, total, clipFraction);
935
+ }
936
+
937
+ function normalizeLevel(value: number, levels: ChannelLevelStats): number {
938
+ return clamp((value - levels.low) / Math.max(1, levels.high - levels.low), 0, 1);
939
+ }
940
+
941
+ function resolveCurveGamma(sourceLevels: ChannelLevelStats, desiredLevels: ChannelLevelStats): number {
942
+ const sourceMid = normalizeLevel(sourceLevels.mid, sourceLevels);
943
+ const desiredMid = normalizeLevel(desiredLevels.mid, desiredLevels);
944
+ if (sourceMid <= 0.001 || sourceMid >= 0.999 || desiredMid <= 0.001 || desiredMid >= 0.999) {
945
+ return 1;
946
+ }
947
+
948
+ return clamp(Math.log(desiredMid) / Math.log(sourceMid), 0.35, 2.5);
949
+ }
950
+
951
+ export async function applyColorLevelCorrection(
952
+ targetCanvas: HTMLCanvasElement,
953
+ referenceCanvas: HTMLCanvasElement,
954
+ options: {
955
+ strength: number;
956
+ clipFraction: number;
957
+ },
958
+ ): Promise<HTMLCanvasElement> {
959
+ const strength = clamp(options.strength, 0, 1);
960
+ if (strength <= 0) {
961
+ return targetCanvas;
962
+ }
963
+
964
+ const targetCtx = targetCanvas.getContext("2d", { willReadFrequently: true });
965
+ const referenceCtx = referenceCanvas.getContext("2d", { willReadFrequently: true });
966
+ if (!targetCtx || !referenceCtx) {
967
+ throw new Error("2D canvas context is unavailable.");
968
+ }
969
+
970
+ const targetImage = targetCtx.getImageData(0, 0, targetCanvas.width, targetCanvas.height);
971
+ const referenceImage = referenceCtx.getImageData(0, 0, referenceCanvas.width, referenceCanvas.height);
972
+ const targetLevels = buildChannelLevelStats(targetImage.data, options.clipFraction);
973
+ const referenceLevels = buildChannelLevelStats(referenceImage.data, options.clipFraction);
974
+ const targetLumaLevels = buildLuminanceLevelStats(targetImage.data, options.clipFraction);
975
+ const referenceLumaLevels = buildLuminanceLevelStats(referenceImage.data, options.clipFraction);
976
+ const curveGamma = resolveCurveGamma(targetLumaLevels, referenceLumaLevels);
977
+ const shouldYield = targetCanvas.width * targetCanvas.height >= 1_000_000;
978
+
979
+ for (let y = 0; y < targetCanvas.height; y += 1) {
980
+ for (let x = 0; x < targetCanvas.width; x += 1) {
981
+ const offset = (y * targetCanvas.width + x) * 4;
982
+ if (targetImage.data[offset + 3] === 0) {
983
+ continue;
984
+ }
985
+
986
+ const originalRed = targetImage.data[offset];
987
+ const originalGreen = targetImage.data[offset + 1];
988
+ const originalBlue = targetImage.data[offset + 2];
989
+ const originalLuma = originalRed * 0.2126 + originalGreen * 0.7152 + originalBlue * 0.0722;
990
+ const normalizedLuma = normalizeLevel(originalLuma, targetLumaLevels);
991
+ const curvedLuma =
992
+ referenceLumaLevels.low +
993
+ Math.pow(normalizedLuma, curveGamma) * Math.max(1, referenceLumaLevels.high - referenceLumaLevels.low);
994
+ let correctedRed = originalRed;
995
+ let correctedGreen = originalGreen;
996
+ let correctedBlue = originalBlue;
997
+
998
+ for (let channel = 0; channel < 3; channel += 1) {
999
+ const original = targetImage.data[offset + channel];
1000
+ const sourceLevels = targetLevels[channel];
1001
+ const desiredLevels = referenceLevels[channel];
1002
+ const normalized = normalizeLevel(original, sourceLevels);
1003
+ const corrected = clamp(
1004
+ Math.round(desiredLevels.low + normalized * (desiredLevels.high - desiredLevels.low)),
1005
+ 0,
1006
+ 255,
1007
+ );
1008
+
1009
+ if (channel === 0) {
1010
+ correctedRed = corrected;
1011
+ } else if (channel === 1) {
1012
+ correctedGreen = corrected;
1013
+ } else {
1014
+ correctedBlue = corrected;
1015
+ }
1016
+ }
1017
+
1018
+ const correctedLuma = correctedRed * 0.2126 + correctedGreen * 0.7152 + correctedBlue * 0.0722;
1019
+ const lumaScale = clamp(curvedLuma / Math.max(1, correctedLuma), 0, 4);
1020
+ const curvedRed = clamp(Math.round(correctedRed * lumaScale), 0, 255);
1021
+ const curvedGreen = clamp(Math.round(correctedGreen * lumaScale), 0, 255);
1022
+ const curvedBlue = clamp(Math.round(correctedBlue * lumaScale), 0, 255);
1023
+
1024
+ targetImage.data[offset] = clamp(
1025
+ Math.round(originalRed + (curvedRed - originalRed) * strength),
1026
+ 0,
1027
+ 255,
1028
+ );
1029
+ targetImage.data[offset + 1] = clamp(
1030
+ Math.round(originalGreen + (curvedGreen - originalGreen) * strength),
1031
+ 0,
1032
+ 255,
1033
+ );
1034
+ targetImage.data[offset + 2] = clamp(
1035
+ Math.round(originalBlue + (curvedBlue - originalBlue) * strength),
1036
+ 0,
1037
+ 255,
1038
+ );
1039
+ }
1040
+
1041
+ if (shouldYield && y > 0 && y % 48 === 0) {
1042
+ await flushUi();
1043
+ }
1044
+ }
1045
+
1046
+ targetCtx.putImageData(targetImage, 0, 0);
1047
+ return targetCanvas;
1048
+ }
1049
+
1050
+ function smoothstep(value: number): number {
1051
+ const clamped = clamp(value, 0, 1);
1052
+ return clamped * clamped * (3 - 2 * clamped);
1053
+ }
1054
+
1055
+ function fract(value: number): number {
1056
+ return value - Math.floor(value);
1057
+ }
1058
+
1059
+ function lerp(from: number, to: number, amount: number): number {
1060
+ return from + (to - from) * amount;
1061
+ }
1062
+
1063
+ function hashNoise(x: number, y: number, seed: number): number {
1064
+ return fract(Math.sin(x * 127.1 + y * 311.7 + seed * 74.7) * 43758.5453123) * 2 - 1;
1065
+ }
1066
+
1067
+ function sampleFilmNoise(x: number, y: number, size: number, seed: number): number {
1068
+ const scaledX = x / Math.max(0.001, size);
1069
+ const scaledY = y / Math.max(0.001, size);
1070
+ const cellX = Math.floor(scaledX);
1071
+ const cellY = Math.floor(scaledY);
1072
+ const fracX = smoothstep(scaledX - cellX);
1073
+ const fracY = smoothstep(scaledY - cellY);
1074
+
1075
+ const n00 = hashNoise(cellX, cellY, seed);
1076
+ const n10 = hashNoise(cellX + 1, cellY, seed);
1077
+ const n01 = hashNoise(cellX, cellY + 1, seed);
1078
+ const n11 = hashNoise(cellX + 1, cellY + 1, seed);
1079
+ const top = lerp(n00, n10, fracX);
1080
+ const bottom = lerp(n01, n11, fracX);
1081
+ return lerp(top, bottom, fracY);
1082
+ }
1083
+
1084
+ export async function applyFilmGrain(
1085
+ targetCanvas: HTMLCanvasElement,
1086
+ options: {
1087
+ amount: number;
1088
+ size: number;
1089
+ monochrome: boolean;
1090
+ },
1091
+ ): Promise<HTMLCanvasElement> {
1092
+ const amount = clamp(options.amount, 0, 1);
1093
+ if (amount <= 0) {
1094
+ return targetCanvas;
1095
+ }
1096
+
1097
+ const ctx = targetCanvas.getContext("2d", { willReadFrequently: true });
1098
+ if (!ctx) {
1099
+ throw new Error("2D canvas context is unavailable.");
1100
+ }
1101
+
1102
+ const image = ctx.getImageData(0, 0, targetCanvas.width, targetCanvas.height);
1103
+ const shouldYield = targetCanvas.width * targetCanvas.height >= 1_000_000;
1104
+ const noiseSize = Math.max(0.5, options.size);
1105
+ const amplitude = amount * 52;
1106
+ const seed =
1107
+ targetCanvas.width * 0.173 +
1108
+ targetCanvas.height * 0.117 +
1109
+ noiseSize * 4.91 +
1110
+ amplitude * 0.29 +
1111
+ (options.monochrome ? 1.7 : 8.3);
1112
+
1113
+ for (let y = 0; y < targetCanvas.height; y += 1) {
1114
+ for (let x = 0; x < targetCanvas.width; x += 1) {
1115
+ const offset = (y * targetCanvas.width + x) * 4;
1116
+ if (image.data[offset + 3] === 0) {
1117
+ continue;
1118
+ }
1119
+
1120
+ const red = image.data[offset];
1121
+ const green = image.data[offset + 1];
1122
+ const blue = image.data[offset + 2];
1123
+ const luma = (red * 0.2126 + green * 0.7152 + blue * 0.0722) / 255;
1124
+ const toneWeight = 0.7 + (1 - Math.min(1, Math.abs(luma - 0.5) * 2)) * 0.3;
1125
+ const scaledAmplitude = amplitude * toneWeight;
1126
+ const mono = sampleFilmNoise(x + 0.5, y + 0.5, noiseSize, seed) * scaledAmplitude;
1127
+
1128
+ image.data[offset] = clamp(
1129
+ Math.round(red + (options.monochrome ? mono : sampleFilmNoise(x + 17.3, y + 5.1, noiseSize, seed + 13) * scaledAmplitude)),
1130
+ 0,
1131
+ 255,
1132
+ );
1133
+ image.data[offset + 1] = clamp(
1134
+ Math.round(green + (options.monochrome ? mono : sampleFilmNoise(x + 41.7, y + 19.9, noiseSize, seed + 29) * scaledAmplitude)),
1135
+ 0,
1136
+ 255,
1137
+ );
1138
+ image.data[offset + 2] = clamp(
1139
+ Math.round(blue + (options.monochrome ? mono : sampleFilmNoise(x + 73.1, y + 31.4, noiseSize, seed + 47) * scaledAmplitude)),
1140
+ 0,
1141
+ 255,
1142
+ );
1143
+ }
1144
+
1145
+ if (shouldYield && y > 0 && y % 48 === 0) {
1146
+ await flushUi();
1147
+ }
1148
+ }
1149
+
1150
+ ctx.putImageData(image, 0, 0);
1151
+ return targetCanvas;
1152
+ }
1153
+
1154
  interface ResampleContributor {
1155
  offsets: number[];
1156
  weights: number[];
src/lib/types.ts CHANGED
@@ -84,6 +84,13 @@ export interface InspectorControls {
84
  outputScale: string;
85
  preResizeAlgorithm: DownscaleAlgorithm;
86
  outputScaleAlgorithm: DownscaleAlgorithm;
 
 
 
 
 
 
 
87
  compareWithClassicUpscale: boolean;
88
  classicCompareAlgorithm: DownscaleAlgorithm;
89
  optLevel: "all" | "extended" | "basic" | "disabled";
 
84
  outputScale: string;
85
  preResizeAlgorithm: DownscaleAlgorithm;
86
  outputScaleAlgorithm: DownscaleAlgorithm;
87
+ colorCorrectionEnabled: boolean;
88
+ colorCorrectionStrength: string;
89
+ colorCorrectionClip: string;
90
+ filmGrainEnabled: boolean;
91
+ filmGrainAmount: string;
92
+ filmGrainSize: string;
93
+ filmGrainMonochrome: boolean;
94
  compareWithClassicUpscale: boolean;
95
  classicCompareAlgorithm: DownscaleAlgorithm;
96
  optLevel: "all" | "extended" | "basic" | "disabled";