Spaces:
Running
Running
| <html lang="en"> | |
| <head> | |
| <meta charset="UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no"> | |
| <title>post ready? β¨</title> | |
| <meta name="description" content="Your on-device AI bestie that tells you if your photo slaps. 100% private."> | |
| <meta name="theme-color" content="#f1faee"> | |
| <meta name="apple-mobile-web-app-capable" content="yes"> | |
| <meta name="mobile-web-app-capable" content="yes"> | |
| <link rel="icon" | |
| href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>β¨</text></svg>"> | |
| <link rel="preconnect" href="https://fonts.googleapis.com"> | |
| <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> | |
| <link | |
| href="https://fonts.googleapis.com/css2?family=Bricolage+Grotesque:opsz,wdth,wght@12..96,75..100,400;700;800&family=DM+Sans:ital,opsz,wght@0,9..40,300;0,9..40,400;0,9..40,500;1,9..40,400&display=swap" | |
| rel="stylesheet"> | |
| <style> | |
| :root { | |
| --red: #e63946; | |
| --dew: #f1faee; | |
| --frost: #a8dadc; | |
| --steel: #457b9d; | |
| --deep: #1d3557; | |
| --white: #fff; | |
| --g100: #f8f9fa; | |
| --g200: #e9ecef; | |
| --g400: #adb5bd; | |
| --g600: #6c757d; | |
| --amber: #f4a261; | |
| --green: #52b788; | |
| --r-sm: 10px; | |
| --r-md: 18px; | |
| --r-lg: 26px; | |
| --sh: 0 4px 24px rgba(29, 53, 87, .10); | |
| --fd: 'Bricolage Grotesque', sans-serif; | |
| --fb: 'DM Sans', sans-serif; | |
| --ease: .28s cubic-bezier(.4, 0, .2, 1); | |
| } | |
| *, | |
| *::before, | |
| *::after { | |
| box-sizing: border-box; | |
| margin: 0; | |
| padding: 0 | |
| } | |
| html { | |
| -webkit-tap-highlight-color: transparent; | |
| scroll-behavior: smooth | |
| } | |
| body { | |
| font-family: var(--fb); | |
| background: var(--white); | |
| color: var(--deep); | |
| min-height: 100dvh; | |
| overflow-x: hidden; | |
| -webkit-font-smoothing: antialiased | |
| } | |
| img { | |
| max-width: 100%; | |
| display: block | |
| } | |
| button { | |
| font-family: var(--fb); | |
| cursor: pointer; | |
| border: none; | |
| outline: none; | |
| -webkit-appearance: none; | |
| background: none | |
| } | |
| input[type=file] { | |
| display: none | |
| } | |
| #app-shell { | |
| max-width: 520px; | |
| margin: 0 auto; | |
| width: 100%; | |
| border-left: 1px solid var(--g200); | |
| border-right: 1px solid var(--g200); | |
| min-height: 100dvh; | |
| background: var(--white) | |
| } | |
| @media(max-width:540px) { | |
| #app-shell { | |
| border: none | |
| } | |
| } | |
| /* ββ SPLASH ββ */ | |
| #splash { | |
| position: fixed; | |
| inset: 0; | |
| z-index: 999; | |
| background: var(--white); | |
| display: flex; | |
| justify-content: center; | |
| overflow: hidden; | |
| transition: opacity .5s ease, transform .5s ease | |
| } | |
| #splash.hide { | |
| opacity: 0; | |
| transform: scale(1.02); | |
| pointer-events: none | |
| } | |
| .sp-inner { | |
| width: 100%; | |
| max-width: 520px; | |
| display: flex; | |
| flex-direction: column; | |
| padding: 0 20px; | |
| overflow-y: auto; | |
| border-left: 1px solid var(--g200); | |
| border-right: 1px solid var(--g200) | |
| } | |
| @media(max-width:540px) { | |
| .sp-inner { | |
| border: none | |
| } | |
| } | |
| .sp-hero { | |
| text-align: center; | |
| padding: 36px 0 20px; | |
| flex-shrink: 0 | |
| } | |
| .sp-wordmark { | |
| font-family: var(--fd); | |
| font-weight: 800; | |
| font-size: clamp(32px, 8.5vw, 50px); | |
| letter-spacing: -.03em; | |
| color: var(--deep); | |
| line-height: 1 | |
| } | |
| .sp-wordmark .dot { | |
| color: var(--red) | |
| } | |
| .sp-tag { | |
| margin-top: 8px; | |
| font-size: clamp(13px, 3.5vw, 15px); | |
| color: var(--steel) | |
| } | |
| .sp-tag b { | |
| color: var(--red); | |
| font-weight: 600 | |
| } | |
| .sp-pills { | |
| display: flex; | |
| gap: 7px; | |
| justify-content: center; | |
| flex-wrap: wrap; | |
| margin: 14px 0 0 | |
| } | |
| .sp-pill { | |
| display: flex; | |
| align-items: center; | |
| gap: 5px; | |
| padding: 5px 11px; | |
| background: var(--dew); | |
| border-radius: 999px; | |
| font-size: 11.5px; | |
| font-weight: 600; | |
| color: var(--steel) | |
| } | |
| .sp-warn { | |
| background: #fffbec; | |
| border: 1.5px solid var(--amber); | |
| border-radius: var(--r-md); | |
| padding: 10px 14px; | |
| font-size: 11.5px; | |
| color: #7c5800; | |
| line-height: 1.5; | |
| margin: 14px 0 10px; | |
| flex-shrink: 0 | |
| } | |
| .load-box { | |
| background: var(--white); | |
| border: 1.5px solid var(--g200); | |
| border-radius: var(--r-lg); | |
| padding: 16px 18px; | |
| margin-bottom: 10px; | |
| box-shadow: var(--sh); | |
| flex-shrink: 0 | |
| } | |
| .load-row { | |
| display: flex; | |
| align-items: center; | |
| gap: 10px; | |
| margin-bottom: 12px | |
| } | |
| .load-icon { | |
| font-size: 20px; | |
| width: 26px; | |
| text-align: center; | |
| flex-shrink: 0 | |
| } | |
| .load-title { | |
| font-family: var(--fd); | |
| font-size: 13px; | |
| font-weight: 700; | |
| color: var(--deep) | |
| } | |
| .load-sub { | |
| font-size: 10.5px; | |
| color: var(--g600); | |
| margin-top: 2px; | |
| line-height: 1.45 | |
| } | |
| .pbar-track { | |
| background: var(--g200); | |
| border-radius: 999px; | |
| height: 8px; | |
| overflow: hidden | |
| } | |
| .pbar-fill { | |
| height: 100%; | |
| border-radius: 999px; | |
| background: linear-gradient(90deg, var(--steel), var(--frost)); | |
| transition: width .6s ease; | |
| width: 1% | |
| } | |
| .pbar-foot { | |
| display: flex; | |
| justify-content: space-between; | |
| margin-top: 5px; | |
| font-size: 10px; | |
| color: var(--g600) | |
| } | |
| .file-tick { | |
| margin-top: 7px; | |
| font-size: 10px; | |
| font-family: monospace; | |
| color: var(--g400); | |
| white-space: nowrap; | |
| overflow: hidden; | |
| text-overflow: ellipsis; | |
| min-height: 13px | |
| } | |
| .file-tick .ok { | |
| color: var(--green) | |
| } | |
| .file-tick .dl { | |
| color: var(--steel) | |
| } | |
| .sp-facts { | |
| flex: 1; | |
| overflow-y: auto; | |
| padding-bottom: 20px; | |
| scrollbar-width: thin; | |
| scrollbar-color: var(--g200) transparent | |
| } | |
| .sp-facts::-webkit-scrollbar { | |
| width: 3px | |
| } | |
| .sp-facts::-webkit-scrollbar-thumb { | |
| background: var(--g200); | |
| border-radius: 2px | |
| } | |
| .facts-label { | |
| text-align: center; | |
| font-size: 10px; | |
| font-weight: 700; | |
| letter-spacing: .12em; | |
| text-transform: uppercase; | |
| color: var(--steel); | |
| margin: 10px 0 9px | |
| } | |
| .fact { | |
| display: flex; | |
| gap: 11px; | |
| padding: 11px 13px; | |
| background: var(--g100); | |
| border-radius: var(--r-md); | |
| margin-bottom: 7px | |
| } | |
| .fact-em { | |
| font-size: 22px; | |
| flex-shrink: 0; | |
| line-height: 1.2; | |
| margin-top: 1px | |
| } | |
| .fact-t { | |
| font-family: var(--fd); | |
| font-size: 13px; | |
| font-weight: 700; | |
| color: var(--deep); | |
| margin-bottom: 3px | |
| } | |
| .fact-b { | |
| font-size: 11.5px; | |
| color: var(--g600); | |
| line-height: 1.55 | |
| } | |
| .fact-b b { | |
| color: var(--red); | |
| font-weight: 600 | |
| } | |
| /* ββ APP ββ */ | |
| #app { | |
| display: none; | |
| flex-direction: column; | |
| min-height: 100dvh | |
| } | |
| #app.on { | |
| display: flex | |
| } | |
| .app-bar { | |
| display: flex; | |
| align-items: center; | |
| justify-content: space-between; | |
| padding: 13px 18px; | |
| background: var(--white); | |
| border-bottom: 1.5px solid var(--g200); | |
| position: sticky; | |
| top: 0; | |
| z-index: 100 | |
| } | |
| .logo { | |
| font-family: var(--fd); | |
| font-size: 20px; | |
| font-weight: 800; | |
| letter-spacing: -.02em; | |
| color: var(--deep) | |
| } | |
| .logo .dot { | |
| color: var(--red) | |
| } | |
| .status-badge { | |
| display: flex; | |
| align-items: center; | |
| gap: 5px; | |
| font-size: 11px; | |
| font-weight: 600; | |
| padding: 4px 10px; | |
| border-radius: 999px; | |
| background: var(--g100); | |
| color: var(--g600); | |
| transition: all var(--ease) | |
| } | |
| .status-badge.rdy { | |
| background: #d8f3dc; | |
| color: #1b4332 | |
| } | |
| .status-badge.ld { | |
| background: #fff3cd; | |
| color: #7c4f0a | |
| } | |
| .sdot { | |
| width: 6px; | |
| height: 6px; | |
| border-radius: 50%; | |
| background: currentColor; | |
| flex-shrink: 0 | |
| } | |
| .sdot.pulse { | |
| animation: pulse 1.4s ease infinite | |
| } | |
| @keyframes pulse { | |
| 0%, | |
| 100% { | |
| opacity: 1; | |
| transform: scale(1) | |
| } | |
| 50% { | |
| opacity: .25; | |
| transform: scale(.55) | |
| } | |
| } | |
| #sec-upload { | |
| padding: 16px; | |
| display: flex; | |
| flex-direction: column; | |
| gap: 11px | |
| } | |
| .drop-zone { | |
| border: 2.5px dashed var(--frost); | |
| border-radius: var(--r-lg); | |
| padding: 38px 20px; | |
| text-align: center; | |
| background: var(--dew); | |
| cursor: pointer; | |
| transition: all var(--ease) | |
| } | |
| .drop-zone:hover, | |
| .drop-zone.over { | |
| border-color: var(--steel); | |
| background: #e5f4f6 | |
| } | |
| .drop-zone.gone { | |
| display: none | |
| } | |
| .dz-icon { | |
| font-size: 44px; | |
| margin-bottom: 11px | |
| } | |
| .dz-h { | |
| font-family: var(--fd); | |
| font-size: 18px; | |
| font-weight: 700; | |
| color: var(--deep); | |
| margin-bottom: 5px | |
| } | |
| .dz-s { | |
| font-size: 13px; | |
| color: var(--g600) | |
| } | |
| .dz-s u { | |
| color: var(--steel); | |
| font-weight: 600; | |
| text-decoration-color: var(--frost) | |
| } | |
| .cam-row { | |
| display: flex; | |
| gap: 8px | |
| } | |
| .cam-btn { | |
| flex: 1; | |
| padding: 11px 8px; | |
| border: 1.5px solid var(--g200); | |
| border-radius: var(--r-md); | |
| font-size: 13px; | |
| font-weight: 600; | |
| color: var(--deep); | |
| background: var(--g100); | |
| display: flex; | |
| align-items: center; | |
| justify-content: center; | |
| gap: 6px; | |
| transition: all var(--ease); | |
| cursor: pointer | |
| } | |
| .cam-btn:hover { | |
| background: var(--g200) | |
| } | |
| .cam-btn.primary { | |
| background: var(--deep); | |
| color: var(--white); | |
| border-color: var(--deep) | |
| } | |
| .cam-btn.primary:hover { | |
| background: #253f6a | |
| } | |
| .cam-row.gone { | |
| display: none | |
| } | |
| .prep-hint { | |
| display: none; | |
| align-items: center; | |
| gap: 7px; | |
| padding: 9px 12px; | |
| background: var(--g100); | |
| border-radius: var(--r-sm); | |
| font-size: 11.5px; | |
| color: var(--g600) | |
| } | |
| .prep-hint.on { | |
| display: flex | |
| } | |
| .prep-spin { | |
| width: 12px; | |
| height: 12px; | |
| border-radius: 50%; | |
| border: 2px solid var(--g200); | |
| border-top-color: var(--steel); | |
| animation: spin .7s linear infinite; | |
| flex-shrink: 0 | |
| } | |
| @keyframes spin { | |
| to { | |
| transform: rotate(360deg) | |
| } | |
| } | |
| .preview { | |
| border-radius: var(--r-lg); | |
| overflow: hidden; | |
| border: 1.5px solid var(--g200); | |
| display: none | |
| } | |
| .preview img { | |
| width: 100%; | |
| max-height: 300px; | |
| object-fit: cover; | |
| display: block | |
| } | |
| .preview-bar { | |
| display: flex; | |
| align-items: center; | |
| justify-content: space-between; | |
| padding: 9px 13px; | |
| background: var(--white) | |
| } | |
| .preview-name { | |
| font-size: 11.5px; | |
| color: var(--g600); | |
| font-weight: 500; | |
| white-space: nowrap; | |
| overflow: hidden; | |
| text-overflow: ellipsis; | |
| max-width: 60% | |
| } | |
| .change-btn { | |
| font-size: 11.5px; | |
| font-weight: 700; | |
| color: var(--steel); | |
| padding: 4px 9px; | |
| border-radius: 999px; | |
| background: var(--g100); | |
| cursor: pointer | |
| } | |
| .change-btn:hover { | |
| background: var(--g200) | |
| } | |
| .analyze-btn { | |
| padding: 15px; | |
| background: var(--red); | |
| color: var(--white); | |
| border-radius: var(--r-md); | |
| font-family: var(--fd); | |
| font-size: 17px; | |
| font-weight: 700; | |
| width: 100%; | |
| text-align: center; | |
| box-shadow: 0 4px 20px rgba(230, 57, 70, .28); | |
| transition: all var(--ease); | |
| cursor: pointer | |
| } | |
| .analyze-btn:hover:not(:disabled) { | |
| transform: translateY(-2px); | |
| box-shadow: 0 6px 28px rgba(230, 57, 70, .38) | |
| } | |
| .analyze-btn:active:not(:disabled) { | |
| transform: translateY(0) | |
| } | |
| .analyze-btn:disabled { | |
| background: var(--g400); | |
| box-shadow: none; | |
| cursor: not-allowed | |
| } | |
| .an-box { | |
| background: var(--deep); | |
| border-radius: var(--r-lg); | |
| overflow: hidden; | |
| display: none | |
| } | |
| .an-box.on { | |
| display: block | |
| } | |
| .an-top { | |
| padding: 18px 18px 14px; | |
| text-align: center | |
| } | |
| .an-h { | |
| font-family: var(--fd); | |
| font-size: 14.5px; | |
| font-weight: 700; | |
| color: var(--white); | |
| margin-bottom: 4px | |
| } | |
| .an-s { | |
| font-size: 11px; | |
| color: rgba(255, 255, 255, .45); | |
| margin-bottom: 12px | |
| } | |
| .dots { | |
| display: flex; | |
| justify-content: center; | |
| gap: 7px | |
| } | |
| .d { | |
| width: 8px; | |
| height: 8px; | |
| border-radius: 50%; | |
| background: var(--frost); | |
| animation: dp 1.4s ease infinite | |
| } | |
| .d:nth-child(2) { | |
| animation-delay: .2s | |
| } | |
| .d:nth-child(3) { | |
| animation-delay: .4s | |
| } | |
| @keyframes dp { | |
| 0%, | |
| 100% { | |
| transform: scale(1); | |
| opacity: .3 | |
| } | |
| 50% { | |
| transform: scale(1.5); | |
| opacity: 1 | |
| } | |
| } | |
| .tok-section { | |
| border-top: 1px solid rgba(255, 255, 255, .08); | |
| background: rgba(0, 0, 0, .22) | |
| } | |
| .tok-label-bar { | |
| display: flex; | |
| align-items: center; | |
| justify-content: space-between; | |
| padding: 7px 14px 4px | |
| } | |
| .tok-label { | |
| font-size: 9px; | |
| font-weight: 700; | |
| letter-spacing: .11em; | |
| text-transform: uppercase; | |
| color: rgba(168, 218, 220, .45) | |
| } | |
| .tok-stats { | |
| font-size: 9px; | |
| color: rgba(255, 255, 255, .28); | |
| font-family: monospace | |
| } | |
| .tok-scroll { | |
| height: 112px; | |
| overflow-y: auto; | |
| padding: 4px 14px 10px; | |
| scrollbar-width: thin; | |
| scrollbar-color: rgba(168, 218, 220, .15) transparent | |
| } | |
| .tok-scroll::-webkit-scrollbar { | |
| width: 3px | |
| } | |
| .tok-scroll::-webkit-scrollbar-thumb { | |
| background: rgba(168, 218, 220, .15); | |
| border-radius: 2px | |
| } | |
| .tok { | |
| font-size: 11px; | |
| color: rgba(168, 218, 220, .88); | |
| font-family: monospace; | |
| line-height: 1.65; | |
| word-break: break-word; | |
| white-space: pre-wrap | |
| } | |
| .tok-wrap { | |
| transition: opacity .7s ease | |
| } | |
| .tok-wrap.fade { | |
| opacity: 0 | |
| } | |
| .history-strip { | |
| display: flex; | |
| gap: 8px; | |
| overflow-x: auto; | |
| padding-bottom: 2px; | |
| scrollbar-width: none | |
| } | |
| .history-strip::-webkit-scrollbar { | |
| display: none | |
| } | |
| .hist-item { | |
| flex-shrink: 0; | |
| background: var(--g100); | |
| border-radius: var(--r-sm); | |
| padding: 8px 10px; | |
| text-align: center; | |
| border: 1.5px solid var(--g200); | |
| cursor: pointer; | |
| transition: all var(--ease); | |
| min-width: 64px | |
| } | |
| .hist-item:hover { | |
| border-color: var(--frost); | |
| background: var(--white) | |
| } | |
| .hist-item.current { | |
| border-color: var(--steel); | |
| background: #e8f4f8 | |
| } | |
| .hist-n { | |
| font-family: var(--fd); | |
| font-size: 18px; | |
| font-weight: 800; | |
| color: var(--deep); | |
| line-height: 1 | |
| } | |
| .hist-v { | |
| font-size: 9px; | |
| color: var(--g600); | |
| margin-top: 2px; | |
| white-space: nowrap; | |
| overflow: hidden; | |
| text-overflow: ellipsis; | |
| max-width: 70px | |
| } | |
| .compare-card { | |
| background: var(--g100); | |
| border-radius: var(--r-lg); | |
| padding: 15px; | |
| border: 1.5px solid var(--g200) | |
| } | |
| .compare-h { | |
| font-family: var(--fd); | |
| font-size: 13px; | |
| font-weight: 700; | |
| color: var(--deep); | |
| margin-bottom: 11px | |
| } | |
| .compare-row { | |
| display: flex; | |
| align-items: center; | |
| gap: 10px | |
| } | |
| .cmp-side { | |
| flex: 1; | |
| text-align: center; | |
| padding: 10px; | |
| background: var(--white); | |
| border-radius: var(--r-sm); | |
| border: 1.5px solid var(--g200) | |
| } | |
| .cmp-side.now { | |
| border-color: var(--steel); | |
| background: #e8f4f8 | |
| } | |
| .cmp-lbl { | |
| font-size: 9px; | |
| font-weight: 700; | |
| text-transform: uppercase; | |
| letter-spacing: .08em; | |
| color: var(--g600); | |
| margin-bottom: 4px | |
| } | |
| .cmp-score { | |
| font-family: var(--fd); | |
| font-size: 22px; | |
| font-weight: 800; | |
| color: var(--deep); | |
| line-height: 1 | |
| } | |
| .cmp-vibe { | |
| font-size: 10px; | |
| color: var(--g600); | |
| margin-top: 3px; | |
| white-space: nowrap; | |
| overflow: hidden; | |
| text-overflow: ellipsis | |
| } | |
| .cmp-vs { | |
| font-family: var(--fd); | |
| font-size: 13px; | |
| font-weight: 800; | |
| color: var(--g400) | |
| } | |
| .delta { | |
| display: inline-flex; | |
| align-items: center; | |
| gap: 3px; | |
| padding: 2px 8px; | |
| border-radius: 999px; | |
| font-size: 11px; | |
| font-weight: 700; | |
| margin-top: 8px | |
| } | |
| .delta.up { | |
| background: #d8f3dc; | |
| color: #1b4332 | |
| } | |
| .delta.dn { | |
| background: #ffe0e3; | |
| color: #7b0010 | |
| } | |
| .delta.eq { | |
| background: var(--g200); | |
| color: var(--g600) | |
| } | |
| .ct-warn { | |
| background: #fff8ec; | |
| border: 1.5px solid var(--amber); | |
| border-radius: var(--r-md); | |
| padding: 11px 14px; | |
| font-size: 12px; | |
| color: #7c5800; | |
| line-height: 1.55 | |
| } | |
| .ct-warn b { | |
| font-weight: 700 | |
| } | |
| #cam-modal { | |
| position: fixed; | |
| inset: 0; | |
| z-index: 500; | |
| background: rgba(29, 53, 87, .9); | |
| display: none; | |
| align-items: flex-end; | |
| justify-content: center; | |
| padding: 0 | |
| } | |
| #cam-modal.on { | |
| display: flex | |
| } | |
| @media(min-width:540px) { | |
| #cam-modal { | |
| align-items: center; | |
| padding: 20px | |
| } | |
| } | |
| .cam-inner { | |
| width: 100%; | |
| max-width: 520px; | |
| background: var(--white); | |
| border-radius: var(--r-lg) var(--r-lg) 0 0; | |
| overflow: hidden | |
| } | |
| @media(min-width:540px) { | |
| .cam-inner { | |
| border-radius: var(--r-lg) | |
| } | |
| } | |
| .cam-vid-wrap { | |
| background: #000; | |
| aspect-ratio: 4/3; | |
| position: relative | |
| } | |
| #camVid { | |
| width: 100%; | |
| height: 100%; | |
| object-fit: cover; | |
| display: block | |
| } | |
| #camCanvas { | |
| display: none | |
| } | |
| .cam-bar { | |
| display: flex; | |
| align-items: center; | |
| gap: 8px; | |
| padding: 13px 16px | |
| } | |
| .cam-act { | |
| flex: 1; | |
| padding: 12px; | |
| background: var(--red); | |
| color: var(--white); | |
| border-radius: var(--r-md); | |
| font-family: var(--fd); | |
| font-size: 15px; | |
| font-weight: 700; | |
| cursor: pointer; | |
| transition: background var(--ease) | |
| } | |
| .cam-act:hover { | |
| background: #c9313d | |
| } | |
| .cam-sq { | |
| width: 44px; | |
| height: 44px; | |
| background: var(--g100); | |
| border-radius: var(--r-sm); | |
| font-size: 19px; | |
| display: flex; | |
| align-items: center; | |
| justify-content: center; | |
| cursor: pointer; | |
| flex-shrink: 0; | |
| transition: background var(--ease) | |
| } | |
| .cam-sq:hover { | |
| background: var(--g200) | |
| } | |
| #sec-results { | |
| padding: 16px; | |
| display: none; | |
| flex-direction: column; | |
| gap: 11px; | |
| padding-bottom: 60px | |
| } | |
| #sec-results.on { | |
| display: flex | |
| } | |
| .verdict-banner { | |
| border-radius: var(--r-lg); | |
| padding: 22px 20px; | |
| text-align: center; | |
| position: relative; | |
| overflow: hidden | |
| } | |
| .vb-bg { | |
| position: absolute; | |
| inset: 0 | |
| } | |
| .vb-inner { | |
| position: relative; | |
| z-index: 1 | |
| } | |
| .v-emoji { | |
| font-size: 44px; | |
| display: block; | |
| margin-bottom: 7px | |
| } | |
| .v-text { | |
| font-family: var(--fd); | |
| font-size: clamp(21px, 6vw, 29px); | |
| font-weight: 800; | |
| letter-spacing: -.02em; | |
| color: var(--white) | |
| } | |
| .v-score { | |
| font-size: 12px; | |
| color: rgba(255, 255, 255, .65); | |
| margin-top: 4px | |
| } | |
| .v-vibe { | |
| display: inline-block; | |
| margin-top: 8px; | |
| padding: 4px 12px; | |
| background: rgba(255, 255, 255, .18); | |
| border-radius: 999px; | |
| font-size: 12px; | |
| color: var(--white); | |
| font-weight: 500 | |
| } | |
| .card { | |
| background: var(--white); | |
| border-radius: var(--r-lg); | |
| padding: 17px; | |
| box-shadow: var(--sh); | |
| border: 1.5px solid var(--g200) | |
| } | |
| .ring-row { | |
| display: flex; | |
| align-items: center; | |
| gap: 16px | |
| } | |
| .ring-wrap { | |
| position: relative; | |
| width: 88px; | |
| height: 88px; | |
| flex-shrink: 0 | |
| } | |
| .ring-svg { | |
| transform: rotate(-90deg) | |
| } | |
| .ring-bg { | |
| fill: none; | |
| stroke: var(--g200); | |
| stroke-width: 9 | |
| } | |
| .ring-arc { | |
| fill: none; | |
| stroke-width: 9; | |
| stroke-linecap: round; | |
| transition: stroke-dashoffset 1.6s cubic-bezier(.4, 0, .2, 1) | |
| } | |
| .ring-lbl { | |
| position: absolute; | |
| inset: 0; | |
| display: flex; | |
| flex-direction: column; | |
| align-items: center; | |
| justify-content: center | |
| } | |
| .ring-n { | |
| font-family: var(--fd); | |
| font-size: 23px; | |
| font-weight: 800; | |
| color: var(--deep); | |
| line-height: 1; | |
| letter-spacing: -.03em | |
| } | |
| .ring-t { | |
| font-size: 8px; | |
| color: var(--g600); | |
| font-weight: 700; | |
| text-transform: uppercase; | |
| letter-spacing: .06em | |
| } | |
| .ring-info { | |
| flex: 1 | |
| } | |
| .ri-h { | |
| font-family: var(--fd); | |
| font-size: 14.5px; | |
| font-weight: 700; | |
| color: var(--deep); | |
| margin-bottom: 4px | |
| } | |
| .ri-s { | |
| font-size: 12px; | |
| color: var(--g600); | |
| line-height: 1.5 | |
| } | |
| .tag { | |
| display: inline-flex; | |
| align-items: center; | |
| gap: 3px; | |
| padding: 3px 9px; | |
| border-radius: 999px; | |
| font-size: 11px; | |
| font-weight: 600; | |
| margin-top: 6px | |
| } | |
| .tag-d { | |
| background: var(--deep); | |
| color: var(--white) | |
| } | |
| .tag-f { | |
| background: var(--frost); | |
| color: var(--deep); | |
| margin-left: 4px | |
| } | |
| .bars-h { | |
| font-family: var(--fd); | |
| font-size: 14px; | |
| font-weight: 700; | |
| margin-bottom: 13px; | |
| color: var(--deep) | |
| } | |
| .sbar { | |
| margin-bottom: 11px | |
| } | |
| .sbar:last-child { | |
| margin-bottom: 0 | |
| } | |
| .sbar-row { | |
| display: flex; | |
| justify-content: space-between; | |
| align-items: center; | |
| margin-bottom: 5px | |
| } | |
| .sbar-n { | |
| font-size: 11.5px; | |
| font-weight: 600; | |
| color: var(--deep) | |
| } | |
| .sbar-v { | |
| font-family: var(--fd); | |
| font-size: 12.5px; | |
| font-weight: 800 | |
| } | |
| .sbar-track { | |
| height: 6px; | |
| background: var(--g200); | |
| border-radius: 999px; | |
| overflow: hidden | |
| } | |
| .sbar-fill { | |
| height: 100%; | |
| border-radius: 999px; | |
| width: 0; | |
| transition: width 1.2s cubic-bezier(.4, 0, .2, 1) | |
| } | |
| .c-low { | |
| color: var(--red) | |
| } | |
| .c-mid { | |
| color: var(--amber) | |
| } | |
| .c-hi { | |
| color: var(--steel) | |
| } | |
| .c-epic { | |
| color: var(--green) | |
| } | |
| .f-low { | |
| background: var(--red) | |
| } | |
| .f-mid { | |
| background: var(--amber) | |
| } | |
| .f-hi { | |
| background: var(--steel) | |
| } | |
| .f-epic { | |
| background: var(--green) | |
| } | |
| .cap-h { | |
| font-family: var(--fd); | |
| font-size: 14px; | |
| font-weight: 700; | |
| margin-bottom: 11px; | |
| color: var(--deep) | |
| } | |
| .cap-item { | |
| display: flex; | |
| align-items: flex-start; | |
| gap: 8px; | |
| padding: 9px 11px; | |
| background: var(--g100); | |
| border-radius: var(--r-sm); | |
| margin-bottom: 6px; | |
| cursor: pointer; | |
| transition: all var(--ease); | |
| border: 1.5px solid transparent | |
| } | |
| .cap-item:last-child { | |
| margin-bottom: 0 | |
| } | |
| .cap-item:hover { | |
| border-color: var(--frost); | |
| background: var(--white) | |
| } | |
| .cap-item.copied { | |
| border-color: var(--green); | |
| background: #d8f3dc | |
| } | |
| .cap-num { | |
| min-width: 19px; | |
| height: 19px; | |
| border-radius: 50%; | |
| background: var(--deep); | |
| color: var(--white); | |
| font-size: 10px; | |
| font-weight: 700; | |
| display: flex; | |
| align-items: center; | |
| justify-content: center; | |
| flex-shrink: 0; | |
| margin-top: 1px | |
| } | |
| .cap-txt { | |
| font-size: 12.5px; | |
| color: var(--deep); | |
| line-height: 1.5 | |
| } | |
| .cap-hint { | |
| font-size: 10px; | |
| color: var(--g400); | |
| margin-top: 2px | |
| } | |
| .glow-card { | |
| background: linear-gradient(140deg, var(--deep), var(--steel)); | |
| border-radius: var(--r-lg); | |
| padding: 17px; | |
| color: var(--white) | |
| } | |
| .glow-h { | |
| font-family: var(--fd); | |
| font-size: 14px; | |
| font-weight: 700; | |
| margin-bottom: 11px | |
| } | |
| .glow-item { | |
| display: flex; | |
| gap: 9px; | |
| margin-bottom: 10px; | |
| font-size: 13px; | |
| line-height: 1.55 | |
| } | |
| .glow-item:last-child { | |
| margin-bottom: 0 | |
| } | |
| .glow-ic { | |
| font-size: 17px; | |
| flex-shrink: 0; | |
| margin-top: 1px | |
| } | |
| #toast { | |
| position: fixed; | |
| bottom: 20px; | |
| left: 50%; | |
| transform: translateX(-50%) translateY(70px); | |
| background: var(--deep); | |
| color: var(--white); | |
| padding: 10px 18px; | |
| border-radius: 999px; | |
| font-size: 13px; | |
| font-weight: 500; | |
| z-index: 9999; | |
| transition: transform .35s cubic-bezier(.4, 0, .2, 1); | |
| max-width: calc(100vw - 40px); | |
| text-align: center; | |
| box-shadow: var(--sh) | |
| } | |
| #toast.show { | |
| transform: translateX(-50%) translateY(0) | |
| } | |
| .conf { | |
| position: fixed; | |
| width: 7px; | |
| height: 7px; | |
| border-radius: 2px; | |
| pointer-events: none; | |
| z-index: 9998 | |
| } | |
| @keyframes cfell { | |
| 0% { | |
| transform: translateY(-10px) rotate(0); | |
| opacity: 1 | |
| } | |
| 100% { | |
| transform: translateY(100vh) rotate(720deg); | |
| opacity: 0 | |
| } | |
| } | |
| </style> | |
| </head> | |
| <body> | |
| <!-- ββββββββββββ SPLASH ββββββββββββ --> | |
| <div id="splash"> | |
| <div class="sp-inner"> | |
| <div class="sp-hero"> | |
| <div class="sp-wordmark">post ready<span class="dot">?</span> β¨</div> | |
| <p class="sp-tag">the on-device AI that tells you if your pic <b>actually slaps</b></p> | |
| <div class="sp-pills"> | |
| <div class="sp-pill">π 100% private</div> | |
| <div class="sp-pill">β‘ runs on your GPU</div> | |
| <div class="sp-pill">πΈ no uploads ever</div> | |
| </div> | |
| </div> | |
| <div class="sp-warn"> | |
| <strong>β οΈ First launch only:</strong> downloading ~500MBβ1GB AI model + warming up GPU shaders (~30s). | |
| WiFi recommended. After that β instant! Your pics <strong>never leave your device.</strong> | |
| </div> | |
| <div class="load-box"> | |
| <div class="load-row"> | |
| <div class="load-icon" id="phIcon">β³</div> | |
| <div> | |
| <div class="load-title" id="phTitle">Getting readyβ¦</div> | |
| <div class="load-sub" id="phSub">Starting AI runtime</div> | |
| </div> | |
| </div> | |
| <div class="pbar-track"> | |
| <div class="pbar-fill" id="pbar"></div> | |
| </div> | |
| <div class="pbar-foot"><span id="pbarPct">0%</span><span id="pbarPhase">Initializing</span></div> | |
| <div class="file-tick" id="fileTick">Connectingβ¦</div> | |
| </div> | |
| <div class="sp-facts"> | |
| <div class="facts-label">π fun facts while u wait</div> | |
| <div class="fact"> | |
| <div class="fact-em">π</div> | |
| <div> | |
| <div class="fact-t">Fully private, no kidding</div> | |
| <div class="fact-b">The AI model runs <b>entirely in your browser</b> via WebGPU. No server, no | |
| upload, no data collection. Ever.</div> | |
| </div> | |
| </div> | |
| <div class="fact"> | |
| <div class="fact-em">β‘</div> | |
| <div> | |
| <div class="fact-t">The 0.4 second rule</div> | |
| <div class="fact-b">You have <b>less than half a second</b> before someone scrolls past. Your | |
| thumbnail does all the heavy lifting.</div> | |
| </div> | |
| </div> | |
| <div class="fact"> | |
| <div class="fact-em">πΏ</div> | |
| <div> | |
| <div class="fact-t">Vibe > perfection</div> | |
| <div class="fact-b">Gen Z engages <b>2Γ more</b> with authentic posts than over-edited ones. | |
| Candid + slightly imperfect? That's the aesthetic.</div> | |
| </div> | |
| </div> | |
| <div class="fact"> | |
| <div class="fact-em">π‘</div> | |
| <div> | |
| <div class="fact-t">Lighting is literally everything</div> | |
| <div class="fact-b">Golden hour + soft window light <b>add depth ring lights can't | |
| replicate.</b></div> | |
| </div> | |
| </div> | |
| <div class="fact"> | |
| <div class="fact-em">π¨</div> | |
| <div> | |
| <div class="fact-t">Color story check</div> | |
| <div class="fact-b">Posts with a <b>cohesive color palette</b> get up to 40% more saves.</div> | |
| </div> | |
| </div> | |
| <div class="fact"> | |
| <div class="fact-em">π¬</div> | |
| <div> | |
| <div class="fact-t">Caption hack</div> | |
| <div class="fact-b">Captions ending in a <b>question</b> get ~3Γ more comments. Give people a | |
| reason to type.</div> | |
| </div> | |
| </div> | |
| <div class="fact"> | |
| <div class="fact-em">π₯</div> | |
| <div> | |
| <div class="fact-t">GPU warmup is a one-time thing</div> | |
| <div class="fact-b">WebGPU compiles shaders on first load β <b>takes ~20-40s once</b>, then | |
| every analysis after that is fast.</div> | |
| </div> | |
| </div> | |
| </div> | |
| </div> | |
| </div> | |
| <!-- ββββββββββββ APP ββββββββββββ --> | |
| <div id="app"> | |
| <div id="app-shell"> | |
| <div class="app-bar"> | |
| <div class="logo">post ready<span class="dot">?</span></div> | |
| <div class="status-badge ld" id="statusBadge"> | |
| <div class="sdot pulse" id="sdot"></div> | |
| <span id="statusTxt">Loading AIβ¦</span> | |
| </div> | |
| </div> | |
| <div id="sec-upload"> | |
| <label class="drop-zone" id="dropZone"> | |
| <div class="dz-icon">πΌοΈ</div> | |
| <div class="dz-h">Drop your pic here</div> | |
| <div class="dz-s">or <u>tap to browse</u> Β· jpg, png, webp, heic</div> | |
| <input type="file" id="fileInput" accept="image/*"> | |
| </label> | |
| <div class="cam-row" id="camRow"> | |
| <button class="cam-btn" id="cameraBtn">π· Take a photo</button> | |
| <button class="cam-btn primary" id="galleryBtn">πΌοΈ Pick from gallery</button> | |
| </div> | |
| <div class="prep-hint" id="prepHint"> | |
| <div class="prep-spin"></div> | |
| <span>Preparing imageβ¦</span> | |
| </div> | |
| <div class="preview" id="preview"> | |
| <img id="previewImg" alt="your photo"> | |
| <div class="preview-bar"> | |
| <span class="preview-name" id="previewName">photo.jpg</span> | |
| <button class="change-btn" id="changeBtn">βοΈ change</button> | |
| </div> | |
| </div> | |
| <button class="analyze-btn" id="analyzeBtn" disabled>β¨ check post-worthiness</button> | |
| <div class="an-box" id="anBox"> | |
| <div class="an-top"> | |
| <div class="an-h" id="anH">π§ AI is judging your picβ¦</div> | |
| <div class="an-s" id="anSub">~10β20s Β· runs on your GPU Β· no internet used</div> | |
| <div class="dots"> | |
| <div class="d"></div> | |
| <div class="d"></div> | |
| <div class="d"></div> | |
| </div> | |
| </div> | |
| <div class="tok-section"> | |
| <div class="tok-wrap" id="tokWrap"> | |
| <div class="tok-label-bar"> | |
| <span class="tok-label">β‘ live model output</span> | |
| <span class="tok-stats" id="tokStats"></span> | |
| </div> | |
| <div class="tok-scroll" id="tokScroll"> | |
| <div class="tok" id="tokStream"></div> | |
| </div> | |
| </div> | |
| </div> | |
| </div> | |
| </div> | |
| <div id="sec-results"></div> | |
| </div> | |
| </div> | |
| <!-- Camera modal --> | |
| <div id="cam-modal"> | |
| <div class="cam-inner"> | |
| <div class="cam-vid-wrap"> | |
| <video id="camVid" autoplay muted playsinline></video> | |
| <canvas id="camCanvas"></canvas> | |
| </div> | |
| <div class="cam-bar"> | |
| <button class="cam-sq" id="camFlipBtn">π</button> | |
| <button class="cam-act" id="camSnapBtn">πΈ Snap it</button> | |
| <button class="cam-sq" id="camCloseBtn">β</button> | |
| </div> | |
| </div> | |
| </div> | |
| <div id="toast"></div> | |
| <script type="module"> | |
| /* βββββββββββ LOGGER βββββββββββ */ | |
| const L = { | |
| p: (m, ...a) => console.log(`%c[PHASE] ${m}`, 'color:#457b9d;font-weight:700;font-size:12px', ...a), | |
| i: (m, ...a) => console.log(`%c[INFO] ${m}`, 'color:#1d3557;font-size:11px', ...a), | |
| ok: (m, ...a) => console.log(`%c[OK] ${m}`, 'color:#52b788;font-weight:700;font-size:12px', ...a), | |
| w: (m, ...a) => console.warn(`%c[WARN] ${m}`, 'color:#f4a261;font-weight:700', ...a), | |
| e: (m, ...a) => console.error(`%c[ERR] ${m}`, 'color:#e63946;font-weight:700', ...a), | |
| ll: (m, ...a) => console.log(`%c[LLM] ${m}`, 'color:#a8dadc;background:#1d3557;padding:2px 6px;border-radius:3px;font-size:11px', ...a), | |
| }; | |
| /* βββββββββββ STATE βββββββββββ */ | |
| const MODEL = 'onnx-community/Qwen3.5-0.8B-ONNX'; | |
| const CDN = 'https://cdn.jsdelivr.net/npm/@huggingface/transformers@next'; | |
| /* | |
| βββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| IMAGE SIZE: 224Γ224 = 8Γ8 patches = 64 vision tokens | |
| βββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| WHY 224 not 448? | |
| Old code used 448Γ448 β 256 vision tokens (16Γ16 patches). | |
| Combined with a ~300-token prompt = 556+ context tokens. | |
| For a 0.8B model, this exhausted the KV cache budget, | |
| causing the model to hit EOS after just 13 tokens. | |
| 224Γ224 β 64 vision tokens. Prompt shrinks too. | |
| Total input drops from ~560 to ~160 tokens. | |
| This gives the model ROOM to generate the full JSON (150+ tokens). | |
| Trade-off: slightly less visual detail. For social media scoring | |
| (exposure, lighting, composition, vibe) 224px is more than enough β | |
| these are global image properties, not pixel-level details. | |
| */ | |
| const IMG_SIZE = 224; | |
| const S = { | |
| model: null, proc: null, Streamer: null, RawImg: null, | |
| ready: false, busy: false, | |
| rawImg: null, | |
| prepPromise: null, | |
| _pendingDataUrl: null, _pendingName: null, | |
| cam: null, camFace: 'environment', | |
| history: [], | |
| }; | |
| /* βββββββββββ DOM βββββββββββ */ | |
| const $ = id => document.getElementById(id); | |
| const splash = $('splash'), app = $('app'); | |
| const phIcon = $('phIcon'), phTitle = $('phTitle'), phSub = $('phSub'); | |
| const pbar = $('pbar'), pbarPct = $('pbarPct'), pbarPhase = $('pbarPhase'), fileTick = $('fileTick'); | |
| const statusBadge = $('statusBadge'), sdot = $('sdot'), statusTxt = $('statusTxt'); | |
| const dropZone = $('dropZone'), fileInput = $('fileInput'), camRow = $('camRow'); | |
| const cameraBtn = $('cameraBtn'), galleryBtn = $('galleryBtn'), changeBtn = $('changeBtn'); | |
| const preview = $('preview'), previewImg = $('previewImg'), previewName = $('previewName'); | |
| const analyzeBtn = $('analyzeBtn'), anBox = $('anBox'), anH = $('anH'), anSub = $('anSub'); | |
| const prepHint = $('prepHint'); | |
| const tokWrap = $('tokWrap'), tokStream = $('tokStream'), tokScroll = $('tokScroll'), tokStats = $('tokStats'); | |
| const secResults = $('sec-results'); | |
| const camModal = $('cam-modal'), camVid = $('camVid'), camCanvas = $('camCanvas'); | |
| const camSnapBtn = $('camSnapBtn'), camCloseBtn = $('camCloseBtn'), camFlipBtn = $('camFlipBtn'); | |
| const toastEl = $('toast'); | |
| /* βββββββββββ MODEL LOAD βββββββββββ | |
| FIX: Added GPU warmup after model load. | |
| WebGPU shader compilation (JIT) happens on the FIRST generate() call. | |
| Without warmup, this was causing 60-70 second TTFT during real analysis. | |
| The warmup runs one tiny dummy inference (text-only, 1 token) which: | |
| - Compiles all GPU shaders (30-45s one-time cost) | |
| - Shows a progress bar so the user knows what's happening | |
| - Makes subsequent REAL inferences fast (~2-5s TTFT instead of 67s) | |
| After warmup, shaders are cached in the browser's GPU driver cache. | |
| Subsequent page loads don't need to recompile β warmup stays fast (<5s). | |
| βββββββββββββββββββββββββββββββββββ */ | |
| const fileState = {}; | |
| function setPhase(icon, title, sub, pct, phase) { | |
| phIcon.textContent = icon; phTitle.textContent = title; phSub.textContent = sub; | |
| if (pct != null) { pbar.style.width = pct + '%'; pbarPct.textContent = Math.round(pct) + '%'; } | |
| if (phase) pbarPhase.textContent = phase; | |
| } | |
| function tickFile(name, st) { | |
| fileState[name] = st; | |
| const active = Object.entries(fileState).find(([, v]) => v === 'dl'); | |
| const done = Object.values(fileState).filter(v => v === 'ok').length; | |
| const total = Object.keys(fileState).length; | |
| fileTick.innerHTML = active | |
| ? `<span class="dl">β¬ ${name.split('/').pop()}</span> Β· ${done}/${total} files` | |
| : `<span class="ok">β ${done}/${total} files downloaded</span>`; | |
| } | |
| async function loadModel() { | |
| const t0 = performance.now(); | |
| L.p('Load start'); | |
| setPhase('π¦', 'Importing AI librariesβ¦', 'Fetching Transformers.js from CDN', 3, 'Importing'); | |
| const { AutoProcessor, Qwen3_5ForConditionalGeneration, RawImage, TextStreamer, env } = await import(CDN); | |
| S.RawImg = RawImage; S.Streamer = TextStreamer; | |
| env.useBrowserCache = true; | |
| L.ok(`Library ready (${((performance.now() - t0) / 1000).toFixed(2)}s)`); | |
| setPhase('βοΈ', 'Loading processorβ¦', 'Tokenizer + vision config', 9, 'Processor'); | |
| S.proc = await AutoProcessor.from_pretrained(MODEL); | |
| L.ok('Processor ready'); | |
| setPhase('π', 'Detecting hardwareβ¦', 'WebGPU + fp16 check', 14, 'Hardware'); | |
| let device = 'wasm', vDt = 'fp32'; | |
| if (navigator.gpu) { | |
| device = 'webgpu'; | |
| try { | |
| const a = await navigator.gpu.requestAdapter(); | |
| if (a?.features?.has('shader-f16')) vDt = 'fp16'; | |
| L.i(`GPU: ${a?.name || '?'} | f16: ${vDt === 'fp16'}`); | |
| } catch (e) { L.w('GPU query:', e); } | |
| } | |
| L.ok(`device=${device} dtype=${vDt}`); | |
| setPhase('π₯', 'Downloading modelβ¦', '~500MBβ1GB Β· cached after first run', 19, 'Downloading'); | |
| const seen = new Set(); | |
| S.model = await Qwen3_5ForConditionalGeneration.from_pretrained(MODEL, { | |
| dtype: { embed_tokens: 'q4', vision_encoder: vDt, decoder_model_merged: 'q4' }, | |
| device, | |
| progress_callback: ({ status, file, loaded, total }) => { | |
| if (!file) return; | |
| if (status === 'initiate' && !seen.has(file)) { seen.add(file); tickFile(file, 'dl'); } | |
| if (status === 'progress' && total) { | |
| const done = Object.values(fileState).filter(v => v === 'ok').length; | |
| const t = Math.max(seen.size, 1); | |
| const pct = 19 + Math.min(72, ((loaded / total) + (done / t)) * 36); | |
| pbar.style.width = pct + '%'; pbarPct.textContent = Math.round(pct) + '%'; | |
| } | |
| if (status === 'done') { | |
| tickFile(file, 'ok'); | |
| const done = Object.values(fileState).filter(v => v === 'ok').length; | |
| const t = Object.keys(fileState).length || 6; | |
| const pct = 19 + Math.round((done / t) * 72); | |
| pbar.style.width = pct + '%'; pbarPct.textContent = pct + '%'; | |
| L.ok(`β ${file.split('/').pop()} (${done}/${t})`); | |
| } | |
| } | |
| }); | |
| L.ok(`Model weights downloaded (${((performance.now() - t0) / 1000).toFixed(1)}s)`); | |
| /* βββ GPU WARMUP WITH IMAGE βββββββββββββββββββββββββββββββββββββ | |
| CRITICAL FIX: Warmup must use an actual image, not text-only! | |
| Previous warmup used text-only β only compiled decoder shaders. | |
| Vision encoder shaders (the expensive ones for VLMs) were NOT | |
| compiled β still caused 42s TTFT on the first real image inference. | |
| Fix: create a tiny 224Γ224 blank canvas, convert to RawImage, | |
| run a full vision+decode warmup pass. This compiles ALL shaders: | |
| - vision_encoder (patch embedding + attention layers) | |
| - decoder_model_merged (prefill + decode) | |
| - embed_tokens | |
| Cost: ~20-40s one time. Shaders cached by GPU driver. | |
| Subsequent page loads: warmup completes in 2-5s (cache hit). | |
| Subsequent analyses: TTFT drops from 42s β 1-3s. | |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ */ | |
| if (device === 'webgpu') { | |
| setPhase('π₯', 'Warming up GPUβ¦', 'Compiling vision + decoder shaders β one-time ~20-40s', 92, 'Warming up'); | |
| fileTick.innerHTML = `<span class="dl">β compiling GPU shaders (cached after this)β¦</span>`; | |
| const wt = performance.now(); | |
| try { | |
| // Build a real 224Γ224 image so vision encoder shaders compile too | |
| const wCanvas = document.createElement('canvas'); | |
| wCanvas.width = IMG_SIZE; wCanvas.height = IMG_SIZE; | |
| const wCtx = wCanvas.getContext('2d'); | |
| // Simple gradient so it's not all-black (avoids trivial GPU skips) | |
| const g = wCtx.createLinearGradient(0, 0, IMG_SIZE, IMG_SIZE); | |
| g.addColorStop(0, '#4a90d9'); g.addColorStop(1, '#a8dadc'); | |
| wCtx.fillStyle = g; wCtx.fillRect(0, 0, IMG_SIZE, IMG_SIZE); | |
| const wDataUrl = wCanvas.toDataURL('image/jpeg', 0.5); | |
| const wRawImg = await S.RawImg.read(wDataUrl); | |
| // Full VL warmup: image + text β 1 token decode | |
| const wConv = [{ role: 'user', content: [{ type: 'image' }, { type: 'text', text: 'rate 1-10' }] }]; | |
| const wTxt = S.proc.apply_chat_template(wConv, { add_generation_prompt: true, enable_thinking: false }); | |
| const wInputs = await S.proc(wTxt, wRawImg); | |
| await S.model.generate({ ...wInputs, max_new_tokens: 2, do_sample: false }); | |
| const wSecs = ((performance.now() - wt) / 1000).toFixed(1); | |
| L.ok(`GPU warmup (vision+decoder) done in ${wSecs}s`); | |
| fileTick.innerHTML = `<span class="ok">β Vision + decoder shaders compiled (${wSecs}s) β analyses now fast!</span>`; | |
| } catch (e) { | |
| L.w('Warmup failed (non-fatal):', e.message); | |
| fileTick.innerHTML = `<span class="ok">β Warmup skipped β first analysis may be slower</span>`; | |
| } | |
| } | |
| const totalSecs = ((performance.now() - t0) / 1000).toFixed(1); | |
| setPhase('β ', 'AI is ready!', `${device.toUpperCase()} Β· ${vDt.toUpperCase()} Β· cached for instant future loads`, 100, 'Ready β'); | |
| pbar.style.width = '100%'; pbarPct.textContent = '100%'; | |
| fileTick.innerHTML = `<span class="ok">β ${device.toUpperCase()} Β· ${vDt.toUpperCase()} Β· ${totalSecs}s total</span>`; | |
| S.ready = true; | |
| L.ok(`ββ Model ready in ${totalSecs}s ββ`); | |
| await sleep(800); | |
| splash.classList.add('hide'); | |
| await sleep(480); | |
| splash.style.display = 'none'; | |
| app.classList.add('on'); | |
| setStatus(true); | |
| setTimeout(() => showToast('β‘ GPU warmed up β analysis is now fast!'), 1200); | |
| } | |
| function setStatus(ok) { | |
| statusBadge.className = 'status-badge ' + (ok ? 'rdy' : 'ld'); | |
| sdot.className = 'sdot' + (ok ? '' : ' pulse'); | |
| statusTxt.textContent = ok ? 'AI Ready β' : 'Loading AIβ¦'; | |
| } | |
| /* βββββββββββ IMAGE PREP βββββββββββ | |
| FIX #2: Image resized to 224Γ224 instead of 448Γ448 | |
| Old: 448Γ448 β 256 vision tokens (patch=28px, 16Γ16 patches) | |
| New: 224Γ224 β 64 vision tokens (patch=28px, 8Γ 8 patches) | |
| Impact: Context drops from ~560 tokens to ~170 tokens. | |
| The model now has PLENTY of budget to generate the full JSON response | |
| instead of hitting EOS at 13 tokens due to context exhaustion. | |
| For social media scoring criteria (lighting, composition, vibe, | |
| color story, authenticity) β global image properties that don't | |
| need pixel-level precision β 224px is more than sufficient. | |
| βββββββββββββββββββββββββββββββββββ */ | |
| function startPrep(dataUrl, name) { | |
| S.rawImg = null; | |
| prepHint.classList.add('on'); | |
| S.prepPromise = S.RawImg.read(dataUrl) | |
| .then(raw => raw.resize(IMG_SIZE, IMG_SIZE)) | |
| .then(raw => { | |
| S.rawImg = raw; | |
| prepHint.classList.remove('on'); | |
| L.ok(`Prep done: "${name}" β ${IMG_SIZE}Γ${IMG_SIZE} RawImage (64 vision tokens)`); | |
| if (S.ready && !S.busy) analyzeBtn.disabled = false; | |
| return raw; | |
| }) | |
| .catch(err => { | |
| prepHint.classList.remove('on'); | |
| L.w('Prep failed:', err.message); | |
| S.prepPromise = null; | |
| }); | |
| } | |
| function loadFile(file) { | |
| L.i(`File: ${file.name} ${file.type} ${(file.size / 1024).toFixed(1)}KB`); | |
| fileToDataUrl(file).then(dataUrl => { | |
| previewImg.src = URL.createObjectURL(file); | |
| previewName.textContent = file.name; | |
| preview.style.display = 'block'; | |
| dropZone.classList.add('gone'); | |
| camRow.classList.add('gone'); | |
| analyzeBtn.disabled = true; | |
| secResults.classList.remove('on'); secResults.innerHTML = ''; | |
| if (S.RawImg) startPrep(dataUrl, file.name); | |
| else { S._pendingDataUrl = dataUrl; S._pendingName = file.name; } | |
| }); | |
| } | |
| fileInput.addEventListener('change', e => { const f = e.target.files?.[0]; if (f) loadFile(f); }); | |
| dropZone.addEventListener('dragover', e => { e.preventDefault(); dropZone.classList.add('over'); }); | |
| dropZone.addEventListener('dragleave', () => dropZone.classList.remove('over')); | |
| dropZone.addEventListener('drop', e => { | |
| e.preventDefault(); dropZone.classList.remove('over'); | |
| const f = e.dataTransfer.files?.[0]; | |
| if (f && f.type.startsWith('image/')) loadFile(f); | |
| }); | |
| galleryBtn.addEventListener('click', () => fileInput.click()); | |
| changeBtn.addEventListener('click', reset); | |
| function reset() { | |
| S.rawImg = null; S.prepPromise = null; S._pendingDataUrl = null; | |
| preview.style.display = 'none'; | |
| dropZone.classList.remove('gone'); | |
| camRow.classList.remove('gone'); | |
| prepHint.classList.remove('on'); | |
| analyzeBtn.disabled = true; | |
| anBox.classList.remove('on'); | |
| tokWrap.classList.remove('fade'); | |
| secResults.classList.remove('on'); secResults.innerHTML = ''; | |
| fileInput.value = ''; | |
| } | |
| /* Camera */ | |
| cameraBtn.addEventListener('click', openCamera); | |
| camCloseBtn.addEventListener('click', closeCamera); | |
| camFlipBtn.addEventListener('click', () => { S.camFace = S.camFace === 'environment' ? 'user' : 'environment'; openCamera(); }); | |
| async function openCamera() { | |
| if (S.cam) { S.cam.getTracks().forEach(t => t.stop()); S.cam = null; } | |
| try { | |
| S.cam = await navigator.mediaDevices.getUserMedia({ video: { facingMode: S.camFace, width: { ideal: 1280 }, height: { ideal: 960 } } }); | |
| camVid.srcObject = S.cam; | |
| camModal.classList.add('on'); | |
| } catch { showToast('Camera access denied π'); } | |
| } | |
| function closeCamera() { | |
| if (S.cam) { S.cam.getTracks().forEach(t => t.stop()); S.cam = null; } | |
| camVid.srcObject = null; | |
| camModal.classList.remove('on'); | |
| } | |
| camSnapBtn.addEventListener('click', () => { | |
| const ctx = camCanvas.getContext('2d'); | |
| camCanvas.width = camVid.videoWidth || 640; | |
| camCanvas.height = camVid.videoHeight || 480; | |
| ctx.drawImage(camVid, 0, 0); | |
| const dataUrl = camCanvas.toDataURL('image/jpeg', .88); | |
| L.i(`Camera snap ${camCanvas.width}Γ${camCanvas.height}`); | |
| previewImg.src = dataUrl; | |
| previewName.textContent = 'camera-snap.jpg'; | |
| preview.style.display = 'block'; | |
| dropZone.classList.add('gone'); | |
| camRow.classList.add('gone'); | |
| analyzeBtn.disabled = true; | |
| secResults.classList.remove('on'); secResults.innerHTML = ''; | |
| if (S.RawImg) startPrep(dataUrl, 'camera-snap.jpg'); | |
| else { S._pendingDataUrl = dataUrl; S._pendingName = 'camera-snap.jpg'; } | |
| closeCamera(); | |
| }); | |
| /* βββββββββββ SCORE KEYS βββββββββββ */ | |
| const KEYS = [ | |
| { key: 'main_character_energy', label: 'Main Character Energy', emoji: 'π' }, | |
| { key: 'vibe_check', label: 'Vibe Check', emoji: 'β¨' }, | |
| { key: 'lighting_slay', label: 'Lighting Slay', emoji: 'π‘' }, | |
| { key: 'color_story', label: 'Color Story', emoji: 'π¨' }, | |
| { key: 'composition_drip', label: 'Composition Drip', emoji: 'π' }, | |
| { key: 'authenticity_meter', label: 'Authenticity Meter', emoji: 'πΏ' }, | |
| { key: 'caption_worthy', label: 'Caption Worthy', emoji: 'π¬' }, | |
| { key: 'scroll_stop_factor', label: 'Scroll-Stop Factor', emoji: 'π' }, | |
| ]; | |
| /* βββββββββββ PROMPT βββββββββββ | |
| ROOT CAUSE of identical scores across all images: | |
| Previous prompt contained literal example numbers: | |
| SCORES:65,70,55,72,68,74,80,60 | |
| OVERALL:68 | |
| VIBE:clean aesthetic | |
| With do_sample:false (greedy), the model picks the highest-probability | |
| token at every step. After seeing those example numbers in the prompt, | |
| copying them IS the highest-probability completion β so EVERY image | |
| returned identical scores regardless of content. | |
| Fix: | |
| 1. Remove ALL example values from the prompt. Use format descriptors only. | |
| 2. Switch back to low-temperature sampling (0.3) so per-image variation | |
| can emerge. Temperature 0.3 = mostly structured but not robotically | |
| deterministic. The format lines act as anchors; sampling provides | |
| image-specific variation in the actual numbers. | |
| 3. The assistant turn is "prefilled" with "TYPE:" to force the model | |
| to start outputting the key:value format immediately rather than | |
| preambling with explanation text. | |
| βββββββββββββββββββββββββββββββββββ */ | |
| const PROMPT = `Carefully analyze this specific image for social media potential. Look at the actual lighting, composition, colors, and mood visible in the image. Then reply with ONLY these 8 lines: | |
| TYPE:[photo or screenshot or meme or artwork or other] | |
| SCORES:[eight integers 0-100 separated by commas] | |
| OVERALL:[one integer 0-100, the average of your 8 scores] | |
| VIBE:[2-3 words describing the aesthetic you actually see] | |
| PLATFORM:[instagram or tiktok or pinterest or twitter or linkedin] | |
| TIP1:[one specific actionable improvement for this image] | |
| TIP2:[one specific actionable improvement for this image] | |
| TIP3:[one specific actionable improvement for this image] | |
| Score order: energy,vibe,lighting,colors,composition,authenticity,caption_potential,scroll_stop | |
| Scoring guide: 30-45=weak, 46-60=average, 61-74=good, 75-84=great, 85+=exceptional | |
| Be honest and vary your scores based on what you actually see. No preamble, no explanation.`; | |
| /* βββββββββββ ANALYZE βββββββββββ */ | |
| analyzeBtn.addEventListener('click', async () => { | |
| if (!S.ready || S.busy) return; | |
| let rawImg = S.rawImg; | |
| if (!rawImg && S.prepPromise) { | |
| analyzeBtn.disabled = true; | |
| anH.textContent = 'βοΈ Finishing image prepβ¦'; | |
| anBox.classList.add('on'); | |
| try { rawImg = await S.prepPromise; } | |
| catch (e) { anBox.classList.remove('on'); showToast('β οΈ Image prep failed β try again?'); return; } | |
| } | |
| if (!rawImg) { showToast('β οΈ No image ready β pick a photo first'); return; } | |
| S.busy = true; | |
| analyzeBtn.disabled = true; | |
| anH.textContent = 'π§ AI is judging your picβ¦'; | |
| anSub.textContent = '~5β15s Β· runs on your GPU Β· no internet used'; | |
| anBox.classList.add('on'); | |
| tokStream.textContent = ''; | |
| tokStats.textContent = ''; | |
| tokWrap.classList.remove('fade'); | |
| secResults.classList.remove('on'); secResults.innerHTML = ''; | |
| L.p('Analysis start'); | |
| const t0 = performance.now(); | |
| try { | |
| const data = await runInference(rawImg); | |
| L.ok(`Done in ${((performance.now() - t0) / 1000).toFixed(1)}s`); | |
| tokWrap.classList.add('fade'); | |
| await sleep(480); | |
| anBox.classList.remove('on'); | |
| S.history.push({ overall: data.overall, verdict: data.verdict, vibe_label: data.vibe_label, content_type: data.content_type, ts: Date.now() }); | |
| if (S.history.length > 5) S.history.shift(); | |
| render(data); | |
| secResults.classList.add('on'); | |
| setTimeout(() => secResults.scrollIntoView({ behavior: 'smooth', block: 'start' }), 80); | |
| } catch (err) { | |
| anBox.classList.remove('on'); | |
| showToast('β οΈ Analysis failed β try again?'); | |
| L.e('Analysis error:', err); | |
| console.error(err); | |
| analyzeBtn.disabled = false; | |
| } | |
| S.busy = false; | |
| if (S.rawImg) analyzeBtn.disabled = false; | |
| }); | |
| /* βββββββββββ INFERENCE βββββββββββ | |
| KEY CHANGE: do_sample:true, temperature:0.3 | |
| Greedy (do_sample:false) was causing identical output every run | |
| because copying the example values was always the highest-probability | |
| completion. Now that example values are removed from the prompt, | |
| we add low-temperature sampling so the model's assessment of the | |
| actual image content can be expressed in the output. | |
| temperature:0.3 = low enough to maintain format structure, | |
| high enough to vary based on what the model sees in the image. | |
| top_k:50 added as a soft cap β prevents low-probability garbage | |
| tokens while still allowing numeric variation across runs. | |
| βββββββββββββββββββββββββββββββββββ */ | |
| async function runInference(rawImage) { | |
| const t0 = performance.now(); | |
| L.ll('Chat template (enable_thinking=false)β¦'); | |
| const conv = [{ role: 'user', content: [{ type: 'image' }, { type: 'text', text: PROMPT }] }]; | |
| const txt = S.proc.apply_chat_template(conv, { add_generation_prompt: true, enable_thinking: false }); | |
| L.ll(`Prompt: ${txt.length} chars`); | |
| L.ll('Tokenize + vision encodeβ¦'); | |
| const tp = performance.now(); | |
| const inputs = await S.proc(txt, rawImage); | |
| const inputLen = inputs.input_ids?.dims?.[1] || 0; | |
| L.ll(`Inputs ready (${((performance.now() - tp) / 1000).toFixed(2)}s) | tokens:${inputLen} (${inputLen - 64} text + 64 vision)`); | |
| let out = '', tokCount = 0, ttft = null, lastStats = 0; | |
| const tGen = performance.now(); | |
| const streamer = new S.Streamer(S.proc.tokenizer, { | |
| skip_prompt: true, | |
| skip_special_tokens: true, | |
| callback_function: tok => { | |
| if (ttft === null) { | |
| ttft = performance.now() - tGen; | |
| const label = ttft < 5000 ? 'β fast' : 'β slow'; | |
| L.ll(`TTFT: ${ttft.toFixed(0)}ms ${label}`); | |
| } | |
| out += tok; tokCount++; | |
| tokStream.textContent = out; | |
| tokScroll.scrollTop = tokScroll.scrollHeight; | |
| const now = performance.now(); | |
| if (now - lastStats > 350) { | |
| lastStats = now; | |
| const e = (now - tGen) / 1000; | |
| tokStats.textContent = `${tokCount} tok Β· ${(tokCount / e).toFixed(1)}/s Β· ${e.toFixed(0)}s`; | |
| } | |
| } | |
| }); | |
| L.ll('Generatingβ¦'); | |
| await S.model.generate({ | |
| ...inputs, | |
| max_new_tokens: 150, | |
| do_sample: true, | |
| temperature: 0.3, // low = structured output; >0 = image-specific variation | |
| top_k: 50, // caps garbage tokens without killing numeric variance | |
| streamer, | |
| }); | |
| const genSecs = (performance.now() - tGen) / 1000; | |
| const tps = (tokCount / genSecs).toFixed(1); | |
| L.ll(`Done: ${tokCount} tok Β· ${tps}/s Β· ${genSecs.toFixed(1)}s`); | |
| tokStats.textContent = `β ${tokCount} tok Β· ${tps}/s Β· ${genSecs.toFixed(1)}s`; | |
| const cleaned = out.replace(/<think>[\s\S]*?<\/think>/gi, '').trim(); | |
| console.groupCollapsed('π Raw LLM output'); console.log(out); console.groupEnd(); | |
| const result = parse(cleaned || out); | |
| console.groupCollapsed('β Parsed'); console.log(result); console.groupEnd(); | |
| L.ok(`runInference total: ${((performance.now() - t0) / 1000).toFixed(1)}s`); | |
| return result; | |
| } | |
| /* βββββββββββ PARSE / VALIDATE βββββββββββ | |
| New dual-parser strategy: | |
| PRIMARY: key:value line format (matches the new prompt) | |
| TYPE:photo | |
| SCORES:65,70,55,72,68,74,80,60 | |
| OVERALL:68 | |
| VIBE:clean aesthetic | |
| PLATFORM:instagram | |
| TIP1:... TIP2:... TIP3:... | |
| FALLBACK: JSON parser (for model that ignores format) | |
| If the output contains { ... } try JSON parse + partial recovery | |
| Captions generated CLIENT-SIDE from vibe+score β reliable, instant, | |
| no LLM token budget wasted. Templates are human-written and better | |
| than what a 0.8B model produces anyway. | |
| βββββββββββββββββββββββββββββββββββ */ | |
| /* Client-side caption templates based on score tier + vibe */ | |
| function makeCaptions(overall, vibeLabel, platform) { | |
| const v = vibeLabel || 'this look'; | |
| const tier = overall >= 80 ? 'epic' : overall >= 65 ? 'fire' : overall >= 50 ? 'good' : 'raw'; | |
| const templates = { | |
| epic: [ | |
| `not taking any questions, just ${v} energy β¨`, | |
| `the algorithm will NOT be ready for this ${v} era π`, | |
| `studied the assignment and then rewrote it π₯`, | |
| ], | |
| fire: [ | |
| `${v} and we are NOT apologizing for it π₯`, | |
| `main character behavior, ${v} edition β`, | |
| `for anyone who needed to see this today π `, | |
| ], | |
| good: [ | |
| `${v} hitting different today β¨`, | |
| `not every pic needs a caption but this one does π¬`, | |
| `the vibe has been set, proceed accordingly π`, | |
| ], | |
| raw: [ | |
| `real ones know what this is about π`, | |
| `showing up, no filter needed πΏ`, | |
| `this is us now, take it or leave it πΈ`, | |
| ], | |
| }; | |
| const caps = templates[tier] || templates.good; | |
| // Swap out platform-specific tweak | |
| if (platform === 'linkedin') return [`Excited to share this milestone! ${v} in action. π`, `Growth mindset: ${v} perspective.`, `Grateful for moments like these. #grateful`]; | |
| return caps; | |
| } | |
| function parseKV(text) { | |
| /* Parse the key:value line format */ | |
| const lines = text.split('\n').map(l => l.trim()).filter(Boolean); | |
| const kv = {}; | |
| for (const line of lines) { | |
| const ci = line.indexOf(':'); | |
| if (ci < 1) continue; | |
| const k = line.slice(0, ci).trim().toUpperCase(); | |
| const v = line.slice(ci + 1).trim(); | |
| kv[k] = v; | |
| } | |
| if (!kv.SCORES && !kv.OVERALL) return null; // not our format | |
| const rawScores = (kv.SCORES || '').split(',').map(n => parseInt(n)).filter(n => !isNaN(n)); | |
| const scores = {}; | |
| KEYS.forEach(({ key }, i) => { scores[key] = clamp(rawScores[i] ?? 65, 0, 100); }); | |
| const avg = Math.round(Object.values(scores).reduce((a, b) => a + b, 0) / KEYS.length); | |
| const overall = clamp(parseInt(kv.OVERALL) || avg, 0, 100); | |
| const vibe = (kv.VIBE || 'undefined aesthetic').toLowerCase().trim(); | |
| const platform = (kv.PLATFORM || 'instagram').toLowerCase().trim(); | |
| const tips = [kv.TIP1, kv.TIP2, kv.TIP3].filter(Boolean); | |
| return validate({ | |
| scores, | |
| overall, | |
| vibe_label: vibe, | |
| best_platform: platform, | |
| glow_ups: tips, | |
| captions: makeCaptions(overall, vibe, platform), | |
| content_type: (kv.TYPE || 'photo').toLowerCase().trim(), | |
| posting_time: 'TueβThu 7β9pm', | |
| content_note: '', | |
| }); | |
| } | |
| function parseJSON(text) { | |
| /* JSON / partial-JSON fallback */ | |
| const s = text.indexOf('{'), e = text.lastIndexOf('}'); | |
| if (s === -1 || e <= s) return null; | |
| const raw = text.slice(s, e + 1).replace(/,(\s*[}\]])/g, '$1'); | |
| try { return validate(JSON.parse(raw)); } catch { } | |
| // partial key:number recovery | |
| const partial = { scores: {} }; | |
| const ovM = text.match(/"overall"\s*:\s*(\d+)/); if (ovM) partial.overall = +ovM[1]; | |
| for (const { key } of KEYS) { const m = text.match(new RegExp(`"${key}"\\s*:\\s*(\\d+)`)); if (m) partial.scores[key] = +m[1]; } | |
| const vibeM = text.match(/"vibe_label"\s*:\s*"([^"]+)"/); if (vibeM) partial.vibe_label = vibeM[1]; | |
| const platM = text.match(/"best_platform"\s*:\s*"([^"]+)"/); if (platM) partial.best_platform = platM[1]; | |
| if (Object.keys(partial.scores).length > 0 || partial.overall) { return validate(partial); } | |
| return null; | |
| } | |
| function parse(text) { | |
| /* Try key:value first (our new format) */ | |
| const kvResult = parseKV(text); | |
| if (kvResult) { L.ok('Parsed via key:value format'); return kvResult; } | |
| /* Fall back to JSON if model ignores the format */ | |
| L.w('key:value parse failed β trying JSON fallback'); | |
| const jsonResult = parseJSON(text); | |
| if (jsonResult) { L.ok('Parsed via JSON fallback'); return jsonResult; } | |
| L.w('Both parsers failed β using graceful fallback'); | |
| return fallback(); | |
| } | |
| function validate(d) { | |
| const sc = d.scores || {}, scores = {}; | |
| for (const { key } of KEYS) { const v = parseInt(sc[key]); scores[key] = isNaN(v) ? 65 : clamp(v, 0, 100); } | |
| const avg = Math.round(Object.values(scores).reduce((a, b) => a + b, 0) / KEYS.length); | |
| const overall = clamp(parseInt(d.overall) || avg, 0, 100); | |
| const vibe = d.vibe_label || 'undefined aesthetic'; | |
| const platform = d.best_platform || 'instagram'; | |
| const captions = Array.isArray(d.captions) && d.captions.length | |
| ? d.captions.slice(0, 3) | |
| : makeCaptions(overall, vibe, platform); | |
| return { | |
| scores, overall, | |
| verdict: verdictOf(overall), | |
| vibe_label: vibe, | |
| captions, | |
| best_platform: platform, | |
| glow_ups: (Array.isArray(d.glow_ups) ? d.glow_ups : []).filter(Boolean).slice(0, 3), | |
| posting_time: d.posting_time || 'TueβThu 7β9pm', | |
| content_note: d.content_note || '', | |
| content_type: d.content_type || 'photo', | |
| }; | |
| } | |
| function fallback() { | |
| const scores = {}; KEYS.forEach(({ key }) => scores[key] = 62); | |
| return validate({ scores, overall: 62 }); | |
| } | |
| function verdictOf(n) { | |
| return n >= 86 ? 'iconic era π' : n >= 71 ? 'main character approved β' : n >= 56 ? 'lowkey fire π₯' : n >= 41 ? 'has potential β¨' : n >= 26 ? 'giving nothing π' : 'not it π'; | |
| } | |
| /* βββββββββββ RENDER βββββββββββ */ | |
| function cls(n) { return n >= 80 ? { c: 'c-epic', f: 'f-epic' } : n >= 60 ? { c: 'c-hi', f: 'f-hi' } : n >= 40 ? { c: 'c-mid', f: 'f-mid' } : { c: 'c-low', f: 'f-low' }; } | |
| function vGrad(v) { return v.includes('π') ? ['#1b4332', '#40916c'] : v.includes('β') ? ['#023e8a', '#48cae4'] : v.includes('π₯') ? ['#7b2d00', '#e63946'] : v.includes('β¨') ? ['#457b9d', '#a8dadc'] : v.includes('π') ? ['#555', '#888'] : ['#333', '#555']; } | |
| function sColor(n) { return n >= 80 ? '#52b788' : n >= 60 ? '#457b9d' : n >= 40 ? '#f4a261' : '#e63946'; } | |
| function render(d) { | |
| L.p(`Render: ${d.overall} "${d.verdict}" ${d.content_type}`); | |
| const [c1, c2] = vGrad(d.verdict); | |
| const { c: rc } = cls(d.overall); | |
| const pe = { instagram: 'πΈ', tiktok: 'π΅', pinterest: 'π', twitter: 'π¦', bereal: 'π‘', linkedin: 'πΌ', reddit: 'π΄', tumblr: 'π' }[d.best_platform] || 'π±'; | |
| const ctIcon = { photo: 'π·', screenshot: 'π±', diagram: 'π', meme: 'π', artwork: 'π¨', other: 'πΌοΈ' }[d.content_type] || 'πΌοΈ'; | |
| const ctLabel = { photo: 'Photo', screenshot: 'Screenshot', diagram: 'Diagram/Chart', meme: 'Meme', artwork: 'Artwork', other: 'Image' }[d.content_type] || 'Image'; | |
| const ctNote = d.content_note ? `<div class="ct-warn">${ctIcon} <b>${ctLabel} detected</b> β ${esc(d.content_note)}</div>` : ''; | |
| const caps = d.captions.map((c, i) => ` | |
| <div class="cap-item" onclick="copyCap(this,'${c.replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/\n/g, ' ')}')"> | |
| <div class="cap-num">${i + 1}</div> | |
| <div><div class="cap-txt">${esc(c)}</div><div class="cap-hint">tap to copy</div></div> | |
| </div>`).join(''); | |
| const glows = d.glow_ups.map((g, i) => `<div class="glow-item"><div class="glow-ic">${['π', 'π', 'π'][i] || 'π‘'}</div><div>${esc(g)}</div></div>`).join(''); | |
| const bars = KEYS.map(({ key, label, emoji }) => { | |
| const n = d.scores[key]; const { c, f } = cls(n); | |
| return `<div class="sbar"> | |
| <div class="sbar-row"><div class="sbar-n">${emoji} ${label}</div><div class="sbar-v ${c}" data-sc="${n}">0</div></div> | |
| <div class="sbar-track"><div class="sbar-fill ${f}" data-t="${n}"></div></div> | |
| </div>`; | |
| }).join(''); | |
| const prev = S.history.length > 1 ? S.history[S.history.length - 2] : null; | |
| const histHTML = S.history.length > 1 ? ` | |
| <div class="card"> | |
| <div class="compare-h">π Session (${S.history.length} pics)</div> | |
| <div class="history-strip"> | |
| ${S.history.map((h, i) => { const isCurr = i === S.history.length - 1; const { c } = cls(h.overall); return `<div class="hist-item${isCurr ? ' current' : ''}"><div class="hist-n ${c}">${h.overall}</div><div class="hist-v">${h.vibe_label}</div></div>`; }).join('')} | |
| </div> | |
| </div>`: ''; | |
| const cmpHTML = prev ? (() => { | |
| const diff = d.overall - prev.overall; | |
| const dc = diff > 0 ? 'up' : diff < 0 ? 'dn' : 'eq'; | |
| const dl = diff > 0 ? `+${diff} β` : diff < 0 ? `${diff} β` : '= tied'; | |
| return `<div class="compare-card"> | |
| <div class="compare-h">π vs last pic</div> | |
| <div class="compare-row"> | |
| <div class="cmp-side"><div class="cmp-lbl">Previous</div><div class="cmp-score">${prev.overall}</div><div class="cmp-vibe">${prev.vibe_label}</div></div> | |
| <div class="cmp-vs">vs</div> | |
| <div class="cmp-side now"><div class="cmp-lbl">This one β¨</div><div class="cmp-score">${d.overall}</div><div class="cmp-vibe">${d.vibe_label}</div></div> | |
| </div> | |
| <div style="text-align:center;margin-top:10px"><span class="delta ${dc}">${dl}</span></div> | |
| </div>`; | |
| })() : ''; | |
| secResults.innerHTML = ` | |
| ${ctNote} | |
| <div class="verdict-banner"> | |
| <div class="vb-bg" style="background:linear-gradient(140deg,${c1},${c2})"></div> | |
| <div class="vb-inner"> | |
| <span class="v-emoji">${d.verdict.match(/\p{Emoji}/u)?.[0] || 'β¨'}</span> | |
| <div class="v-text">${d.verdict.replace(/\p{Emoji}/gu, '').trim()}</div> | |
| <div class="v-score">Post Score: ${d.overall}/100</div> | |
| <div class="v-vibe">${d.vibe_label}</div> | |
| </div> | |
| </div> | |
| <div class="card"> | |
| <div class="ring-row"> | |
| <div class="ring-wrap"> | |
| <svg class="ring-svg" width="88" height="88" viewBox="0 0 88 88"> | |
| <circle class="ring-bg" cx="44" cy="44" r="38"/> | |
| <circle class="ring-arc" cx="44" cy="44" r="38" stroke="${sColor(d.overall)}" stroke-dasharray="239" stroke-dashoffset="239" id="rArc"/> | |
| </svg> | |
| <div class="ring-lbl"><div class="ring-n ${rc}" id="rNum">0</div><div class="ring-t">score</div></div> | |
| </div> | |
| <div class="ring-info"> | |
| <div class="ri-h">Best for ${d.best_platform.charAt(0).toUpperCase() + d.best_platform.slice(1)}</div> | |
| <div class="ri-s">Post on <strong>${d.posting_time}</strong></div> | |
| <span class="tag tag-d">${pe} ${d.best_platform}</span> | |
| <span class="tag tag-f">π ${d.posting_time}</span> | |
| </div> | |
| </div> | |
| </div> | |
| ${cmpHTML} | |
| <div class="card"><div class="bars-h">π¬ The breakdown</div>${bars}</div> | |
| ${d.captions.length ? `<div class="card"><div class="cap-h">π¬ Caption inspo (tap to copy)</div>${caps}</div>` : ''} | |
| <div class="glow-card"><div class="glow-h">π Glow-up tips</div>${glows}</div> | |
| ${histHTML} | |
| <div class="card" style="text-align:center;padding:20px"> | |
| <div style="font-size:28px;margin-bottom:8px">πΈ</div> | |
| <div style="font-family:var(--fd);font-size:16px;font-weight:700;color:var(--deep);margin-bottom:5px">Try another pic?</div> | |
| <div style="font-size:12px;color:var(--g600);margin-bottom:14px">Model stays loaded β instant analysis β‘</div> | |
| <button class="analyze-btn" onclick="doRetry()" style="font-size:14px;padding:12px">β¨ Check another pic</button> | |
| </div> | |
| `; | |
| requestAnimationFrame(() => { | |
| const arc = document.getElementById('rArc'); | |
| if (arc) setTimeout(() => { arc.style.strokeDashoffset = 239 - (d.overall / 100) * 239; animNum(document.getElementById('rNum'), 0, d.overall, 1400); }, 80); | |
| document.querySelectorAll('.sbar-fill').forEach(el => setTimeout(() => { el.style.width = el.dataset.t + '%'; }, 150)); | |
| document.querySelectorAll('[data-sc]').forEach(el => setTimeout(() => animNum(el, 0, +el.dataset.sc, 1100), 150)); | |
| }); | |
| if (d.overall >= 70) confetti(); | |
| } | |
| /* βββββββββββ UTILS βββββββββββ */ | |
| window.copyCap = async (el, text) => { | |
| try { await navigator.clipboard.writeText(text); el.classList.add('copied'); showToast('Caption copied! β'); setTimeout(() => el.classList.remove('copied'), 2000); } | |
| catch { showToast('Long-press to copy'); } | |
| }; | |
| window.doRetry = () => { reset(); setTimeout(() => $('sec-upload')?.scrollIntoView({ behavior: 'smooth', block: 'start' }), 80); }; | |
| function animNum(el, from, to, dur) { const st = performance.now(); (function tick(now) { const t = Math.min(1, (now - st) / dur); el.textContent = Math.round(from + (to - from) * (1 - Math.pow(1 - t, 3))); if (t < 1) requestAnimationFrame(tick); })(performance.now()); } | |
| function confetti() { const cols = ['#e63946', '#457b9d', '#a8dadc', '#52b788', '#f4a261', '#1d3557']; for (let i = 0; i < 36; i++) { const p = document.createElement('div'); p.className = 'conf'; p.style.cssText = `left:${Math.random() * 100}vw;top:-10px;background:${cols[i % 6]};animation:cfell ${1.4 + Math.random() * 1.4}s ease-in forwards;animation-delay:${Math.random() * .5}s;transform:rotate(${Math.random() * 360}deg);width:${5 + Math.random() * 6}px;height:${5 + Math.random() * 6}px;border-radius:${Math.random() > .5 ? '50%' : '2px'}`; document.body.appendChild(p); setTimeout(() => p.remove(), 3200); } } | |
| let _tt; | |
| function showToast(msg) { toastEl.textContent = msg; toastEl.classList.add('show'); clearTimeout(_tt); _tt = setTimeout(() => toastEl.classList.remove('show'), 2800); } | |
| function fileToDataUrl(f) { return new Promise((res, rej) => { const r = new FileReader(); r.onload = () => res(r.result); r.onerror = rej; r.readAsDataURL(f); }); } | |
| function esc(s) { return (s || '').replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"'); } | |
| function sleep(ms) { return new Promise(r => setTimeout(r, ms)); } | |
| function clamp(v, mn, mx) { return Math.min(mx, Math.max(mn, v)); } | |
| /* βββββββββββ BOOT βββββββββββ */ | |
| console.log('%c post ready? β¨ ', 'background:#1d3557;color:#a8dadc;font-size:16px;font-weight:800;padding:4px 10px;border-radius:4px'); | |
| console.log('%c fix: removed example numbers from prompt (model was copying them) + temp=0.3 sampling ', 'color:#457b9d;font-size:11px'); | |
| L.i(`WebGPU: ${!!navigator.gpu}`); | |
| loadModel() | |
| .then(() => { | |
| if (S._pendingDataUrl) { | |
| startPrep(S._pendingDataUrl, S._pendingName || 'image'); | |
| S._pendingDataUrl = null; | |
| } | |
| }) | |
| .catch(err => { | |
| L.e(`loadModel threw: ${err.message}`); console.error('[FATAL]', err); | |
| setPhase('β', 'Load failed', err.message?.slice(0, 60) || 'Unknown error', null, 'Error'); | |
| setTimeout(async () => { splash.classList.add('hide'); await sleep(480); splash.style.display = 'none'; app.classList.add('on'); setStatus(false); }, 3000); | |
| }); | |
| setInterval(() => { if (S.ready && S.rawImg && !S.busy && analyzeBtn.disabled) analyzeBtn.disabled = false; }, 400); | |
| </script> | |
| </body> | |
| </html> |