Add IndexedDB logging and timeout detection for hung model loading
Browse files- source/dist/assets/index-BlYoBjIf.js → assets/index-ZEJvQeY9.js +0 -0
- assets/worker-BTGDtQEO.js +1 -0
- index.html +1 -1
- source/dist/assets/index-ZEJvQeY9.js +0 -0
- source/dist/assets/worker-BTGDtQEO.js +1 -0
- source/dist/assets/worker-jPcDQKd3.js +0 -1
- source/dist/index.html +1 -1
- source/src/worker.js +37 -8
source/dist/assets/index-BlYoBjIf.js → assets/index-ZEJvQeY9.js
RENAMED
|
The diff for this file is too large to render.
See raw diff
|
|
|
assets/worker-BTGDtQEO.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
async function h(t,s={}){const{getParakeetModel:e}=await import("./hub-BlMT648A.js"),{ParakeetModel:o}=await import("./parakeet-xcg-VHSn.js"),{MODELS:r}=await import("./models-Dq2DCePq.js"),a=r[t]?.repoId||t,n=await e(a,s);return o.fromUrls({...n.urls,filenames:n.filenames,preprocessorBackend:n.preprocessorBackend,...s})}const w=self.fetch;self.fetch=async(...t)=>{const s=typeof t[0]=="string"?t[0]:t[0]?.url||t[0];console.log("[Worker] Fetch request:",s);try{const e=performance.now(),o=await w(...t),r=performance.now()-e;return console.log(`[Worker] Fetch response: ${s} - ${o.status} ${o.ok} (${r.toFixed(0)}ms)`),o}catch(e){throw console.error("[Worker] Fetch error:",s,e),e}};console.log("[Worker] Checking IndexedDB availability...");self.indexedDB?(console.log("[Worker] IndexedDB is available"),self.indexedDB.databases().then(t=>{console.log("[Worker] Existing IndexedDB databases:",t.map(s=>s.name))}).catch(t=>{console.log("[Worker] Could not list databases:",t)})):console.log("[Worker] IndexedDB is NOT available");let i=null,f=!1;async function b(t="parakeet-tdt-0.6b-v3",s={}){if(f)return{status:"loading",message:"Model is already loading..."};if(i)return{status:"ready",message:"Model already loaded"};try{f=!0;const e=s.device==="webgpu"?"webgpu-hybrid":"wasm";self.postMessage({status:"loading",message:`Downloading Parakeet ${t}... (~2.5GB, this may take 1-2 minutes)`}),console.log(`[Worker] Loading model with backend: ${e}`);const o=e==="wasm"?{encoderQuant:"int8",decoderQuant:"int8",preprocessor:"nemo128"}:{encoderQuant:"fp32",decoderQuant:"int8",preprocessor:"nemo128"},r=new Set,a=c=>{console.log("[Worker] Progress callback received:",c);const{loaded:d,total:u,file:l}=c,k=u>0?Math.round(d/u*100):0;r.has(l)||(r.add(l),console.log("[Worker] Initiating download:",l),self.postMessage({status:"initiate",file:l,progress:0,total:u})),self.postMessage({status:"progress",file:l,progress:k,total:u,loaded:d}),d>=u&&(console.log("[Worker] Download complete:",l),self.postMessage({status:"done",file:l}))};console.log("[Worker] Calling fromHub with modelVersion:",t),console.log("[Worker] Options:",{backend:e,...o});const n=3e5,g=new Promise((c,d)=>{setTimeout(()=>{d(new Error(`Model loading timed out after ${n/1e3}s. This may indicate a CORS issue or network problem.`))},n)});try{i=await Promise.race([h(t,{backend:e,...o,progress:a}),g]),console.log("[Worker] fromHub returned, model loaded")}catch(c){throw console.error("[Worker] fromHub failed:",c),c}const m=i.session?.executionProviders?.[0]||e;console.log(`[Worker] Model loaded. Requested: ${e}, Actual provider: ${m}`),self.postMessage({status:"loading",message:"Model downloaded, warming up..."});const p=new Float32Array(16e3);return await i.transcribe(p,16e3),self.postMessage({status:"ready",message:`Parakeet ${t} loaded successfully!`,device:e,modelVersion:t}),{status:"ready",device:e}}catch(e){return console.error("Failed to load model:",e),self.postMessage({status:"error",message:`Failed to load model: ${e.message}`,error:e.toString()}),{status:"error",error:e.toString()}}finally{f=!1}}async function y(t,s=null){if(!i)throw new Error("Model not loaded. Call load() first.");try{const e=performance.now(),o=await i.transcribe(t,16e3,{returnTimestamps:!0,returnConfidences:!0,temperature:1}),a=(performance.now()-e)/1e3,n=t.length/16e3,g=n/a,m=M(o.words||[]);return{text:o.utterance_text||"",sentences:m,words:o.words||[],chunks:o.words||[],metadata:{latency:a,audioDuration:n,rtf:g,language:s,confidence:o.confidence_scores,metrics:o.metrics}}}catch(e){throw console.error("Transcription error:",e),e}}function M(t){if(!t||t.length===0)return[];const s=[];let e=[],o=t[0].start_time||0;for(let r=0;r<t.length;r++){const a=t[r];e.push(a.text),(/[.!?]$/.test(a.text)||r===t.length-1)&&(s.push({text:e.join(" ").trim(),start:o,end:a.end_time||a.start_time||0}),r<t.length-1&&(e=[],o=t[r+1].start_time||a.end_time||0))}return s}self.onmessage=async t=>{const{type:s,data:e}=t.data;try{switch(s){case"load":await b(e?.modelVersion,e?.options||{});break;case"transcribe":const o=await y(e.audio,e.language);self.postMessage({status:"transcription",result:o});break;case"ping":self.postMessage({status:"pong"});break;default:self.postMessage({status:"error",message:`Unknown message type: ${s}`})}}catch(o){self.postMessage({status:"error",message:o.message,error:o.toString()})}};
|
index.html
CHANGED
|
@@ -6,7 +6,7 @@
|
|
| 6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 7 |
<meta name="description" content="Real-time speech recognition with Parakeet STT and WebGPU acceleration. Progressive transcription demo." />
|
| 8 |
<title>Parakeet STT Progressive Transcription | WebGPU Demo</title>
|
| 9 |
-
<script type="module" crossorigin src="/assets/index-
|
| 10 |
<link rel="stylesheet" crossorigin href="/assets/index-4ud1a0so.css">
|
| 11 |
</head>
|
| 12 |
<body>
|
|
|
|
| 6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 7 |
<meta name="description" content="Real-time speech recognition with Parakeet STT and WebGPU acceleration. Progressive transcription demo." />
|
| 8 |
<title>Parakeet STT Progressive Transcription | WebGPU Demo</title>
|
| 9 |
+
<script type="module" crossorigin src="/assets/index-ZEJvQeY9.js"></script>
|
| 10 |
<link rel="stylesheet" crossorigin href="/assets/index-4ud1a0so.css">
|
| 11 |
</head>
|
| 12 |
<body>
|
source/dist/assets/index-ZEJvQeY9.js
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
source/dist/assets/worker-BTGDtQEO.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
async function h(t,s={}){const{getParakeetModel:e}=await import("./hub-BlMT648A.js"),{ParakeetModel:o}=await import("./parakeet-xcg-VHSn.js"),{MODELS:r}=await import("./models-Dq2DCePq.js"),a=r[t]?.repoId||t,n=await e(a,s);return o.fromUrls({...n.urls,filenames:n.filenames,preprocessorBackend:n.preprocessorBackend,...s})}const w=self.fetch;self.fetch=async(...t)=>{const s=typeof t[0]=="string"?t[0]:t[0]?.url||t[0];console.log("[Worker] Fetch request:",s);try{const e=performance.now(),o=await w(...t),r=performance.now()-e;return console.log(`[Worker] Fetch response: ${s} - ${o.status} ${o.ok} (${r.toFixed(0)}ms)`),o}catch(e){throw console.error("[Worker] Fetch error:",s,e),e}};console.log("[Worker] Checking IndexedDB availability...");self.indexedDB?(console.log("[Worker] IndexedDB is available"),self.indexedDB.databases().then(t=>{console.log("[Worker] Existing IndexedDB databases:",t.map(s=>s.name))}).catch(t=>{console.log("[Worker] Could not list databases:",t)})):console.log("[Worker] IndexedDB is NOT available");let i=null,f=!1;async function b(t="parakeet-tdt-0.6b-v3",s={}){if(f)return{status:"loading",message:"Model is already loading..."};if(i)return{status:"ready",message:"Model already loaded"};try{f=!0;const e=s.device==="webgpu"?"webgpu-hybrid":"wasm";self.postMessage({status:"loading",message:`Downloading Parakeet ${t}... (~2.5GB, this may take 1-2 minutes)`}),console.log(`[Worker] Loading model with backend: ${e}`);const o=e==="wasm"?{encoderQuant:"int8",decoderQuant:"int8",preprocessor:"nemo128"}:{encoderQuant:"fp32",decoderQuant:"int8",preprocessor:"nemo128"},r=new Set,a=c=>{console.log("[Worker] Progress callback received:",c);const{loaded:d,total:u,file:l}=c,k=u>0?Math.round(d/u*100):0;r.has(l)||(r.add(l),console.log("[Worker] Initiating download:",l),self.postMessage({status:"initiate",file:l,progress:0,total:u})),self.postMessage({status:"progress",file:l,progress:k,total:u,loaded:d}),d>=u&&(console.log("[Worker] Download complete:",l),self.postMessage({status:"done",file:l}))};console.log("[Worker] Calling fromHub with modelVersion:",t),console.log("[Worker] Options:",{backend:e,...o});const n=3e5,g=new Promise((c,d)=>{setTimeout(()=>{d(new Error(`Model loading timed out after ${n/1e3}s. This may indicate a CORS issue or network problem.`))},n)});try{i=await Promise.race([h(t,{backend:e,...o,progress:a}),g]),console.log("[Worker] fromHub returned, model loaded")}catch(c){throw console.error("[Worker] fromHub failed:",c),c}const m=i.session?.executionProviders?.[0]||e;console.log(`[Worker] Model loaded. Requested: ${e}, Actual provider: ${m}`),self.postMessage({status:"loading",message:"Model downloaded, warming up..."});const p=new Float32Array(16e3);return await i.transcribe(p,16e3),self.postMessage({status:"ready",message:`Parakeet ${t} loaded successfully!`,device:e,modelVersion:t}),{status:"ready",device:e}}catch(e){return console.error("Failed to load model:",e),self.postMessage({status:"error",message:`Failed to load model: ${e.message}`,error:e.toString()}),{status:"error",error:e.toString()}}finally{f=!1}}async function y(t,s=null){if(!i)throw new Error("Model not loaded. Call load() first.");try{const e=performance.now(),o=await i.transcribe(t,16e3,{returnTimestamps:!0,returnConfidences:!0,temperature:1}),a=(performance.now()-e)/1e3,n=t.length/16e3,g=n/a,m=M(o.words||[]);return{text:o.utterance_text||"",sentences:m,words:o.words||[],chunks:o.words||[],metadata:{latency:a,audioDuration:n,rtf:g,language:s,confidence:o.confidence_scores,metrics:o.metrics}}}catch(e){throw console.error("Transcription error:",e),e}}function M(t){if(!t||t.length===0)return[];const s=[];let e=[],o=t[0].start_time||0;for(let r=0;r<t.length;r++){const a=t[r];e.push(a.text),(/[.!?]$/.test(a.text)||r===t.length-1)&&(s.push({text:e.join(" ").trim(),start:o,end:a.end_time||a.start_time||0}),r<t.length-1&&(e=[],o=t[r+1].start_time||a.end_time||0))}return s}self.onmessage=async t=>{const{type:s,data:e}=t.data;try{switch(s){case"load":await b(e?.modelVersion,e?.options||{});break;case"transcribe":const o=await y(e.audio,e.language);self.postMessage({status:"transcription",result:o});break;case"ping":self.postMessage({status:"pong"});break;default:self.postMessage({status:"error",message:`Unknown message type: ${s}`})}}catch(o){self.postMessage({status:"error",message:o.message,error:o.toString()})}};
|
source/dist/assets/worker-jPcDQKd3.js
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
async function p(t,s={}){const{getParakeetModel:e}=await import("./hub-BlMT648A.js"),{ParakeetModel:r}=await import("./parakeet-xcg-VHSn.js"),{MODELS:o}=await import("./models-Dq2DCePq.js"),a=o[t]?.repoId||t,n=await e(a,s);return r.fromUrls({...n.urls,filenames:n.filenames,preprocessorBackend:n.preprocessorBackend,...s})}const h=self.fetch;self.fetch=async(...t)=>{console.log("[Worker] Fetch request:",t[0]);try{const s=await h(...t);return console.log("[Worker] Fetch response:",t[0],s.status,s.ok),s}catch(s){throw console.error("[Worker] Fetch error:",t[0],s),s}};let i=null,m=!1;async function k(t="parakeet-tdt-0.6b-v3",s={}){if(m)return{status:"loading",message:"Model is already loading..."};if(i)return{status:"ready",message:"Model already loaded"};try{m=!0;const e=s.device==="webgpu"?"webgpu-hybrid":"wasm";self.postMessage({status:"loading",message:`Downloading Parakeet ${t}... (~2.5GB, this may take 1-2 minutes)`}),console.log(`[Worker] Loading model with backend: ${e}`);const r=e==="wasm"?{encoderQuant:"int8",decoderQuant:"int8",preprocessor:"nemo128"}:{encoderQuant:"fp32",decoderQuant:"int8",preprocessor:"nemo128"},o=new Set,a=c=>{console.log("[Worker] Progress callback received:",c);const{loaded:g,total:d,file:l}=c,f=d>0?Math.round(g/d*100):0;o.has(l)||(o.add(l),console.log("[Worker] Initiating download:",l),self.postMessage({status:"initiate",file:l,progress:0,total:d})),self.postMessage({status:"progress",file:l,progress:f,total:d,loaded:g}),g>=d&&(console.log("[Worker] Download complete:",l),self.postMessage({status:"done",file:l}))};console.log("[Worker] Calling fromHub with modelVersion:",t),console.log("[Worker] Options:",{backend:e,...r});try{i=await p(t,{backend:e,...r,progress:a}),console.log("[Worker] fromHub returned, model loaded")}catch(c){throw console.error("[Worker] fromHub failed:",c),c}const n=i.session?.executionProviders?.[0]||e;console.log(`[Worker] Model loaded. Requested: ${e}, Actual provider: ${n}`),self.postMessage({status:"loading",message:"Model downloaded, warming up..."});const u=new Float32Array(16e3);return await i.transcribe(u,16e3),self.postMessage({status:"ready",message:`Parakeet ${t} loaded successfully!`,device:e,modelVersion:t}),{status:"ready",device:e}}catch(e){return console.error("Failed to load model:",e),self.postMessage({status:"error",message:`Failed to load model: ${e.message}`,error:e.toString()}),{status:"error",error:e.toString()}}finally{m=!1}}async function w(t,s=null){if(!i)throw new Error("Model not loaded. Call load() first.");try{const e=performance.now(),r=await i.transcribe(t,16e3,{returnTimestamps:!0,returnConfidences:!0,temperature:1}),a=(performance.now()-e)/1e3,n=t.length/16e3,u=n/a,c=y(r.words||[]);return{text:r.utterance_text||"",sentences:c,words:r.words||[],chunks:r.words||[],metadata:{latency:a,audioDuration:n,rtf:u,language:s,confidence:r.confidence_scores,metrics:r.metrics}}}catch(e){throw console.error("Transcription error:",e),e}}function y(t){if(!t||t.length===0)return[];const s=[];let e=[],r=t[0].start_time||0;for(let o=0;o<t.length;o++){const a=t[o];e.push(a.text),(/[.!?]$/.test(a.text)||o===t.length-1)&&(s.push({text:e.join(" ").trim(),start:r,end:a.end_time||a.start_time||0}),o<t.length-1&&(e=[],r=t[o+1].start_time||a.end_time||0))}return s}self.onmessage=async t=>{const{type:s,data:e}=t.data;try{switch(s){case"load":await k(e?.modelVersion,e?.options||{});break;case"transcribe":const r=await w(e.audio,e.language);self.postMessage({status:"transcription",result:r});break;case"ping":self.postMessage({status:"pong"});break;default:self.postMessage({status:"error",message:`Unknown message type: ${s}`})}}catch(r){self.postMessage({status:"error",message:r.message,error:r.toString()})}};
|
|
|
|
|
|
source/dist/index.html
CHANGED
|
@@ -6,7 +6,7 @@
|
|
| 6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 7 |
<meta name="description" content="Real-time speech recognition with Parakeet STT and WebGPU acceleration. Progressive transcription demo." />
|
| 8 |
<title>Parakeet STT Progressive Transcription | WebGPU Demo</title>
|
| 9 |
-
<script type="module" crossorigin src="/assets/index-
|
| 10 |
<link rel="stylesheet" crossorigin href="/assets/index-4ud1a0so.css">
|
| 11 |
</head>
|
| 12 |
<body>
|
|
|
|
| 6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 7 |
<meta name="description" content="Real-time speech recognition with Parakeet STT and WebGPU acceleration. Progressive transcription demo." />
|
| 8 |
<title>Parakeet STT Progressive Transcription | WebGPU Demo</title>
|
| 9 |
+
<script type="module" crossorigin src="/assets/index-ZEJvQeY9.js"></script>
|
| 10 |
<link rel="stylesheet" crossorigin href="/assets/index-4ud1a0so.css">
|
| 11 |
</head>
|
| 12 |
<body>
|
source/src/worker.js
CHANGED
|
@@ -10,17 +10,35 @@ import { fromHub } from 'parakeet.js';
|
|
| 10 |
// Intercept fetch to log all network requests in worker
|
| 11 |
const originalFetch = self.fetch;
|
| 12 |
self.fetch = async (...args) => {
|
| 13 |
-
|
|
|
|
| 14 |
try {
|
|
|
|
| 15 |
const response = await originalFetch(...args);
|
| 16 |
-
|
|
|
|
| 17 |
return response;
|
| 18 |
} catch (error) {
|
| 19 |
-
console.error('[Worker] Fetch error:',
|
| 20 |
throw error;
|
| 21 |
}
|
| 22 |
};
|
| 23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
let model = null;
|
| 25 |
let isLoading = false;
|
| 26 |
|
|
@@ -100,12 +118,23 @@ async function loadModel(modelVersion = 'parakeet-tdt-0.6b-v3', options = {}) {
|
|
| 100 |
console.log('[Worker] Calling fromHub with modelVersion:', modelVersion);
|
| 101 |
console.log('[Worker] Options:', { backend, ...quantization });
|
| 102 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
try {
|
| 104 |
-
model = await
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
|
|
|
|
|
|
|
|
|
| 109 |
console.log('[Worker] fromHub returned, model loaded');
|
| 110 |
} catch (loadError) {
|
| 111 |
console.error('[Worker] fromHub failed:', loadError);
|
|
|
|
| 10 |
// Intercept fetch to log all network requests in worker
|
| 11 |
const originalFetch = self.fetch;
|
| 12 |
self.fetch = async (...args) => {
|
| 13 |
+
const url = typeof args[0] === 'string' ? args[0] : args[0]?.url || args[0];
|
| 14 |
+
console.log('[Worker] Fetch request:', url);
|
| 15 |
try {
|
| 16 |
+
const startTime = performance.now();
|
| 17 |
const response = await originalFetch(...args);
|
| 18 |
+
const duration = performance.now() - startTime;
|
| 19 |
+
console.log(`[Worker] Fetch response: ${url} - ${response.status} ${response.ok} (${duration.toFixed(0)}ms)`);
|
| 20 |
return response;
|
| 21 |
} catch (error) {
|
| 22 |
+
console.error('[Worker] Fetch error:', url, error);
|
| 23 |
throw error;
|
| 24 |
}
|
| 25 |
};
|
| 26 |
|
| 27 |
+
// Log IndexedDB operations
|
| 28 |
+
console.log('[Worker] Checking IndexedDB availability...');
|
| 29 |
+
if (self.indexedDB) {
|
| 30 |
+
console.log('[Worker] IndexedDB is available');
|
| 31 |
+
|
| 32 |
+
// Try to list databases
|
| 33 |
+
self.indexedDB.databases().then(dbs => {
|
| 34 |
+
console.log('[Worker] Existing IndexedDB databases:', dbs.map(db => db.name));
|
| 35 |
+
}).catch(err => {
|
| 36 |
+
console.log('[Worker] Could not list databases:', err);
|
| 37 |
+
});
|
| 38 |
+
} else {
|
| 39 |
+
console.log('[Worker] IndexedDB is NOT available');
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
let model = null;
|
| 43 |
let isLoading = false;
|
| 44 |
|
|
|
|
| 118 |
console.log('[Worker] Calling fromHub with modelVersion:', modelVersion);
|
| 119 |
console.log('[Worker] Options:', { backend, ...quantization });
|
| 120 |
|
| 121 |
+
// Add a timeout to detect if loading hangs
|
| 122 |
+
const timeoutMs = 300000; // 5 minutes
|
| 123 |
+
const timeoutPromise = new Promise((_, reject) => {
|
| 124 |
+
setTimeout(() => {
|
| 125 |
+
reject(new Error(`Model loading timed out after ${timeoutMs / 1000}s. This may indicate a CORS issue or network problem.`));
|
| 126 |
+
}, timeoutMs);
|
| 127 |
+
});
|
| 128 |
+
|
| 129 |
try {
|
| 130 |
+
model = await Promise.race([
|
| 131 |
+
fromHub(modelVersion, {
|
| 132 |
+
backend,
|
| 133 |
+
...quantization,
|
| 134 |
+
progress: progressCallback
|
| 135 |
+
}),
|
| 136 |
+
timeoutPromise
|
| 137 |
+
]);
|
| 138 |
console.log('[Worker] fromHub returned, model loaded');
|
| 139 |
} catch (loadError) {
|
| 140 |
console.error('[Worker] fromHub failed:', loadError);
|