Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -78,7 +78,7 @@ def _to_np_audio(x) -> np.ndarray:
|
|
| 78 |
if isinstance(x, torch.Tensor):
|
| 79 |
if x.dtype != torch.float32: x = x.float()
|
| 80 |
return x.detach().cpu().contiguous().view(-1).numpy()
|
| 81 |
-
x = np.asarray(x)
|
| 82 |
if x.ndim > 1: x = x.reshape(-1)
|
| 83 |
return x.astype(np.float32, copy=False) if x.dtype != np.float32 else x
|
| 84 |
|
|
@@ -174,7 +174,7 @@ def _latents_disk_path(key: str) -> pathlib.Path: return PERSIST_LATENTS_DIR / f
|
|
| 174 |
|
| 175 |
def _save_latents_to_disk(key: str, gpt, spk): torch.save({"gpt_cond_latent": gpt.cpu(), "speaker_embedding": spk.cpu()}, _latents_disk_path(key))
|
| 176 |
|
| 177 |
-
def _load_latents_from_disk(key: str):
|
| 178 |
p = _latents_disk_path(key)
|
| 179 |
if not p.exists(): return None
|
| 180 |
obj = torch.load(p, map_location="cpu"); return obj["gpt_cond_latent"], obj["speaker_embedding"]
|
|
@@ -227,7 +227,7 @@ def _pcm_f32_to_b64(x: np.ndarray) -> str:
|
|
| 227 |
_SENT_END = re.compile(r"([\.!\?…]+[»\")\]]*\s+)")
|
| 228 |
_WS = re.compile(r"\s+")
|
| 229 |
def _fast_split(text: str, limit: int) -> List[str]:
|
| 230 |
-
text = text.strip()
|
| 231 |
if not text: return []
|
| 232 |
parts=[]; start=0
|
| 233 |
for m in _SENT_END.finditer(text): parts.append(text[start:m.end()].strip()); start=m.end()
|
|
@@ -242,16 +242,16 @@ def _fast_split(text: str, limit: int) -> List[str]:
|
|
| 242 |
w=_WS.split(s); acc=""
|
| 243 |
for tok in w:
|
| 244 |
if len(acc)+1+len(tok)<=limit: acc=(acc+" "+tok).strip() if acc else tok
|
| 245 |
-
else:
|
| 246 |
if acc: chunks.append(acc); acc=tok
|
| 247 |
cur=acc
|
| 248 |
if cur: chunks.append(cur)
|
| 249 |
return [c for c in chunks if c]
|
| 250 |
|
| 251 |
def _split_text_smart(text_in: str, lang_short: str, chunk_limit: int, first_segment_limit: int) -> List[str]:
|
| 252 |
-
text_in = text_in.strip()
|
| 253 |
if not text_in: return []
|
| 254 |
-
parts=[]
|
| 255 |
if len(text_in)>first_segment_limit:
|
| 256 |
head=text_in[:first_segment_limit]; m=re.search(r".*[\.!\?…»)]", head)
|
| 257 |
if m and len(m.group(0))>30: head=m.group(0)
|
|
@@ -420,7 +420,6 @@ class StreamBufferProcessor extends AudioWorkletProcessor {
|
|
| 420 |
} else if (msg.type === 'set_thresholds') {
|
| 421 |
this.thresholdSamples = msg.thresholdSamples|0;
|
| 422 |
this.lowWatermarkSamples = msg.lowWatermarkSamples|0;
|
| 423 |
-
// handshake back to main
|
| 424 |
this.port.postMessage({ type: 'thresholds_ready',
|
| 425 |
thresholdSamples: this.thresholdSamples,
|
| 426 |
lowWatermarkSamples: this.lowWatermarkSamples,
|
|
@@ -468,54 +467,54 @@ class StreamBufferProcessor extends AudioWorkletProcessor {
|
|
| 468 |
registerProcessor('stream-buffer', StreamBufferProcessor);
|
| 469 |
"""
|
| 470 |
|
| 471 |
-
# ---------- INIT + player (
|
| 472 |
-
INIT_RESET_AND_PLAY_JS =
|
| 473 |
-
() => {
|
| 474 |
const AC = window.AudioContext || window.webkitAudioContext;
|
| 475 |
if (!AC) return;
|
| 476 |
|
| 477 |
-
function getLocalFloat(key, defVal) {
|
| 478 |
-
try {
|
| 479 |
return defVal;
|
| 480 |
-
}
|
| 481 |
|
| 482 |
-
const DEFAULT_PREROLL =
|
| 483 |
-
const MAX_PREROLL =
|
| 484 |
-
const STEP_PREROLL =
|
| 485 |
-
const DEFAULT_LOWWM =
|
| 486 |
|
| 487 |
let PREROLL_S = getLocalFloat("tts_preroll_s", DEFAULT_PREROLL);
|
| 488 |
let LOW_WM_S = getLocalFloat("tts_lowwm_s", DEFAULT_LOWWM);
|
| 489 |
|
| 490 |
-
const blob = new Blob([`
|
| 491 |
const url = URL.createObjectURL(blob);
|
| 492 |
|
| 493 |
-
const ctx = new AC({
|
| 494 |
-
const meta = {
|
| 495 |
t_click_ms: performance.now(), t_first_push_ms: null, t_first_audio_ms: null,
|
| 496 |
server: null, click_to_first_chunk_s: null, ctx_sr: ctx.sampleRate,
|
| 497 |
thresholds: null
|
| 498 |
-
}
|
| 499 |
|
| 500 |
let workletNode = null, gate = null, connected = false;
|
| 501 |
let ready = false; // WAIT for thresholds_ready
|
| 502 |
const pending = []; // queue chunks before ready
|
| 503 |
let underrunSeen = false;
|
| 504 |
|
| 505 |
-
function toSec(ms) {
|
| 506 |
-
function p3(x) {
|
| 507 |
-
function logUpdate() {
|
| 508 |
const el = document.getElementById('wa-log'); if (!el) return;
|
| 509 |
-
const s = meta.server || {
|
| 510 |
const lines = [];
|
| 511 |
lines.push("Клік (Згенераваць): 0.000 s");
|
| 512 |
-
if (meta.t_first_push_ms) {
|
| 513 |
lines.push("Першы чанк прыйшоў: " + (toSec(meta.t_first_push_ms - meta.t_click_ms)).toFixed(3) + " s");
|
| 514 |
-
if (meta.t_first_audio_ms) {
|
| 515 |
lines.push("Пачатак прайгравання: " + (toSec(meta.t_first_audio_ms - meta.t_click_ms)).toFixed(3) + " s");
|
| 516 |
lines.push("Затрымка (чанк→аўдыя): " + (toSec(meta.t_first_audio_ms - meta.t_first_push_ms)).toFixed(3) + " s");
|
| 517 |
-
}
|
| 518 |
-
}
|
| 519 |
lines.push("");
|
| 520 |
lines.push("— Серверныя метрыкі —");
|
| 521 |
lines.push("Latents (умоўны голас): " + p3(s.latents_s));
|
|
@@ -524,101 +523,97 @@ registerProcessor('stream-buffer', StreamBufferProcessor);
|
|
| 524 |
lines.push("Усё да 1-га чанка: " + p3(s.until_first_chunk_total_s));
|
| 525 |
lines.push("Іншая серверная апрац.: " + p3(s.server_unaccounted_before_first_chunk_s));
|
| 526 |
lines.push("Запіс WAV: " + p3(s.file_write_s));
|
| 527 |
-
if (meta.click_to_first_chunk_s !== null && s.until_first_chunk_total_s !== null) {
|
| 528 |
const est = Math.max(0, meta.click_to_first_chunk_s - s.until_first_chunk_total_s);
|
| 529 |
lines.push(""); lines.push("Ацэнка чаргі ZeroGPU + сеткі: " + est.toFixed(3) + " s");
|
| 530 |
-
}
|
| 531 |
lines.push("");
|
| 532 |
lines.push("Статус стриму: " + (connected ? "playing" : "stopped"));
|
| 533 |
lines.push("PREROLL: " + PREROLL_S.toFixed(3) + " s | LOW WM: " + LOW_WM_S.toFixed(3) + " s");
|
| 534 |
lines.push("ctx.sampleRate: " + meta.ctx_sr + " Hz");
|
| 535 |
-
if (meta.thresholds) {
|
| 536 |
lines.push("thresholdSamples: " + meta.thresholds.thresholdSamples + " | lowWM: " + meta.thresholds.lowWatermarkSamples);
|
| 537 |
-
}
|
| 538 |
el.textContent = lines.join("\\n");
|
| 539 |
-
}
|
| 540 |
|
| 541 |
-
(async () => {
|
| 542 |
await ctx.audioWorklet.addModule(url);
|
| 543 |
workletNode = new AudioWorkletNode(ctx, 'stream-buffer');
|
| 544 |
gate = ctx.createGain(); gate.gain.value = 1.0;
|
| 545 |
workletNode.connect(gate);
|
| 546 |
|
| 547 |
-
workletNode.port.onmessage = (e) => {
|
| 548 |
-
const msg = e.data || {
|
| 549 |
-
if (msg.type === 'thresholds_ready') {
|
| 550 |
-
ready = true; meta.thresholds = {
|
| 551 |
-
|
| 552 |
-
for (const f32 of pending) {{
|
| 553 |
-
workletNode.port.postMessage({{ type:'push', buffer:f32.buffer }}, [f32.buffer]);
|
| 554 |
-
}}
|
| 555 |
pending.length = 0;
|
| 556 |
logUpdate();
|
| 557 |
-
}
|
| 558 |
-
if (meta.t_first_audio_ms === null) {
|
| 559 |
-
}
|
| 560 |
underrunSeen = true;
|
| 561 |
-
}
|
| 562 |
-
}
|
| 563 |
|
| 564 |
-
// send thresholds using
|
| 565 |
-
workletNode.port.postMessage({
|
| 566 |
type: 'set_thresholds',
|
| 567 |
thresholdSamples: Math.floor(PREROLL_S * ctx.sampleRate),
|
| 568 |
lowWatermarkSamples: Math.floor(LOW_WM_S * ctx.sampleRate),
|
| 569 |
-
}
|
| 570 |
|
| 571 |
-
window.__wa = {
|
| 572 |
ctx, workletNode, gate,
|
| 573 |
-
get playing() {
|
| 574 |
meta,
|
| 575 |
-
push: async (f32) => {
|
| 576 |
-
try {
|
| 577 |
-
if (!meta.t_first_push_ms) {
|
| 578 |
-
|
| 579 |
-
|
| 580 |
-
|
| 581 |
-
if (!connected) {{ try {{ gate.connect(ctx.destination); connected = true; }} catch(e) {{}} }}
|
| 582 |
logUpdate();
|
| 583 |
-
}
|
| 584 |
-
stop: () => {
|
| 585 |
-
reset: () => {
|
| 586 |
-
try {
|
| 587 |
-
if (underrunSeen) {
|
| 588 |
-
const cur = Math.min(
|
| 589 |
localStorage.setItem("tts_preroll_s", String(cur));
|
| 590 |
-
}
|
| 591 |
-
}
|
| 592 |
-
if (workletNode) {
|
| 593 |
-
workletNode.port.postMessage({
|
| 594 |
ready = false; pending.length = 0;
|
| 595 |
-
workletNode.port.postMessage({
|
| 596 |
type:'set_thresholds',
|
| 597 |
thresholdSamples: Math.floor(PREROLL_S * ctx.sampleRate),
|
| 598 |
lowWatermarkSamples: Math.floor(LOW_WM_S * ctx.sampleRate),
|
| 599 |
-
}
|
| 600 |
-
}
|
| 601 |
-
if (connected) {
|
| 602 |
meta.t_first_push_ms = null; meta.t_first_audio_ms = null; meta.click_to_first_chunk_s = null; logUpdate();
|
| 603 |
-
}
|
| 604 |
updateLog: logUpdate,
|
| 605 |
-
applyClient: (pr, lw) => {
|
| 606 |
PREROLL_S = pr; LOW_WM_S = lw;
|
| 607 |
-
try {
|
| 608 |
-
if (workletNode) {
|
| 609 |
-
workletNode.port.postMessage({
|
| 610 |
type:'set_thresholds',
|
| 611 |
thresholdSamples: Math.floor(PREROLL_S * ctx.sampleRate),
|
| 612 |
lowWatermarkSamples: Math.floor(LOW_WM_S * ctx.sampleRate),
|
| 613 |
-
}
|
| 614 |
-
}
|
| 615 |
logUpdate();
|
| 616 |
-
}
|
| 617 |
-
}
|
| 618 |
logUpdate();
|
| 619 |
-
}
|
| 620 |
-
}
|
| 621 |
-
"""
|
| 622 |
|
| 623 |
STOP_JS = "() => { if (window.__wa) window.__wa.stop(); }"
|
| 624 |
PLAY_JS = "() => { if (window.__wa) { try { window.__wa.ctx.resume(); } catch(e){}; if (!window.__wa.playing) { try { window.__wa.gate.connect(window.__wa.ctx.destination); } catch(e){} } window.__wa.updateLog && window.__wa.updateLog(); } }"
|
|
@@ -655,6 +650,17 @@ registerProcessor('stream-buffer', StreamBufferProcessor);
|
|
| 655 |
"""
|
| 656 |
PLAY_FINAL_JS = "(() => { const el=document.getElementById('final-audio'); const a=el?.querySelector('audio'); if (a) { try{a.play();}catch(e){} } })()"
|
| 657 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 658 |
# wiring
|
| 659 |
apply_btn.click(fn=None, inputs=[], outputs=[], js=APPLY_JS)
|
| 660 |
reset_btn.click(fn=None, inputs=[], outputs=[], js=RESET_JS)
|
|
|
|
| 78 |
if isinstance(x, torch.Tensor):
|
| 79 |
if x.dtype != torch.float32: x = x.float()
|
| 80 |
return x.detach().cpu().contiguous().view(-1).numpy()
|
| 81 |
+
x = np.asarray(x)
|
| 82 |
if x.ndim > 1: x = x.reshape(-1)
|
| 83 |
return x.astype(np.float32, copy=False) if x.dtype != np.float32 else x
|
| 84 |
|
|
|
|
| 174 |
|
| 175 |
def _save_latents_to_disk(key: str, gpt, spk): torch.save({"gpt_cond_latent": gpt.cpu(), "speaker_embedding": spk.cpu()}, _latents_disk_path(key))
|
| 176 |
|
| 177 |
+
def _load_latents_from_disk(key: str):
|
| 178 |
p = _latents_disk_path(key)
|
| 179 |
if not p.exists(): return None
|
| 180 |
obj = torch.load(p, map_location="cpu"); return obj["gpt_cond_latent"], obj["speaker_embedding"]
|
|
|
|
| 227 |
_SENT_END = re.compile(r"([\.!\?…]+[»\")\]]*\s+)")
|
| 228 |
_WS = re.compile(r"\s+")
|
| 229 |
def _fast_split(text: str, limit: int) -> List[str]:
|
| 230 |
+
text = text.strip()
|
| 231 |
if not text: return []
|
| 232 |
parts=[]; start=0
|
| 233 |
for m in _SENT_END.finditer(text): parts.append(text[start:m.end()].strip()); start=m.end()
|
|
|
|
| 242 |
w=_WS.split(s); acc=""
|
| 243 |
for tok in w:
|
| 244 |
if len(acc)+1+len(tok)<=limit: acc=(acc+" "+tok).strip() if acc else tok
|
| 245 |
+
else:
|
| 246 |
if acc: chunks.append(acc); acc=tok
|
| 247 |
cur=acc
|
| 248 |
if cur: chunks.append(cur)
|
| 249 |
return [c for c in chunks if c]
|
| 250 |
|
| 251 |
def _split_text_smart(text_in: str, lang_short: str, chunk_limit: int, first_segment_limit: int) -> List[str]:
|
| 252 |
+
text_in = text_in.strip()
|
| 253 |
if not text_in: return []
|
| 254 |
+
parts=[]
|
| 255 |
if len(text_in)>first_segment_limit:
|
| 256 |
head=text_in[:first_segment_limit]; m=re.search(r".*[\.!\?…»)]", head)
|
| 257 |
if m and len(m.group(0))>30: head=m.group(0)
|
|
|
|
| 420 |
} else if (msg.type === 'set_thresholds') {
|
| 421 |
this.thresholdSamples = msg.thresholdSamples|0;
|
| 422 |
this.lowWatermarkSamples = msg.lowWatermarkSamples|0;
|
|
|
|
| 423 |
this.port.postMessage({ type: 'thresholds_ready',
|
| 424 |
thresholdSamples: this.thresholdSamples,
|
| 425 |
lowWatermarkSamples: this.lowWatermarkSamples,
|
|
|
|
| 467 |
registerProcessor('stream-buffer', StreamBufferProcessor);
|
| 468 |
"""
|
| 469 |
|
| 470 |
+
# ---------- INIT + player (no f-string; placeholders replaced) ----------
|
| 471 |
+
INIT_RESET_AND_PLAY_JS = """
|
| 472 |
+
() => {
|
| 473 |
const AC = window.AudioContext || window.webkitAudioContext;
|
| 474 |
if (!AC) return;
|
| 475 |
|
| 476 |
+
function getLocalFloat(key, defVal) {
|
| 477 |
+
try { const v = parseFloat(localStorage.getItem(key)); if (isFinite(v) && v > 0) return v; } catch(e) {}
|
| 478 |
return defVal;
|
| 479 |
+
}
|
| 480 |
|
| 481 |
+
const DEFAULT_PREROLL = __DEF_PR__;
|
| 482 |
+
const MAX_PREROLL = __MAX_PR__;
|
| 483 |
+
const STEP_PREROLL = __STEP_PR__;
|
| 484 |
+
const DEFAULT_LOWWM = __DEF_LW__;
|
| 485 |
|
| 486 |
let PREROLL_S = getLocalFloat("tts_preroll_s", DEFAULT_PREROLL);
|
| 487 |
let LOW_WM_S = getLocalFloat("tts_lowwm_s", DEFAULT_LOWWM);
|
| 488 |
|
| 489 |
+
const blob = new Blob([`__AW_CODE__`], { type: 'application/javascript' });
|
| 490 |
const url = URL.createObjectURL(blob);
|
| 491 |
|
| 492 |
+
const ctx = new AC({ sampleRate: __SR__ });
|
| 493 |
+
const meta = {
|
| 494 |
t_click_ms: performance.now(), t_first_push_ms: null, t_first_audio_ms: null,
|
| 495 |
server: null, click_to_first_chunk_s: null, ctx_sr: ctx.sampleRate,
|
| 496 |
thresholds: null
|
| 497 |
+
};
|
| 498 |
|
| 499 |
let workletNode = null, gate = null, connected = false;
|
| 500 |
let ready = false; // WAIT for thresholds_ready
|
| 501 |
const pending = []; // queue chunks before ready
|
| 502 |
let underrunSeen = false;
|
| 503 |
|
| 504 |
+
function toSec(ms) { return (ms/1000); }
|
| 505 |
+
function p3(x) { return (x==null)?'n/a':x.toFixed(3)+' s'; }
|
| 506 |
+
function logUpdate() {
|
| 507 |
const el = document.getElementById('wa-log'); if (!el) return;
|
| 508 |
+
const s = meta.server || {};
|
| 509 |
const lines = [];
|
| 510 |
lines.push("Клік (Згенераваць): 0.000 s");
|
| 511 |
+
if (meta.t_first_push_ms) {
|
| 512 |
lines.push("Першы чанк прыйшоў: " + (toSec(meta.t_first_push_ms - meta.t_click_ms)).toFixed(3) + " s");
|
| 513 |
+
if (meta.t_first_audio_ms) {
|
| 514 |
lines.push("Пачатак прайгравання: " + (toSec(meta.t_first_audio_ms - meta.t_click_ms)).toFixed(3) + " s");
|
| 515 |
lines.push("Затрымка (чанк→аўдыя): " + (toSec(meta.t_first_audio_ms - meta.t_first_push_ms)).toFixed(3) + " s");
|
| 516 |
+
}
|
| 517 |
+
}
|
| 518 |
lines.push("");
|
| 519 |
lines.push("— Серверныя метрыкі —");
|
| 520 |
lines.push("Latents (умоўны голас): " + p3(s.latents_s));
|
|
|
|
| 523 |
lines.push("Усё да 1-га чанка: " + p3(s.until_first_chunk_total_s));
|
| 524 |
lines.push("Іншая серверная апрац.: " + p3(s.server_unaccounted_before_first_chunk_s));
|
| 525 |
lines.push("Запіс WAV: " + p3(s.file_write_s));
|
| 526 |
+
if (meta.click_to_first_chunk_s !== null && s.until_first_chunk_total_s !== null) {
|
| 527 |
const est = Math.max(0, meta.click_to_first_chunk_s - s.until_first_chunk_total_s);
|
| 528 |
lines.push(""); lines.push("Ацэнка чаргі ZeroGPU + сеткі: " + est.toFixed(3) + " s");
|
| 529 |
+
}
|
| 530 |
lines.push("");
|
| 531 |
lines.push("Статус стриму: " + (connected ? "playing" : "stopped"));
|
| 532 |
lines.push("PREROLL: " + PREROLL_S.toFixed(3) + " s | LOW WM: " + LOW_WM_S.toFixed(3) + " s");
|
| 533 |
lines.push("ctx.sampleRate: " + meta.ctx_sr + " Hz");
|
| 534 |
+
if (meta.thresholds) {
|
| 535 |
lines.push("thresholdSamples: " + meta.thresholds.thresholdSamples + " | lowWM: " + meta.thresholds.lowWatermarkSamples);
|
| 536 |
+
}
|
| 537 |
el.textContent = lines.join("\\n");
|
| 538 |
+
}
|
| 539 |
|
| 540 |
+
(async () => {
|
| 541 |
await ctx.audioWorklet.addModule(url);
|
| 542 |
workletNode = new AudioWorkletNode(ctx, 'stream-buffer');
|
| 543 |
gate = ctx.createGain(); gate.gain.value = 1.0;
|
| 544 |
workletNode.connect(gate);
|
| 545 |
|
| 546 |
+
workletNode.port.onmessage = (e) => {
|
| 547 |
+
const msg = e.data || {};
|
| 548 |
+
if (msg.type === 'thresholds_ready') {
|
| 549 |
+
ready = true; meta.thresholds = { thresholdSamples: msg.thresholdSamples, lowWatermarkSamples: msg.lowWatermarkSamples };
|
| 550 |
+
for (const f32 of pending) { workletNode.port.postMessage({ type:'push', buffer:f32.buffer }, [f32.buffer]); }
|
|
|
|
|
|
|
|
|
|
| 551 |
pending.length = 0;
|
| 552 |
logUpdate();
|
| 553 |
+
} else if (msg.type === 'first_audio') {
|
| 554 |
+
if (meta.t_first_audio_ms === null) { meta.t_first_audio_ms = performance.now(); logUpdate(); }
|
| 555 |
+
} else if (msg.type === 'underrun') {
|
| 556 |
underrunSeen = true;
|
| 557 |
+
}
|
| 558 |
+
};
|
| 559 |
|
| 560 |
+
// send thresholds using ctx.sampleRate
|
| 561 |
+
workletNode.port.postMessage({
|
| 562 |
type: 'set_thresholds',
|
| 563 |
thresholdSamples: Math.floor(PREROLL_S * ctx.sampleRate),
|
| 564 |
lowWatermarkSamples: Math.floor(LOW_WM_S * ctx.sampleRate),
|
| 565 |
+
});
|
| 566 |
|
| 567 |
+
window.__wa = {
|
| 568 |
ctx, workletNode, gate,
|
| 569 |
+
get playing() { return connected; },
|
| 570 |
meta,
|
| 571 |
+
push: async (f32) => {
|
| 572 |
+
try { await ctx.resume(); } catch(e) {}
|
| 573 |
+
if (!meta.t_first_push_ms) { meta.t_first_push_ms = performance.now(); meta.click_to_first_chunk_s = (meta.t_first_push_ms - meta.t_click_ms)/1000; }
|
| 574 |
+
if (!ready) { pending.push(f32); }
|
| 575 |
+
else { workletNode.port.postMessage({ type:'push', buffer:f32.buffer }, [f32.buffer]); }
|
| 576 |
+
if (!connected) { try { gate.connect(ctx.destination); connected = true; } catch(e) {} }
|
|
|
|
| 577 |
logUpdate();
|
| 578 |
+
},
|
| 579 |
+
stop: () => { if (connected) { try { gate.disconnect(); } catch(e) {} connected=false; logUpdate(); } },
|
| 580 |
+
reset: () => {
|
| 581 |
+
try {
|
| 582 |
+
if (underrunSeen) {
|
| 583 |
+
const cur = Math.min(MAX_PREROLL, PREROLL_S + STEP_PREROLL);
|
| 584 |
localStorage.setItem("tts_preroll_s", String(cur));
|
| 585 |
+
}
|
| 586 |
+
} catch(e) {}
|
| 587 |
+
if (workletNode) {
|
| 588 |
+
workletNode.port.postMessage({ type:'reset' });
|
| 589 |
ready = false; pending.length = 0;
|
| 590 |
+
workletNode.port.postMessage({
|
| 591 |
type:'set_thresholds',
|
| 592 |
thresholdSamples: Math.floor(PREROLL_S * ctx.sampleRate),
|
| 593 |
lowWatermarkSamples: Math.floor(LOW_WM_S * ctx.sampleRate),
|
| 594 |
+
});
|
| 595 |
+
}
|
| 596 |
+
if (connected) { try { gate.disconnect(); } catch(e) {} connected=false; }
|
| 597 |
meta.t_first_push_ms = null; meta.t_first_audio_ms = null; meta.click_to_first_chunk_s = null; logUpdate();
|
| 598 |
+
},
|
| 599 |
updateLog: logUpdate,
|
| 600 |
+
applyClient: (pr, lw) => {
|
| 601 |
PREROLL_S = pr; LOW_WM_S = lw;
|
| 602 |
+
try { localStorage.setItem("tts_preroll_s", String(pr)); localStorage.setItem("tts_lowwm_s", String(lw)); } catch(e) {}
|
| 603 |
+
if (workletNode) {
|
| 604 |
+
workletNode.port.postMessage({
|
| 605 |
type:'set_thresholds',
|
| 606 |
thresholdSamples: Math.floor(PREROLL_S * ctx.sampleRate),
|
| 607 |
lowWatermarkSamples: Math.floor(LOW_WM_S * ctx.sampleRate),
|
| 608 |
+
});
|
| 609 |
+
}
|
| 610 |
logUpdate();
|
| 611 |
+
}
|
| 612 |
+
};
|
| 613 |
logUpdate();
|
| 614 |
+
})();
|
| 615 |
+
}
|
| 616 |
+
"""
|
| 617 |
|
| 618 |
STOP_JS = "() => { if (window.__wa) window.__wa.stop(); }"
|
| 619 |
PLAY_JS = "() => { if (window.__wa) { try { window.__wa.ctx.resume(); } catch(e){}; if (!window.__wa.playing) { try { window.__wa.gate.connect(window.__wa.ctx.destination); } catch(e){} } window.__wa.updateLog && window.__wa.updateLog(); } }"
|
|
|
|
| 650 |
"""
|
| 651 |
PLAY_FINAL_JS = "(() => { const el=document.getElementById('final-audio'); const a=el?.querySelector('audio'); if (a) { try{a.play();}catch(e){} } })()"
|
| 652 |
|
| 653 |
+
# ---- replace placeholders to avoid f-strings ----
|
| 654 |
+
INIT_RESET_AND_PLAY_JS = (
|
| 655 |
+
INIT_RESET_AND_PLAY_JS
|
| 656 |
+
.replace("__AW_CODE__", AUDIO_WORKLET_PROCESSOR)
|
| 657 |
+
.replace("__SR__", str(sampling_rate))
|
| 658 |
+
.replace("__DEF_PR__", str(DEF_CLIENT_PREROLL))
|
| 659 |
+
.replace("__MAX_PR__", str(MAX_CLIENT_PREROLL))
|
| 660 |
+
.replace("__STEP_PR__", str(STEP_CLIENT_PREROLL))
|
| 661 |
+
.replace("__DEF_LW__", str(DEF_CLIENT_LOWWM))
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
# wiring
|
| 665 |
apply_btn.click(fn=None, inputs=[], outputs=[], js=APPLY_JS)
|
| 666 |
reset_btn.click(fn=None, inputs=[], outputs=[], js=RESET_JS)
|