|
|
import { Component, OnDestroy, OnInit, signal, ViewChild, ElementRef, ChangeDetectorRef } from '@angular/core'; |
|
|
import { CommonModule } from '@angular/common'; |
|
|
import { Router, NavigationStart } from '@angular/router'; |
|
|
import { Subscription } from 'rxjs'; |
|
|
|
|
|
declare global { |
|
|
interface Window { |
|
|
webkitSpeechRecognition?: any; |
|
|
SpeechRecognition?: any; |
|
|
} |
|
|
} |
|
|
|
|
|
type QAResult = { |
|
|
question: string; |
|
|
transcript: string; |
|
|
language: string; |
|
|
avgPitchHz: number | null; |
|
|
avgVolume: number | null; |
|
|
audioUrl: string; |
|
|
startedAt: number; |
|
|
endedAt: number; |
|
|
}; |
|
|
|
|
|
@Component({ |
|
|
standalone: true, |
|
|
selector: 'app-py-detect', |
|
|
imports: [CommonModule], |
|
|
templateUrl: './py-detect.component.html', |
|
|
styleUrls: ['./py-detect.component.css'] |
|
|
}) |
|
|
export class PyDetectComponent implements OnInit, OnDestroy { |
|
|
|
|
|
|
|
|
status = signal<'idle' | 'asking' | 'idle-wait' | 'recording' | 'processing'>('idle'); |
|
|
autoMode = signal(true); |
|
|
micOn = signal(false); |
|
|
ttsEnabled = signal(true); |
|
|
recognizerReady = signal(false); |
|
|
|
|
|
|
|
|
private isActive = false; |
|
|
|
|
|
|
|
|
currentQuestion = signal<string>(''); |
|
|
questionIndex = signal<number>(0); |
|
|
log: QAResult[] = []; |
|
|
|
|
|
|
|
|
|
|
|
private routerSubscription?: Subscription; |
|
|
|
|
|
constructor(private router: Router, private cdr: ChangeDetectorRef) { |
|
|
|
|
|
this.routerSubscription = this.router.events.subscribe(event => { |
|
|
if (event instanceof NavigationStart) { |
|
|
if (window.speechSynthesis) { |
|
|
window.speechSynthesis.cancel(); |
|
|
} |
|
|
} |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
private mediaStream?: MediaStream; |
|
|
private mediaRecorder?: MediaRecorder; |
|
|
private audioChunks: Blob[] = []; |
|
|
|
|
|
private audioCtx?: AudioContext; |
|
|
private analyser?: AnalyserNode; |
|
|
private sourceNode?: MediaStreamAudioSourceNode; |
|
|
|
|
|
private pitchSamples: number[] = []; |
|
|
private volumeSamples: number[] = []; |
|
|
private analyserBuffer?: Float32Array; |
|
|
private analyserTimer?: any; |
|
|
|
|
|
|
|
|
private recognition?: any; |
|
|
private transcriptSoFar = ''; |
|
|
private detectedLang = 'auto'; |
|
|
|
|
|
|
|
|
private maxAnswerMs = 10_000; |
|
|
private silenceTimeout?: any; |
|
|
|
|
|
private analyserWindowMs = 100; |
|
|
|
|
|
|
|
|
private seedQuestions = [ |
|
|
'Please introduce yourself in two sentences.', |
|
|
'What motivates you to take on challenging tasks?', |
|
|
'Describe a situation where you solved a tough problem.', |
|
|
'How do you handle disagreements in a team?', |
|
|
'What is a recent technology you learned and why?' |
|
|
]; |
|
|
|
|
|
|
|
|
startDisabled = signal(false); |
|
|
stopDisabled = signal(true); |
|
|
resumeDisabled = signal(true); |
|
|
submitDisabled = signal(true); |
|
|
|
|
|
|
|
|
public videoStatus: string = ''; |
|
|
public videoStream?: MediaStream; |
|
|
public videoRecorder?: MediaRecorder; |
|
|
@ViewChild('videoElement', { static: false }) videoElement?: ElementRef<HTMLVideoElement>; |
|
|
public videoChunks: Blob[] = []; |
|
|
public videoAnswers: Blob[] = []; |
|
|
|
|
|
|
|
|
progress: number = 0; |
|
|
caseId: string = 'CASE-007'; |
|
|
officer: string = 'Ganesh'; |
|
|
currentQuestionText: string = ''; |
|
|
isRecording: boolean = false; |
|
|
isProcessing: boolean = false; |
|
|
transcriptLines: string[] = []; |
|
|
showSummary: boolean = false; |
|
|
summaryData: { question: string; answer: string; duration: number }[] = []; |
|
|
|
|
|
|
|
|
navigateHome() { |
|
|
if (window.speechSynthesis) { |
|
|
window.speechSynthesis.cancel(); |
|
|
} |
|
|
this.router.navigate(['/']); |
|
|
} |
|
|
|
|
|
|
|
|
goToInfoPage() { |
|
|
if (window.speechSynthesis) { |
|
|
window.speechSynthesis.cancel(); |
|
|
} |
|
|
this.router.navigate(['/infopage']); |
|
|
} |
|
|
|
|
|
|
|
|
ngOnInit(): void { |
|
|
this.isActive = true; |
|
|
} |
|
|
|
|
|
ngOnDestroy(): void { |
|
|
this.isActive = false; |
|
|
if (this.routerSubscription) { |
|
|
this.routerSubscription.unsubscribe(); |
|
|
} |
|
|
this.cleanupAll(); |
|
|
this.stopVideoRecording(); |
|
|
this.micOn.set(false); |
|
|
this.recognizerReady.set(false); |
|
|
this.videoStatus = ''; |
|
|
if (this.videoStream) { |
|
|
this.videoStream.getTracks().forEach(t => t.stop()); |
|
|
this.videoStream = undefined; |
|
|
} |
|
|
this.videoRecorder = undefined; |
|
|
if (window.speechSynthesis) { |
|
|
window.speechSynthesis.cancel(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
async start(): Promise<void> { |
|
|
if (this.status() !== 'idle') return; |
|
|
this.status.set('asking'); |
|
|
this.setupRecognition(); |
|
|
this.recognizerReady.set(!!this.recognition); |
|
|
await this.startCamera(); |
|
|
this.startDisabled.set(false); |
|
|
this.stopDisabled.set(false); |
|
|
this.resumeDisabled.set(true); |
|
|
this.submitDisabled.set(true); |
|
|
this.autoMode.set(true); |
|
|
this.nextQuestionLoopRunning = true; |
|
|
this.nextQuestionLoop(); |
|
|
} |
|
|
|
|
|
stopAll(): void { |
|
|
this.autoMode.set(false); |
|
|
this.nextQuestionLoopRunning = false; |
|
|
if (this.videoRecorder && this.videoRecorder.state === 'recording') { |
|
|
this.videoRecorder.stop(); |
|
|
} |
|
|
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') { |
|
|
this.cleanupRecording(); |
|
|
} |
|
|
this.stopRecognition(); |
|
|
if (window.speechSynthesis) { |
|
|
window.speechSynthesis.cancel(); |
|
|
} |
|
|
|
|
|
if (this.videoStream) { |
|
|
this.videoStream.getTracks().forEach(t => t.stop()); |
|
|
this.videoStream = undefined; |
|
|
if (this.videoElement?.nativeElement) { |
|
|
this.videoElement.nativeElement.srcObject = null; |
|
|
} |
|
|
} |
|
|
this.videoRecorder = undefined; |
|
|
this.videoStatus = ''; |
|
|
this.micOn.set(false); |
|
|
this.recognizerReady.set(false); |
|
|
this.status.set('idle'); |
|
|
this.startDisabled.set(false); |
|
|
this.stopDisabled.set(false); |
|
|
this.resumeDisabled.set(false); |
|
|
this.submitDisabled.set(false); |
|
|
} |
|
|
|
|
|
public resume() { |
|
|
|
|
|
this.startCamera(); |
|
|
|
|
|
if (this.videoRecorder && this.videoRecorder.state === 'paused') { |
|
|
this.videoRecorder.resume(); |
|
|
this.videoStatus = 'Recording...'; |
|
|
this.stopDisabled.set(false); |
|
|
this.resumeDisabled.set(true); |
|
|
this.submitDisabled.set(true); |
|
|
} |
|
|
|
|
|
if (this.status() === 'idle') { |
|
|
this.autoMode.set(true); |
|
|
this.nextQuestionLoopRunning = true; |
|
|
this.startDisabled.set(false); |
|
|
this.stopDisabled.set(false); |
|
|
this.resumeDisabled.set(true); |
|
|
this.submitDisabled.set(true); |
|
|
this.nextQuestionLoop(); |
|
|
} |
|
|
} |
|
|
|
|
|
public submitAll() { |
|
|
this.stopAll(); |
|
|
this.stopVideoRecording(); |
|
|
this.videoStatus = 'Submitted!'; |
|
|
this.startDisabled.set(true); |
|
|
this.stopDisabled.set(true); |
|
|
this.resumeDisabled.set(true); |
|
|
this.submitDisabled.set(true); |
|
|
|
|
|
|
|
|
|
|
|
const truePercentage = 70; |
|
|
const falsePercentage = 30; |
|
|
this.router.navigate(['/validationpage'], { state: { truePercentage, falsePercentage } }); |
|
|
} |
|
|
|
|
|
private nextQuestionLoopRunning = false; |
|
|
|
|
|
private async nextQuestionLoop(): Promise<void> { |
|
|
while (this.autoMode() && this.nextQuestionLoopRunning) { |
|
|
|
|
|
const q = await this.fetchNextQuestion(); |
|
|
this.currentQuestion.set(q); |
|
|
this.status.set('asking'); |
|
|
if (this.ttsEnabled()) { |
|
|
await this.speak(q); |
|
|
} |
|
|
|
|
|
this.status.set('idle-wait'); |
|
|
await this.sleep(5000); |
|
|
|
|
|
this.status.set('recording'); |
|
|
let userSpoke = false; |
|
|
this.transcriptSoFar = ''; |
|
|
this.startRecognition('en-IN'); |
|
|
|
|
|
await new Promise<void>((resolve) => { |
|
|
const checkSpeech = () => { |
|
|
if (this.transcriptSoFar.trim().length > 0 && !userSpoke) { |
|
|
userSpoke = true; |
|
|
|
|
|
this.startVideoRecording(); |
|
|
resolve(); |
|
|
} else { |
|
|
setTimeout(checkSpeech, 200); |
|
|
} |
|
|
}; |
|
|
checkSpeech(); |
|
|
}); |
|
|
|
|
|
await this.sleep(this.maxAnswerMs); |
|
|
this.stopRecognition(); |
|
|
this.stopVideoRecording(); |
|
|
const startedAt = Date.now(); |
|
|
const { audioUrl, avgPitchHz, avgVolume, transcript, language } = |
|
|
await this.captureAnswerWithAnalysis(this.maxAnswerMs); |
|
|
const endedAt = Date.now(); |
|
|
this.log.push({ |
|
|
question: q, |
|
|
transcript, |
|
|
language, |
|
|
avgPitchHz, |
|
|
avgVolume, |
|
|
audioUrl, |
|
|
startedAt, |
|
|
endedAt |
|
|
}); |
|
|
this.questionIndex.set(this.questionIndex() + 1); |
|
|
this.status.set('processing'); |
|
|
await this.sleep(700); |
|
|
} |
|
|
this.status.set('idle'); |
|
|
this.stopVideoRecording(); |
|
|
this.startDisabled.set(false); |
|
|
this.stopDisabled.set(true); |
|
|
this.resumeDisabled.set(false); |
|
|
this.submitDisabled.set(false); |
|
|
} |
|
|
|
|
|
|
|
|
private async fetchNextQuestion(): Promise<string> { |
|
|
|
|
|
|
|
|
const i = this.questionIndex() % this.seedQuestions.length; |
|
|
return this.seedQuestions[i]; |
|
|
} |
|
|
|
|
|
|
|
|
private speak(text: string): Promise<void> { |
|
|
return new Promise<void>((resolve) => { |
|
|
if (!this.isActive) return resolve(); |
|
|
const synth = window.speechSynthesis; |
|
|
if (!synth) return resolve(); |
|
|
|
|
|
const utter = new SpeechSynthesisUtterance(text); |
|
|
|
|
|
const prefer = ['en-IN', 'en-GB', 'en-US']; |
|
|
const voices = synth.getVoices(); |
|
|
const v = voices.find(v => |
|
|
prefer.includes(v.lang) || v.lang.toLowerCase().startsWith('en')); |
|
|
if (v) utter.voice = v; |
|
|
|
|
|
utter.rate = 1.0; |
|
|
utter.pitch = 1.0; |
|
|
utter.onend = () => resolve(); |
|
|
utter.onerror = () => resolve(); |
|
|
synth.cancel(); |
|
|
synth.speak(utter); |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
private async captureAnswerWithAnalysis(ms: number): Promise<{ |
|
|
audioUrl: string; avgPitchHz: number | null; avgVolume: number | null; transcript: string; language: string; |
|
|
}> { |
|
|
|
|
|
this.audioChunks = []; |
|
|
this.pitchSamples = []; |
|
|
this.volumeSamples = []; |
|
|
this.transcriptSoFar = ''; |
|
|
this.detectedLang = 'auto'; |
|
|
|
|
|
|
|
|
this.mediaStream = await navigator.mediaDevices.getUserMedia({ |
|
|
audio: { channelCount: 1, echoCancellation: true, noiseSuppression: true }, |
|
|
video: false |
|
|
}); |
|
|
this.micOn.set(true); |
|
|
|
|
|
|
|
|
const mime = this.chooseMimeType(); |
|
|
this.mediaRecorder = new MediaRecorder(this.mediaStream, { mimeType: mime }); |
|
|
this.mediaRecorder.ondataavailable = (e) => { |
|
|
if (e.data && e.data.size > 0) this.audioChunks.push(e.data); |
|
|
}; |
|
|
|
|
|
|
|
|
this.startRecognition('en-IN'); |
|
|
|
|
|
|
|
|
await this.startAnalyser(this.mediaStream); |
|
|
|
|
|
|
|
|
const recordPromise = new Promise<void>((resolve) => { |
|
|
this.mediaRecorder!.onstop = () => resolve(); |
|
|
this.mediaRecorder!.start(200); |
|
|
setTimeout(() => { |
|
|
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') { |
|
|
this.mediaRecorder.stop(); |
|
|
} |
|
|
}, ms); |
|
|
}); |
|
|
|
|
|
await recordPromise; |
|
|
|
|
|
|
|
|
this.stopAnalyser(); |
|
|
this.stopRecognition(); |
|
|
this.cleanupMediaStream(); |
|
|
this.micOn.set(false); |
|
|
|
|
|
|
|
|
const blob = new Blob(this.audioChunks, { type: mime }); |
|
|
const audioUrl = URL.createObjectURL(blob); |
|
|
|
|
|
|
|
|
const avgPitchHz = this.averageNonZero(this.pitchSamples) ?? null; |
|
|
const avgVolume = this.averageNonZero(this.volumeSamples) ?? null; |
|
|
|
|
|
const transcript = this.transcriptSoFar.trim(); |
|
|
const language = this.detectedLang; |
|
|
|
|
|
return { audioUrl, avgPitchHz, avgVolume, transcript, language }; |
|
|
} |
|
|
|
|
|
private waitForSilenceOrContinue() { |
|
|
if (this.silenceTimeout) clearTimeout(this.silenceTimeout); |
|
|
this.silenceTimeout = setTimeout(() => { |
|
|
this.stopAll(); |
|
|
}, 5000); |
|
|
} |
|
|
|
|
|
|
|
|
private chooseMimeType(): string { |
|
|
const candidates = [ |
|
|
'audio/webm;codecs=opus', |
|
|
'audio/webm', |
|
|
'audio/mp4', |
|
|
'audio/mpeg' |
|
|
]; |
|
|
for (const c of candidates) { |
|
|
if (MediaRecorder.isTypeSupported(c)) return c; |
|
|
} |
|
|
return ''; |
|
|
} |
|
|
|
|
|
|
|
|
private async startAnalyser(stream: MediaStream) { |
|
|
this.audioCtx = new (window.AudioContext || (window as any).webkitAudioContext)(); |
|
|
this.sourceNode = this.audioCtx.createMediaStreamSource(stream); |
|
|
this.analyser = this.audioCtx.createAnalyser(); |
|
|
this.analyser.fftSize = 2048; |
|
|
this.sourceNode.connect(this.analyser); |
|
|
|
|
|
this.analyserBuffer = new Float32Array(this.analyser.fftSize); |
|
|
|
|
|
const tick = () => { |
|
|
if (!this.analyser || !this.analyserBuffer) return; |
|
|
this.analyser.getFloatTimeDomainData(this.analyserBuffer); |
|
|
|
|
|
const pitch = this.estimatePitchFromAutocorrelation( |
|
|
this.analyserBuffer, this.audioCtx!.sampleRate |
|
|
); |
|
|
const vol = this.rootMeanSquare(this.analyserBuffer); |
|
|
|
|
|
if (pitch) this.pitchSamples.push(pitch); |
|
|
this.volumeSamples.push(vol); |
|
|
|
|
|
this.analyserTimer = setTimeout(tick, this.analyserWindowMs); |
|
|
}; |
|
|
tick(); |
|
|
} |
|
|
|
|
|
private stopAnalyser() { |
|
|
if (this.analyserTimer) clearTimeout(this.analyserTimer); |
|
|
this.analyserTimer = null; |
|
|
if (this.sourceNode) { try { this.sourceNode.disconnect(); } catch { } } |
|
|
if (this.analyser) { try { this.analyser.disconnect(); } catch { } } |
|
|
if (this.audioCtx) { try { this.audioCtx.close(); } catch { } } |
|
|
this.sourceNode = undefined; |
|
|
this.analyser = undefined; |
|
|
this.audioCtx = undefined; |
|
|
} |
|
|
|
|
|
|
|
|
private estimatePitchFromAutocorrelation(buf: Float32Array, sampleRate: number): number | null { |
|
|
|
|
|
let size = buf.length; |
|
|
let rms = 0; |
|
|
for (let i = 0; i < size; i++) rms += buf[i] * buf[i]; |
|
|
rms = Math.sqrt(rms / size); |
|
|
if (rms < 0.01) return null; |
|
|
|
|
|
|
|
|
const MAX_SAMPLES = Math.floor(size / 2); |
|
|
let bestOffset = -1; |
|
|
let bestCorr = 0; |
|
|
let lastCorr = 1; |
|
|
|
|
|
for (let offset = 1; offset < MAX_SAMPLES; offset++) { |
|
|
let corr = 0; |
|
|
for (let i = 0; i < MAX_SAMPLES; i++) { |
|
|
corr += Math.abs(buf[i] - buf[i + offset]); |
|
|
} |
|
|
corr = 1 - (corr / MAX_SAMPLES); |
|
|
if (corr > 0.9 && corr > lastCorr) { |
|
|
bestCorr = corr; |
|
|
bestOffset = offset; |
|
|
} |
|
|
lastCorr = corr; |
|
|
} |
|
|
if (bestOffset > 0) { |
|
|
const freq = sampleRate / bestOffset; |
|
|
if (freq >= 50 && freq <= 400) return Math.round(freq); |
|
|
} |
|
|
return null; |
|
|
} |
|
|
|
|
|
private rootMeanSquare(buf: Float32Array): number { |
|
|
let sum = 0; |
|
|
for (let i = 0; i < buf.length; i++) sum += buf[i] * buf[i]; |
|
|
return Math.sqrt(sum / buf.length); |
|
|
} |
|
|
|
|
|
private averageNonZero(arr: number[]): number | undefined { |
|
|
const f = arr.filter(x => x && isFinite(x)); |
|
|
if (!f.length) return undefined; |
|
|
return Math.round((f.reduce((a, b) => a + b, 0) / f.length) * 100) / 100; |
|
|
} |
|
|
|
|
|
|
|
|
private setupRecognition() { |
|
|
const Ctor = window.webkitSpeechRecognition || window.SpeechRecognition; |
|
|
if (!Ctor) return; |
|
|
|
|
|
this.recognition = new Ctor(); |
|
|
this.recognition.continuous = true; |
|
|
this.recognition.interimResults = false; |
|
|
|
|
|
this.recognition.onresult = (event: any) => { |
|
|
let finalText = ''; |
|
|
|
|
|
|
|
|
for (let i = event.resultIndex; i < event.results.length; i++) { |
|
|
const result = event.results[i]; |
|
|
|
|
|
if (result.isFinal) { |
|
|
finalText += result[0].transcript.trim(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
this.transcriptSoFar = this.removeFillerWords(finalText.trim()); |
|
|
}; |
|
|
|
|
|
this.recognition.onerror = (error: any) => { |
|
|
console.error('Speech recognition error', error); |
|
|
}; |
|
|
|
|
|
this.recognition.onend = () => { |
|
|
console.log('Speech recognition has ended'); |
|
|
}; |
|
|
} |
|
|
|
|
|
private removeFillerWords(text: string): string { |
|
|
const fillerWords = ['um', 'ah', 'like', 'you know', 'so', 'actually', 'basically']; |
|
|
const regex = new RegExp(`\\b(${fillerWords.join('|')})\\b`, 'gi'); |
|
|
return text.replace(regex, '').replace(/\s+/g, ' ').trim(); |
|
|
} |
|
|
|
|
|
private startRecognition(lang: string) { |
|
|
if (!this.recognition) return; |
|
|
try { |
|
|
this.recognition.lang = lang; |
|
|
this.recognition.start(); |
|
|
} catch { } |
|
|
} |
|
|
|
|
|
private stopRecognition() { |
|
|
if (!this.recognition) return; |
|
|
try { this.recognition.stop(); } catch { } |
|
|
} |
|
|
|
|
|
|
|
|
private cleanupMediaStream() { |
|
|
if (this.mediaStream) { |
|
|
this.mediaStream.getTracks().forEach(t => t.stop()); |
|
|
} |
|
|
this.mediaStream = undefined; |
|
|
} |
|
|
|
|
|
private cleanupRecording() { |
|
|
try { if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') this.mediaRecorder.stop(); } catch { } |
|
|
this.mediaRecorder = undefined; |
|
|
this.audioChunks = []; |
|
|
this.stopAnalyser(); |
|
|
this.cleanupMediaStream(); |
|
|
} |
|
|
|
|
|
private cleanupAll() { |
|
|
this.stopRecognition(); |
|
|
this.cleanupRecording(); |
|
|
} |
|
|
|
|
|
|
|
|
private sleep(ms: number) { return new Promise(res => setTimeout(res, ms)); } |
|
|
|
|
|
public async startCamera() { |
|
|
if (!this.videoStream) { |
|
|
this.videoStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false }); |
|
|
if (this.videoElement?.nativeElement) { |
|
|
this.videoElement.nativeElement.srcObject = this.videoStream; |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
public async startVideoRecording() { |
|
|
if (!this.videoStream) return; |
|
|
this.videoChunks = []; |
|
|
this.videoRecorder = new MediaRecorder(this.videoStream, { mimeType: 'video/webm' }); |
|
|
this.videoRecorder.ondataavailable = (e) => { |
|
|
if (e.data && e.data.size > 0) this.videoChunks.push(e.data); |
|
|
}; |
|
|
this.videoRecorder.onstart = () => { |
|
|
this.videoStatus = 'Recording...'; |
|
|
}; |
|
|
this.videoRecorder.onstop = () => { |
|
|
this.videoStatus = 'Stopped'; |
|
|
const videoBlob = new Blob(this.videoChunks, { type: 'video/webm' }); |
|
|
this.videoAnswers.push(videoBlob); |
|
|
}; |
|
|
this.videoRecorder.start(); |
|
|
} |
|
|
|
|
|
public stopVideoRecording() { |
|
|
if (this.videoRecorder && this.videoRecorder.state !== 'inactive') { |
|
|
this.videoRecorder.stop(); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
onStartInterview() { |
|
|
this.currentQuestionText = this.seedQuestions[0]; |
|
|
this.progress = 0; |
|
|
this.isRecording = false; |
|
|
this.isProcessing = false; |
|
|
} |
|
|
pauseRecording() { |
|
|
this.isRecording = false; |
|
|
|
|
|
} |
|
|
skipQuestion() { |
|
|
const idx = this.seedQuestions.indexOf(this.currentQuestionText); |
|
|
if (idx >= 0 && idx < this.seedQuestions.length - 1) { |
|
|
this.currentQuestionText = this.seedQuestions[idx + 1]; |
|
|
this.progress = Math.round(((idx + 2) / this.seedQuestions.length) * 100); |
|
|
} else { |
|
|
this.currentQuestionText = ''; |
|
|
this.progress = 100; |
|
|
this.showSummary = true; |
|
|
} |
|
|
} |
|
|
closeSummary() { |
|
|
this.showSummary = false; |
|
|
} |
|
|
onStartRecording() { |
|
|
this.progress = 0; |
|
|
this.transcriptLines = []; |
|
|
this.showSummary = false; |
|
|
this.startQuestionFlow(0); |
|
|
|
|
|
this.speakQuestion(this.seedQuestions[0], () => { |
|
|
setTimeout(() => { |
|
|
this.startRecognitionWithRecording(0); |
|
|
}, 2000); |
|
|
}); |
|
|
} |
|
|
|
|
|
speakQuestion(text: string, onEnd?: () => void) { |
|
|
const synth = window.speechSynthesis; |
|
|
if (!synth) { |
|
|
if (onEnd) onEnd(); |
|
|
return; |
|
|
} |
|
|
const utter = new SpeechSynthesisUtterance(text); |
|
|
utter.lang = 'en-IN'; |
|
|
synth.cancel(); |
|
|
utter.onend = () => { if (onEnd) onEnd(); }; |
|
|
synth.speak(utter); |
|
|
} |
|
|
|
|
|
startRecognitionWithRecording(idx: number) { |
|
|
const Ctor = window.webkitSpeechRecognition || window.SpeechRecognition; |
|
|
if (!Ctor) return; |
|
|
this.recognition = new Ctor(); |
|
|
this.recognition.lang = 'en-IN'; |
|
|
this.recognition.continuous = false; |
|
|
this.recognition.interimResults = false; |
|
|
let recordingStarted = false; |
|
|
const startTime = Date.now(); |
|
|
|
|
|
const silenceTimeout = setTimeout(() => { |
|
|
if (!recordingStarted) { |
|
|
this.recognition.stop(); |
|
|
|
|
|
this.status.set('idle'); |
|
|
setTimeout(() => this.playNextQuestion(idx + 1), 5000); |
|
|
} |
|
|
}, 5000); |
|
|
this.recognition.onresult = (event: any) => { |
|
|
let finalText = ''; |
|
|
for (let i = event.resultIndex; i < event.results.length; i++) { |
|
|
const result = event.results[i]; |
|
|
if (result.isFinal) { |
|
|
finalText += result[0].transcript.trim(); |
|
|
} |
|
|
} |
|
|
if (!recordingStarted) { |
|
|
recordingStarted = true; |
|
|
clearTimeout(silenceTimeout); |
|
|
} |
|
|
this.transcriptLines.push(finalText); |
|
|
}; |
|
|
this.recognition.onend = () => { |
|
|
this.isRecording = false; |
|
|
this.status.set('processing'); |
|
|
setTimeout(() => this.playNextQuestion(idx + 1), 5000); |
|
|
}; |
|
|
this.recognition.onerror = () => { |
|
|
this.isRecording = false; |
|
|
this.status.set('idle'); |
|
|
}; |
|
|
this.recognition.start(); |
|
|
} |
|
|
|
|
|
playNextQuestion(idx: number) { |
|
|
if (idx >= this.seedQuestions.length) { |
|
|
this.currentQuestionText = ''; |
|
|
this.progress = 100; |
|
|
this.showSummary = true; |
|
|
this.status.set('idle'); |
|
|
this.cdr.detectChanges(); |
|
|
this.isRecording = false; |
|
|
return; |
|
|
} |
|
|
this.currentQuestionText = this.seedQuestions[idx]; |
|
|
this.cdr.detectChanges(); |
|
|
this.status.set('asking'); |
|
|
this.isRecording = false; |
|
|
this.speakQuestion(this.seedQuestions[idx], () => { |
|
|
|
|
|
setTimeout(() => { |
|
|
this.status.set('recording'); |
|
|
this.isRecording = true; |
|
|
this.startRecognitionWithRecording(idx); |
|
|
}, 5000); |
|
|
}); |
|
|
} |
|
|
|
|
|
startQuestionFlow(idx: number) { |
|
|
if (idx >= this.seedQuestions.length) { |
|
|
this.currentQuestionText = ''; |
|
|
this.progress = 100; |
|
|
this.showSummary = true; |
|
|
this.status.set('idle'); |
|
|
return; |
|
|
} |
|
|
this.currentQuestionText = this.seedQuestions[idx]; |
|
|
this.status.set('asking'); |
|
|
setTimeout(() => { |
|
|
this.status.set('recording'); |
|
|
this.isRecording = true; |
|
|
this.isProcessing = false; |
|
|
this.transcriptLines = []; |
|
|
this.startRecognition('en-IN'); |
|
|
}, 1200); |
|
|
} |
|
|
|
|
|
get currentQuestionIndex(): number { |
|
|
return this.seedQuestions.indexOf(this.currentQuestionText) + 1; |
|
|
} |
|
|
get totalQuestions(): number { |
|
|
return this.seedQuestions.length; |
|
|
} |
|
|
startVoice() { |
|
|
|
|
|
this.status.set('recording'); |
|
|
} |
|
|
} |
|
|
|