Py-detect / src /app /view-details-page /view-details-page.component.ts
pykara's picture
fix
73566f6
import { Component } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { QuestionDataService } from '../question-data.service';
import { trigger, transition, style, animate } from '@angular/animations';
import { CASE_DATA } from '../data/case-data';
@Component({
selector: 'app-view-details-page',
templateUrl: './view-details-page.component.html',
styleUrls: ['./view-details-page.component.css'],
animations: [
trigger('fadeInTab', [
transition(':enter', [
style({ opacity: 0, transform: 'translateY(16px)' }),
animate('500ms cubic-bezier(.4,0,.2,1)', style({ opacity: 1, transform: 'translateY(0)' }))
])
]),
// cardFade used for individual metric cards when they enter/leave
trigger('cardFade', [
transition(':enter', [
style({ opacity: 0, transform: 'translateY(8px) scale(0.98)' }),
// duration300ms, delay60ms
animate('300ms 60ms cubic-bezier(.2,.8,.2,1)', style({ opacity: 1, transform: 'translateY(0) scale(1)' }))
]),
transition(':leave', [
animate('180ms cubic-bezier(.4,0,.2,1)', style({ opacity: 0, transform: 'translateY(8px) scale(0.98)' }))
])
])
]
})
export class ViewDetailsPageComponent {
activeTab: 'audio' | 'video' | 'validation' = 'audio';
// When navigated from question summary with an index -> selectedQuestion is shown
selectedQuestion: any = null;
// When navigated by caseId -> questions contains all questions for the case
caseId: string = '';
caseDetails: any = null;
questions: any[] = [];
// prefer service data; fallback to static CASE_DATA
private sourceData: any[] = CASE_DATA;
// layout mode persistence: 'single' or 'multiple'
layoutMode: 'single' | 'multiple' = 'single';
// tooltip handling
shownTooltip: string | null = null;
// Audio metric groups (use key for binding and label for display)
coreMetrics = [
{ key: 'truthProbability', label: 'Truth Probability (%)', desc: 'AI-estimated likelihood the spoken response is truthful.' },
{ key: 'dominantEmotion', label: 'Dominant Emotion', desc: 'Primary emotion (Calm, Nervous, Defensive, Angry, Sad).' },
{ key: 'emotion', label: 'Emotion', desc: 'Detected emotion labels or values for the utterance.' },
{ key: 'duration', label: 'Duration', desc: 'Length of the spoken response or recording.' },
{ key: 'confidence', label: 'Confidence Level', desc: 'High / Moderate / Low (based on tone steadiness).' },
{ key: 'speechRate', label: 'Speech Rate (WPM)', desc: 'Words per minute. Faster or slower speech under stress.' },
{ key: 'sentiment', label: 'Sentiment Score', desc: 'Positive / Negative / Neutral tone.' }
];
stressToneMetrics = [
{ key: 'pitchStability', label: 'Pitch Stability (Hz variation)', desc: 'Measures vocal frequency fluctuation.' },
{ key: 'stressLevel', label: 'Stress Level (%)', desc: 'Based on amplitude variation & tone sharpness.' },
{ key: 'blinkRate', label: 'Blink Rate', desc: 'Blinks per minute — often rises under stress.' },
{ key: 'energyLevel', label: 'Energy Level (dB)', desc: 'Average vocal energy / loudness.' },
{ key: 'voiceTremor', label: 'Voice Tremor Index', desc: 'Detects micro-shakes in tone.' }
];
speechBehaviourMetrics = [
{ key: 'responseDelay', label: 'Response Delay (sec)', desc: 'Time between question end and answer start.' },
{ key: 'pausesPerMinute', label: 'Pauses per Minute', desc: 'Number of noticeable silences.' },
{ key: 'disfluencyRate', label: 'Disfluency Rate', desc: '“Uh”, “um”, or stuttering frequency.' },
{ key: 'articulationClarity', label: 'Articulation Clarity', desc: 'Pronunciation sharpness.' },
{ key: 'eyeContact', label: 'Eye Contact', desc: 'Estimate of eye contact during response (if available).' }
];
advancedMetrics = [
{ key: 'spectralTilt', label: 'Spectral Tilt', desc: 'Balance between low/high frequency energy.' },
{ key: 'formantShifts', label: 'Formant Shifts (F1, F2)', desc: 'Resonance changes in vocal tract.' },
{ key: 'prosodyScore', label: 'Prosody Score', desc: 'Rhythm + intonation smoothness.' },
{ key: 'emotionStability', label: 'Emotion Stability Index', desc: 'Consistency of emotion across phrases.' }
];
// Video metric groups (Core, Behavioural, Advanced) — only these metrics per user request
videoCoreMetrics = [
{ key: 'facialEmotion', label: 'Facial Emotion Detection', desc: 'Classifies visible emotions (Calm, Angry, Nervous, Sad, Confused, Fearful).' },
{ key: 'eyeContactConsistency', label: 'Eye Contact Consistency (%)', desc: 'Percentage of time the subject maintains eye contact.' },
{ key: 'blinkRate', label: 'Blink Rate (per minute)', desc: 'Blink frequency; increased blinking may indicate nervousness.' },
{ key: 'headMovement', label: 'Head Movement Analysis', desc: 'Detects nods, shakes, or tilts.' },
{ key: 'bodyMovementIndex', label: 'Body Movement Index', desc: 'Tracks posture shifts, fidgeting, or restlessness.' },
{ key: 'handMovementFreq', label: 'Hand Movement Frequency', desc: 'Detects gesturing or hiding hands.' },
{ key: 'microExpressionScore', label: 'Facial Micro-Expression Score', desc: 'AI-based confidence in identifying suppressed emotions.' }
];
videoBehaviourMetrics = [
{ key: 'confidenceLevel', label: 'Confidence Level (%)', desc: 'Derived from posture, facial stability, and gestures.' },
{ key: 'stressLevel', label: 'Stress Level (%)', desc: 'Combines facial tension + movement instability.' },
{ key: 'emotionShiftTimeline', label: 'Emotion Shift Timeline', desc: 'Tracks emotion changes throughout questioning.' }
];
videoAdvancedMetrics = [
{ key: 'gazeDeviation', label: 'Gaze Deviation Angle', desc: 'Measures deviation of eye direction from interviewer.' },
{ key: 'facialTempMap', label: 'Facial Temperature Map (IR)', desc: 'Detects heat changes around nose/forehead (IR optional).' },
{ key: 'postureStability', label: 'Posture Stability Index', desc: 'Monitors torso movement variance.' }
];
// Final verified metrics used in Validation tab (only these metrics)
videoFinalMetrics = [
{ key: 'physicalExpression', label: 'Physical Expression', desc: 'Summary of visible cues: posture, gestures, micro-expressions.' },
{ key: 'physicalScore', label: 'Physical Score (%)', desc: 'Overall body-language consistency and stability score.' },
{ key: 'voiceExpression', label: 'Voice Expression', desc: 'Combined emotional tone summary.' },
{ key: 'voiceScore', label: 'Voice Score (%)', desc: 'Confidence and emotional steadiness derived from tone and speech.' },
{ key: 'truthProbability', label: 'Truth Probability (%)', desc: 'Weighted average score combining both voice and video indicators.' },
];
constructor(private route: ActivatedRoute, private router: Router, private questionDataService: QuestionDataService) {
// load persisted layout
const saved = localStorage.getItem('metricsLayoutMode');
if (saved === 'single' || saved === 'multiple') this.layoutMode = saved;
this.route.paramMap.subscribe(params => {
const idParam = params.get('id');
const caseParam = params.get('caseId');
const svcQuestions = this.questionDataService.getQuestions() || [];
const rawData = svcQuestions.length ? svcQuestions : this.sourceData;
// normalize so each item has `question` (fallback to `text`)
const data = rawData.map(q => ({ ...(q || {}), question: q?.question || q?.text || '' }));
this.caseDetails = this.questionDataService.getCaseDetails() || null;
if (idParam !== null) {
const idx = Number(idParam);
this.selectedQuestion = data[idx] || null;
this.caseId = this.selectedQuestion?.caseId || this.caseDetails?.caseId || '';
// keep full data list so Previous/Next navigate across all questions
this.questions = data;
this.activeTab = 'audio';
} else if (caseParam) {
this.caseId = caseParam;
this.questions = data.filter(q => q.caseId === this.caseId);
this.selectedQuestion = this.questions[0] || null;
} else {
this.questions = data;
this.selectedQuestion = this.questions[0] || null;
this.caseId = this.selectedQuestion?.caseId || '';
}
});
}
toggleLayout(mode: 'single' | 'multiple') {
this.layoutMode = mode;
localStorage.setItem('metricsLayoutMode', mode);
}
toggleTooltip(key: string) {
this.shownTooltip = this.shownTooltip === key ? null : key;
}
// helper to read metric value from question object and format
getMetricValue(q: any, key: string): string {
if (!q) return '—';
// truthProbability uses existing formatter
if (key === 'truthProbability') return this.formatTruthProbability(q);
const val = q[key];
if (val === undefined || val === null) return '—';
if (typeof val === 'number') {
// append % for keys that represent percent-like metrics
if (/Level|Score|Probability|Rate|Tremor|Stability/i.test(key)) return val + '%';
return String(val);
}
return String(val);
}
setTab(tab: 'audio' | 'video' | 'validation') {
this.activeTab = tab;
}
goBack() {
// navigate back to question summary list
this.router.navigate(['/question-summary']);
}
navigateHome() {
window.location.href = '/home';
}
// helper methods for validation calculations
getPhysicalExpressionSummary(q: any): string {
const parts: string[] = [];
if (!q) return '—';
if (q.posture) parts.push(q.posture);
if (q.handMovement) parts.push(q.handMovement + ' hand');
if (q.legMovement) parts.push(q.legMovement + ' leg');
if (q.microExpressions) parts.push(q.microExpressions);
return parts.length ? parts.join(', ') : '—';
}
getPhysicalScore(q: any): string {
if (!q) return '—';
const scores: number[] = [];
if (typeof q.handMovement === 'number') scores.push(q.handMovement);
if (typeof q.legMovement === 'number') scores.push(q.legMovement);
const match = (q.microExpressions || '').match(/(\d+)/);
if (match) scores.push(Number(match[1]));
if (!scores.length) return '—';
return Math.round(scores.reduce((a, b) => a + b, 0) / scores.length) + '%';
}
getVoiceExpressionSummary(q: any): string {
if (!q) return '—';
const parts: string[] = [];
if (q.stressLevel !== undefined) parts.push('Stress ' + q.stressLevel);
if (q.confidence) parts.push('Conf ' + q.confidence);
if (q.sentiment) parts.push('Sent ' + this.getSentimentPercent(q.sentiment));
if (q.responseDelay) parts.push('Delay ' + q.responseDelay);
return parts.length ? parts.join(', ') : '—';
}
getVoiceScore(q: any): string {
if (!q) return '—';
const scores: number[] = [];
if (typeof q.stressLevel === 'number') scores.push(q.stressLevel);
if (typeof q.confidence === 'number') scores.push(q.confidence);
else if (q.confidence === 'High') scores.push(90);
else if (q.confidence === 'Moderate') scores.push(60);
else if (q.confidence === 'Low') scores.push(30);
if (!scores.length) return '—';
return Math.round(scores.reduce((a, b) => a + b, 0) / scores.length) + '%';
}
getOverallScore(q: any): string {
const phys = this.getPhysicalScore(q);
const voice = this.getVoiceScore(q);
const physNum = parseInt(phys as any);
const voiceNum = parseInt(voice as any);
if (isNaN(physNum) && isNaN(voiceNum)) return '—';
if (isNaN(physNum)) return voice;
if (isNaN(voiceNum)) return phys;
return Math.round((physNum + voiceNum) / 2) + '%';
}
getSentimentPercent(sentiment: string): string {
if (!sentiment) return '';
const match = sentiment.match(/([+-]?\d*\.?\d+)/);
if (match) {
const value = parseFloat(match[1]);
const percent = Math.round(value * 100);
return (percent > 0 ? '+' : '') + percent + '%';
}
return sentiment;
}
// New helper: compute lie percentage from truthProbability (string like '78%')
getLiePercent(q: any): string {
if (!q) return '—';
const tp = q.truthProbability;
if (tp === undefined || tp === null) return '—';
// allow number or string with %
let num = NaN;
if (typeof tp === 'number') num = tp;
else if (typeof tp === 'string') {
const m = tp.match(/(\d+)/);
if (m) num = Number(m[1]);
}
if (isNaN(num)) return '—';
const lie = Math.max(0, 100 - num);
return lie + '%';
}
// helper to format truthProbability for display in template
formatTruthProbability(q: any): string {
if (!q) return '—';
const tp = q.truthProbability;
if (tp === undefined || tp === null) return '—';
if (typeof tp === 'number') return tp + '%';
if (typeof tp === 'string') {
// if already contains %, return as-is, else append
if (tp.includes('%')) return tp;
const m = tp.match(/(\d+)/);
if (m) return m[1] + '%';
return tp;
}
return String(tp);
}
// helper to get display value for final verified metrics
getFinalMetricValue(q: any, key: string): string {
if (!q) return '—';
switch (key) {
case 'physicalExpression':
return q?.physicalExpression || this.getPhysicalExpressionSummary(q) || '—';
case 'physicalScore':
return q?.physicalScore || this.getPhysicalScore(q);
case 'voiceExpression':
return q?.voiceExpression || this.getVoiceExpressionSummary(q);
case 'voiceScore':
return q?.voiceScore || this.getVoiceScore(q);
case 'truthProbability':
return this.formatTruthProbability(q);
case 'overallScore':
return q?.overallScore || this.getOverallScore(q);
default:
return this.getMetricValue(q, key);
}
}
// navigation helpers for Previous / Next buttons
public getCurrentIndex(): number {
if (!this.selectedQuestion || !this.questions || !this.questions.length) return -1;
// try reference match first
let idx = this.questions.indexOf(this.selectedQuestion);
if (idx >=0) return idx;
// fallback to matching by caseId + question text
idx = this.questions.findIndex(q => q && this.selectedQuestion && q.caseId === this.selectedQuestion.caseId && (q.question === this.selectedQuestion.question || q.text === this.selectedQuestion.question));
return idx;
}
hasPrev(): boolean {
const idx = this.getCurrentIndex();
return idx >0;
}
hasNext(): boolean {
const idx = this.getCurrentIndex();
return idx >=0 && idx < this.questions.length -1;
}
prevQuestion() {
const idx = this.getCurrentIndex();
if (idx >0) {
const newIdx = idx -1;
// play click animation briefly before navigating
this.navAnimating = 'prev';
this.shownTooltip = null;
setTimeout(() => {
this.router.navigate(['/view-details', newIdx]);
},140);
// clear animation state after it finishes
setTimeout(() => { this.navAnimating = null; },420);
}
}
nextQuestion() {
const idx = this.getCurrentIndex();
if (idx >=0 && idx < this.questions.length -1) {
const newIdx = idx +1;
this.navAnimating = 'next';
this.shownTooltip = null;
setTimeout(() => {
this.router.navigate(['/view-details', newIdx]);
},140);
setTimeout(() => { this.navAnimating = null; },420);
}
}
// transient animation state for nav buttons ('prev' | 'next' | null)
navAnimating: 'prev' | 'next' | null = null;
}