pykara commited on
Commit
3cd5de7
·
1 Parent(s): c0a26b6
src/app/chat/api.service.ts CHANGED
@@ -48,15 +48,20 @@ export class ApiService {
48
  return this.http.post<any>(`${this.baseUrl}/generate-questions`, body, { headers });
49
  }
50
 
51
- explainGrammar(question: string): Observable<any> {
 
52
  const grade = this.getGrade();
53
  const headers = this.makeHeaders(grade);
54
- const body = {
55
- question,
56
- model: 'gpt-4o-mini',
57
- db_level: this.toDbLevel(grade),
58
- };
59
- return this.http.post<any>(`${this.baseUrl}/explain-grammar`, body, { headers });
 
 
 
 
60
  }
61
 
62
  // ✅ Updated to carry source_ids so follow-ups stay in the same textbook pages
 
48
  return this.http.post<any>(`${this.baseUrl}/generate-questions`, body, { headers });
49
  }
50
 
51
+ // Accept either a string (legacy) or an object { question, synthesize_audio, synthesize_video }
52
+ explainGrammar(payload: string | any): Observable<any> {
53
  const grade = this.getGrade();
54
  const headers = this.makeHeaders(grade);
55
+
56
+ // Normalize payload into an object
57
+ const body = typeof payload === 'string' ? { question: payload } : { ...payload };
58
+
59
+ // Ensure backend gets db_level and model so it selects the right vectorstore/LLM
60
+ if (!body.db_level) body.db_level = this.toDbLevel(grade);
61
+ if (!body.model) body.model = 'gpt-4o-mini';
62
+
63
+ const url = `${this.baseUrl}/explain-grammar`;
64
+ return this.http.post(url, body, { headers });
65
  }
66
 
67
  // ✅ Updated to carry source_ids so follow-ups stay in the same textbook pages
src/app/chat/chat.component.ts CHANGED
@@ -68,13 +68,13 @@ export class ChatComponent implements OnDestroy {
68
  isSynthesizing?: boolean;
69
  isVideoSynthesizing?: boolean;
70
  }> = [];
71
- isTyping: boolean = false;
72
  @ViewChild('chatBox') chatBox!: ElementRef;
73
 
74
  /** Speech / mic state */
75
  isLoadingSpeech: boolean = false;
76
  selectedVoice: SpeechSynthesisVoice | null = null;
77
-
78
  speechSynthesisInstance: SpeechSynthesisUtterance | null = null;
79
  isListening: boolean = false;
80
  isProcessingSpeech: boolean = false;
@@ -85,7 +85,7 @@ export class ChatComponent implements OnDestroy {
85
  suggestions: string[] = [];
86
  isInputValid = false;
87
  /** Popup */
88
-
89
 
90
  /** Subscriptions */
91
  private responseSub?: Subscription;
@@ -134,7 +134,7 @@ export class ChatComponent implements OnDestroy {
134
 
135
 
136
  currentFollowups: string[] = [];
137
-
138
  /*private shouldAutoScroll = true;*/
139
  videoUrl = '';
140
  aiResponseInterval: any = null;
@@ -284,7 +284,7 @@ export class ChatComponent implements OnDestroy {
284
  }
285
  }
286
 
287
-
288
 
289
  ngOnInit(): void {
290
  this.ensureGradeLevel();
@@ -296,7 +296,7 @@ export class ChatComponent implements OnDestroy {
296
  this.loadVoices();
297
  }
298
 
299
-
300
 
301
  ngOnDestroy(): void {
302
  if (this.currentExplainSub) { this.currentExplainSub.unsubscribe(); this.currentExplainSub = null; }
@@ -324,7 +324,7 @@ export class ChatComponent implements OnDestroy {
324
  });
325
  }
326
 
327
-
328
 
329
  scrollToBottom(): void {
330
  if (this.shouldAutoScroll) {
@@ -564,49 +564,54 @@ export class ChatComponent implements OnDestroy {
564
  this.shouldAutoScroll = true;
565
  this.scrollToBottom();
566
 
567
- this.responseSub = this.apiService.explainGrammar(message)
568
- .pipe(finalize(() => {
569
- this.isSubmitting = false;
570
- }))
571
- .subscribe({
572
- next: (response) => {
573
- this.isTyping = false;
574
-
575
- const explanation =
576
- (response?.answer || response?.response || response?.text || 'No explanation available.').trim();
577
-
578
- const sourceIds: string[] = Array.isArray(response?.source_ids)
579
- ? response.source_ids.filter((s: any) => typeof s === 'string' && s.trim().length > 0)
580
- : [];
581
-
582
- // Store question + source ids *now*.
583
- this.lastQuestion = message;
584
- this.lastSourceIds = sourceIds;
585
-
586
- const notFound = /No information available in the provided textbook content/i.test(explanation);
587
- const hasContext = !!sourceIds.length && !notFound;
588
-
589
- // IMPORTANT:
590
- // Do NOT set this.lastAnswer here.
591
- // We will set it only after the streaming animation has finished.
592
- this.streamAiAnswer(explanation, sourceIds, hasContext);
593
- },
594
- error: (err) => {
595
- console.error('API Error:', err);
596
- this.isTyping = false;
597
- const errorMessage = 'Error: Could not get a response from the server.';
 
 
 
 
 
 
598
 
599
- // We can still stream the error text (or you can show it directly if you prefer)
600
- this.streamAiAnswer(errorMessage, [], false);
601
- }
602
- });
603
  }
604
 
605
 
606
 
607
  /** Show AI answer word-by-word and start audio */
608
  /** Show AI answer word-by-word and start audio */
609
- private streamAiAnswer(explanation: string, sourceIds: string[], hasContext: boolean): void {
610
  const text = (explanation || '').trim() || 'No explanation available.';
611
  const timestamp = new Date().toLocaleTimeString();
612
 
@@ -616,9 +621,17 @@ export class ChatComponent implements OnDestroy {
616
  text: '',
617
  timestamp,
618
  source_ids: sourceIds,
619
- pending: true
 
 
 
620
  } as any) - 1;
621
 
 
 
 
 
 
622
  this.isAiResponding = true;
623
  this.shouldAutoScroll = true;
624
  this.cdr.detectChanges();
@@ -628,16 +641,56 @@ export class ChatComponent implements OnDestroy {
628
  // When streaming is finished, we finally store lastAnswer
629
  this.lastAnswer = text;
630
  this.lastAnswerHasContext = hasContext;
 
 
 
 
631
  });
632
 
633
- // Play audio at the same time (only if voice is enabled)
634
- this.speakResponse(text);
 
 
635
  }
636
 
 
 
 
 
 
 
 
 
 
 
637
 
 
 
638
 
 
 
 
 
639
 
640
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
641
 
642
  displaySource(tag: string): string {
643
  if (!tag) return '';
@@ -822,7 +875,7 @@ export class ChatComponent implements OnDestroy {
822
  }
823
 
824
 
825
-
826
 
827
  resumeAudio(): void {
828
  if (this.serverAudio && this.serverAudio.paused) {
@@ -978,7 +1031,7 @@ export class ChatComponent implements OnDestroy {
978
  });
979
  }
980
 
981
- openMessageVideo(i: number): void {
982
  const msg = this.messages[i] as any;
983
  if (!msg?.videoUrl) return;
984
 
@@ -1002,6 +1055,13 @@ export class ChatComponent implements OnDestroy {
1002
  const vid = document.getElementById(`inline-video-${i}`) as HTMLVideoElement | null;
1003
  if (!vid) { this.isVideoPlayingIndex = null; this.cdr.detectChanges(); return; }
1004
 
 
 
 
 
 
 
 
1005
  vid.onplay = () => { this.isVideoPlayingIndex = i; this.cdr.detectChanges(); };
1006
  vid.onpause = () => { this.cdr.detectChanges(); };
1007
  vid.onended = () => { this.onMessageVideoEnded(i); };
@@ -1083,8 +1143,8 @@ export class ChatComponent implements OnDestroy {
1083
  }
1084
  }
1085
 
1086
-
1087
-
1088
 
1089
  stopListening(): void {
1090
  this.isListening = false;
@@ -1308,7 +1368,7 @@ export class ChatComponent implements OnDestroy {
1308
  }
1309
  }
1310
 
1311
-
1312
 
1313
  addNewLine(event: KeyboardEvent): void {
1314
  if (event.key === 'Enter' && event.shiftKey) {
@@ -1353,7 +1413,7 @@ export class ChatComponent implements OnDestroy {
1353
 
1354
  //if (this.serverAudio) {
1355
  // try { this.serverAudio.volume = this.isMuted ? 0 : 1; } catch { /* noop */ }
1356
- //}
1357
  //this.saveToggleStates();
1358
  }
1359
 
@@ -1452,7 +1512,7 @@ export class ChatComponent implements OnDestroy {
1452
 
1453
  goToHomePageShortcut(): void { this.router.navigate(['/home']); }
1454
 
1455
-
1456
 
1457
  openUserGuide(): void { this.showUserGuide = true; }
1458
  closeUserGuide(): void { this.showUserGuide = false; }
@@ -1501,7 +1561,7 @@ export class ChatComponent implements OnDestroy {
1501
  alert("Please check your browser's settings to enable the microphone.");
1502
  }
1503
  } catch (err) {
1504
- console.error('openMicrophoneSettings error:', err);
1505
  alert("Unable to open settings automatically. Please check your browser's microphone/privacy settings.");
1506
  }
1507
  this.cdr.detectChanges();
@@ -1751,6 +1811,7 @@ export class ChatComponent implements OnDestroy {
1751
  analyser.smoothingTimeConstant = 0.85;
1752
  source.connect(analyser);
1753
 
 
1754
  this.analyser = analyser;
1755
  const bufferLength = analyser.fftSize;
1756
  this.dataArray = new Uint8Array(bufferLength);
 
68
  isSynthesizing?: boolean;
69
  isVideoSynthesizing?: boolean;
70
  }> = [];
71
+ isTyping: boolean = false;
72
  @ViewChild('chatBox') chatBox!: ElementRef;
73
 
74
  /** Speech / mic state */
75
  isLoadingSpeech: boolean = false;
76
  selectedVoice: SpeechSynthesisVoice | null = null;
77
+
78
  speechSynthesisInstance: SpeechSynthesisUtterance | null = null;
79
  isListening: boolean = false;
80
  isProcessingSpeech: boolean = false;
 
85
  suggestions: string[] = [];
86
  isInputValid = false;
87
  /** Popup */
88
+
89
 
90
  /** Subscriptions */
91
  private responseSub?: Subscription;
 
134
 
135
 
136
  currentFollowups: string[] = [];
137
+
138
  /*private shouldAutoScroll = true;*/
139
  videoUrl = '';
140
  aiResponseInterval: any = null;
 
284
  }
285
  }
286
 
287
+
288
 
289
  ngOnInit(): void {
290
  this.ensureGradeLevel();
 
296
  this.loadVoices();
297
  }
298
 
299
+
300
 
301
  ngOnDestroy(): void {
302
  if (this.currentExplainSub) { this.currentExplainSub.unsubscribe(); this.currentExplainSub = null; }
 
324
  });
325
  }
326
 
327
+
328
 
329
  scrollToBottom(): void {
330
  if (this.shouldAutoScroll) {
 
564
  this.shouldAutoScroll = true;
565
  this.scrollToBottom();
566
 
567
+ this.responseSub = this.apiService.explainGrammar({
568
+ question: message,
569
+ // ask backend to synthesize audio/video only when user toggles are ON
570
+ synthesize_audio: Boolean(this.isVoiceEnabled),
571
+ synthesize_video: Boolean(this.isTutorEnabled)
572
+ })
573
+ .pipe(finalize(() => {
574
+ this.isSubmitting = false;
575
+ }))
576
+ .subscribe({
577
+ next: (response) => {
578
+ this.isTyping = false;
579
+
580
+ const explanation =
581
+ (response?.answer || response?.response || response?.text || 'No explanation available.').trim();
582
+
583
+ const sourceIds: string[] = Array.isArray(response?.source_ids)
584
+ ? response.source_ids.filter((s: any) => typeof s === 'string' && s.trim().length > 0)
585
+ : [];
586
+
587
+ const audioUrl = (response?.audio_url || response?.audioUrl || '') as string;
588
+ const videoUrl = (response?.video_url || response?.videoUrl || '') as string;
589
+
590
+ // Store question + source ids *now*.
591
+ this.lastQuestion = message;
592
+ this.lastSourceIds = sourceIds;
593
+
594
+ const notFound = /No information available in the provided textbook content/i.test(explanation);
595
+ const hasContext = !!sourceIds.length && !notFound;
596
+
597
+ // Pass URLs along to the stream/attach to message
598
+ this.streamAiAnswer(explanation, sourceIds, hasContext, audioUrl || undefined, videoUrl || undefined);
599
+ },
600
+ error: (err) => {
601
+ console.error('API Error:', err);
602
+ this.isTyping = false;
603
+ const errorMessage = 'Error: Could not get a response from the server.';
604
 
605
+ this.streamAiAnswer(errorMessage, [], false);
606
+ }
607
+ });
 
608
  }
609
 
610
 
611
 
612
  /** Show AI answer word-by-word and start audio */
613
  /** Show AI answer word-by-word and start audio */
614
+ private streamAiAnswer(explanation: string, sourceIds: string[], hasContext: boolean, audioUrl?: string, videoUrl?: string): void {
615
  const text = (explanation || '').trim() || 'No explanation available.';
616
  const timestamp = new Date().toLocaleTimeString();
617
 
 
621
  text: '',
622
  timestamp,
623
  source_ids: sourceIds,
624
+ pending: true,
625
+ audioUrl: audioUrl || '',
626
+ videoUrl: videoUrl || '',
627
+ playingVideoUrl: ''
628
  } as any) - 1;
629
 
630
+ // ensure video-enabled index has an entry for this message
631
+ if (this.isVideoEnabledIndex.length <= aiIndex) {
632
+ this.isVideoEnabledIndex[aiIndex] = false;
633
+ }
634
+
635
  this.isAiResponding = true;
636
  this.shouldAutoScroll = true;
637
  this.cdr.detectChanges();
 
641
  // When streaming is finished, we finally store lastAnswer
642
  this.lastAnswer = text;
643
  this.lastAnswerHasContext = hasContext;
644
+
645
+ // After the AI text finishes streaming, automatically play server media when toggles are enabled.
646
+ // If both audio and video are present and both toggles are enabled, video takes precedence.
647
+ this.autoPlayMediaForMessage(aiIndex);
648
  });
649
 
650
+ // Only run client-side TTS when there is no server-provided audio URL.
651
+ if (!audioUrl && this.isVoiceEnabled) {
652
+ this.speakResponse(text);
653
+ }
654
  }
655
 
656
+ /**
657
+ * Decide and start playback for a message that already has `audioUrl` and/or `videoUrl`.
658
+ * Behavior:
659
+ * - If both videoUrl && audioUrl && both video and audio toggles are ON -> play video (video wins)
660
+ * - Else if videoUrl && video toggle ON -> play video
661
+ * - Else if audioUrl && voice toggle ON -> play audio
662
+ */
663
+ private autoPlayMediaForMessage(index: number): void {
664
+ const msg = this.messages[index] as any;
665
+ if (!msg) return;
666
 
667
+ const hasVideo = !!(msg.videoUrl && msg.videoUrl.trim());
668
+ const hasAudio = !!(msg.audioUrl && msg.audioUrl.trim());
669
 
670
+ // If both present and user enabled both, prefer video
671
+ if (hasVideo && this.isTutorEnabled) {
672
+ try { this.stopServerAudio(); } catch { /* noop */ }
673
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
674
 
675
+ // request autoplay (will mute to allow autoplay)
676
+ this.openMessageVideo(index, true);
677
+ return;
678
+ }
679
+
680
+ // If video present but tutor (video) is not enabled, do not auto-play it.
681
+ // Only auto-play audio if user enabled voice
682
+ if (hasAudio && this.isVoiceEnabled) {
683
+ // play server audio (this will stop videos if any)
684
+ // ensure inline videos are stopped
685
+ try { this.stopAllVideo(); } catch { /* noop */ }
686
+
687
+ // If message already has audioUrl, play it
688
+ this.playServerAudioForMessage(index);
689
+ return;
690
+ }
691
+
692
+ // If no server media played and no TTS, do nothing (user may manually click synth).
693
+ }
694
 
695
  displaySource(tag: string): string {
696
  if (!tag) return '';
 
875
  }
876
 
877
 
878
+
879
 
880
  resumeAudio(): void {
881
  if (this.serverAudio && this.serverAudio.paused) {
 
1031
  });
1032
  }
1033
 
1034
+ openMessageVideo(i: number, autoPlay: boolean = false): void {
1035
  const msg = this.messages[i] as any;
1036
  if (!msg?.videoUrl) return;
1037
 
 
1055
  const vid = document.getElementById(`inline-video-${i}`) as HTMLVideoElement | null;
1056
  if (!vid) { this.isVideoPlayingIndex = null; this.cdr.detectChanges(); return; }
1057
 
1058
+ // If autoplay requested, mute to satisfy browser autoplay policies.
1059
+ if (autoPlay) {
1060
+ try { vid.muted = true; } catch { /* noop */ }
1061
+ } else {
1062
+ try { vid.muted = false; } catch { /* noop */ }
1063
+ }
1064
+
1065
  vid.onplay = () => { this.isVideoPlayingIndex = i; this.cdr.detectChanges(); };
1066
  vid.onpause = () => { this.cdr.detectChanges(); };
1067
  vid.onended = () => { this.onMessageVideoEnded(i); };
 
1143
  }
1144
  }
1145
 
1146
+
1147
+
1148
 
1149
  stopListening(): void {
1150
  this.isListening = false;
 
1368
  }
1369
  }
1370
 
1371
+
1372
 
1373
  addNewLine(event: KeyboardEvent): void {
1374
  if (event.key === 'Enter' && event.shiftKey) {
 
1413
 
1414
  //if (this.serverAudio) {
1415
  // try { this.serverAudio.volume = this.isMuted ? 0 : 1; } catch { /* noop */ }
1416
+ // }
1417
  //this.saveToggleStates();
1418
  }
1419
 
 
1512
 
1513
  goToHomePageShortcut(): void { this.router.navigate(['/home']); }
1514
 
1515
+
1516
 
1517
  openUserGuide(): void { this.showUserGuide = true; }
1518
  closeUserGuide(): void { this.showUserGuide = false; }
 
1561
  alert("Please check your browser's settings to enable the microphone.");
1562
  }
1563
  } catch (err) {
1564
+ console.error('openMicrophoneSettings error', err);
1565
  alert("Unable to open settings automatically. Please check your browser's microphone/privacy settings.");
1566
  }
1567
  this.cdr.detectChanges();
 
1811
  analyser.smoothingTimeConstant = 0.85;
1812
  source.connect(analyser);
1813
 
1814
+ // assign to instance field
1815
  this.analyser = analyser;
1816
  const bufferLength = analyser.fftSize;
1817
  this.dataArray = new Uint8Array(bufferLength);