Oviya commited on
Commit
b366f52
·
1 Parent(s): e5dcd83
src/app/auth/auth.component.ts CHANGED
@@ -1,7 +1,7 @@
1
  import { Component } from '@angular/core';
2
  import { AuthService } from './auth.service';
3
  import { Router } from '@angular/router';
4
-
5
  @Component({
6
  selector: 'app-auth',
7
  templateUrl: './auth.component.html',
@@ -23,23 +23,40 @@ export class AuthComponent {
23
  login(): void {
24
  this.authService.login(this.username, this.password).subscribe(
25
  (response) => {
26
- console.log('✅ Login success. Redirecting to /home...');
 
 
 
 
 
 
 
 
 
 
 
 
27
  this.authService.setLoggedIn(true);
28
- this.authService.startAutoRefresh();
29
- /*this.router.navigate(['/home']); // Redirect to dashboard after login*/
30
- // 🔁 Redirect to stored URL or default to /home
31
  const redirectUrl = localStorage.getItem('redirectAfterLogin') || '/home';
32
  localStorage.removeItem('redirectAfterLogin');
33
  this.router.navigate([redirectUrl]);
34
  },
35
- (error) => {
36
  this.errorMessage = 'Invalid username or password';
37
  }
38
  );
39
  }
40
 
41
 
42
-
 
 
 
 
 
 
43
 
44
  // ✅ Check if user is logged in
45
  isLoggedIn(): boolean {
 
1
  import { Component } from '@angular/core';
2
  import { AuthService } from './auth.service';
3
  import { Router } from '@angular/router';
4
+ type GradeLevel = 'lowergrade' | 'midgrade' | 'highergrade';
5
  @Component({
6
  selector: 'app-auth',
7
  templateUrl: './auth.component.html',
 
23
  login(): void {
24
  this.authService.login(this.username, this.password).subscribe(
25
  (response) => {
26
+ // 1) Prefer grade coming from backend if it exists
27
+ let level = this.normalizeGrade(
28
+ (response?.gradeLevel ?? response?.grade ?? response?.role ?? '')
29
+ );
30
+
31
+ // 2) Otherwise derive it from the USERNAME exactly as you requested
32
+ if (!level) level = this.normalizeGrade(this.username);
33
+
34
+ // 3) Persist for ApiService -> sets X-User per request
35
+ if (!level) level = 'highergrade'; // final fallback (optional)
36
+ localStorage.setItem('gradeLevel', level);
37
+
38
+ // 4) Proceed as before
39
  this.authService.setLoggedIn(true);
40
+ this.authService.startAutoRefresh();
41
+
 
42
  const redirectUrl = localStorage.getItem('redirectAfterLogin') || '/home';
43
  localStorage.removeItem('redirectAfterLogin');
44
  this.router.navigate([redirectUrl]);
45
  },
46
+ () => {
47
  this.errorMessage = 'Invalid username or password';
48
  }
49
  );
50
  }
51
 
52
 
53
+ private normalizeGrade(v: string): GradeLevel | '' {
54
+ const s = (v || '').trim().toLowerCase();
55
+ if (s === 'lowergrade' || s === 'lower' || s === 'low' || s === 'l') return 'lowergrade';
56
+ if (s === 'midgrade' || s === 'mid' || s === 'm') return 'midgrade';
57
+ if (s === 'highergrade' || s === 'higher' || s === 'high' || s === 'h') return 'highergrade';
58
+ return '';
59
+ }
60
 
61
  // ✅ Check if user is logged in
62
  isLoggedIn(): boolean {
src/app/chat/api.service.ts CHANGED
@@ -1,33 +1,81 @@
1
-
2
  import { Injectable } from '@angular/core';
3
  import { HttpClient, HttpHeaders } from '@angular/common/http';
4
  import { Observable } from 'rxjs';
5
 
6
- @Injectable({
7
- providedIn: 'root'
8
- })
 
 
 
 
 
 
 
 
 
 
9
  export class ApiService {
10
-
11
- private baseUrl = location.hostname.endsWith('hf.space')
12
- ? 'https://pykara-py-learn-backend.hf.space/media'
13
- : 'http://localhost:5000/media';
14
- //private baseUrl = 'http://localhost:5012';
15
- //private apiUrl = 'http://localhost:5012/explain-grammar';
16
 
17
- constructor(private http: HttpClient) { }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
 
19
 
20
-
 
 
 
 
 
 
 
 
 
 
 
21
 
22
- askQuestion(userInput: string, sessionId: string | null): Observable<any> {
23
- const headers = { 'Content-Type': 'application/json' };
24
- const data = sessionId ? { topic: userInput, session_id: sessionId } : { topic: userInput };
 
 
 
 
 
 
 
25
 
26
- return this.http.post<any>(`${this.baseUrl}/explain-grammar`, data, { headers });
 
 
 
 
 
 
 
 
 
 
27
  }
28
 
29
- getGrammarSuggestions(input: string): Observable<any> {
30
- const headers = { 'Content-Type': 'application/json' };
31
- return this.http.post<any>(`${this.baseUrl}/suggest-grammar-questions`, { input }, { headers });
32
  }
33
  }
 
 
1
  import { Injectable } from '@angular/core';
2
  import { HttpClient, HttpHeaders } from '@angular/common/http';
3
  import { Observable } from 'rxjs';
4
 
5
+ type Grade = 'lowergrade' | 'midgrade' | 'highergrade';
6
+ type DbLevel = 'low' | 'mid' | 'high';
7
+
8
+ function resolveBaseUrl(): string {
9
+ const isHF = location.hostname.endsWith('hf.space');
10
+ if (isHF) return 'https://pykara-py-learn-backend.hf.space/rag';
11
+ if (location.hostname === 'localhost' || location.hostname === '127.0.0.1') {
12
+ return 'http://localhost:5000/rag';
13
+ }
14
+ return '/rag';
15
+ }
16
+
17
+ @Injectable({ providedIn: 'root' })
18
  export class ApiService {
19
+ private baseUrl = resolveBaseUrl();
 
 
 
 
 
20
 
21
+ // ---- NEW: grade/db-level helpers ----
22
+ private getGrade(): Grade {
23
+ const g = (localStorage.getItem('gradeLevel') || 'highergrade').toLowerCase();
24
+ if (g === 'lowergrade' || g === 'midgrade' || g === 'highergrade') return g as Grade;
25
+ return 'highergrade';
26
+ }
27
+ private toDbLevel(g: Grade): DbLevel {
28
+ return g === 'lowergrade' ? 'low' : g === 'midgrade' ? 'mid' : 'high';
29
+ }
30
+ private makeHeaders(g: Grade): HttpHeaders {
31
+ return new HttpHeaders({
32
+ 'Content-Type': 'application/json',
33
+ 'X-User': g, // this is what your Postman test sets
34
+ });
35
+ }
36
+ // -------------------------------------
37
 
38
+ constructor(private http: HttpClient) { }
39
 
40
+ generateOpenQuestions(payload: { qtype: 'OPEN'; n?: number; topic?: string }): Observable<any> {
41
+ const grade = this.getGrade();
42
+ const headers = this.makeHeaders(grade);
43
+ const body = {
44
+ qtype: 'OPEN',
45
+ n: payload.n ?? 5,
46
+ topic: payload.topic ?? '',
47
+ model: 'gpt-4o-mini',
48
+ db_level: this.toDbLevel(grade) // also tell the backend in the body
49
+ };
50
+ return this.http.post<any>(`${this.baseUrl}/generate-questions`, body, { headers });
51
+ }
52
 
53
+ explainGrammar(question: string): Observable<any> {
54
+ const grade = this.getGrade();
55
+ const headers = this.makeHeaders(grade);
56
+ const body = {
57
+ question,
58
+ model: 'gpt-4o-mini',
59
+ db_level: this.toDbLevel(grade)
60
+ };
61
+ return this.http.post<any>(`${this.baseUrl}/explain-grammar`, body, { headers });
62
+ }
63
 
64
+ suggestFollowups(payload: { last_question: string; last_answer: string; n?: number }): Observable<any> {
65
+ const grade = this.getGrade();
66
+ const headers = this.makeHeaders(grade);
67
+ const body = {
68
+ last_question: payload.last_question,
69
+ last_answer: payload.last_answer,
70
+ n: payload.n ?? 5,
71
+ model: 'gpt-4o-mini',
72
+ db_level: this.toDbLevel(grade)
73
+ };
74
+ return this.http.post<any>(`${this.baseUrl}/suggest-followups`, body, { headers });
75
  }
76
 
77
+ // Back-compat
78
+ askQuestion(userInput: string, _sessionId: string | null): Observable<any> {
79
+ return this.explainGrammar(userInput);
80
  }
81
  }
src/app/chat/chat.component.html CHANGED
@@ -1,99 +1,120 @@
1
- <div class="chat-container">
2
- <header class="header-container">
3
- <div class="logo">
4
- <a (click)="goToHome()" routerLink="/home" class="brand-link">
5
- <img src="assets/images/pykara-logo.png" alt="Pykara Logo" />
6
- </a>
7
- <span class="product-name">Py-Learn</span>
8
- </div>
9
- <div class="header-title">
10
- <h1>Grammar Chat</h1>
11
- </div>
12
- <div class="home-btn">
13
- <a (click)="goToHome()" routerLink="/home">
14
- <img src="assets/images/home.png" alt="Home" class="home-icon" />
15
- </a>
16
- </div>
17
- </header>
18
 
19
- <main class="chat-box" #chatBox>
20
- <img src="assets/images/chat/chatbg.png" alt="Chat Background" class="chat-bg" />
21
- <ng-container *ngFor="let message of messages">
22
- <div *ngIf="message.from === 'user'" class="message-wrapper user">
23
- <div class="profile-pic">
24
- <img src="assets/images/chat/rabbit.png" alt="User Profile Picture" />
 
 
 
 
 
25
  </div>
26
- <div class="message">
27
- {{ message.text }}
28
- <div class="message-timestamp">{{ message.timestamp }}</div>
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  </div>
31
- <div *ngIf="message.from === 'ai'" class="message-wrapper ai">
32
- <div class="profile-pic">
33
- <img src="assets/images/chat/lion.png" alt="AI Profile Picture" />
 
 
 
 
 
 
 
 
 
 
34
  </div>
35
- <div class="message structured-response">
36
- <div [innerHTML]="formatStructuredResponse(message.text)"></div>
37
- <div class="message-timestamp">{{ message.timestamp }}</div>
 
38
  </div>
39
  </div>
40
- </ng-container>
41
- <div *ngIf="isTyping" class="typing-indicator" role="status" aria-live="polite">
42
- AI is typing
43
- <span></span>
44
- <span></span>
45
- <span></span>
46
- </div>
47
- </main>
48
-
49
- <section class="input-container">
50
- <div class="input-box">
51
- <textarea [(ngModel)]="userInput"
52
- (focus)="showHardcodedQuestions()"
53
- (blur)="hideHardcodedQuestions()"
54
- (input)="adjustTextareaHeight($event); getSuggestions()"
55
- (keydown)="handleEnterPress($event)"
56
- placeholder="Type your message here..."
57
- [disabled]="isSpeaking"
58
- aria-label="Message input"></textarea>
59
- <button (click)="isSpeaking ? stopSpeaking() : handleButtonClick()"
60
- [disabled]="isSubmitting"
61
- aria-label="Send or voice">
62
- <img [src]="isSpeaking ? 'assets/images/chat/stop.png' : getButtonIcon()"
63
- alt="Action"
64
- class="button-icon" />
65
- </button>
66
- </div>
67
- <div class="hardcoded-questions-container" *ngIf="showQuestions">
68
- <div class="hardcoded-question" (click)="selectHardcodedQuestion('What is grammar?')">What is grammar?</div>
69
- <div class="hardcoded-question" (click)="selectHardcodedQuestion('What are the rules to be followed in grammar?')">What are the rules to be followed in grammar?</div>
70
- <div class="hardcoded-question" (click)="selectHardcodedQuestion('What are the types of tenses?')">What are the types of tenses?</div>
71
- <div class="hardcoded-question" (click)="selectHardcodedQuestion('Why do we need to follow grammar rules while writing and speaking?')">Why do we need to follow grammar rules while writing and speaking?</div>
72
- <div class="hardcoded-question" (click)="selectHardcodedQuestion('How do you identify a subject and a predicate in a sentence?')">How do you identify a subject and a predicate in a sentence?</div>
73
- </div>
74
- </section>
75
 
76
- <div class="listening-box" *ngIf="isListening" role="dialog" aria-modal="true">
77
- <div class="listening-content">
78
- <img src="assets/images/chat/microphone-icon.png" alt="Microphone" class="microphone-image" />
79
- <p *ngIf="!errorMessage">Listening...</p>
80
- <div class="listening-actions">
81
- <button class="mute-btn" (click)="muteMicrophone()">
82
- <img src="assets/images/chat/mic.png" alt="Mute" />
83
- </button>
84
- <button class="close-btn" (click)="stopListening()">
85
- <img src="assets/images/chat/cross.png" alt="Close" />
86
- </button>
 
 
87
  </div>
88
- <p *ngIf="errorMessage" class="error-text" (click)="openMicrophoneSettings()">{{ errorMessage }}</p>
89
  </div>
90
- </div>
91
 
92
- <div class="popup-overlay" *ngIf="showMicPopup">
93
- <div class="popup-box">
94
- <h3>Microphone access required</h3>
95
- <p>To use voice mode, you'll need to enable your microphone and try again.</p>
96
- <button class="popup-button" (click)="closeMicrophonePopup()">OK</button>
 
97
  </div>
98
  </div>
99
- </div>
 
1
+ <div class="chat-container">
2
+ <header class="header-container">
3
+ <div class="logo">
4
+ <a (click)="goToHome()" routerLink="/home" class="brand-link">
5
+ <img src="assets/images/pykara-logo.png" alt="Pykara Logo" />
6
+ </a>
7
+ <span class="product-name">Py-Learn</span>
8
+ </div>
9
+ <div class="header-title">
10
+ <h1>Grammar Chat</h1>
11
+ </div>
12
+ <div class="home-btn">
13
+ <a (click)="goToHome()" routerLink="/home">
14
+ <img src="assets/images/home.png" alt="Home" class="home-icon" />
15
+ </a>
16
+ </div>
17
+ </header>
18
 
19
+ <main class="chat-box" #chatBox>
20
+ <img src="assets/images/chat/chatbg.png" alt="Chat Background" class="chat-bg" />
21
+ <ng-container *ngFor="let message of messages">
22
+ <div *ngIf="message.from === 'user'" class="message-wrapper user">
23
+ <div class="profile-pic">
24
+ <img src="assets/images/chat/rabbit.png" alt="User Profile Picture" />
25
+ </div>
26
+ <div class="message">
27
+ {{ message.text }}
28
+ <div class="message-timestamp">{{ message.timestamp }}</div>
29
+ </div>
30
  </div>
31
+ <div *ngIf="message.from === 'ai'" class="message-wrapper ai">
32
+ <div class="profile-pic">
33
+ <img src="assets/images/chat/lion.png" alt="AI Profile Picture" />
34
+ </div>
35
+ <div class="message structured-response">
36
+ <div [innerHTML]="formatStructuredResponse(message.text)"></div>
37
+
38
+ <div class="sources" *ngIf="message.source_ids?.length">
39
+ <span class="sources-label">Sources:</span>
40
+ <span class="source-chip" *ngFor="let s of message.source_ids">
41
+ {{ displaySource(s) }}
42
+ </span>
43
+ </div>
44
+
45
+ <div class="message-timestamp">{{ message.timestamp }}</div>
46
+ </div>
47
  </div>
48
+ </ng-container>
49
+ <div *ngIf="isTyping" class="typing-indicator" role="status" aria-live="polite">
50
+ AI is typing
51
+ <span></span>
52
+ <span></span>
53
+ <span></span>
54
+ </div>
55
+ </main>
56
+
57
+ <section class="input-container">
58
+ <div class="input-box">
59
+ <textarea [(ngModel)]="userInput"
60
+ (focus)="showHardcodedQuestions()"
61
+ (blur)="hideHardcodedQuestions()"
62
+ (input)="adjustTextareaHeight($event); getSuggestions()"
63
+ (keydown)="handleEnterPress($event)"
64
+ placeholder="Type your message here..."
65
+ [disabled]="isSpeaking"
66
+ aria-label="Message input"></textarea>
67
+ <button (click)="isSpeaking ? stopSpeaking() : handleButtonClick()"
68
+ [disabled]="isSubmitting"
69
+ aria-label="Send or voice">
70
+ <img [src]="isSpeaking ? 'assets/images/chat/stop.png' : getButtonIcon()"
71
+ alt="Action"
72
+ class="button-icon" />
73
+ </button>
74
  </div>
75
+ <div class="hardcoded-questions-container" *ngIf="showQuestions">
76
+ <!-- Loading -->
77
+ <div class="hardcoded-question" *ngIf="pdfLoading">Loading questions…</div>
78
+
79
+ <!-- Generated questions list -->
80
+ <div class="hardcoded-question"
81
+ *ngFor="let q of pdfQuestions; let i = index"
82
+ (click)="selectGeneratedQuestion(q)"
83
+ role="button"
84
+ tabindex="0"
85
+ (keydown.enter)="selectGeneratedQuestion(q)"
86
+ aria-label="Suggested question">
87
+ {{ q }}
88
  </div>
89
+
90
+ <!-- Empty state -->
91
+ <div class="hardcoded-question" *ngIf="!pdfLoading && !pdfQuestions.length">
92
+ No questions available from the textbook.
93
  </div>
94
  </div>
95
+ </section>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
 
97
+ <div class="listening-box" *ngIf="isListening" role="dialog" aria-modal="true">
98
+ <div class="listening-content">
99
+ <img src="assets/images/chat/microphone-icon.png" alt="Microphone" class="microphone-image" />
100
+ <p *ngIf="!errorMessage">Listening...</p>
101
+ <div class="listening-actions">
102
+ <button class="mute-btn" (click)="muteMicrophone()">
103
+ <img src="assets/images/chat/mic.png" alt="Mute" />
104
+ </button>
105
+ <button class="close-btn" (click)="stopListening()">
106
+ <img src="assets/images/chat/cross.png" alt="Close" />
107
+ </button>
108
+ </div>
109
+ <p *ngIf="errorMessage" class="error-text" (click)="openMicrophoneSettings()">{{ errorMessage }}</p>
110
  </div>
 
111
  </div>
 
112
 
113
+ <div class="popup-overlay" *ngIf="showMicPopup">
114
+ <div class="popup-box">
115
+ <h3>Microphone access required</h3>
116
+ <p>To use voice mode, you'll need to enable your microphone and try again.</p>
117
+ <button class="popup-button" (click)="closeMicrophonePopup()">OK</button>
118
+ </div>
119
  </div>
120
  </div>
 
src/app/chat/chat.component.ts CHANGED
@@ -7,7 +7,8 @@ import { isPlatformBrowser } from '@angular/common';
7
  import { ViewChild, ElementRef } from '@angular/core';
8
  import { Renderer2 } from '@angular/core';
9
  import { Subscription } from 'rxjs';
10
-
 
11
  @Component({
12
  selector: 'app-chat',
13
  standalone: true,
@@ -17,11 +18,18 @@ import { Subscription } from 'rxjs';
17
  })
18
  export class ChatComponent implements OnDestroy {
19
  showQuestions: boolean = false;
 
 
 
 
 
 
20
  userInput: string = '';
21
- messages: { from: string, text: string, timestamp: string; isPlaying?: boolean }[] = [];
22
  isTyping: boolean = false;
23
  @ViewChild('chatBox') chatBox!: ElementRef;
24
 
 
25
  isLoadingSpeech: boolean = false;
26
  selectedVoice: SpeechSynthesisVoice | null = null;
27
  errorMessage: string = "";
@@ -31,25 +39,22 @@ export class ChatComponent implements OnDestroy {
31
  isProcessingSpeech: boolean = false;
32
  isSpeaking: boolean = false;
33
  isAudioPaused: boolean = false;
34
- isInputValid: boolean = false;
 
35
  suggestions: string[] = [];
36
- showMicPopup: boolean = false;
37
- isSubmitting: boolean = false;
38
- private responseSub?: Subscription;
39
- private lastFullAiText: string = '';
40
 
41
- ngAfterViewChecked() {
42
- setTimeout(() => {
43
- this.scrollToBottom();
44
- }, 100);
45
- }
46
 
47
- private scrollToBottom(): void {
48
- try {
49
- this.chatBox.nativeElement.scrollTop = this.chatBox.nativeElement.scrollHeight;
50
- } catch (err) { }
51
- }
52
 
 
 
 
 
 
 
53
  constructor(
54
  private apiService: ApiService,
55
  private cdr: ChangeDetectorRef,
@@ -57,6 +62,7 @@ export class ChatComponent implements OnDestroy {
57
  private router: Router,
58
  private renderer: Renderer2
59
  ) {
 
60
  window.speechSynthesis.onvoiceschanged = () => {
61
  console.log("Available Voices:", window.speechSynthesis.getVoices());
62
  };
@@ -72,7 +78,6 @@ export class ChatComponent implements OnDestroy {
72
  this.recognition.onresult = (event: any) => {
73
  if (event.results && event.results[0]) {
74
  const transcript = event.results[0][0].transcript.trim();
75
- console.log('Recognized speech:', transcript);
76
  this.userInput = transcript;
77
  this.sendMessage();
78
  this.recognition.stop();
@@ -84,18 +89,36 @@ export class ChatComponent implements OnDestroy {
84
  console.error('Speech Recognition Error:', event.error);
85
  this.isProcessingSpeech = false;
86
  };
87
- } else {
88
- console.warn('Speech Recognition is not supported in this browser.');
89
  }
90
  window.addEventListener('beforeunload', this.handleUnload);
91
  }
92
  }
93
 
94
- private handleUnload = (): void => {
95
- if (window.speechSynthesis) {
96
- window.speechSynthesis.cancel();
 
 
 
 
97
  }
98
- };
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
 
100
  ngOnDestroy(): void {
101
  if (isPlatformBrowser(this.platformId)) {
@@ -104,19 +127,36 @@ export class ChatComponent implements OnDestroy {
104
  }
105
  window.removeEventListener('beforeunload', this.handleUnload);
106
  }
 
 
 
107
  }
108
 
109
- openMicrophonePopup(): void {
110
- this.showMicPopup = true;
111
- }
 
 
112
 
113
- closeMicrophonePopup(): void {
114
- this.showMicPopup = false;
 
 
115
  }
116
 
 
 
 
 
 
117
  showHardcodedQuestions(): void {
118
- setTimeout(() => {
119
  this.showQuestions = true;
 
 
 
 
 
120
  }, 100);
121
  }
122
 
@@ -126,37 +166,80 @@ export class ChatComponent implements OnDestroy {
126
  }, 200);
127
  }
128
 
129
- selectHardcodedQuestion(question: string): void {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
130
  this.userInput = question;
131
  this.showQuestions = false;
132
  setTimeout(() => {
133
  this.sendMessage();
134
  this.userInput = '';
135
- }, 100);
136
  }
137
 
 
138
  getSuggestions(): void {
139
  if (!this.userInput || this.userInput.trim().length < 1 || this.isSpeaking) {
140
  this.suggestions = [];
141
  return;
142
  }
143
-
144
- this.apiService.getGrammarSuggestions(this.userInput).subscribe(
145
- (response) => {
146
- console.log("API Response:", response);
147
- if (response.suggestions) {
148
- this.suggestions = response.suggestions
149
- .filter((s: string) => s && s.trim().length > 0)
150
- .map((s: string) => s.replace(/^\d+\.\s*/, ""));
151
- } else {
152
- this.suggestions = [];
153
- }
154
- },
155
- (error) => {
156
- console.error("Error fetching suggestions:", error);
157
- this.suggestions = [];
158
- }
159
- );
160
  }
161
 
162
  selectSuggestion(suggestion: string): void {
@@ -165,90 +248,99 @@ export class ChatComponent implements OnDestroy {
165
  this.sendMessage();
166
  }
167
 
 
168
  sendMessage(inputText?: string): void {
169
  const message = inputText ? inputText.trim() : this.userInput.trim();
170
- if (!message) {
171
- return;
172
- }
173
 
174
- let sessionId = localStorage.getItem('session_id');
175
 
176
- this.messages.push({ from: 'user', text: message, timestamp: new Date().toLocaleTimeString() });
 
177
  this.userInput = '';
178
  this.isTyping = true;
179
  this.cdr.detectChanges();
180
  this.scrollToBottom();
181
 
182
- this.responseSub = this.apiService.askQuestion(message, sessionId).subscribe(
183
- (response) => {
184
- this.isTyping = false;
185
-
186
- const explanation = (response?.response || 'No explanation available.').trim();
187
-
188
- if (response.session_id && !sessionId) {
189
- localStorage.setItem('session_id', response.session_id);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
190
  }
 
 
191
 
192
- const lines: string[] = String(explanation).split('\n');
193
- const formatted: string = lines.map((line: string) => line.trim()).join('\n');
194
- this.messages.push({
195
- from: 'ai',
196
- text: formatted,
197
- timestamp: new Date().toLocaleTimeString(),
198
- });
199
- this.cdr.detectChanges();
200
- this.scrollToBottom();
201
-
202
- this.lastFullAiText = formatted;
203
-
204
- this.speakResponse(explanation);
205
- },
206
- (error) => {
207
- this.isTyping = false;
208
- const errorMessage = 'Error: Could not get a response from the server.';
209
- console.error('API Error:', error);
210
-
211
- this.messages.push({
212
- from: 'ai',
213
- text: errorMessage,
214
- timestamp: new Date().toLocaleTimeString(),
215
- });
216
- this.cdr.detectChanges();
217
- this.scrollToBottom();
218
-
219
- this.lastFullAiText = errorMessage;
220
- this.speakResponse(errorMessage);
221
- }
222
- );
223
  }
224
 
 
 
225
  formatStructuredResponse(text: string): string {
226
- let formattedText = text
227
  .replace(/\n/g, '<br>')
228
  .replace(/(\d+)\.\s/g, '<b>$1.</b> ')
229
  .replace(/\•\s/g, '✔️ ')
230
  .replace(/\-\s/g, '🔹 ')
231
  .replace(/(\*\*)(.*?)\1/g, '<b>$2</b>');
232
- return formattedText;
233
  }
234
 
 
235
  speakResponse(responseText: string): void {
236
- if (!responseText) {
237
- console.warn('No response text provided for speech.');
238
- return;
239
- }
240
-
241
- console.log('Initiating text-to-speech with response:', responseText);
242
 
243
  let lastAiMessage = this.messages.slice().reverse().find((msg) => msg.from === 'ai');
244
-
245
  if (!lastAiMessage) {
246
  lastAiMessage = { from: 'ai', text: '', timestamp: new Date().toLocaleTimeString() };
247
  this.messages.push(lastAiMessage);
248
  } else {
249
  lastAiMessage.text = '';
250
  }
251
-
252
  this.cdr.detectChanges();
253
 
254
  const words = responseText.split(' ');
@@ -262,17 +354,10 @@ export class ChatComponent implements OnDestroy {
262
  this.isSpeaking = true;
263
 
264
  const voices = window.speechSynthesis.getVoices();
 
 
265
 
266
- let femaleVoice = voices.find(voice => voice.name === "Microsoft Zira - English (United States)");
267
-
268
- if (femaleVoice) {
269
- speech.voice = femaleVoice;
270
- console.log("Using voice:", femaleVoice.name);
271
- } else {
272
- console.warn("Microsoft Zira not found, using default.");
273
- }
274
-
275
- speech.onboundary = (event) => {
276
  if (event.name === 'word' && currentWordIndex < words.length) {
277
  lastAiMessage!.text = words.slice(0, currentWordIndex + 1).join(' ');
278
  currentWordIndex++;
@@ -281,38 +366,21 @@ export class ChatComponent implements OnDestroy {
281
  };
282
 
283
  speech.onend = () => {
284
- console.log('Speech ended.');
285
  this.isSpeaking = false;
286
  lastAiMessage!.text = responseText;
287
  this.cdr.detectChanges();
288
  };
289
 
290
- console.log('Starting speech synthesis...');
291
  window.speechSynthesis.speak(speech);
292
  }
293
 
294
- ngOnInit(): void {
295
- if (window.speechSynthesis.onvoiceschanged !== undefined) {
296
- window.speechSynthesis.onvoiceschanged = () => {
297
- this.loadVoices();
298
- };
299
- }
300
-
301
- this.loadVoices();
302
- }
303
-
304
  loadVoices(): void {
305
  const voices = window.speechSynthesis.getVoices();
306
-
307
  if (!voices.length) {
308
- console.warn("No voices available yet, retrying...");
309
  setTimeout(() => this.loadVoices(), 500);
310
  return;
311
  }
312
-
313
- console.log("Available Voices:", voices.map(v => v.name));
314
-
315
- const preferredVoices = [
316
  "Google UK English Female",
317
  "Google US English Female",
318
  "Microsoft Zira - English (United States)",
@@ -320,27 +388,20 @@ export class ChatComponent implements OnDestroy {
320
  "Google en-GB Female",
321
  "Google en-US Female"
322
  ];
323
-
324
- for (let voiceName of preferredVoices) {
325
- const foundVoice = voices.find(voice => voice.name === voiceName);
326
- if (foundVoice) {
327
- this.selectedVoice = foundVoice;
328
- break;
329
- }
330
  }
331
-
332
  if (!this.selectedVoice) {
333
  this.selectedVoice = voices.find(voice => voice.name.toLowerCase().includes("female")) || voices[0];
334
  }
335
-
336
- console.log("Selected AI Voice:", this.selectedVoice?.name);
337
  }
338
 
 
339
  pauseAudio(): void {
340
  if (window.speechSynthesis.speaking && !window.speechSynthesis.paused) {
341
  window.speechSynthesis.pause();
342
  this.isAudioPaused = true;
343
- console.log('AI Speech Paused');
344
  this.cdr.detectChanges();
345
  }
346
  }
@@ -349,63 +410,28 @@ export class ChatComponent implements OnDestroy {
349
  if (window.speechSynthesis.paused) {
350
  window.speechSynthesis.resume();
351
  this.isAudioPaused = false;
352
- console.log('AI Speech Resumed');
353
  this.cdr.detectChanges();
354
  }
355
  }
356
 
357
- muteMicrophone(): void {
358
- console.log("Microphone muted");
359
- }
360
 
361
  startListening(): void {
362
  this.isListening = true;
363
  this.isProcessingSpeech = false;
364
 
365
  if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
366
- navigator.mediaDevices
367
- .getUserMedia({ audio: true })
368
  .then(() => {
369
  if (this.recognition) {
370
- console.log('Starting speech recognition...');
371
  this.recognition.start();
372
-
373
- this.recognition.onaudiostart = () => console.log('Audio capturing started.');
374
- this.recognition.onspeechstart = () => console.log('Speech has been detected.');
375
- this.recognition.onspeechend = () => console.log('Speech ended, processing...');
376
- this.recognition.onaudioend = () => console.log('Audio capturing ended.');
377
-
378
- this.recognition.onresult = (event: any) => {
379
- if (event.results && event.results[0]) {
380
- const transcript = event.results[0][0].transcript.trim();
381
- console.log('Recognized speech:', transcript);
382
-
383
- this.userInput = transcript;
384
-
385
- if (this.userInput.trim()) {
386
- console.log('Sending question automatically:', this.userInput);
387
- this.sendMessage();
388
- }
389
-
390
- this.recognition.stop();
391
- this.isListening = false;
392
- }
393
- };
394
-
395
- this.recognition.onnomatch = () =>
396
- alert('No speech detected. Please try again.');
397
- this.recognition.onend = () => {
398
- console.log('Speech recognition service disconnected.');
399
- this.isListening = false;
400
- };
401
  this.recognition.onerror = (error: any) => {
402
  console.error('Speech Recognition Error:', error);
403
  this.isListening = false;
404
- if (error.error === 'not-allowed') {
405
- alert('Microphone permission denied.');
406
- } else if (error.error === 'no-speech') {
407
- alert('No speech detected. Please try speaking clearly.');
408
- }
409
  };
410
  } else {
411
  alert('Speech Recognition is not supported in this browser.');
@@ -416,7 +442,6 @@ export class ChatComponent implements OnDestroy {
416
  this.errorMessage = 'Please enable microphone access to use this feature.';
417
  this.isListening = true;
418
  });
419
-
420
  } else {
421
  alert('Microphone access is not supported in this browser.');
422
  }
@@ -424,11 +449,10 @@ export class ChatComponent implements OnDestroy {
424
 
425
  stopListening(): void {
426
  this.isListening = false;
427
- if (this.recognition) {
428
- this.recognition.stop();
429
- }
430
  }
431
 
 
432
  toggleAudio(message: { text: string, isPlaying?: boolean }): void {
433
  if (this.speechSynthesisInstance && this.speechSynthesisInstance.text === message.text) {
434
  if (message.isPlaying) {
@@ -439,10 +463,8 @@ export class ChatComponent implements OnDestroy {
439
  message.isPlaying = true;
440
  }
441
  } else {
442
- if (this.speechSynthesisInstance) {
443
- window.speechSynthesis.cancel();
444
- }
445
- this.messages.forEach((msg) => (msg.isPlaying = false));
446
 
447
  message.isPlaying = true;
448
  this.speechSynthesisInstance = new SpeechSynthesisUtterance(message.text);
@@ -454,30 +476,19 @@ export class ChatComponent implements OnDestroy {
454
  message.isPlaying = false;
455
  this.speechSynthesisInstance = null;
456
  };
457
-
458
  window.speechSynthesis.speak(this.speechSynthesisInstance);
459
  }
460
  }
461
 
462
- goToHome() {
463
- this.router.navigate(['/home']);
464
- }
465
 
466
  copySuccessIndex: number | null = null;
467
-
468
  copyToClipboard(text: string, index: number): void {
469
  navigator.clipboard.writeText(text).then(() => {
470
  this.copySuccessIndex = index;
471
- setTimeout(() => {
472
- this.copySuccessIndex = null;
473
- }, 2000);
474
- }).catch(err => {
475
- console.error('Failed to copy: ', err);
476
- });
477
- }
478
-
479
- checkInput() {
480
- this.isInputValid = this.userInput.trim().length > 0;
481
  }
482
 
483
  handleButtonClick(): void {
@@ -496,15 +507,10 @@ export class ChatComponent implements OnDestroy {
496
  }
497
 
498
  getButtonIcon(): string {
499
- if (this.userInput.trim().length > 0) {
500
- return 'assets/images/chat/send-icon.png';
501
- } else if (this.isSpeaking) {
502
- return 'assets/images/chat/pause-icon.png';
503
- } else if (this.isAudioPaused) {
504
- return 'assets/images/chat/resume-icon.png';
505
- } else {
506
- return 'assets/images/chat/microphone-icon.png';
507
- }
508
  }
509
 
510
  addNewLine(event: KeyboardEvent): void {
@@ -520,28 +526,12 @@ export class ChatComponent implements OnDestroy {
520
  textarea.style.height = `${textarea.scrollHeight}px`;
521
  }
522
 
523
- getButtonIconClass(): string {
524
- return this.userInput.trim().length > 0
525
- ? 'send-icon'
526
- : this.isSpeaking
527
- ? 'pause-icon'
528
- : this.isAudioPaused
529
- ? 'resume-icon'
530
- : 'microphone-icon';
531
- }
532
-
533
  openMicrophoneSettings(): void {
534
- const userAgent = navigator.userAgent;
535
-
536
- if (userAgent.includes("Chrome")) {
537
- window.open("chrome://settings/content/microphone", "_blank");
538
- } else if (userAgent.includes("Firefox")) {
539
- window.open("about:preferences#privacy", "_blank");
540
- } else if (userAgent.includes("Edge")) {
541
- window.open("edge://settings/content/microphone", "_blank");
542
- } else {
543
- alert("Please check your browser's settings to enable the microphone.");
544
- }
545
  }
546
 
547
  stopSpeaking(): void {
@@ -550,29 +540,19 @@ export class ChatComponent implements OnDestroy {
550
  window.speechSynthesis.cancel();
551
  }
552
  } catch { }
553
-
554
  (this as any).speechSynthesisInstance = null;
555
-
556
- if (this.responseSub && !this.responseSub.closed) {
557
- this.responseSub.unsubscribe();
558
- }
559
-
560
  this.isSpeaking = false;
561
  this.isAudioPaused = false;
562
  this.isTyping = false;
563
  }
564
 
565
  handleEnterPress(event: KeyboardEvent): void {
566
- if (this.isSpeaking) {
567
- event.preventDefault();
568
- return;
569
- }
570
  if (event.key === 'Enter' && !event.shiftKey) {
571
  event.preventDefault();
572
  const text = (this.userInput || '').trim();
573
  if (text) this.sendMessage();
574
  }
575
  }
576
-
577
-
578
  }
 
7
  import { ViewChild, ElementRef } from '@angular/core';
8
  import { Renderer2 } from '@angular/core';
9
  import { Subscription } from 'rxjs';
10
+ import { finalize } from 'rxjs/operators';
11
+ type Grade = 'lowergrade' | 'midgrade' | 'highergrade';
12
  @Component({
13
  selector: 'app-chat',
14
  standalone: true,
 
18
  })
19
  export class ChatComponent implements OnDestroy {
20
  showQuestions: boolean = false;
21
+ isSubmitting: boolean = false;
22
+ /** Generated (initial) or follow-up questions shown on focus */
23
+ pdfQuestions: string[] = [];
24
+ pdfLoading: boolean = false;
25
+
26
+ /** Chat state */
27
  userInput: string = '';
28
+ messages: { from: string, text: string, timestamp: string; isPlaying?: boolean, source_ids?: string[]; }[] = [];
29
  isTyping: boolean = false;
30
  @ViewChild('chatBox') chatBox!: ElementRef;
31
 
32
+ /** Speech / mic state */
33
  isLoadingSpeech: boolean = false;
34
  selectedVoice: SpeechSynthesisVoice | null = null;
35
  errorMessage: string = "";
 
39
  isProcessingSpeech: boolean = false;
40
  isSpeaking: boolean = false;
41
  isAudioPaused: boolean = false;
42
+
43
+ /** Suggestions for typed input (powered by PDF too) */
44
  suggestions: string[] = [];
 
 
 
 
45
 
46
+ /** Popup */
47
+ showMicPopup: boolean = false;
 
 
 
48
 
49
+ /** Subscriptions */
50
+ private responseSub?: Subscription;
 
 
 
51
 
52
+ /** Store last Q/A to drive follow-ups */
53
+ private lastQuestion: string | null = null;
54
+ private lastAnswer: string | null = null;
55
+ private lastSourceIds: string[] = [];
56
+ // lowergrade | midgrade | highergrade
57
+
58
  constructor(
59
  private apiService: ApiService,
60
  private cdr: ChangeDetectorRef,
 
62
  private router: Router,
63
  private renderer: Renderer2
64
  ) {
65
+ // Log voices
66
  window.speechSynthesis.onvoiceschanged = () => {
67
  console.log("Available Voices:", window.speechSynthesis.getVoices());
68
  };
 
78
  this.recognition.onresult = (event: any) => {
79
  if (event.results && event.results[0]) {
80
  const transcript = event.results[0][0].transcript.trim();
 
81
  this.userInput = transcript;
82
  this.sendMessage();
83
  this.recognition.stop();
 
89
  console.error('Speech Recognition Error:', event.error);
90
  this.isProcessingSpeech = false;
91
  };
 
 
92
  }
93
  window.addEventListener('beforeunload', this.handleUnload);
94
  }
95
  }
96
 
97
+ private ensureGradeLevel(defaultGrade: Grade = 'lowergrade'): void {
98
+ const g = (localStorage.getItem('gradeLevel') || '').toLowerCase();
99
+ if (g !== 'lowergrade' && g !== 'midgrade' && g !== 'highergrade') {
100
+ localStorage.setItem('gradeLevel', defaultGrade);
101
+ console.info('[Chat] gradeLevel not set; defaulted to', defaultGrade);
102
+ } else {
103
+ console.info('[Chat] gradeLevel =', g);
104
  }
105
+ }
106
+
107
+ ngOnInit(): void {
108
+ this.ensureGradeLevel();
109
+ if (window.speechSynthesis.onvoiceschanged !== undefined) {
110
+ window.speechSynthesis.onvoiceschanged = () => {
111
+ this.loadVoices();
112
+ };
113
+ }
114
+ this.loadVoices();
115
+ }
116
+
117
+ ngAfterViewChecked() {
118
+ setTimeout(() => {
119
+ this.scrollToBottom();
120
+ }, 100);
121
+ }
122
 
123
  ngOnDestroy(): void {
124
  if (isPlatformBrowser(this.platformId)) {
 
127
  }
128
  window.removeEventListener('beforeunload', this.handleUnload);
129
  }
130
+ if (this.responseSub && !this.responseSub.closed) {
131
+ this.responseSub.unsubscribe();
132
+ }
133
  }
134
 
135
+ private handleUnload = (): void => {
136
+ if (window.speechSynthesis) {
137
+ window.speechSynthesis.cancel();
138
+ }
139
+ };
140
 
141
+ private scrollToBottom(): void {
142
+ try {
143
+ this.chatBox.nativeElement.scrollTop = this.chatBox.nativeElement.scrollHeight;
144
+ } catch (err) { }
145
  }
146
 
147
+ /** Popup controls */
148
+ openMicrophonePopup(): void { this.showMicPopup = true; }
149
+ closeMicrophonePopup(): void { this.showMicPopup = false; }
150
+
151
+ /** Show questions on focus: initial (no answer yet) or follow-ups (after an answer) */
152
  showHardcodedQuestions(): void {
153
+ setTimeout(async () => {
154
  this.showQuestions = true;
155
+ if (this.lastAnswer) {
156
+ this.fetchFollowupQuestions();
157
+ } else {
158
+ this.fetchInitialQuestions();
159
+ }
160
  }, 100);
161
  }
162
 
 
166
  }, 200);
167
  }
168
 
169
+ /** Initial questions generated from PDFs (topicless OPEN) */
170
+ private fetchInitialQuestions(n: number = 5): void {
171
+ this.pdfLoading = true;
172
+ this.pdfQuestions = [];
173
+ this.apiService.generateOpenQuestions({ qtype: 'OPEN', n, topic: '' })
174
+ .subscribe({
175
+ next: (resp) => {
176
+ const items = Array.isArray(resp?.questions) ? resp.questions : [];
177
+ this.pdfQuestions = items.map((q: any) => typeof q === 'string' ? q : (q?.question || '')).filter(Boolean);
178
+
179
+ if (!this.pdfQuestions.length && resp?.note) {
180
+ console.warn('Question generator note:', resp.note);
181
+ }
182
+
183
+ this.pdfLoading = false;
184
+ this.cdr.detectChanges();
185
+ },
186
+ error: () => { this.pdfLoading = false; this.pdfQuestions = []; this.cdr.detectChanges(); }
187
+ });
188
+ }
189
+
190
+ /** Follow-ups after an answer */
191
+ private fetchFollowupQuestions(n: number = 5): void {
192
+ if (!this.lastQuestion || !this.lastAnswer) {
193
+ this.fetchInitialQuestions(n);
194
+ return;
195
+ }
196
+ this.pdfLoading = true;
197
+ this.pdfQuestions = [];
198
+ this.apiService.suggestFollowups({
199
+ last_question: this.lastQuestion,
200
+ last_answer: this.lastAnswer,
201
+ n
202
+ }).subscribe({
203
+ next: (resp) => {
204
+ const list = Array.isArray(resp?.suggestions) ? resp.suggestions : [];
205
+ this.pdfQuestions = list.filter((s: string) => !!s);
206
+ this.pdfLoading = false;
207
+ this.cdr.detectChanges();
208
+ },
209
+ error: () => {
210
+ this.pdfLoading = false;
211
+ this.pdfQuestions = [];
212
+ this.cdr.detectChanges();
213
+ }
214
+ });
215
+ }
216
+
217
+ /** Click on one generated question */
218
+ selectGeneratedQuestion(question: string): void {
219
  this.userInput = question;
220
  this.showQuestions = false;
221
  setTimeout(() => {
222
  this.sendMessage();
223
  this.userInput = '';
224
+ }, 80);
225
  }
226
 
227
+ /** Type-ahead suggestions sourced from PDFs (OPEN on the typed topic) */
228
  getSuggestions(): void {
229
  if (!this.userInput || this.userInput.trim().length < 1 || this.isSpeaking) {
230
  this.suggestions = [];
231
  return;
232
  }
233
+ this.apiService.generateOpenQuestions({ qtype: 'OPEN', n: 5, topic: this.userInput })
234
+ .subscribe({
235
+ next: (resp) => {
236
+ const items = Array.isArray(resp?.questions) ? resp.questions : [];
237
+ this.suggestions = items
238
+ .map((q: any) => (typeof q === 'string' ? q : (q?.question || '')))
239
+ .filter((s: string) => !!s);
240
+ },
241
+ error: () => { this.suggestions = []; }
242
+ });
 
 
 
 
 
 
 
243
  }
244
 
245
  selectSuggestion(suggestion: string): void {
 
248
  this.sendMessage();
249
  }
250
 
251
+ /** Send question to backend for an answer */
252
  sendMessage(inputText?: string): void {
253
  const message = inputText ? inputText.trim() : this.userInput.trim();
254
+ if (!message) return;
 
 
255
 
256
+ this.isSubmitting = true; // <— start submitting
257
 
258
+ const timestamp = new Date().toLocaleTimeString();
259
+ this.messages.push({ from: 'user', text: message, timestamp });
260
  this.userInput = '';
261
  this.isTyping = true;
262
  this.cdr.detectChanges();
263
  this.scrollToBottom();
264
 
265
+ this.responseSub = this.apiService.explainGrammar(message)
266
+ .pipe(finalize(() => {
267
+ this.isSubmitting = false;
268
+ }))
269
+ .subscribe({
270
+ next: (response) => {
271
+ this.isTyping = false;
272
+
273
+ const explanation =
274
+ (response?.answer || response?.response || response?.text || 'No explanation available.').trim();
275
+
276
+ // NEW: collect source IDs safely
277
+ const sourceIds: string[] = Array.isArray(response?.source_ids)
278
+ ? response.source_ids.filter((s: any) => typeof s === 'string' && s.trim().length > 0)
279
+ : [];
280
+
281
+ this.messages.push({
282
+ from: 'ai',
283
+ text: explanation,
284
+ timestamp: new Date().toLocaleTimeString(),
285
+ source_ids: sourceIds
286
+ });
287
+ this.cdr.detectChanges();
288
+ this.scrollToBottom();
289
+
290
+ this.lastQuestion = message;
291
+ this.lastAnswer = explanation;
292
+ this.lastSourceIds = sourceIds;
293
+
294
+ this.speakResponse(explanation);
295
+ },
296
+ error: (err) => {
297
+ console.error('API Error:', err);
298
+ this.isTyping = false;
299
+ const errorMessage = 'Error: Could not get a response from the server.';
300
+
301
+ // Use an empty array here (no undefined variable)
302
+ this.messages.push({
303
+ from: 'ai',
304
+ text: errorMessage,
305
+ timestamp: new Date().toLocaleTimeString(),
306
+ source_ids: []
307
+ });
308
+ this.cdr.detectChanges();
309
+ this.scrollToBottom();
310
+ this.speakResponse(errorMessage);
311
  }
312
+ });
313
+ }
314
 
315
+ displaySource(tag: string): string {
316
+ if (!tag) return '';
317
+ const [path, pagePart] = tag.split('#p');
318
+ const file = path.split(/[/\\]/).pop() || path; // works for Windows/Unix paths
319
+ return pagePart ? `${file} p${pagePart}` : file;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
320
  }
321
 
322
+
323
+ /** Render helpers */
324
  formatStructuredResponse(text: string): string {
325
+ return text
326
  .replace(/\n/g, '<br>')
327
  .replace(/(\d+)\.\s/g, '<b>$1.</b> ')
328
  .replace(/\•\s/g, '✔️ ')
329
  .replace(/\-\s/g, '🔹 ')
330
  .replace(/(\*\*)(.*?)\1/g, '<b>$2</b>');
 
331
  }
332
 
333
+ /** TTS helpers */
334
  speakResponse(responseText: string): void {
335
+ if (!responseText) return;
 
 
 
 
 
336
 
337
  let lastAiMessage = this.messages.slice().reverse().find((msg) => msg.from === 'ai');
 
338
  if (!lastAiMessage) {
339
  lastAiMessage = { from: 'ai', text: '', timestamp: new Date().toLocaleTimeString() };
340
  this.messages.push(lastAiMessage);
341
  } else {
342
  lastAiMessage.text = '';
343
  }
 
344
  this.cdr.detectChanges();
345
 
346
  const words = responseText.split(' ');
 
354
  this.isSpeaking = true;
355
 
356
  const voices = window.speechSynthesis.getVoices();
357
+ const femaleVoice = voices.find(v => v.name === "Microsoft Zira - English (United States)");
358
+ if (femaleVoice) speech.voice = femaleVoice;
359
 
360
+ speech.onboundary = (event: any) => {
 
 
 
 
 
 
 
 
 
361
  if (event.name === 'word' && currentWordIndex < words.length) {
362
  lastAiMessage!.text = words.slice(0, currentWordIndex + 1).join(' ');
363
  currentWordIndex++;
 
366
  };
367
 
368
  speech.onend = () => {
 
369
  this.isSpeaking = false;
370
  lastAiMessage!.text = responseText;
371
  this.cdr.detectChanges();
372
  };
373
 
 
374
  window.speechSynthesis.speak(speech);
375
  }
376
 
 
 
 
 
 
 
 
 
 
 
377
  loadVoices(): void {
378
  const voices = window.speechSynthesis.getVoices();
 
379
  if (!voices.length) {
 
380
  setTimeout(() => this.loadVoices(), 500);
381
  return;
382
  }
383
+ const preferred = [
 
 
 
384
  "Google UK English Female",
385
  "Google US English Female",
386
  "Microsoft Zira - English (United States)",
 
388
  "Google en-GB Female",
389
  "Google en-US Female"
390
  ];
391
+ for (let name of preferred) {
392
+ const v = voices.find(voice => voice.name === name);
393
+ if (v) { this.selectedVoice = v; break; }
 
 
 
 
394
  }
 
395
  if (!this.selectedVoice) {
396
  this.selectedVoice = voices.find(voice => voice.name.toLowerCase().includes("female")) || voices[0];
397
  }
 
 
398
  }
399
 
400
+ /** Global audio controls */
401
  pauseAudio(): void {
402
  if (window.speechSynthesis.speaking && !window.speechSynthesis.paused) {
403
  window.speechSynthesis.pause();
404
  this.isAudioPaused = true;
 
405
  this.cdr.detectChanges();
406
  }
407
  }
 
410
  if (window.speechSynthesis.paused) {
411
  window.speechSynthesis.resume();
412
  this.isAudioPaused = false;
 
413
  this.cdr.detectChanges();
414
  }
415
  }
416
 
417
+ /** Mic controls */
418
+ muteMicrophone(): void { console.log("Microphone muted"); }
 
419
 
420
  startListening(): void {
421
  this.isListening = true;
422
  this.isProcessingSpeech = false;
423
 
424
  if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
425
+ navigator.mediaDevices.getUserMedia({ audio: true })
 
426
  .then(() => {
427
  if (this.recognition) {
 
428
  this.recognition.start();
429
+ this.recognition.onend = () => { this.isListening = false; };
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
430
  this.recognition.onerror = (error: any) => {
431
  console.error('Speech Recognition Error:', error);
432
  this.isListening = false;
433
+ if (error.error === 'not-allowed') alert('Microphone permission denied.');
434
+ else if (error.error === 'no-speech') alert('No speech detected. Please try again.');
 
 
 
435
  };
436
  } else {
437
  alert('Speech Recognition is not supported in this browser.');
 
442
  this.errorMessage = 'Please enable microphone access to use this feature.';
443
  this.isListening = true;
444
  });
 
445
  } else {
446
  alert('Microphone access is not supported in this browser.');
447
  }
 
449
 
450
  stopListening(): void {
451
  this.isListening = false;
452
+ if (this.recognition) this.recognition.stop();
 
 
453
  }
454
 
455
+ /** Per-message read toggle (kept from your code) */
456
  toggleAudio(message: { text: string, isPlaying?: boolean }): void {
457
  if (this.speechSynthesisInstance && this.speechSynthesisInstance.text === message.text) {
458
  if (message.isPlaying) {
 
463
  message.isPlaying = true;
464
  }
465
  } else {
466
+ if (this.speechSynthesisInstance) window.speechSynthesis.cancel();
467
+ this.messages.forEach((m) => (m.isPlaying = false));
 
 
468
 
469
  message.isPlaying = true;
470
  this.speechSynthesisInstance = new SpeechSynthesisUtterance(message.text);
 
476
  message.isPlaying = false;
477
  this.speechSynthesisInstance = null;
478
  };
 
479
  window.speechSynthesis.speak(this.speechSynthesisInstance);
480
  }
481
  }
482
 
483
+ /** UI helpers */
484
+ goToHome() { this.router.navigate(['/home']); }
 
485
 
486
  copySuccessIndex: number | null = null;
 
487
  copyToClipboard(text: string, index: number): void {
488
  navigator.clipboard.writeText(text).then(() => {
489
  this.copySuccessIndex = index;
490
+ setTimeout(() => { this.copySuccessIndex = null; }, 2000);
491
+ }).catch(err => { console.error('Failed to copy: ', err); });
 
 
 
 
 
 
 
 
492
  }
493
 
494
  handleButtonClick(): void {
 
507
  }
508
 
509
  getButtonIcon(): string {
510
+ if (this.userInput.trim().length > 0) return 'assets/images/chat/send-icon.png';
511
+ if (this.isSpeaking) return 'assets/images/chat/pause-icon.png';
512
+ if (this.isAudioPaused) return 'assets/images/chat/resume-icon.png';
513
+ return 'assets/images/chat/microphone-icon.png';
 
 
 
 
 
514
  }
515
 
516
  addNewLine(event: KeyboardEvent): void {
 
526
  textarea.style.height = `${textarea.scrollHeight}px`;
527
  }
528
 
 
 
 
 
 
 
 
 
 
 
529
  openMicrophoneSettings(): void {
530
+ const ua = navigator.userAgent;
531
+ if (ua.includes("Chrome")) window.open("chrome://settings/content/microphone", "_blank");
532
+ else if (ua.includes("Firefox")) window.open("about:preferences#privacy", "_blank");
533
+ else if (ua.includes("Edge")) window.open("edge://settings/content/microphone", "_blank");
534
+ else alert("Please check your browser's settings to enable the microphone.");
 
 
 
 
 
 
535
  }
536
 
537
  stopSpeaking(): void {
 
540
  window.speechSynthesis.cancel();
541
  }
542
  } catch { }
 
543
  (this as any).speechSynthesisInstance = null;
544
+ if (this.responseSub && !this.responseSub.closed) this.responseSub.unsubscribe();
 
 
 
 
545
  this.isSpeaking = false;
546
  this.isAudioPaused = false;
547
  this.isTyping = false;
548
  }
549
 
550
  handleEnterPress(event: KeyboardEvent): void {
551
+ if (this.isSpeaking) { event.preventDefault(); return; }
 
 
 
552
  if (event.key === 'Enter' && !event.shiftKey) {
553
  event.preventDefault();
554
  const text = (this.userInput || '').trim();
555
  if (text) this.sendMessage();
556
  }
557
  }
 
 
558
  }