pykara commited on
Commit
26dd9d1
·
1 Parent(s): 07c54b5
src/app/chat/api.service.ts CHANGED
@@ -82,4 +82,28 @@ export class ApiService {
82
  askQuestion(userInput: string, _sessionId: string | null): Observable<any> {
83
  return this.explainGrammar(userInput);
84
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  }
 
82
  askQuestion(userInput: string, _sessionId: string | null): Observable<any> {
83
  return this.explainGrammar(userInput);
84
  }
85
+
86
+
87
+
88
+ // add method in ApiService
89
+ synthesizeAudio(text: string, language = 'en', referenceFiles?: string[]) {
90
+ const payload: any = { text, language };
91
+ if (referenceFiles) payload.reference_files = referenceFiles;
92
+ return this.http.post<{ audio_url: string }>(`${this.baseUrl}/synthesize-audio`, payload);
93
+ }
94
+
95
+ // New: synthesize video (calls backend D-ID flow)
96
+ synthesizeVideo(text: string, language = 'en') {
97
+ const payload: any = { text, language };
98
+ return this.http.post<{ video_url: string }>(`${this.baseUrl}/synthesize-video`, payload);
99
+ }
100
+
101
+ punctuate(text: string): Observable<any> {
102
+ const headers = {
103
+ 'Content-Type': 'application/json',
104
+ 'X-User': localStorage.getItem('username') || ''
105
+ };
106
+ // Use the port your backend runs on (your local runner uses 7000)
107
+ return this.http.post<any>(`${this.baseUrl}/punctuate`, { text }, { headers });
108
+ }
109
  }
src/app/chat/chat.component.css CHANGED
@@ -1,9 +1,10 @@
 
1
  .header-container {
2
  display: flex;
3
  justify-content: space-between;
4
  align-items: center;
5
  padding: 0vw 1vw;
6
- background-color: #03182d;
7
  box-shadow: 0 0.4vw 0.8vw rgba(0, 0, 0, 0.2);
8
  width: 100%;
9
  position: sticky;
@@ -15,7 +16,7 @@
15
  max-width: 5vw;
16
  height: auto;
17
  background: #e5e7eb;
18
- border-radius: 50%;
19
  margin: 0.5vw;
20
  }
21
 
@@ -45,6 +46,7 @@ h1 {
45
  font-display: swap;
46
  }
47
 
 
48
  .chat-container {
49
  display: flex;
50
  flex-direction: column;
@@ -64,17 +66,6 @@ h1 {
64
  opacity: 0.2;
65
  }
66
 
67
- .chat-header {
68
- text-align: center;
69
- background: linear-gradient(to right, #4ca1af, #6ac5cb);
70
- color: white;
71
- padding: 0px 0;
72
- font-size: 2.2rem;
73
- font-weight: bold;
74
- box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2);
75
- flex-shrink: 0;
76
- }
77
-
78
  .chat-box {
79
  display: flex;
80
  flex-direction: column;
@@ -84,14 +75,25 @@ h1 {
84
  background-color: rgba(35, 34, 32, 0.43);
85
  scroll-behavior: smooth;
86
  flex-grow: 1;
87
- padding-bottom: 90px;
 
88
  }
89
 
 
 
 
 
 
 
 
 
 
 
90
  .input-box {
91
  display: flex;
92
  gap: 10px;
93
  padding: 8px;
94
- background: #03182d;
95
  box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.1);
96
  flex-shrink: 0;
97
  width: 100%;
@@ -111,8 +113,8 @@ h1 {
111
  line-height: 1.2;
112
  border: 1px solid rgb(93, 145, 195);
113
  border-radius: 8px;
114
- background: rgba(255, 255, 255, 0.04);
115
- color: white;
116
  outline: none;
117
  transition: border-color 0.3s ease, box-shadow 0.3s ease;
118
  width: 100%;
@@ -122,19 +124,9 @@ h1 {
122
  .input-box textarea:focus {
123
  border-color: rgb(135, 185, 235);
124
  box-shadow: 0 0 5px rgba(93, 145, 195, 0.6);
125
- text-align: left;
126
- vertical-align: middle;
127
- line-height: normal;
128
- font-size: 1rem;
129
- color: white;
130
- font-weight: bold;
131
  }
132
 
133
  .input-box textarea::placeholder {
134
- text-align: left;
135
- vertical-align: middle;
136
- line-height: normal;
137
- font-size: 1rem;
138
  color: rgba(255, 255, 255, 0.5);
139
  }
140
 
@@ -144,7 +136,7 @@ h1 {
144
  display: flex;
145
  align-items: center;
146
  justify-content: center;
147
- background: #5d91c3;
148
  border: none;
149
  border-radius: 8px;
150
  cursor: pointer;
@@ -161,69 +153,7 @@ h1 {
161
  object-fit: contain;
162
  }
163
 
164
- .resume-icon {
165
- width: 40px;
166
- height: 40px;
167
- object-fit: contain;
168
- }
169
-
170
- .pause-icon {
171
- width: 31px;
172
- height: 31px;
173
- object-fit: contain;
174
- }
175
-
176
- .suggestion-box {
177
- position: absolute;
178
- bottom: 100%;
179
- width: 100%;
180
- background: #d1dae3;
181
- border: 1px solid rgba(0, 0, 0, 0.2);
182
- border-radius: 8px;
183
- box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2);
184
- color: black;
185
- z-index: 100;
186
- padding: 8px 0;
187
- }
188
-
189
- .suggestion-box ul {
190
- list-style: none;
191
- margin: 0;
192
- padding: 0;
193
- }
194
-
195
- .suggestion-box li {
196
- padding: 8px 12px;
197
- cursor: pointer;
198
- font-size: 1rem;
199
- transition: background-color 0.3s ease;
200
- color: rgb(67 65 65);
201
- }
202
-
203
- .suggestion-box li:hover {
204
- background-color: rgba(0, 0, 0, 0.1);
205
- border-radius: 4px;
206
- }
207
-
208
- .input-container {
209
- display: flex;
210
- flex-direction: column-reverse;
211
- position: relative;
212
- width: 100%;
213
- }
214
-
215
- .input-box {
216
- display: flex;
217
- gap: 10px;
218
- padding: 8px;
219
- background: #03182d;
220
- box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.1);
221
- flex-shrink: 0;
222
- width: 100%;
223
- height: auto;
224
- min-height: 40px;
225
- }
226
-
227
  .listening-box {
228
  position: fixed;
229
  top: 50%;
@@ -233,7 +163,6 @@ h1 {
233
  padding: 45px;
234
  border-radius: 20px;
235
  width: 30vw;
236
- height: auto;
237
  min-height: 40vh;
238
  box-shadow: 0px 8px 20px rgba(0, 0, 0, 0.15);
239
  backdrop-filter: blur(20px);
@@ -250,7 +179,6 @@ h1 {
250
  .microphone-image {
251
  width: 100px;
252
  height: 100px;
253
- display: block;
254
  margin: 0 auto 20px;
255
  animation: pulse 1.5s infinite alternate ease-in-out;
256
  }
@@ -258,12 +186,12 @@ h1 {
258
  @keyframes pulse {
259
  from {
260
  transform: scale(1);
261
- filter: drop-shadow(0px 0px 8px rgba(0, 0, 0, 0.4));
262
  }
263
 
264
  to {
265
  transform: scale(1.2);
266
- filter: drop-shadow(0px 0px 14px rgba(0, 0, 0, 0.7));
267
  }
268
  }
269
 
@@ -273,7 +201,7 @@ h1 {
273
  font-weight: bold;
274
  text-align: center;
275
  margin: 10px 0;
276
- text-shadow: 0px 0px 8px rgba(0, 0, 0, 0.3);
277
  margin-top: 3vw;
278
  }
279
 
@@ -318,134 +246,6 @@ h1 {
318
  transform: scale(1.15);
319
  }
320
 
321
- @keyframes fadeInScale {
322
- from {
323
- opacity: 0;
324
- transform: translate(-50%, -50%) scale(0.85);
325
- }
326
-
327
- to {
328
- opacity: 1;
329
- transform: translate(-50%, -50%) scale(1);
330
- }
331
- }
332
-
333
- .error-text {
334
- color: black;
335
- background: #ffccccad;
336
- font-size: 2vw;
337
- font-weight: bold;
338
- text-align: center;
339
- padding: 8px 12px;
340
- border-radius: 5px;
341
- cursor: pointer;
342
- width: auto;
343
- margin-top: 10px;
344
- display: inline-block;
345
- white-space: nowrap;
346
- transition: background 0.3s;
347
- }
348
-
349
- .error-text:hover {
350
- background: #ffaaaa;
351
- }
352
-
353
- .popup-overlay {
354
- position: fixed;
355
- top: 0;
356
- left: 0;
357
- width: 100%;
358
- height: 100%;
359
- background: rgba(0, 0, 0, 0.5);
360
- display: flex;
361
- justify-content: center;
362
- align-items: center;
363
- z-index: 1000;
364
- }
365
-
366
- .popup-box {
367
- background: white;
368
- padding: 20px;
369
- border-radius: 8px;
370
- width: 300px;
371
- text-align: center;
372
- box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.2);
373
- }
374
-
375
- .popup-box h3 {
376
- font-size: 18px;
377
- margin-bottom: 10px;
378
- }
379
-
380
- .popup-box p {
381
- font-size: 14px;
382
- color: #555;
383
- }
384
-
385
- .popup-button {
386
- background: #007bff;
387
- color: white;
388
- border: none;
389
- padding: 10px 20px;
390
- font-size: 14px;
391
- border-radius: 5px;
392
- cursor: pointer;
393
- margin-top: 10px;
394
- }
395
-
396
- .popup-button:hover {
397
- background: #0056b3;
398
- }
399
-
400
- .typing-indicator {
401
- display: flex;
402
- align-items: center;
403
- gap: 8px;
404
- font-style: italic;
405
- color: #ffffff;
406
- font-size: 1.2vw;
407
- margin-left: 4vw;
408
- margin-top: 1vw;
409
- background: rgba(255, 255, 255, 0.2);
410
- padding: 0.8vw 1.5vw;
411
- border-radius: 2vw;
412
- width: fit-content;
413
- animation: fadeIn 0.3s ease-in-out;
414
- }
415
-
416
- .typing-indicator span {
417
- width: 10px;
418
- height: 10px;
419
- background-color: #ffffff;
420
- border-radius: 50%;
421
- display: inline-block;
422
- animation: typingDots 1.5s infinite ease-in-out;
423
- }
424
-
425
- .typing-indicator span:nth-child(1) {
426
- animation-delay: 0s;
427
- }
428
-
429
- .typing-indicator span:nth-child(2) {
430
- animation-delay: 0.2s;
431
- }
432
-
433
- .typing-indicator span:nth-child(3) {
434
- animation-delay: 0.4s;
435
- }
436
-
437
- @keyframes typingDots {
438
- 0%, 100% {
439
- transform: scale(0.8);
440
- opacity: 0.3;
441
- }
442
-
443
- 50% {
444
- transform: scale(1);
445
- opacity: 1;
446
- }
447
- }
448
-
449
  @keyframes fadeIn {
450
  from {
451
  opacity: 0;
@@ -458,6 +258,7 @@ h1 {
458
  }
459
  }
460
 
 
461
  .hardcoded-questions-container {
462
  display: flex;
463
  justify-content: flex-start;
@@ -503,6 +304,7 @@ h1 {
503
  }
504
  }
505
 
 
506
  .message-wrapper {
507
  display: flex;
508
  align-items: flex-start;
@@ -536,16 +338,30 @@ h1 {
536
 
537
  .ai .message {
538
  align-self: flex-start;
539
- background: #2b6296;
540
  color: white;
541
  }
542
 
543
  .user .message {
544
  align-self: flex-end;
545
- background: #2b6296;
546
  color: white;
547
  }
548
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
549
  .message-timestamp {
550
  font-size: 0.8vw;
551
  color: rgba(255, 255, 255, 0.8);
@@ -554,28 +370,23 @@ h1 {
554
  text-align: right;
555
  }
556
 
557
- .paragraph-block {
558
- background: #2b6296;
559
- color: white;
560
- padding: 1vw;
561
- border-radius: 1vw;
562
- margin-bottom: 0.5vw;
563
- box-shadow: 0 0.3vw 0.8vw rgba(0, 0, 0, 0.1);
564
- line-height: 1.5;
565
- max-width: 48vw;
566
- }
567
-
568
  .message {
569
  max-width: 50vw;
570
  padding: 1vw 2vw;
571
  border-radius: 2vw;
572
- font-size: 1.2vw;
 
573
  line-height: 1.5;
574
  box-shadow: 0 0.3vw 0.8vw rgba(0, 0, 0, 0.1);
575
  background: #2b6296;
576
  color: white;
577
  }
578
 
 
 
 
 
 
579
  .structured-response {
580
  background: #2b6296;
581
  color: white;
@@ -610,3 +421,839 @@ h1 {
610
  margin-bottom: 0.2vw;
611
  line-height: 1.4;
612
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Header Container */
2
  .header-container {
3
  display: flex;
4
  justify-content: space-between;
5
  align-items: center;
6
  padding: 0vw 1vw;
7
+ background-color: #009688;
8
  box-shadow: 0 0.4vw 0.8vw rgba(0, 0, 0, 0.2);
9
  width: 100%;
10
  position: sticky;
 
16
  max-width: 5vw;
17
  height: auto;
18
  background: #e5e7eb;
19
+ border-radius: 1vw;
20
  margin: 0.5vw;
21
  }
22
 
 
46
  font-display: swap;
47
  }
48
 
49
+ /* Layout */
50
  .chat-container {
51
  display: flex;
52
  flex-direction: column;
 
66
  opacity: 0.2;
67
  }
68
 
 
 
 
 
 
 
 
 
 
 
 
69
  .chat-box {
70
  display: flex;
71
  flex-direction: column;
 
75
  background-color: rgba(35, 34, 32, 0.43);
76
  scroll-behavior: smooth;
77
  flex-grow: 1;
78
+ padding-bottom: var(--input-gap, 120px);
79
+ scroll-padding-bottom: var(--input-gap, 120px);
80
  }
81
 
82
+ .input-container {
83
+ display: flex;
84
+ flex-direction: column-reverse;
85
+ width: 100%;
86
+ position: sticky;
87
+ bottom: 0;
88
+ z-index: 10;
89
+ }
90
+
91
+ /* Input */
92
  .input-box {
93
  display: flex;
94
  gap: 10px;
95
  padding: 8px;
96
+ background: #009688;
97
  box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.1);
98
  flex-shrink: 0;
99
  width: 100%;
 
113
  line-height: 1.2;
114
  border: 1px solid rgb(93, 145, 195);
115
  border-radius: 8px;
116
+ background: #fff;
117
+ color: black;
118
  outline: none;
119
  transition: border-color 0.3s ease, box-shadow 0.3s ease;
120
  width: 100%;
 
124
  .input-box textarea:focus {
125
  border-color: rgb(135, 185, 235);
126
  box-shadow: 0 0 5px rgba(93, 145, 195, 0.6);
 
 
 
 
 
 
127
  }
128
 
129
  .input-box textarea::placeholder {
 
 
 
 
130
  color: rgba(255, 255, 255, 0.5);
131
  }
132
 
 
136
  display: flex;
137
  align-items: center;
138
  justify-content: center;
139
+ background: white;
140
  border: none;
141
  border-radius: 8px;
142
  cursor: pointer;
 
153
  object-fit: contain;
154
  }
155
 
156
+ /* Listening modal */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
  .listening-box {
158
  position: fixed;
159
  top: 50%;
 
163
  padding: 45px;
164
  border-radius: 20px;
165
  width: 30vw;
 
166
  min-height: 40vh;
167
  box-shadow: 0px 8px 20px rgba(0, 0, 0, 0.15);
168
  backdrop-filter: blur(20px);
 
179
  .microphone-image {
180
  width: 100px;
181
  height: 100px;
 
182
  margin: 0 auto 20px;
183
  animation: pulse 1.5s infinite alternate ease-in-out;
184
  }
 
186
  @keyframes pulse {
187
  from {
188
  transform: scale(1);
189
+ filter: drop-shadow(0 0 8px rgba(0, 0, 0, 0.4));
190
  }
191
 
192
  to {
193
  transform: scale(1.2);
194
+ filter: drop-shadow(0 0 14px rgba(0, 0, 0, 0.7));
195
  }
196
  }
197
 
 
201
  font-weight: bold;
202
  text-align: center;
203
  margin: 10px 0;
204
+ text-shadow: 0 0 8px rgba(0, 0, 0, 0.3);
205
  margin-top: 3vw;
206
  }
207
 
 
246
  transform: scale(1.15);
247
  }
248
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
249
  @keyframes fadeIn {
250
  from {
251
  opacity: 0;
 
258
  }
259
  }
260
 
261
+ /* Quick suggestions */
262
  .hardcoded-questions-container {
263
  display: flex;
264
  justify-content: flex-start;
 
304
  }
305
  }
306
 
307
+ /* Messages */
308
  .message-wrapper {
309
  display: flex;
310
  align-items: flex-start;
 
338
 
339
  .ai .message {
340
  align-self: flex-start;
341
+ background: #56cdc273;
342
  color: white;
343
  }
344
 
345
  .user .message {
346
  align-self: flex-end;
347
+ background: #56cdc273;
348
  color: white;
349
  }
350
 
351
+ .message-meta {
352
+ display: flex;
353
+ align-items: center;
354
+ justify-content: flex-end;
355
+ gap: 8px;
356
+ margin-top: 8px;
357
+ }
358
+
359
+ /* Keep timestamp inline inside the meta row (next to the copy button) */
360
+ .message-meta .message-timestamp {
361
+ display: inline;
362
+ margin: 0;
363
+ }
364
+
365
  .message-timestamp {
366
  font-size: 0.8vw;
367
  color: rgba(255, 255, 255, 0.8);
 
370
  text-align: right;
371
  }
372
 
 
 
 
 
 
 
 
 
 
 
 
373
  .message {
374
  max-width: 50vw;
375
  padding: 1vw 2vw;
376
  border-radius: 2vw;
377
+ font-size: 1vw;
378
+ font-weight: 600;
379
  line-height: 1.5;
380
  box-shadow: 0 0.3vw 0.8vw rgba(0, 0, 0, 0.1);
381
  background: #2b6296;
382
  color: white;
383
  }
384
 
385
+ .message .message-content {
386
+ font-size: inherit;
387
+ font-weight: inherit;
388
+ }
389
+
390
  .structured-response {
391
  background: #2b6296;
392
  color: white;
 
421
  margin-bottom: 0.2vw;
422
  line-height: 1.4;
423
  }
424
+
425
+ .message,
426
+ .structured-response {
427
+ scroll-margin-bottom: var(--input-gap, 120px);
428
+ }
429
+
430
+ /* Speaking indicator */
431
+ .profile-pic-stack {
432
+ display: flex;
433
+ flex-direction: column;
434
+ align-items: center;
435
+ gap: 6px;
436
+ }
437
+
438
+ .speaking-indicator {
439
+ display: flex;
440
+ gap: 6px;
441
+ align-items: flex-end;
442
+ height: 18px;
443
+ }
444
+
445
+ .speaking-indicator span {
446
+ width: 3px;
447
+ height: 8px;
448
+ background: rgba(255, 255, 255, 0.85);
449
+ border-radius: 2px;
450
+ animation: speakPulse 800ms infinite ease-in-out;
451
+ }
452
+
453
+ .speaking-indicator span:nth-child(2) {
454
+ animation-delay: 100ms;
455
+ }
456
+
457
+ .speaking-indicator span:nth-child(3) {
458
+ animation-delay: 200ms;
459
+ }
460
+
461
+ .speaking-indicator span:nth-child(4) {
462
+ animation-delay: 300ms;
463
+ }
464
+
465
+ @keyframes speakPulse {
466
+ 0%, 100% {
467
+ height: 8px;
468
+ opacity: .6;
469
+ }
470
+
471
+ 50% {
472
+ height: 18px;
473
+ opacity: 1;
474
+ }
475
+ }
476
+
477
+ /* Spacer + floating button */
478
+ .scroll-spacer {
479
+ height: var(--input-gap, 120px);
480
+ flex: 0 0 auto;
481
+ }
482
+
483
+ .scroll-to-bottom-btn {
484
+ position: fixed;
485
+ right: 16px;
486
+ bottom: calc(var(--input-gap, 120px) + 16px);
487
+ width: 51px;
488
+ height: 51px;
489
+ border-radius: 50%;
490
+ border: none;
491
+ background: #56cdc2;
492
+ color: #fff;
493
+ font-size: 31px;
494
+ box-shadow: 0 6px 16px rgba(0, 0, 0, 0.25);
495
+ cursor: pointer;
496
+ transition: transform 0.15s ease, box-shadow 0.2s ease, background 0.2s ease;
497
+ z-index: 20;
498
+ }
499
+
500
+ .scroll-to-bottom-btn:hover {
501
+ transform: translateY(-2px);
502
+ box-shadow: 0 10px 22px rgba(0, 0, 0, 0.3);
503
+ background: #56cdc2;
504
+ }
505
+
506
+
507
+
508
+ /* Toggle Buttons Container */
509
+ .toggle-buttons-container {
510
+ display: flex;
511
+ gap: 10px;
512
+ align-items: center;
513
+ margin-right: 15px;
514
+ position: relative;
515
+ right: -16vw;
516
+ }
517
+
518
+ /* Disabled state for toggle buttons */
519
+ .toggle-btn.modern:disabled {
520
+ opacity: 0.5;
521
+ cursor: not-allowed;
522
+ box-shadow: none;
523
+ }
524
+
525
+ .toggle-btn {
526
+ padding: 8px 12px;
527
+ border: 2px solid #5d91c3;
528
+ background: transparent;
529
+ color: white;
530
+ border-radius: 20px;
531
+ cursor: pointer;
532
+ font-size: 0.9vw;
533
+ font-weight: bold;
534
+ transition: all 0.3s ease;
535
+ min-width: 49px;
536
+ white-space: nowrap;
537
+ }
538
+
539
+ .toggle-btn:hover {
540
+ background: rgba(93, 145, 195, 0.2);
541
+ transform: scale(1.05);
542
+ }
543
+
544
+ .toggle-btn.active {
545
+ background: #5d91c3;
546
+ color: white;
547
+ box-shadow: 0 0 10px rgba(93, 145, 195, 0.5);
548
+ }
549
+
550
+ .toggle-btn.voice-toggle {
551
+ background-color: #e5e7eb;
552
+ border: none;
553
+ padding: 8px 12px;
554
+ border-radius: 6px;
555
+ transition: background 0.2s;
556
+ }
557
+
558
+ .toggle-btn.voice-toggle.active {
559
+ background-color: #7ed957; /* Soft green for enabled */
560
+ }
561
+
562
+ .toggle-btn.voice-toggle.muted {
563
+ background-color: #ff7b7b; /* Soft red for disabled */
564
+ }
565
+
566
+ .voice-icon {
567
+ width: 24px;
568
+ height: 24px;
569
+ vertical-align: middle;
570
+ }
571
+
572
+ /* Modern Toggle Buttons */
573
+ .toggle-buttons-container.modern-toggle {
574
+ display: flex;
575
+ gap: 18px;
576
+ align-items: center;
577
+ margin-right: 15px;
578
+ position: relative;
579
+ right: -16vw;
580
+ }
581
+
582
+ .toggle-btn.modern {
583
+ background: #f5f7fa;
584
+ border: none;
585
+ border-radius: 24px;
586
+ box-shadow: 0 2px 8px rgba(93,145,195,0.08);
587
+ padding: 8px 16px;
588
+ display: flex;
589
+ align-items: center;
590
+ transition: background 0.2s, box-shadow 0.2s, transform 0.2s;
591
+ cursor: pointer;
592
+ min-width: 48px;
593
+ height: 44px;
594
+ }
595
+
596
+ .toggle-btn.modern .toggle-icon {
597
+ width: 24px;
598
+ height: 24px;
599
+ margin: 0 auto;
600
+ display: block;
601
+ }
602
+
603
+ .toggle-btn.modern.active {
604
+ background: #e3fcec;
605
+ box-shadow: 0 4px 16px rgba(93,145,195,0.18);
606
+ transform: scale(1.08);
607
+ }
608
+
609
+ .toggle-btn.modern.muted {
610
+ background: #ffeaea;
611
+ }
612
+
613
+ .toggle-btn.modern:hover {
614
+ background: #eaf1fb;
615
+ box-shadow: 0 6px 18px rgba(93,145,195,0.18);
616
+ transform: scale(1.05);
617
+ }
618
+
619
+ /* Responsive design for toggle buttons */
620
+ @media (max-width: 1200px) {
621
+ .toggle-btn {
622
+ font-size: 0.8vw;
623
+ min-width: 70px;
624
+ padding: 6px 10px;
625
+ }
626
+ }
627
+
628
+ @media (max-width: 768px) {
629
+ .toggle-buttons-container {
630
+ gap: 5px;
631
+ margin-right: 10px;
632
+ }
633
+
634
+ .toggle-btn {
635
+ font-size: 0.7vw;
636
+ min-width: 60px;
637
+ padding: 4px 8px;
638
+ }
639
+ }
640
+
641
+ .icon-btn {
642
+ background: none;
643
+ border: none;
644
+ padding: 4px;
645
+ cursor: pointer;
646
+ border-radius: 4px;
647
+ transition: background 0.2s;
648
+ margin-left: 0.5vw;
649
+ }
650
+
651
+ .icon-btn:hover {
652
+ background: rgba(93, 145, 195, 0.15);
653
+ }
654
+
655
+ .meta-icon {
656
+ width: 20px;
657
+ height: 20px;
658
+ display: inline-flex;
659
+ align-items: center;
660
+ justify-content: center;
661
+ vertical-align: middle;
662
+ }
663
+
664
+ .dropdown {
665
+ position: relative;
666
+ display: inline-block;
667
+ }
668
+
669
+ .dropdown-menu {
670
+ position: absolute;
671
+ right: 0;
672
+ top: 28px;
673
+ background: #fff;
674
+ border: 1px solid #e5e7eb;
675
+ border-radius: 8px;
676
+ box-shadow: 0 2px 8px rgba(0,0,0,0.12);
677
+ min-width: 140px;
678
+ z-index: 100;
679
+ padding: 4px 0;
680
+ }
681
+
682
+ .dropdown-item {
683
+ display: flex;
684
+ align-items: center;
685
+ gap: 8px;
686
+ background: none;
687
+ border: none;
688
+ width: 100%;
689
+ padding: 8px 16px;
690
+ font-size: 15px;
691
+ color: #222;
692
+ cursor: pointer;
693
+ transition: background 0.2s;
694
+ }
695
+
696
+ .dropdown-item:hover {
697
+ background: #f5f5f5;
698
+ }
699
+
700
+ .copy-tick {
701
+ color: #fff;
702
+ font-size: 20px;
703
+ font-weight: bold;
704
+ width: 20px;
705
+ height: 20px;
706
+ display: inline-flex;
707
+ align-items: center;
708
+ justify-content: center;
709
+ vertical-align: middle;
710
+ transition: color 0.2s;
711
+ }
712
+
713
+ .message-meta .icon-btn {
714
+ margin-left: 6px;
715
+ }
716
+
717
+ .tutor-video {
718
+ position: fixed;
719
+ right: 24px;
720
+ bottom: 80px;
721
+ width: 320px;
722
+ height: 180px;
723
+ border-radius: 12px;
724
+ box-shadow: 0 4px 16px rgba(0,0,0,0.25);
725
+ z-index: 2000;
726
+ background: #000;
727
+ }
728
+
729
+ @media (max-width: 600px) {
730
+ .tutor-video {
731
+ width: 90vw;
732
+ height: 28vw;
733
+ right: 5vw;
734
+ bottom: 5vw;
735
+ }
736
+ }
737
+
738
+ .blinking-cursor {
739
+ font-weight: bold;
740
+ color: white;
741
+ animation: blink 1s steps(1) infinite;
742
+ }
743
+
744
+ @keyframes blink {
745
+ 0%, 50% {
746
+ opacity: 1;
747
+ }
748
+
749
+ 51%, 100% {
750
+ opacity: 0;
751
+ }
752
+ }
753
+
754
+
755
+
756
+ /* User Guide Popup Centered, Four-Side Border, Previous Font/Color, No Animation */
757
+ .user-guide-modal {
758
+ position: fixed;
759
+ top: 52%;
760
+ left: 50%;
761
+ transform: translate(-50%, -50%);
762
+ height: 38vw;
763
+ width: 50vw;
764
+ background: linear-gradient(135deg, #fff 80%, #e3fcec 100%);
765
+ color: #222;
766
+ box-shadow: 0 12px 40px rgba(93, 145, 195, .22);
767
+ border-radius: 18px;
768
+ padding: 1vw;
769
+ z-index: 2001;
770
+ overflow: visible; /* Allow button to overflow outside modal */
771
+ border: 10px solid #009688; /* Four-side border */
772
+ box-sizing: border-box;
773
+ }
774
+
775
+ .close-icon {
776
+ position: absolute;
777
+ top: 1vw;
778
+ right: 1vw;
779
+ background: none;
780
+ border: none;
781
+ font-size: 2vw;
782
+ color: #2b6296;
783
+ cursor: pointer;
784
+ z-index: 2010;
785
+ transition: color 0.2s;
786
+ }
787
+
788
+ .user-guide-close-icon:hover {
789
+ color: #009688;
790
+ }
791
+
792
+ /* Overlay for closing on outside click */
793
+ .user-guide-overlay {
794
+ position: fixed;
795
+ inset: 0;
796
+ background: rgba(0,0,0,0.35);
797
+ z-index: 2000;
798
+ }
799
+
800
+ @media (max-width: 600px) {
801
+ .user-guide-modal {
802
+ width: 90vw;
803
+ height: 60vw;
804
+ border-radius: 12px;
805
+ padding: 4vw 2vw 2vw 2vw;
806
+ }
807
+
808
+ .user-guide-modal .close-icon {
809
+ font-size: 6vw;
810
+ width: 6vw;
811
+ height: 6vw;
812
+ }
813
+
814
+ .close-icon {
815
+ font-size: 6vw;
816
+ top: 2vw;
817
+ right: 2vw;
818
+ }
819
+ }
820
+
821
+
822
+ .user-guide-modal li {
823
+ line-height: 1.7;
824
+ font-size: 1.1vw;
825
+ background: rgba(93, 145, 195, .07);
826
+ padding: .5vw .5vw;
827
+ box-shadow: 0 2px 8px rgba(93, 145, 195, .06);
828
+ }
829
+
830
+ .user-guide-modal li b {
831
+ color: #2b6296;
832
+ font-size: 1.15vw;
833
+ }
834
+
835
+ .user-guide-modal ol {
836
+ overflow-y: auto;
837
+ max-height: 34.6vw; /* Adjust as needed for your modal size */
838
+ }
839
+
840
+ .user-guide-modal h2 {
841
+ font-size: 2.2vw;
842
+ margin-bottom: 1.5vw;
843
+ color: #2b6296;
844
+ text-align: center;
845
+ font-weight: bold;
846
+ letter-spacing: 1px;
847
+ }
848
+
849
+
850
+
851
+
852
+
853
+
854
+
855
+ /* Close button styles */
856
+ .user-guide-close-icon {
857
+ display: block;
858
+ position: absolute;
859
+ top: 1vw;
860
+ right: 1vw;
861
+ background: none;
862
+ border: none;
863
+ font-size: 2vw;
864
+ color: #2b6296;
865
+ cursor: pointer;
866
+ z-index: 2010;
867
+ transition: color 0.2s;
868
+ }
869
+
870
+ .user-guide-close-icon:hover {
871
+ color: #009688;
872
+ }
873
+
874
+ .user-guide-close-icon {
875
+ font-size: 6vw;
876
+ top: 2vw;
877
+ right: 2vw;
878
+ }
879
+
880
+
881
+
882
+ /* Close button styles */
883
+ .user-guide-close-icon {
884
+ display: block;
885
+ position: absolute;
886
+ top: 1vw;
887
+ right: 1vw;
888
+ background: none;
889
+ border: none;
890
+ font-size: 2vw;
891
+ color: #2b6296;
892
+ cursor: pointer;
893
+ z-index: 2010;
894
+ transition: color 0.2s;
895
+ }
896
+
897
+ .user-guide-close-icon:hover {
898
+ color: #009688;
899
+ }
900
+
901
+ /* Overlay for closing on outside click */
902
+ .user-guide-overlay {
903
+ position: fixed;
904
+ inset: 0;
905
+ background: rgba(0,0,0,0.35);
906
+ z-index: 2000;
907
+ }
908
+
909
+ @media (max-width: 600px) {
910
+ .user-guide-modal {
911
+ width: 90vw;
912
+ height: 60vw;
913
+ border-radius: 12px;
914
+ padding: 4vw 2vw 2vw 2vw;
915
+ }
916
+
917
+ .user-guide-modal .close-icon {
918
+ font-size: 6vw;
919
+ width: 6vw;
920
+ height: 6vw;
921
+ }
922
+
923
+ .user-guide-close-icon {
924
+ font-size: 6vw;
925
+ top: 2vw;
926
+ right: 2vw;
927
+ }
928
+ }
929
+
930
+
931
+ .user-guide-close-icon {
932
+ position: absolute;
933
+ top: -22px; /* Move above modal, adjust as needed */
934
+ right: -22px; /* Move to the right outside modal, adjust as needed */
935
+ background: #009688; /* Match border color */
936
+ border: none;
937
+ width: 44px;
938
+ height: 44px;
939
+ border-radius: 50%;
940
+ display: flex;
941
+ align-items: center;
942
+ justify-content: center;
943
+ font-size: 2vw;
944
+ color: black;
945
+ cursor: pointer;
946
+ z-index: 2010;
947
+ box-shadow: 0 2px 8px rgba(93,145,195,0.18);
948
+ transition: background 0.2s, color 0.2s;
949
+ ;
950
+ }
951
+
952
+ .user-guide-close-icon:hover {
953
+ background: white;
954
+ color: black;
955
+ }
956
+
957
+
958
+ @keyframes fadeInScale {
959
+
960
+ from {
961
+ opacity: 0;
962
+ transform: translate(-50%, -50%) scale(0.85);
963
+ }
964
+
965
+
966
+
967
+ to {
968
+ opacity: 1;
969
+ transform: translate(-50%, -50%) scale(1);
970
+ }
971
+ }
972
+
973
+
974
+
975
+ .error-text {
976
+ color: black;
977
+ background: #ffccccad;
978
+ font-size: 2vw;
979
+ font-weight: bold;
980
+ text-align: center;
981
+ padding: 8px 12px;
982
+ border-radius: 5px;
983
+ cursor: pointer;
984
+ width: auto;
985
+ margin-top: 10px;
986
+ display: inline-block;
987
+ white-space: nowrap;
988
+ transition: background 0.3s;
989
+ }
990
+
991
+
992
+
993
+ .error-text:hover {
994
+ background: #ffaaaa;
995
+ }
996
+
997
+
998
+
999
+ .popup-overlay {
1000
+ position: fixed;
1001
+ inset: 0;
1002
+ background: rgba(0, 0, 0, 0.5);
1003
+ display: flex;
1004
+ justify-content: center;
1005
+ align-items: center;
1006
+ z-index: 1000;
1007
+ }
1008
+
1009
+
1010
+
1011
+ .popup-box {
1012
+ background: white;
1013
+ padding: 20px;
1014
+ border-radius: 8px;
1015
+ width: 300px;
1016
+ text-align: center;
1017
+ box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.2);
1018
+ }
1019
+
1020
+
1021
+
1022
+ .popup-box h3 {
1023
+ font-size: 18px;
1024
+ margin-bottom: 10px;
1025
+ }
1026
+
1027
+
1028
+
1029
+ .popup-box p {
1030
+ font-size: 14px;
1031
+ color: #555;
1032
+ }
1033
+
1034
+
1035
+
1036
+ .popup-button {
1037
+ background: #007bff;
1038
+ color: white;
1039
+ border: none;
1040
+ padding: 10px 20px;
1041
+ font-size: 14px;
1042
+ border-radius: 5px;
1043
+ cursor: pointer;
1044
+ margin-top: 10px;
1045
+ }
1046
+
1047
+
1048
+
1049
+ .popup-button:hover {
1050
+ background: #0056b3;
1051
+ }
1052
+
1053
+
1054
+
1055
+ /* Typing indicator */
1056
+
1057
+ .typing-indicator {
1058
+ display: flex;
1059
+ align-items: center;
1060
+ gap: 8px;
1061
+ color: #ffffff;
1062
+ font-size: 1.2vw;
1063
+ margin-left: 4vw;
1064
+ margin-top: 1vw;
1065
+ background: rgba(255, 255, 255, 0.2);
1066
+ padding: 0.8vw 1.5vw;
1067
+ border-radius: 2vw;
1068
+ width: fit-content;
1069
+ animation: fadeIn 0.3s ease-in-out;
1070
+ }
1071
+
1072
+
1073
+
1074
+ .typing-indicator span {
1075
+ width: 10px;
1076
+ height: 10px;
1077
+ background-color: #ffffff;
1078
+ border-radius: 50%;
1079
+ display: inline-block;
1080
+ animation: typingDots 1.5s infinite ease-in-out;
1081
+ }
1082
+
1083
+
1084
+
1085
+ .typing-indicator span:nth-child(1) {
1086
+ animation-delay: 0s;
1087
+ }
1088
+
1089
+
1090
+
1091
+ .typing-indicator span:nth-child(2) {
1092
+ animation-delay: 0.2s;
1093
+ }
1094
+
1095
+
1096
+
1097
+ .typing-indicator span:nth-child(3) {
1098
+ animation-delay: 0.4s;
1099
+ }
1100
+
1101
+
1102
+
1103
+ @keyframes typingDots {
1104
+
1105
+ 0%, 100% {
1106
+ transform: scale(0.8);
1107
+ opacity: 0.3;
1108
+ }
1109
+
1110
+
1111
+
1112
+ 50% {
1113
+ transform: scale(1);
1114
+ opacity: 1;
1115
+ }
1116
+ }
1117
+
1118
+ .voice-chat-container {
1119
+ padding: 16px;
1120
+ font-family: Arial, sans-serif;
1121
+ }
1122
+
1123
+ .controls button {
1124
+ padding: 8px 12px;
1125
+ }
1126
+
1127
+ .voice-root {
1128
+ position: relative;
1129
+ display: inline-block;
1130
+ }
1131
+
1132
+ .mic-toggle {
1133
+ background: transparent;
1134
+ border: none;
1135
+ font-size: 20px;
1136
+ cursor: pointer;
1137
+ }
1138
+
1139
+ .mic-popup {
1140
+ position: fixed;
1141
+ left: 50%;
1142
+ top: 18%;
1143
+ transform: translateX(-50%);
1144
+ width: 420px;
1145
+ background: #fff;
1146
+ border-radius: 10px;
1147
+ box-shadow: 0 10px 30px rgba(0,0,0,0.15);
1148
+ z-index: 1200;
1149
+ overflow: hidden;
1150
+ font-family: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial;
1151
+ }
1152
+
1153
+ .mic-header {
1154
+ display: flex;
1155
+ justify-content: space-between;
1156
+ align-items: center;
1157
+ padding: 10px 14px;
1158
+ border-bottom: 1px solid #eee;
1159
+ }
1160
+
1161
+ .mic-header .close {
1162
+ background: transparent;
1163
+ border: none;
1164
+ font-size: 16px;
1165
+ cursor: pointer;
1166
+ }
1167
+
1168
+ .mic-body {
1169
+ padding: 12px;
1170
+ display: flex;
1171
+ flex-direction: column;
1172
+ gap: 10px;
1173
+ }
1174
+
1175
+ .waveform {
1176
+ width: 100%;
1177
+ height: 90px;
1178
+ background: #fafafa;
1179
+ border-radius: 6px;
1180
+ display: block;
1181
+ }
1182
+
1183
+ .transcript-area {
1184
+ display: flex;
1185
+ flex-direction: column;
1186
+ gap: 8px;
1187
+ }
1188
+
1189
+ .status {
1190
+ display: flex;
1191
+ align-items: center;
1192
+ gap: 8px;
1193
+ color: #444;
1194
+ font-size: 13px;
1195
+ }
1196
+
1197
+ .status .dot {
1198
+ width: 10px;
1199
+ height: 10px;
1200
+ border-radius: 50%;
1201
+ background: #bbb;
1202
+ display: inline-block;
1203
+ }
1204
+
1205
+ .status .dot.recording {
1206
+ background: #d23;
1207
+ box-shadow: 0 0 8px rgba(210,35,35,0.35);
1208
+ }
1209
+
1210
+ .popup-transcript {
1211
+ min-height: 52px;
1212
+ background: #f7f8fa;
1213
+ padding: 8px;
1214
+ border-radius: 6px;
1215
+ white-space: pre-wrap;
1216
+ font-size: 14px;
1217
+ color: #222;
1218
+ }
1219
+
1220
+ .popup-transcript .hint {
1221
+ color: #666;
1222
+ font-size: 13px;
1223
+ }
1224
+
1225
+ .popup-transcript pre {
1226
+ margin: 0;
1227
+ font-family: inherit;
1228
+ white-space: pre-wrap;
1229
+ }
1230
+
1231
+ .mic-actions {
1232
+ display: flex;
1233
+ gap: 8px;
1234
+ justify-content: flex-end;
1235
+ margin-top: 6px;
1236
+ }
1237
+
1238
+ .mic-actions button {
1239
+ padding: 6px 10px;
1240
+ border-radius: 6px;
1241
+ border: 1px solid #ddd;
1242
+ background: #fff;
1243
+ cursor: pointer;
1244
+ }
1245
+
1246
+ .mic-actions button[disabled] {
1247
+ opacity: 0.5;
1248
+ cursor: not-allowed;
1249
+ }
1250
+
1251
+ .status .on {
1252
+ color: green;
1253
+ font-weight: 600;
1254
+ }
1255
+
1256
+ .error {
1257
+ color: #b00020;
1258
+ font-size: 13px;
1259
+ }
src/app/chat/chat.component.html CHANGED
@@ -1,120 +1,386 @@
1
- <div class="chat-container">
2
- <header class="header-container">
3
- <div class="logo">
4
- <a (click)="goToHome()" routerLink="/home" class="brand-link">
5
- <img src="assets/images/pykara-logo.png" alt="Pykara Logo" />
6
- </a>
7
- <span class="product-name">Py-Learn</span>
8
- </div>
9
- <div class="header-title">
10
- <h1>Grammar Chat</h1>
11
- </div>
12
- <div class="home-btn">
13
- <a (click)="goToHome()" routerLink="/home">
14
- <img src="assets/images/home.png" alt="Home" class="home-icon" />
15
- </a>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  </div>
17
- </header>
18
-
19
- <main class="chat-box" #chatBox>
20
- <img src="assets/images/chat/chatbg.png" alt="Chat Background" class="chat-bg" />
21
- <ng-container *ngFor="let message of messages">
22
- <div *ngIf="message.from === 'user'" class="message-wrapper user">
23
- <div class="profile-pic">
24
- <img src="assets/images/chat/rabbit.png" alt="User Profile Picture" />
25
- </div>
26
- <div class="message">
27
- {{ message.text }}
28
- <div class="message-timestamp">{{ message.timestamp }}</div>
29
- </div>
30
  </div>
31
- <div *ngIf="message.from === 'ai'" class="message-wrapper ai">
32
- <div class="profile-pic">
33
- <img src="assets/images/chat/lion.png" alt="AI Profile Picture" />
 
 
 
 
 
 
 
 
 
 
34
  </div>
35
- <div class="message structured-response">
36
- <div [innerHTML]="formatStructuredResponse(message.text)"></div>
37
 
38
- <div class="sources" *ngIf="message.source_ids?.length">
39
- <span class="sources-label">Sources:</span>
40
- <span class="source-chip" *ngFor="let s of message.source_ids">
41
- {{ displaySource(s) }}
42
- </span>
43
- </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
 
45
- <div class="message-timestamp">{{ message.timestamp }}</div>
46
  </div>
 
 
 
 
 
 
 
 
 
47
  </div>
48
- </ng-container>
49
- <div *ngIf="isTyping" class="typing-indicator" role="status" aria-live="polite">
50
- AI is typing
51
- <span></span>
52
- <span></span>
53
- <span></span>
54
- </div>
55
- </main>
56
-
57
- <section class="input-container">
58
- <div class="input-box">
59
- <textarea [(ngModel)]="userInput"
60
- (focus)="showHardcodedQuestions()"
61
- (blur)="hideHardcodedQuestions()"
62
- (input)="adjustTextareaHeight($event); getSuggestions()"
63
- (keydown)="handleEnterPress($event)"
64
- placeholder="Type your message here..."
65
- [disabled]="isSpeaking"
66
- aria-label="Message input"></textarea>
67
- <button (click)="isSpeaking ? stopSpeaking() : handleButtonClick()"
68
- [disabled]="isSubmitting"
69
- aria-label="Send or voice">
70
- <img [src]="isSpeaking ? 'assets/images/chat/stop.png' : getButtonIcon()"
71
- alt="Action"
72
- class="button-icon" />
73
- </button>
74
  </div>
75
- <div class="hardcoded-questions-container" *ngIf="showQuestions">
76
- <!-- Loading -->
77
- <div class="hardcoded-question" *ngIf="pdfLoading">Loading questions…</div>
78
-
79
- <!-- Generated questions list -->
80
- <div class="hardcoded-question"
81
- *ngFor="let q of pdfQuestions; let i = index"
82
- (click)="selectGeneratedQuestion(q)"
83
- role="button"
84
- tabindex="0"
85
- (keydown.enter)="selectGeneratedQuestion(q)"
86
- aria-label="Suggested question">
87
- {{ q }}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
  </div>
89
 
90
- <!-- Empty state -->
91
- <div class="hardcoded-question" *ngIf="!pdfLoading && !pdfQuestions.length">
92
- No questions available from the textbook.
 
93
  </div>
 
 
94
  </div>
95
- </section>
96
-
97
- <div class="listening-box" *ngIf="isListening" role="dialog" aria-modal="true">
98
- <div class="listening-content">
99
- <img src="assets/images/chat/microphone-icon.png" alt="Microphone" class="microphone-image" />
100
- <p *ngIf="!errorMessage">Listening...</p>
101
- <div class="listening-actions">
102
- <button class="mute-btn" (click)="muteMicrophone()">
103
- <img src="assets/images/chat/mic.png" alt="Mute" />
104
- </button>
105
- <button class="close-btn" (click)="stopListening()">
106
- <img src="assets/images/chat/cross.png" alt="Close" />
107
- </button>
108
- </div>
109
- <p *ngIf="errorMessage" class="error-text" (click)="openMicrophoneSettings()">{{ errorMessage }}</p>
110
  </div>
111
  </div>
 
112
 
113
- <div class="popup-overlay" *ngIf="showMicPopup">
114
- <div class="popup-box">
115
- <h3>Microphone access required</h3>
116
- <p>To use voice mode, you'll need to enable your microphone and try again.</p>
117
- <button class="popup-button" (click)="closeMicrophonePopup()">OK</button>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
  </div>
119
  </div>
120
  </div>
 
 
 
 
 
 
 
 
 
 
 
1
+ <!-- Full chat.component.html matching chat.component.ts -->
2
+ <div class="chat-container">
3
+ <div class="header-container">
4
+ <div class="logo">
5
+ <a (click)="goToHome()" routerLink="/home" class="brand-link">
6
+ <img src="assets/images/pykara-logo.png" alt="Pykara Logo" />
7
+ </a>
8
+ <span class="product-name">Py-Learn</span>
9
+ </div>
10
+
11
+ <div class="header-title">
12
+ <h1>Grammar Chat</h1>
13
+ </div>
14
+
15
+ <div class="toggle-buttons-container modern-toggle">
16
+ <button class="toggle-btn modern"
17
+ [class.active]="isVoiceEnabled"
18
+ [class.muted]="!isVoiceEnabled"
19
+ (click)="toggleVoice()"
20
+ title="Voice On/Off">
21
+ <img [src]="isVoiceEnabled ? 'assets/images/chat/volume.png' : 'assets/images/chat/volume-mute.png'"
22
+ alt="Voice Toggle" class="toggle-icon" />
23
+ </button>
24
+
25
+ <button class="toggle-btn modern"
26
+ [class.active]="isTutorEnabled"
27
+ (click)="toggleTutor()"
28
+ title="Tutor On/Off">
29
+ <img [src]="isTutorEnabled ? 'assets/images/chat/video.png' : 'assets/images/chat/no-video.png'"
30
+ alt="Tutor" class="toggle-icon" />
31
+ </button>
32
+
33
+ <button class="toggle-btn modern" disabled
34
+ [class.active]="isSyllabusEnabled"
35
+ (click)="toggleSyllabus()"
36
+ title="Syllabus On/Off">
37
+ <img [src]="isSyllabusEnabled ? 'assets/images/chat/syllabus.png' : 'assets/images/chat/internet.png'"
38
+ alt="Syllabus" class="toggle-icon" />
39
+ </button>
40
+
41
+ <button class="toggle-btn modern" disabled
42
+ [class.active]="isBreadcrumbEnabled"
43
+ (click)="toggleBreadcrumb()"
44
+ title="Breadcrumb On/Off">
45
+ <img src="assets/images/chat/breadcrumbs.png" alt="Breadcrumb" class="toggle-icon" />
46
+ </button>
47
+
48
+ <button class="toggle-btn modern" (click)="openUserGuide()" title="Open User Guide">
49
+ <img src="assets/images/chat/info.png" alt="User Guide" class="toggle-icon" />
50
+ </button>
51
+ </div>
52
+
53
+ <div class="home-btn">
54
+ <a (click)="goToHome()" routerLink="/home">
55
+ <img src="assets/images/home.png" alt="Home" class="home-icon" />
56
+ </a>
57
+ </div>
58
+ </div>
59
+
60
+ <div class="chat-box" #chatBox>
61
+ <img src="assets/images/chat/chatbg.png" alt="Chat Background" class="chat-bg" />
62
+
63
+ <div *ngFor="let message of messages; let i = index">
64
+ <!-- User message -->
65
+ <div *ngIf="message.from === 'user'" class="message-wrapper user">
66
+ <div class="profile-pic">
67
+ <img src="assets/images/chat/rabbit.png" alt="User" />
68
+ </div>
69
+ <div class="message">
70
+ {{ message.text }}
71
+ <div class="message-timestamp">{{ message.timestamp }}</div>
72
+ </div>
73
  </div>
74
+
75
+ <!-- AI message -->
76
+ <div *ngIf="message.from === 'ai' && !message.pending" class="message-wrapper ai">
77
+ <div class="profile-pic">
78
+ <img src="assets/images/chat/natasha.png" alt="AI" />
 
 
 
 
 
 
 
 
79
  </div>
80
+
81
+ <div class="message structured-response">
82
+ <div [innerHTML]="formatStructuredResponse(message.text)"></div>
83
+
84
+ <!-- Follow-ups -->
85
+ <div class="followups" *ngIf="message.suggestions?.length">
86
+ <div class="followups-title">Follow-up suggestions</div>
87
+ <button class="followup-chip"
88
+ *ngFor="let s of message.suggestions"
89
+ (click)="selectHardcodedQuestion(s)"
90
+ title="Ask this next">
91
+ {{ s }}
92
+ </button>
93
  </div>
 
 
94
 
95
+ <div class="message-timestamp">
96
+ {{ message.timestamp }}
97
+
98
+ <!-- Copy -->
99
+ <button class="icon-btn" (click)="copyToClipboard(message.text, i)"
100
+ [attr.aria-label]="copySuccessIndex === i ? 'Copied' : 'Copy message'"
101
+ title="Copy message">
102
+ <ng-container *ngIf="copySuccessIndex === i; else showCopy">
103
+ <span class="copy-tick">&#10003;</span>
104
+ </ng-container>
105
+ <ng-template #showCopy>
106
+ <img src="assets/images/chat/copy.png" alt="Copy" class="meta-icon" />
107
+ </ng-template>
108
+ </button>
109
+
110
+ <!-- Audio: play / stop -->
111
+ <button class="icon-btn"
112
+ *ngIf="message.audioUrl && isReadingIndex !== i"
113
+ (click)="playServerAudioForMessage(i)"
114
+ aria-label="Play audio" title="Play audio">
115
+ <img src="assets/images/chat/speaker.png" alt="Play audio" class="meta-icon" />
116
+ </button>
117
+
118
+ <button class="icon-btn"
119
+ *ngIf="message.audioUrl && isReadingIndex === i"
120
+ (click)="stopReadAloud()"
121
+ aria-label="Stop audio" title="Stop audio">
122
+ <img src="assets/images/chat/stop-button.png" alt="Stop audio" class="meta-icon" />
123
+ </button>
124
+
125
+ <!-- Generate audio on demand -->
126
+ <button class="icon-btn"
127
+ *ngIf="!message.audioUrl"
128
+ (click)="synthesizeAudioAndPlay(i)"
129
+ [disabled]="message.isSynthesizing"
130
+ aria-label="Generate audio"
131
+ title="Generate audio">
132
+ <ng-container *ngIf="!message.isSynthesizing; else audioSpinner">
133
+ <img src="assets/images/chat/speaker.png" alt="Generate audio" class="meta-icon" />
134
+ </ng-container>
135
+ <ng-template #audioSpinner>
136
+ <img src="assets/images/chat/loading-spinner.gif" alt="Generating audio" class="meta-icon" />
137
+ </ng-template>
138
+ </button>
139
+
140
+ <!-- Video: generate, play, stop -->
141
+ <!-- Show Generate when no cached video -->
142
+ <button class="icon-btn"
143
+ *ngIf="!message.videoUrl && !message.isVideoSynthesizing"
144
+ (click)="synthesizeVideoAndPlay(i)"
145
+ aria-label="Generate video"
146
+ title="Generate video">
147
+ <img src="assets/images/chat/video.png" alt="Generate video" class="meta-icon" />
148
+ </button>
149
+
150
+ <!-- Spinner while generating video -->
151
+ <button class="icon-btn" *ngIf="!message.videoUrl && message.isVideoSynthesizing" disabled>
152
+ <img src="assets/images/chat/loading-spinner.gif" alt="Generating video" class="meta-icon" />
153
+ </button>
154
+
155
+ <!-- Toggle inline video (single button). Show video.png initially, switches to no-video.png when enabled -->
156
+ <button class="icon-btn"
157
+ *ngIf="message.videoUrl"
158
+ (click)="toggleMessageVideo(i)"
159
+ [class.active]="isVideoEnabledIndex[i]"
160
+ aria-label="Toggle video"
161
+ title="Toggle video">
162
+ <img [src]="isVideoEnabledIndex[i] ? 'assets/images/chat/no-video.png' : 'assets/images/chat/video.png'"
163
+ alt="Video" class="meta-icon" />
164
+ </button>
165
 
 
166
  </div>
167
+
168
+ <!-- Inline video player -->
169
+ <video *ngIf="isVideoEnabledIndex[i] && message.playingVideoUrl"
170
+ id="inline-video-{{i}}"
171
+ class="tutor-video"
172
+ [src]="message.playingVideoUrl"
173
+ (ended)="onMessageVideoEnded(i)"
174
+ controls autoplay>
175
+ </video>
176
  </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  </div>
178
+ </div>
179
+
180
+ <!-- Typing indicator -->
181
+ <div *ngIf="isTyping" class="typing-indicator" aria-live="polite">
182
+ Tutor's Response
183
+ <span></span><span></span><span></span>
184
+ </div>
185
+ </div>
186
+
187
+ <!-- Input -->
188
+ <div class="input-container">
189
+ <div class="input-box">
190
+ <textarea [(ngModel)]="userInput"
191
+ (focus)="showHardcodedQuestions()"
192
+ (blur)="hideHardcodedQuestions()"
193
+ (input)="adjustTextareaHeight($event)"
194
+ (keydown)="handleEnterPress($event)"
195
+ placeholder="Type your message here..."
196
+ [disabled]="isAiResponding">
197
+ </textarea>
198
+
199
+ <button (click)="handleButtonClick()"
200
+ [disabled]="isListening && !showMicPopup"
201
+ aria-label="Send or voice">
202
+ <img [src]="getButtonIcon()" alt="Action" class="button-icon" />
203
+ </button>
204
+
205
+ <!--<button class="mic-toggle" (click)="openMicrophonePopup()" [disabled]="isListening && !showMicPopup" title="Open microphone">🎤</button>-->
206
+
207
+ </div>
208
+
209
+ <button class="scroll-to-bottom-btn" (click)="scrollToBottom()" aria-label="Jump to latest">&#8595;</button>
210
+
211
+ <div class="hardcoded-questions-container" *ngIf="showQuestions">
212
+ <div *ngIf="pdfLoading" class="loading-row">Loading…</div>
213
+ <ng-container *ngIf="!pdfLoading">
214
+ <ng-container *ngIf="currentFollowups.length; else showPdfQs">
215
+ <div class="hint-row">Follow-up suggestions</div>
216
+ <button class="hardcoded-question"
217
+ *ngFor="let q of currentFollowups"
218
+ (mousedown)="selectHardcodedQuestion(q)"
219
+ [disabled]="isAiResponding">
220
+ {{ q }}
221
+ </button>
222
+ </ng-container>
223
+
224
+ <ng-template #showPdfQs>
225
+ <div class="hint-row">Questions from your textbook</div>
226
+ <ng-container *ngIf="pdfQuestions?.length; else noPdfQs">
227
+ <button class="hardcoded-question"
228
+ *ngFor="let q of pdfQuestions"
229
+ (mousedown)="selectHardcodedQuestion(q)"
230
+ [disabled]="isAiResponding">
231
+ {{ q }}
232
+ </button>
233
+ </ng-container>
234
+ <ng-template #noPdfQs>
235
+ <div class="hardcoded-question disabled">No grammar questions available.</div>
236
+ </ng-template>
237
+ </ng-template>
238
+ </ng-container>
239
+ </div>
240
+ </div>
241
+
242
+
243
+ <div class="mic-popup" *ngIf="showMicPopup">
244
+ <div class="mic-header">
245
+ <strong>Microphone</strong>
246
+ <button class="close" (click)="closeMicrophonePopup()">✕</button>
247
+ </div>
248
+
249
+ <div class="mic-body">
250
+ <canvas #waveformCanvas class="waveform" aria-hidden="true"></canvas>
251
+
252
+ <div class="transcript-area">
253
+ <div class="status">
254
+ <span class="dot" [class.recording]="isRecording"></span>
255
+ <span>{{ isRecording ? 'Recording…' : 'Ready' }}</span>
256
  </div>
257
 
258
+ <div #popupTranscriptEl class="popup-transcript">
259
+ <div class="hint" *ngIf="popupTranscript === 'Processing…'">Processing… please wait</div>
260
+ <pre *ngIf="popupTranscript && popupTranscript !== 'Processing…'">{{ popupTranscript }}</pre>
261
+ <div class="empty" *ngIf="!popupTranscript">Speak and click Done when finished</div>
262
  </div>
263
+
264
+ <div class="error" *ngIf="errorMessage">{{ errorMessage }}</div>
265
  </div>
266
+
267
+ <div class="mic-actions">
268
+ <button (click)="stopRecording()" [disabled]="!isRecording">Done</button>
269
+ <button (click)="confirmAndSendTranscript()" [disabled]="!popupTranscript || popupTranscript === 'Processing…'">Send</button>
270
+ <button (click)="closeMicrophonePopup()">Cancel</button>
 
 
 
 
 
 
 
 
 
 
271
  </div>
272
  </div>
273
+ </div>
274
 
275
+ <!-- User guide modal -->
276
+ <div class="user-guide-overlay" *ngIf="showUserGuide" (click)="closeUserGuide()">
277
+ <div class="user-guide-modal" (click)="$event.stopPropagation()">
278
+ <button class="user-guide-close-icon" (click)="closeUserGuide()" aria-label="Close">&times;</button>
279
+ <div class="user-guide-content">
280
+ <!-- Insert guide content here -->
281
+ <ol style="text-align: justify; font-size:15px;">
282
+ <li>
283
+ <b>Introduction</b><br>
284
+ <ul style="list-style: unset; margin-left:1.25rem;">
285
+ <li>The Chat Module is an interactive learning environment designed to assist learners through text and voice-based communication.</li>
286
+ <li>Users can type their question or select from follow-up suggestions, using either the text input or the microphone.</li>
287
+ <li>Predefined questions appear above the input field for quick access.</li>
288
+ <li>This creates an engaging and personalised learning experience similar to interacting with a real tutor.</li>
289
+ </ul>
290
+ </li>
291
+ <li>
292
+ <b>Starting the Chat</b><br>
293
+ <ul style="list-style: unset; margin-left:1.25rem;">
294
+ <li>When learners open the module, they will see a text input box and a microphone icon. They can type a question or activate the microphone to speak.</li>
295
+ <li>While the microphone is active, a listening popup appears with options to mute or stop recording. Once the learner finishes speaking or typing, their question is displayed in the chat area along with a timestamp.</li>
296
+ <li>This simple interface ensures that both typing and speaking interactions are smooth and user-friendly.</li>
297
+ </ul>
298
+ </li>
299
+ <li>
300
+ <b>Accessing the Syllabus</b><br>
301
+ <ul style="list-style: unset; margin-left:1.25rem;">
302
+ <li>Before learners begin, an administrator uploads the syllabus or textbook in digital format.</li>
303
+ <li>The system analyses the document and automatically generates a list of predefined questions based on the uploaded syllabus.</li>
304
+ <li>These predefined questions are displayed above the input field, allowing learners to choose any topic without needing to type.</li>
305
+ <li>When a learner selects a question, the system locates the relevant section from the syllabus and prepares an answer. The response appears instantly in the chat area in a clear and readable format.</li>
306
+ </ul>
307
+ </li>
308
+ <li>
309
+ <b>Receiving the Response</b><br>
310
+ <ul style="list-style: unset; margin-left:1.25rem;">
311
+ <li>
312
+ After a question is sent, the system generates an immediate response that includes:
313
+ <ul>
314
+ <li>A text-based explanation</li>
315
+ <li>An audio narration in the tutor’s real voice</li>
316
+ <li>A derived video explanation, when applicable</li>
317
+ </ul>
318
+ </li>
319
+ <li>The response is first produced as text. If the learner chooses to listen, the system plays an audio narration that has been synthetically generated using the real voice of the teacher.</li>
320
+ <li>The voice is not a generic computer voice; it has been trained and modelled on the actual tutor’s speech patterns, ensuring that the tone, pronunciation, and expression closely resemble the teacher’s natural way of speaking.</li>
321
+ <li>Similarly, when a video explanation is requested, the system displays a derived video of the teacher. This video is not a pre-recorded clip or animation, but is generated to resemble the real teacher’s voice and reactions.</li>
322
+ <li>All audio and video responses are created dynamically for each question, providing unique, real-time explanations. Learners can replay or stop the narration at any time, copy text responses, and follow the conversation naturally with the speaking indicator showing when the tutor’s voice is active.</li>
323
+ <li>By default, audio is muted; you can enable it as needed.</li>
324
+ </ul>
325
+ </li>
326
+ <li>
327
+ <b>Handling Out-of-Syllabus Questions</b><br>
328
+ <ul style="list-style: unset; margin-left:1.25rem;">
329
+ <li>If a learner asks a question that is not part of the uploaded syllabus or textbook, the system responds with the message: “This topic is out of syllabus.”</li>
330
+ <li>Only administrators can configure whether such questions can be answered using external information sources.</li>
331
+ <li>This ensures that all discussions remain within the approved syllabus unless authorised otherwise.</li>
332
+ </ul>
333
+ </li>
334
+ <li>
335
+ <b>Follow-Up and Progressive Learning</b><br>
336
+ <ul style="list-style: unset; margin-left:1.25rem;">
337
+ <li>After each response, the system displays related or next-level questions below the chat. This feature helps learners progress through topics in a logical sequence.</li>
338
+ <li>A breadcrumb trail is also displayed, showing the topic flow and subtopics covered during the conversation.</li>
339
+ <li>Learners can easily revisit previous points and continue from where they left off.</li>
340
+ </ul>
341
+ </li>
342
+ <li>
343
+ <b>Audio, Video, and Mode Controls</b><br>
344
+ <ul style="list-style: unset; margin-left:1.25rem;">
345
+ <li>
346
+ At the top of the chat interface, four control buttons provide flexibility and accessibility:
347
+ <ul>
348
+ <li>Audio Control – Enable or disable narration.</li>
349
+ <li>Video Control – Show or hide derived video explanations.</li>
350
+ <li>Syllabus Mode Control – Keep learning limited to syllabus topics.</li>
351
+ <li>Breadcrumb Control – Display or hide the topic trail.</li>
352
+ </ul>
353
+ </li>
354
+ <li>Only administrators can modify the syllabus mode to include out-of-syllabus responses.</li>
355
+ </ul>
356
+ </li>
357
+ <li>
358
+ <b>Interface and Usability</b><br>
359
+ <ul style="list-style: unset; margin-left:1.25rem;">
360
+ <li>The chat interface presents a clear, conversational layout between the learner and the tutor. Each message includes a profile icon and timestamp for a natural reading flow.</li>
361
+ <li>Typing indicators appear while the system prepares responses, and a scroll button allows quick access to the most recent messages.</li>
362
+ <li>The design is responsive and adapts to different devices such as desktops, tablets, etc.</li>
363
+ </ul>
364
+ </li>
365
+ <li>
366
+ <b>Summary</b><br>
367
+ <ul style="list-style: unset; margin-left:1.25rem;">
368
+ <li>The Chat Module provides an engaging, syllabus-focused learning experience where learners can type or speak their questions and receive immediate answers through text, real teacher voice, and derived video.</li>
369
+ <li>With predefined questions, real-time explanations, structured progression, and easy-to-use controls, this module offers a complete and intelligent conversational learning environment—all within a single platform.</li>
370
+ <li>Use this feature for summary-guided training.</li>
371
+ </ul>
372
+ </li>
373
+ </ol>
374
  </div>
375
  </div>
376
  </div>
377
+
378
+ <!-- Global Tutor Video (bottom-right) -->
379
+ <video *ngIf="isTutorEnabled && videoUrl"
380
+ class="tutor-video"
381
+ [src]="videoUrl"
382
+ (ended)="clearVideoUrl()"
383
+ (error)="clearVideoUrl()"
384
+ controls autoplay>
385
+ </video>
386
+ </div>
src/app/chat/chat.component.ts CHANGED
@@ -1,13 +1,13 @@
1
- import { Component, Inject, OnDestroy, PLATFORM_ID, ChangeDetectorRef } from '@angular/core';
2
  import { ApiService } from './api.service';
3
  import { FormsModule } from '@angular/forms';
4
- import { CommonModule } from '@angular/common';
5
  import { Router, RouterModule } from '@angular/router';
6
- import { isPlatformBrowser } from '@angular/common';
7
- import { ViewChild, ElementRef } from '@angular/core';
8
- import { Renderer2 } from '@angular/core';
9
  import { Subscription } from 'rxjs';
10
  import { finalize } from 'rxjs/operators';
 
 
 
11
  type Grade = 'lowergrade' | 'midgrade' | 'highergrade';
12
 
13
  @Component({
@@ -15,9 +15,35 @@ type Grade = 'lowergrade' | 'midgrade' | 'highergrade';
15
  standalone: true,
16
  imports: [FormsModule, CommonModule, RouterModule],
17
  templateUrl: './chat.component.html',
18
- styleUrl: './chat.component.css'
19
  })
20
  export class ChatComponent implements OnDestroy {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  showQuestions: boolean = false;
22
  isSubmitting: boolean = false;
23
 
@@ -27,15 +53,28 @@ export class ChatComponent implements OnDestroy {
27
 
28
  /** Chat state */
29
  userInput: string = '';
30
- messages: { from: string, text: string, timestamp: string; isPlaying?: boolean, source_ids?: string[]; }[] = [];
31
- isTyping: boolean = false;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  @ViewChild('chatBox') chatBox!: ElementRef;
33
 
34
  /** Speech / mic state */
35
  isLoadingSpeech: boolean = false;
36
  selectedVoice: SpeechSynthesisVoice | null = null;
37
- errorMessage: string = "";
38
- recognition: any;
39
  speechSynthesisInstance: SpeechSynthesisUtterance | null = null;
40
  isListening: boolean = false;
41
  isProcessingSpeech: boolean = false;
@@ -44,9 +83,9 @@ export class ChatComponent implements OnDestroy {
44
 
45
  /** Suggestions for typed input (powered by PDF too) */
46
  suggestions: string[] = [];
47
-
48
  /** Popup */
49
- showMicPopup: boolean = false;
50
 
51
  /** Subscriptions */
52
  private responseSub?: Subscription;
@@ -59,43 +98,180 @@ export class ChatComponent implements OnDestroy {
59
  /** NEW: only allow follow-ups when last answer was grounded in textbook pages */
60
  private lastAnswerHasContext: boolean = false;
61
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  constructor(
63
  private apiService: ApiService,
64
  private cdr: ChangeDetectorRef,
65
- @Inject(PLATFORM_ID,) private platformId: object,
 
66
  private router: Router,
67
  private renderer: Renderer2
68
  ) {
69
- // Log voices
70
- window.speechSynthesis.onvoiceschanged = () => {
71
- console.log("Available Voices:", window.speechSynthesis.getVoices());
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  };
73
 
74
- if (isPlatformBrowser(this.platformId)) {
75
- const SpeechRecognition = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
76
- if (SpeechRecognition) {
77
- this.recognition = new SpeechRecognition();
78
- this.recognition.continuous = false;
79
- this.recognition.lang = 'en-US';
80
- this.recognition.interimResults = false;
81
-
82
- this.recognition.onresult = (event: any) => {
83
- if (event.results && event.results[0]) {
84
- const transcript = event.results[0][0].transcript.trim();
85
- this.userInput = transcript;
86
- this.sendMessage();
87
- this.recognition.stop();
88
- this.isListening = false;
89
- }
90
- };
91
 
92
- this.recognition.onerror = (event: any) => {
93
- console.error('Speech Recognition Error:', event.error);
94
- this.isProcessingSpeech = false;
95
- };
 
 
 
 
96
  }
97
- window.addEventListener('beforeunload', this.handleUnload);
98
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
  }
100
 
101
  private ensureGradeLevel(defaultGrade: Grade = 'lowergrade'): void {
@@ -108,6 +284,15 @@ export class ChatComponent implements OnDestroy {
108
  }
109
  }
110
 
 
 
 
 
 
 
 
 
 
111
  ngOnInit(): void {
112
  this.ensureGradeLevel();
113
  if (window.speechSynthesis.onvoiceschanged !== undefined) {
@@ -118,22 +303,22 @@ export class ChatComponent implements OnDestroy {
118
  this.loadVoices();
119
  }
120
 
121
- ngAfterViewChecked() {
122
- setTimeout(() => {
123
- this.scrollToBottom();
124
- }, 100);
125
- }
126
 
127
  ngOnDestroy(): void {
 
 
128
  if (isPlatformBrowser(this.platformId)) {
129
- if (window.speechSynthesis) {
130
- window.speechSynthesis.cancel();
131
- }
132
  window.removeEventListener('beforeunload', this.handleUnload);
 
133
  }
134
- if (this.responseSub && !this.responseSub.closed) {
135
- this.responseSub.unsubscribe();
136
- }
137
  }
138
 
139
  private handleUnload = (): void => {
@@ -142,15 +327,135 @@ export class ChatComponent implements OnDestroy {
142
  }
143
  };
144
 
145
- private scrollToBottom(): void {
146
- try {
147
- this.chatBox.nativeElement.scrollTop = this.chatBox.nativeElement.scrollHeight;
148
- } catch (err) { }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  }
150
 
151
- /** Popup controls */
152
- openMicrophonePopup(): void { this.showMicPopup = true; }
153
- closeMicrophonePopup(): void { this.showMicPopup = false; }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
 
155
  /** Show questions on focus: initial (no answer yet) or follow-ups (after an answer) */
156
  showHardcodedQuestions(): void {
@@ -272,6 +577,7 @@ export class ChatComponent implements OnDestroy {
272
  this.userInput = '';
273
  this.isTyping = true;
274
  this.cdr.detectChanges();
 
275
  this.scrollToBottom();
276
 
277
  this.responseSub = this.apiService.explainGrammar(message)
@@ -296,6 +602,7 @@ export class ChatComponent implements OnDestroy {
296
  source_ids: sourceIds
297
  });
298
  this.cdr.detectChanges();
 
299
  this.scrollToBottom();
300
 
301
  this.lastQuestion = message;
@@ -320,6 +627,7 @@ export class ChatComponent implements OnDestroy {
320
  source_ids: []
321
  });
322
  this.cdr.detectChanges();
 
323
  this.scrollToBottom();
324
 
325
  // NEW: ensure follow-ups are not attempted after an error
@@ -330,6 +638,8 @@ export class ChatComponent implements OnDestroy {
330
  });
331
  }
332
 
 
 
333
  displaySource(tag: string): string {
334
  if (!tag) return '';
335
  const [path, pagePart] = tag.split('#p');
@@ -348,49 +658,363 @@ export class ChatComponent implements OnDestroy {
348
  }
349
 
350
  /** TTS helpers */
351
- speakResponse(responseText: string): void {
352
- if (!responseText) return;
353
 
354
- let lastAiMessage = this.messages.slice().reverse().find((msg) => msg.from === 'ai');
355
- if (!lastAiMessage) {
356
- lastAiMessage = { from: 'ai', text: '', timestamp: new Date().toLocaleTimeString() };
357
- this.messages.push(lastAiMessage);
358
  } else {
359
- lastAiMessage.text = '';
 
 
 
 
 
 
 
 
360
  }
 
 
 
 
 
 
361
  this.cdr.detectChanges();
362
 
363
  const words = responseText.split(' ');
364
- let currentWordIndex = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
365
 
 
 
 
366
  const speech = new SpeechSynthesisUtterance();
367
  speech.text = responseText;
368
  speech.lang = 'en-US';
369
  speech.pitch = 1;
370
  speech.rate = 1;
371
  this.isSpeaking = true;
372
-
373
  const voices = window.speechSynthesis.getVoices();
374
- const femaleVoice = voices.find(v => v.name === "Microsoft Zira - English (United States)");
375
- if (femaleVoice) speech.voice = femaleVoice;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
376
 
377
- speech.onboundary = (event: any) => {
378
- if (event.name === 'word' && currentWordIndex < words.length) {
379
- lastAiMessage!.text = words.slice(0, currentWordIndex + 1).join(' ');
380
- currentWordIndex++;
381
- this.cdr.detectChanges();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
382
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
383
  };
384
 
385
- speech.onend = () => {
 
 
 
 
 
386
  this.isSpeaking = false;
387
- lastAiMessage!.text = responseText;
388
  this.cdr.detectChanges();
389
  };
390
 
391
- window.speechSynthesis.speak(speech);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
392
  }
393
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
394
  loadVoices(): void {
395
  const voices = window.speechSynthesis.getVoices();
396
  if (!voices.length) {
@@ -423,46 +1047,8 @@ export class ChatComponent implements OnDestroy {
423
  }
424
  }
425
 
426
- resumeAudio(): void {
427
- if (window.speechSynthesis.paused) {
428
- window.speechSynthesis.resume();
429
- this.isAudioPaused = false;
430
- this.cdr.detectChanges();
431
- }
432
- }
433
-
434
- /** Mic controls */
435
- muteMicrophone(): void { console.log("Microphone muted"); }
436
-
437
- startListening(): void {
438
- this.isListening = true;
439
- this.isProcessingSpeech = false;
440
-
441
- if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
442
- navigator.mediaDevices.getUserMedia({ audio: true })
443
- .then(() => {
444
- if (this.recognition) {
445
- this.recognition.start();
446
- this.recognition.onend = () => { this.isListening = false; };
447
- this.recognition.onerror = (error: any) => {
448
- console.error('Speech Recognition Error:', error);
449
- this.isListening = false;
450
- if (error.error === 'not-allowed') alert('Microphone permission denied.');
451
- else if (error.error === 'no-speech') alert('No speech detected. Please try again.');
452
- };
453
- } else {
454
- alert('Speech Recognition is not supported in this browser.');
455
- }
456
- })
457
- .catch((error) => {
458
- console.error('Microphone access denied:', error);
459
- this.errorMessage = 'Please enable microphone access to use this feature.';
460
- this.isListening = true;
461
- });
462
- } else {
463
- alert('Microphone access is not supported in this browser.');
464
- }
465
- }
466
 
467
  stopListening(): void {
468
  this.isListening = false;
@@ -497,39 +1083,197 @@ export class ChatComponent implements OnDestroy {
497
  }
498
  }
499
 
500
- /** UI helpers */
501
- goToHome() { this.router.navigate(['/home']); }
502
 
503
  copySuccessIndex: number | null = null;
504
  copyToClipboard(text: string, index: number): void {
505
  navigator.clipboard.writeText(text).then(() => {
506
  this.copySuccessIndex = index;
507
  setTimeout(() => { this.copySuccessIndex = null; }, 2000);
508
- }).catch(err => { console.error('Failed to copy: ', err); });
509
  }
510
 
 
 
511
  handleButtonClick(): void {
 
 
 
 
 
 
512
  if (this.userInput.trim().length > 0) {
513
  this.showQuestions = false;
514
- const messageToSend = this.userInput;
515
  this.userInput = '';
516
- this.sendMessage(messageToSend);
517
- } else if (this.isSpeaking) {
518
  this.pauseAudio();
519
  } else if (this.isAudioPaused) {
520
  this.resumeAudio();
521
  } else {
522
- this.startListening();
523
  }
524
  }
525
 
526
- getButtonIcon(): string {
527
- if (this.userInput.trim().length > 0) return 'assets/images/chat/send-icon.png';
528
- if (this.isSpeaking) return 'assets/images/chat/pause-icon.png';
529
- if (this.isAudioPaused) return 'assets/images/chat/resume-icon.png';
530
- return 'assets/images/chat/microphone-icon.png';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
531
  }
532
 
 
 
533
  addNewLine(event: KeyboardEvent): void {
534
  if (event.key === 'Enter' && event.shiftKey) {
535
  event.preventDefault();
@@ -543,33 +1287,595 @@ export class ChatComponent implements OnDestroy {
543
  textarea.style.height = `${textarea.scrollHeight}px`;
544
  }
545
 
546
- openMicrophoneSettings(): void {
547
- const ua = navigator.userAgent;
548
- if (ua.includes("Chrome")) window.open("chrome://settings/content/microphone", "_blank");
549
- else if (ua.includes("Firefox")) window.open("about:preferences#privacy", "_blank");
550
- else if (ua.includes("Edge")) window.open("edge://settings/content/microphone", "_blank");
551
- else alert("Please check your browser's settings to enable the microphone.");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
552
  }
553
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
554
  stopSpeaking(): void {
555
  try {
556
- if (window.speechSynthesis.speaking || window.speechSynthesis.paused) {
557
  window.speechSynthesis.cancel();
558
  }
559
- } catch { }
560
- (this as any).speechSynthesisInstance = null;
561
- if (this.responseSub && !this.responseSub.closed) this.responseSub.unsubscribe();
562
  this.isSpeaking = false;
563
  this.isAudioPaused = false;
564
- this.isTyping = false;
 
 
565
  }
566
 
567
- handleEnterPress(event: KeyboardEvent): void {
568
- if (this.isSpeaking) { event.preventDefault(); return; }
569
- if (event.key === 'Enter' && !event.shiftKey) {
570
- event.preventDefault();
571
- const text = (this.userInput || '').trim();
572
- if (text) this.sendMessage();
 
573
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
574
  }
 
 
 
 
 
 
 
575
  }
 
1
+ import { Component, Inject, OnDestroy, PLATFORM_ID, ChangeDetectorRef, ViewChild, ElementRef, Renderer2, AfterViewChecked, Output, EventEmitter } from '@angular/core';
2
  import { ApiService } from './api.service';
3
  import { FormsModule } from '@angular/forms';
4
+ import { CommonModule, isPlatformBrowser } from '@angular/common';
5
  import { Router, RouterModule } from '@angular/router';
 
 
 
6
  import { Subscription } from 'rxjs';
7
  import { finalize } from 'rxjs/operators';
8
+ import { HttpClient } from '@angular/common/http';
9
+ import { lastValueFrom } from 'rxjs';
10
+
11
  type Grade = 'lowergrade' | 'midgrade' | 'highergrade';
12
 
13
  @Component({
 
15
  standalone: true,
16
  imports: [FormsModule, CommonModule, RouterModule],
17
  templateUrl: './chat.component.html',
18
+ styleUrls: ['./chat.component.css']
19
  })
20
  export class ChatComponent implements OnDestroy {
21
+
22
+ @ViewChild('waveformCanvas') waveformCanvas!: ElementRef<HTMLCanvasElement>;
23
+ @ViewChild('popupTranscriptEl') popupTranscriptEl!: ElementRef<HTMLDivElement>;
24
+ @Output() transcriptConfirmed = new EventEmitter<string>();
25
+
26
+ isRecording = false;
27
+ showMicPopup = false;
28
+ popupTranscript = '';
29
+ errorMessage = '';
30
+
31
+ private recognition: any = null;
32
+ private _recordingFinalBuffer = '';
33
+ private _recordingInterimBuffer = '';
34
+
35
+ private audioContext: AudioContext | null = null;
36
+ private analyser: AnalyserNode | null = null;
37
+ private dataArray: Uint8Array | null = null;
38
+ private mediaStream: MediaStream | null = null;
39
+ private animationFrameId: number | null = null;
40
+
41
+ // restart helpers
42
+ private _recognitionActive = false;
43
+ private _restartTimer: any = null;
44
+ private _restartAttempts = 0;
45
+ private _maxRestartDelay = 1500;
46
+
47
  showQuestions: boolean = false;
48
  isSubmitting: boolean = false;
49
 
 
53
 
54
  /** Chat state */
55
  userInput: string = '';
56
+ messages: Array<{
57
+ from: string;
58
+ text: string;
59
+ timestamp: string;
60
+ isPlaying?: boolean;
61
+ isMuted?: boolean;
62
+ suggestions?: string[];
63
+ source_ids?: string[];
64
+ videoUrl?: string;
65
+ audioUrl?: string;
66
+ playingVideoUrl?: string;
67
+ pending?: boolean;
68
+ isSynthesizing?: boolean;
69
+ isVideoSynthesizing?: boolean;
70
+ }> = [];
71
+ isTyping: boolean = false;
72
  @ViewChild('chatBox') chatBox!: ElementRef;
73
 
74
  /** Speech / mic state */
75
  isLoadingSpeech: boolean = false;
76
  selectedVoice: SpeechSynthesisVoice | null = null;
77
+
 
78
  speechSynthesisInstance: SpeechSynthesisUtterance | null = null;
79
  isListening: boolean = false;
80
  isProcessingSpeech: boolean = false;
 
83
 
84
  /** Suggestions for typed input (powered by PDF too) */
85
  suggestions: string[] = [];
86
+ isInputValid = false;
87
  /** Popup */
88
+
89
 
90
  /** Subscriptions */
91
  private responseSub?: Subscription;
 
98
  /** NEW: only allow follow-ups when last answer was grounded in textbook pages */
99
  private lastAnswerHasContext: boolean = false;
100
 
101
+
102
+ // --- Multi-chat state: keep active `this.messages` pointing to the active chat
103
+ private activeChatIndex = 0; // 0 = primary, 1 = secondary
104
+
105
+ // Per-chat storage (persist when switching)
106
+ private primaryMessages: typeof this.messages = [];
107
+ private secondaryMessages: typeof this.messages = [];
108
+
109
+ private primaryIsVideoEnabledIndex: boolean[] = [];
110
+ private secondaryIsVideoEnabledIndex: boolean[] = [];
111
+
112
+ private primaryLastQuestionContext = '';
113
+ private secondaryLastQuestionContext = '';
114
+ private primaryLastAnswerContext = '';
115
+ private secondaryLastAnswerContext = '';
116
+ private primaryLastSourceIdsContext: string[] = [];
117
+ private secondaryLastSourceIdsContext: string[] = [];
118
+
119
+ private primaryCurrentFollowups: string[] = [];
120
+ private secondaryCurrentFollowups: string[] = [];
121
+
122
+ private primaryPendingAiIndex: number | null = null;
123
+ private secondaryPendingAiIndex: number | null = null;
124
+
125
+ private primaryServerAudioMessageIndex: number | null = null;
126
+ private secondaryServerAudioMessageIndex: number | null = null;
127
+
128
+ private primaryIsReadingIndex: number | null = null;
129
+ private secondaryIsReadingIndex: number | null = null;
130
+
131
+ private primaryIsVideoPlayingIndex: number | null = null;
132
+ private secondaryIsVideoPlayingIndex: number | null = null;
133
+ // --- end multi-chat state
134
+
135
+
136
+ currentFollowups: string[] = [];
137
+
138
+ /*private shouldAutoScroll = true;*/
139
+ videoUrl = '';
140
+ aiResponseInterval: any = null;
141
+ isAiResponding = false;
142
+ isVideoEnabledIndex: boolean[] = [];
143
+
144
+ private currentExplainSub: Subscription | null = null;
145
+ private currentFollowupsSub: Subscription | null = null;
146
+
147
+ serverAudio: HTMLAudioElement | null = null;
148
+ serverAudioMessageIndex: number | null = null;
149
+
150
+ private pendingAiIndex: number | null = null;
151
+
152
+ isMuted = false;
153
+ showUserGuide = false;
154
+ openDropdownIndex: number | null = null;
155
+ isReadingIndex: number | null = null;
156
+ isVideoPlayingIndex: number | null = null;
157
+ private readAloudUtterance: SpeechSynthesisUtterance | null = null;
158
+
159
+ private lastQuestionContext: string = '';
160
+ private lastAnswerContext: string = '';
161
+ private lastSourceIdsContext: string[] = [];
162
+
163
+ private popupListeningMode = false;
164
+ private _savedRecognitionOnResult: any = null;
165
+ private _savedInterimResults = false;
166
+ private _suppressDefaultOnResult = false;
167
+ private shouldAutoScroll = true;
168
  constructor(
169
  private apiService: ApiService,
170
  private cdr: ChangeDetectorRef,
171
+ @Inject(PLATFORM_ID) private platformId: object,
172
+ private http: HttpClient,
173
  private router: Router,
174
  private renderer: Renderer2
175
  ) {
176
+ // Initialize primary/secondary storage with the current defaults
177
+ this.primaryMessages = this.messages;
178
+ this.secondaryMessages = [];
179
+
180
+ this.primaryIsVideoEnabledIndex = this.isVideoEnabledIndex;
181
+ this.secondaryIsVideoEnabledIndex = [];
182
+
183
+ this.primaryLastQuestionContext = this.lastQuestionContext;
184
+ this.primaryLastAnswerContext = this.lastAnswerContext;
185
+ this.primaryLastSourceIdsContext = this.lastSourceIdsContext.slice();
186
+ this.primaryCurrentFollowups = this.currentFollowups.slice();
187
+ this.primaryPendingAiIndex = this.pendingAiIndex;
188
+ this.primaryServerAudioMessageIndex = this.serverAudioMessageIndex;
189
+ this.primaryIsReadingIndex = this.isReadingIndex;
190
+ this.primaryIsVideoPlayingIndex = this.isVideoPlayingIndex;
191
+
192
+ // secondary fields are already empty/default
193
+
194
+ if (!isPlatformBrowser(this.platformId)) return;
195
+
196
+ const SR = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
197
+ if (!SR) {
198
+ console.warn('SpeechRecognition not supported');
199
+ return;
200
+ }
201
+
202
+ this.recognition = new SR();
203
+ this.recognition.continuous = true;
204
+ this.recognition.interimResults = true;
205
+ this.recognition.lang = 'en-US';
206
+ try { this.recognition.maxAlternatives = 1; } catch { /* ignore */ }
207
+
208
+ this.recognition.onstart = () => { this._recognitionActive = true; this._restartAttempts = 0; };
209
+ this.recognition.onspeechstart = () => { this._recognitionActive = true; };
210
+ this.recognition.onspeechend = () => { this._recognitionActive = false; };
211
+
212
+ this.recognition.onresult = (event: any) => {
213
+ if (!this.isRecording) return;
214
+ let interim = '';
215
+ let final = '';
216
+ for (let i = event.resultIndex; i < event.results.length; i++) {
217
+ const res = event.results[i];
218
+ const t = (res && res[0] && res[0].transcript) ? res[0].transcript : '';
219
+ if (res.isFinal) final += t + ' ';
220
+ else interim += t + ' ';
221
+ }
222
+
223
+ if (final) {
224
+ this._recordingFinalBuffer += final;
225
+ this._recordingInterimBuffer = '';
226
+ } else {
227
+ this._recordingInterimBuffer = interim;
228
+ }
229
+
230
+ // keep UI quiet while recording; show only after Done pressed
231
+ this.cdr.detectChanges();
232
  };
233
 
234
+ this.recognition.onerror = (e: any) => {
235
+ console.error('Recognition error', e);
236
+ if (e?.error === 'not-allowed') {
237
+ this.errorMessage = 'Microphone access denied';
238
+ this.isRecording = false;
239
+ } else {
240
+ this.errorMessage = `Error: ${e?.error || 'unknown'}`;
241
+ }
 
 
 
 
 
 
 
 
 
242
 
243
+ if (this.isRecording && (e?.error === 'no-speech' || e?.error === 'aborted' || e?.error === 'network')) {
244
+ if (this._restartTimer) clearTimeout(this._restartTimer);
245
+ const delay = Math.min(400 * (this._restartAttempts + 1), this._maxRestartDelay);
246
+ this._restartTimer = setTimeout(() => {
247
+ try { if (this.recognition && !this._recognitionActive) this.recognition.start(); } catch { this._restartAttempts++; }
248
+ }, delay);
249
+ } else {
250
+ if (e?.error !== 'not-allowed') this._recognitionActive = false;
251
  }
252
+
253
+ this.cdr.detectChanges();
254
+ };
255
+
256
+ this.recognition.onend = () => {
257
+ this._recognitionActive = false;
258
+ if (this.isRecording && this.showMicPopup) {
259
+ if (this._restartTimer) clearTimeout(this._restartTimer);
260
+ const delay = Math.min(250 + (this._restartAttempts * 200), this._maxRestartDelay);
261
+ this._restartTimer = setTimeout(() => {
262
+ try { if (this.recognition && !this._recognitionActive) this.recognition.start(); } catch { this._restartAttempts++; }
263
+ }, delay);
264
+ }
265
+ };
266
+
267
+ this.recognition.onnomatch = () => { /* noop */ };
268
+ }
269
+
270
+ selectHardcodedQuestion(question: string): void {
271
+ try { console.log('[CHAT] default question selected:', question); } catch { }
272
+ this.showQuestions = false;
273
+ this.sendMessage(question);
274
+ this.userInput = '';
275
  }
276
 
277
  private ensureGradeLevel(defaultGrade: Grade = 'lowergrade'): void {
 
284
  }
285
  }
286
 
287
+ //private storageHandler = (e: StorageEvent) => {
288
+ // if (e.key === 'hasPDF') this.showRagModeAlert();
289
+ //};
290
+
291
+ //private showRagModeAlert(): void {
292
+ // const ragEnabled = localStorage.getItem('rag_enabled') === '1' || localStorage.getItem('hasPDF') === 'true';
293
+ // // No-op placeholder
294
+ //}
295
+
296
  ngOnInit(): void {
297
  this.ensureGradeLevel();
298
  if (window.speechSynthesis.onvoiceschanged !== undefined) {
 
303
  this.loadVoices();
304
  }
305
 
306
+ //ngAfterViewChecked() {
307
+ // setTimeout(() => {
308
+ // this.scrollToBottom();
309
+ // }, 100);
310
+ //}
311
 
312
  ngOnDestroy(): void {
313
+ if (this.currentExplainSub) { this.currentExplainSub.unsubscribe(); this.currentExplainSub = null; }
314
+ if (this.currentFollowupsSub) { this.currentFollowupsSub.unsubscribe(); this.currentFollowupsSub = null; }
315
  if (isPlatformBrowser(this.platformId)) {
316
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
 
 
317
  window.removeEventListener('beforeunload', this.handleUnload);
318
+ //window.removeEventListener('storage', this.storageHandler);
319
  }
320
+ this.stopServerAudio();
321
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
 
322
  }
323
 
324
  private handleUnload = (): void => {
 
327
  }
328
  };
329
 
330
+ ngAfterViewInit() {
331
+ this.chatBox.nativeElement.addEventListener('scroll', () => {
332
+ const el = this.chatBox.nativeElement;
333
+ const atBottom = el.scrollHeight - el.clientHeight - el.scrollTop < 50;
334
+ this.shouldAutoScroll = atBottom;
335
+ });
336
+ }
337
+
338
+ //scrollToBottom(): void {
339
+ // try {
340
+ // this.chatBox.nativeElement.scrollTo({ top: this.chatBox.nativeElement.scrollHeight, behavior: 'smooth' });
341
+ // } catch (err) {
342
+ // console.error('Scroll error:', err);
343
+ // }
344
+ //}
345
+
346
+ scrollToBottom(): void {
347
+ if (this.shouldAutoScroll) {
348
+ try {
349
+ this.chatBox.nativeElement.scrollTo({
350
+ top: this.chatBox.nativeElement.scrollHeight,
351
+ behavior: 'smooth'
352
+ });
353
+ } catch { }
354
+ }
355
+ }
356
+
357
+ // --- Public API: toggle between chat A/B
358
+ toggleChat(): void {
359
+ const target = 1 - this.activeChatIndex;
360
+ this.switchToChat(target);
361
  }
362
 
363
+ switchToChat(index: number): void {
364
+ if (index === this.activeChatIndex) return;
365
+
366
+ // Stop any playing media on the current chat first so saved state doesn't preserve "playing" flags
367
+ this.stopServerAudio();
368
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
369
+ this.stopAllVideo();
370
+
371
+ // Save current active chat state (now that media is stopped)
372
+ this.saveCurrentChatState(this.activeChatIndex);
373
+
374
+ // Restore target chat state
375
+ this.restoreChatState(index);
376
+
377
+ this.activeChatIndex = index;
378
+ this.cdr.detectChanges();
379
+ }
380
+
381
+ private saveCurrentChatState(idx: number): void {
382
+ // Make a shallow copy of messages and clear transient playback flags so the saved chat never
383
+ // preserves an "isPlaying" or inline-playing video state.
384
+ const sanitizedMessages = (this.messages || []).map(m => ({
385
+ ...m,
386
+ isPlaying: false,
387
+ playingVideoUrl: '',
388
+ // keep pending / audio/video urls etc. — only clear playback flags
389
+ }));
390
+
391
+ if (idx === 0) {
392
+ this.primaryMessages = sanitizedMessages;
393
+ this.primaryIsVideoEnabledIndex = (this.isVideoEnabledIndex || []).slice();
394
+ this.primaryLastQuestionContext = this.lastQuestionContext;
395
+ this.primaryLastAnswerContext = this.lastAnswerContext;
396
+ this.primaryLastSourceIdsContext = this.lastSourceIdsContext.slice();
397
+ this.primaryCurrentFollowups = this.currentFollowups.slice();
398
+ this.primaryPendingAiIndex = this.pendingAiIndex;
399
+ // serverAudioMessageIndex should be null because we stopped audio before saving
400
+ this.primaryServerAudioMessageIndex = null;
401
+ this.primaryIsReadingIndex = null;
402
+ this.primaryIsVideoPlayingIndex = null;
403
+ } else {
404
+ this.secondaryMessages = sanitizedMessages;
405
+ this.secondaryIsVideoEnabledIndex = (this.isVideoEnabledIndex || []).slice();
406
+ this.secondaryLastQuestionContext = this.lastQuestionContext;
407
+ this.secondaryLastAnswerContext = this.lastAnswerContext;
408
+ this.secondaryLastSourceIdsContext = this.lastSourceIdsContext.slice();
409
+ this.secondaryCurrentFollowups = this.currentFollowups.slice();
410
+ this.secondaryPendingAiIndex = this.pendingAiIndex;
411
+ this.secondaryServerAudioMessageIndex = null;
412
+ this.secondaryIsReadingIndex = null;
413
+ this.secondaryIsVideoPlayingIndex = null;
414
+ }
415
+ }
416
+
417
+ private restoreChatState(idx: number): void {
418
+ if (idx === 0) {
419
+ this.messages = this.primaryMessages || [];
420
+ this.isVideoEnabledIndex = this.primaryIsVideoEnabledIndex || [];
421
+ this.lastQuestionContext = this.primaryLastQuestionContext || '';
422
+ this.lastAnswerContext = this.primaryLastAnswerContext || '';
423
+ this.lastSourceIdsContext = (this.primaryLastSourceIdsContext || []).slice();
424
+ this.currentFollowups = (this.primaryCurrentFollowups || []).slice();
425
+ this.pendingAiIndex = this.primaryPendingAiIndex;
426
+ this.serverAudioMessageIndex = this.primaryServerAudioMessageIndex;
427
+ this.isReadingIndex = this.primaryIsReadingIndex;
428
+ this.isVideoPlayingIndex = this.primaryIsVideoPlayingIndex;
429
+ } else {
430
+ this.messages = this.secondaryMessages || [];
431
+ this.isVideoEnabledIndex = this.secondaryIsVideoEnabledIndex || [];
432
+ this.lastQuestionContext = this.secondaryLastQuestionContext || '';
433
+ this.lastAnswerContext = this.secondaryLastAnswerContext || '';
434
+ this.lastSourceIdsContext = (this.secondaryLastSourceIdsContext || []).slice();
435
+ this.currentFollowups = (this.secondaryCurrentFollowups || []).slice();
436
+ this.pendingAiIndex = this.secondaryPendingAiIndex;
437
+ this.serverAudioMessageIndex = this.secondaryServerAudioMessageIndex;
438
+ this.isReadingIndex = this.secondaryIsReadingIndex;
439
+ this.isVideoPlayingIndex = this.secondaryIsVideoPlayingIndex;
440
+ }
441
+
442
+ // Ensure arrays exist to avoid undefined errors elsewhere
443
+ if (!this.messages) this.messages = [];
444
+ if (!this.isVideoEnabledIndex) this.isVideoEnabledIndex = [];
445
+ if (!this.currentFollowups) this.currentFollowups = [];
446
+ }
447
+
448
+ startFromPopup(): void {
449
+ this._suppressDefaultOnResult = true;
450
+ try { if (this.recognition && typeof this.recognition.stop === 'function') this.recognition.stop(); } catch { }
451
+ this._restoreRecognitionHandlers();
452
+ this.isListening = false;
453
+ this.showMicPopup = false;
454
+ const message = (this.popupTranscript || '').trim();
455
+ this.popupTranscript = '';
456
+ if (!message) { this.errorMessage = 'No speech captured. Please try again.'; this.cdr.detectChanges(); return; }
457
+ this.sendMessage(message);
458
+ }
459
 
460
  /** Show questions on focus: initial (no answer yet) or follow-ups (after an answer) */
461
  showHardcodedQuestions(): void {
 
577
  this.userInput = '';
578
  this.isTyping = true;
579
  this.cdr.detectChanges();
580
+ this.shouldAutoScroll = true;
581
  this.scrollToBottom();
582
 
583
  this.responseSub = this.apiService.explainGrammar(message)
 
602
  source_ids: sourceIds
603
  });
604
  this.cdr.detectChanges();
605
+ this.shouldAutoScroll = true;
606
  this.scrollToBottom();
607
 
608
  this.lastQuestion = message;
 
627
  source_ids: []
628
  });
629
  this.cdr.detectChanges();
630
+ this.shouldAutoScroll = true;
631
  this.scrollToBottom();
632
 
633
  // NEW: ensure follow-ups are not attempted after an error
 
638
  });
639
  }
640
 
641
+
642
+
643
  displaySource(tag: string): string {
644
  if (!tag) return '';
645
  const [path, pagePart] = tag.split('#p');
 
658
  }
659
 
660
  /** TTS helpers */
661
+ animateAiResponse(responseText: string, targetIndex?: number): void {
662
+ if (!responseText) { this.isAiResponding = false; return; }
663
 
664
+ let aiIndex: number | null = null;
665
+ if (typeof targetIndex === 'number' && this.messages[targetIndex] && this.messages[targetIndex].from === 'ai') {
666
+ aiIndex = targetIndex;
 
667
  } else {
668
+ for (let i = this.messages.length - 1; i >= 0; i--) {
669
+ if (this.messages[i].from === 'ai') { aiIndex = i; break; }
670
+ }
671
+ }
672
+
673
+ if (aiIndex === null || aiIndex < 0 || !this.messages[aiIndex]) {
674
+ this.messages.push({ from: 'ai', text: '', timestamp: new Date().toLocaleTimeString() } as any);
675
+ aiIndex = this.messages.length - 1;
676
+ this.isVideoEnabledIndex.push(false);
677
  }
678
+
679
+ const aiMsg = this.messages[aiIndex] as any;
680
+
681
+ if (this.aiResponseInterval) { clearInterval(this.aiResponseInterval); this.aiResponseInterval = null; }
682
+
683
+ aiMsg.text = '';
684
  this.cdr.detectChanges();
685
 
686
  const words = responseText.split(' ');
687
+ let idx = 0;
688
+ this.aiResponseInterval = setInterval(() => {
689
+ if (idx < words.length) {
690
+ aiMsg.text = words.slice(0, idx + 1).join(' ');
691
+ idx++;
692
+ this.cdr.detectChanges();
693
+ } else {
694
+ clearInterval(this.aiResponseInterval);
695
+ this.aiResponseInterval = null;
696
+ aiMsg.text = responseText;
697
+ aiMsg.pending = false;
698
+ this.cdr.detectChanges();
699
+ this.isAiResponding = false;
700
+ }
701
+ }, 280);
702
+ }
703
+
704
+ stopAiResponse(): void {
705
+ if (this.currentExplainSub) { this.currentExplainSub.unsubscribe(); this.currentExplainSub = null; }
706
+ if (this.currentFollowupsSub) { this.currentFollowupsSub.unsubscribe(); this.currentFollowupsSub = null; }
707
+ if (this.aiResponseInterval) { clearInterval(this.aiResponseInterval); this.aiResponseInterval = null; }
708
+
709
+ this.stopServerAudio();
710
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
711
+
712
+ if (this.pendingAiIndex !== null && this.messages[this.pendingAiIndex] && this.messages[this.pendingAiIndex].from === 'ai') {
713
+ const msg = this.messages[this.pendingAiIndex];
714
+ msg.text = 'Response cancelled.';
715
+ msg.timestamp = new Date().toLocaleTimeString();
716
+ msg.suggestions = [];
717
+ msg.audioUrl = '';
718
+ msg.videoUrl = '';
719
+ msg.playingVideoUrl = '';
720
+ msg.pending = false;
721
+ if (this.isVideoEnabledIndex.length > this.pendingAiIndex) this.isVideoEnabledIndex[this.pendingAiIndex] = false;
722
+ this.pendingAiIndex = null;
723
+ } else {
724
+ const revIndex = [...this.messages].reverse().findIndex(m => m.from === 'ai');
725
+ if (revIndex !== -1) {
726
+ const actualIndex = this.messages.length - 1 - revIndex;
727
+ const msg = this.messages[actualIndex];
728
+ msg.text = 'Response cancelled.';
729
+ msg.timestamp = new Date().toLocaleTimeString();
730
+ msg.suggestions = [];
731
+ msg.audioUrl = '';
732
+ msg.videoUrl = '';
733
+ msg.playingVideoUrl = '';
734
+ msg.pending = false;
735
+ if (this.isVideoEnabledIndex.length > actualIndex) this.isVideoEnabledIndex[actualIndex] = false;
736
+ } else {
737
+ this.messages.push({ from: 'ai', text: 'Response cancelled.', timestamp: new Date().toLocaleTimeString() });
738
+ this.isVideoEnabledIndex.push(false);
739
+ }
740
+ }
741
+
742
+ this.isAiResponding = false;
743
+ this.isTyping = false;
744
+ this.isSpeaking = false;
745
+ this.isReadingIndex = null;
746
+ this.cdr.detectChanges();
747
+ }
748
 
749
+ speakResponse(responseText: string): void {
750
+ if (!responseText) return;
751
+ this.stopAllVideo();
752
  const speech = new SpeechSynthesisUtterance();
753
  speech.text = responseText;
754
  speech.lang = 'en-US';
755
  speech.pitch = 1;
756
  speech.rate = 1;
757
  this.isSpeaking = true;
 
758
  const voices = window.speechSynthesis.getVoices();
759
+ const preferred = [
760
+ 'Google UK English Female',
761
+ 'Google US English Female',
762
+ 'Microsoft Zira - English (United States)',
763
+ 'Microsoft Hazel - English (United Kingdom)',
764
+ 'Google en-GB Female',
765
+ 'Google en-US Female'
766
+ ];
767
+ for (const n of preferred) {
768
+ const found = voices.find(v => v.name === n);
769
+ if (found) { speech.voice = found; break; }
770
+ }
771
+ if (!speech.voice && voices.length) speech.voice = voices[0];
772
+ speech.onend = () => { this.isSpeaking = false; this.cdr.detectChanges(); };
773
+ window.speechSynthesis.speak(speech);
774
+ }
775
 
776
+ //pauseAudio(): void {
777
+ // if (this.serverAudio && !this.serverAudio.paused) {
778
+ // this.serverAudio.pause();
779
+ // this.isAudioPaused = true;
780
+ // if (this.serverAudioMessageIndex !== null) this.messages[this.serverAudioMessageIndex].isPlaying = false;
781
+ // this.cdr.detectChanges();
782
+ // return;
783
+ // }
784
+ // if (window.speechSynthesis && window.speechSynthesis.speaking && !window.speechSynthesis.paused) {
785
+ // window.speechSynthesis.pause();
786
+ // this.isAudioPaused = true;
787
+ // this.cdr.detectChanges();
788
+ // }
789
+ //}
790
+
791
+ resumeAudio(): void {
792
+ if (this.serverAudio && this.serverAudio.paused) {
793
+ this.serverAudio.play();
794
+ this.isAudioPaused = false;
795
+ if (this.serverAudioMessageIndex !== null) this.messages[this.serverAudioMessageIndex].isPlaying = true;
796
+ this.cdr.detectChanges();
797
+ return;
798
+ }
799
+ if (window.speechSynthesis && window.speechSynthesis.paused) {
800
+ window.speechSynthesis.resume();
801
+ this.isAudioPaused = false;
802
+ this.cdr.detectChanges();
803
+ }
804
+ }
805
+
806
+ playServerAudioForMessage(index: number): void {
807
+ const msg = this.messages[index] as any;
808
+ if (!msg || !msg.audioUrl) return;
809
+
810
+ // If same message is clicked while its audio element exists: toggle play/pause
811
+ if (this.serverAudio && this.serverAudioMessageIndex === index) {
812
+ if (!this.serverAudio.paused) {
813
+ this.serverAudio.pause();
814
+ this.isAudioPaused = true;
815
+ msg.isPlaying = false;
816
+ } else {
817
+ this.serverAudio.play().catch(err => {
818
+ console.error('Audio resume failed:', err);
819
+ });
820
+ this.isAudioPaused = false;
821
+ msg.isPlaying = true;
822
  }
823
+ this.cdr.detectChanges();
824
+ return;
825
+ }
826
+
827
+ // Stop any inline videos (pauses DOM video elements) before starting audio
828
+ this.stopAllVideo();
829
+
830
+ // Stop any existing audio and speech synthesis
831
+ this.stopServerAudio();
832
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
833
+
834
+ // NOTE: Do NOT change global navbar toggles here.
835
+ // Previously this method forced `isMuted = false` and `isVoiceEnabled = true`
836
+ // which caused the navbar audio icon to flip when playing per-message audio.
837
+ // We intentionally avoid modifying those global flags so per-message playback
838
+ // doesn't affect the navbar state.
839
+
840
+ // Create and play new audio
841
+ this.serverAudio = new Audio(msg.audioUrl);
842
+ this.serverAudioMessageIndex = index;
843
+ try { this.serverAudio.volume = this.isMuted ? 0 : 1; } catch { /* noop */ }
844
+
845
+ this.isReadingIndex = index;
846
+ this.isAudioPaused = false;
847
+
848
+ msg.isPlaying = true;
849
+ this.isSpeaking = true;
850
+ this.cdr.detectChanges();
851
+
852
+ this.serverAudio.onended = () => {
853
+ try { msg.isPlaying = false; } catch { /* noop */ }
854
+ this.isReadingIndex = null;
855
+ this.serverAudio = null;
856
+ this.serverAudioMessageIndex = null;
857
+ this.isSpeaking = false;
858
+ this.cdr.detectChanges();
859
  };
860
 
861
+ this.serverAudio.onerror = (e) => {
862
+ console.error('Server audio playback error', e);
863
+ try { msg.isPlaying = false; } catch { /* noop */ }
864
+ this.isReadingIndex = null;
865
+ this.serverAudio = null;
866
+ this.serverAudioMessageIndex = null;
867
  this.isSpeaking = false;
 
868
  this.cdr.detectChanges();
869
  };
870
 
871
+ this.serverAudio.play().catch(err => {
872
+ console.error('Audio play failed:', err);
873
+ try { msg.isPlaying = false; } catch { /* noop */ }
874
+ this.isReadingIndex = null;
875
+ this.serverAudio = null;
876
+ this.serverAudioMessageIndex = null;
877
+ this.isSpeaking = false;
878
+ this.cdr.detectChanges();
879
+ });
880
+ }
881
+
882
+ private stopServerAudio(): void {
883
+ if (this.serverAudio) {
884
+ try { this.serverAudio.pause(); this.serverAudio.currentTime = 0; } catch { }
885
+ this.serverAudio = null;
886
+ if (this.serverAudioMessageIndex !== null && this.messages[this.serverAudioMessageIndex]) {
887
+ this.messages[this.serverAudioMessageIndex].isPlaying = false;
888
+ }
889
+ this.serverAudioMessageIndex = null;
890
+ }
891
+ this.isReadingIndex = null;
892
+ }
893
+
894
+ synthesizeAudioAndPlay(index: number): void {
895
+ const msg = this.messages[index] as any;
896
+ if (!msg || !msg.text) return;
897
+ if (msg.audioUrl) { this.playServerAudioForMessage(index); return; }
898
+ if (msg.isSynthesizing) return;
899
+ msg.isSynthesizing = true; this.cdr.detectChanges();
900
+ this.apiService.synthesizeAudio(msg.text).subscribe({
901
+ next: (res: any) => {
902
+ msg.isSynthesizing = false;
903
+ if (res?.audio_url) { msg.audioUrl = res.audio_url; this.playServerAudioForMessage(index); }
904
+ else { this.errorMessage = 'Audio generation failed.'; }
905
+ this.cdr.detectChanges();
906
+ },
907
+ error: (err) => { msg.isSynthesizing = false; this.errorMessage = 'Audio generation failed.'; this.cdr.detectChanges(); }
908
+ });
909
+ }
910
+
911
+ synthesizeVideoAndPlay(index: number): void {
912
+ const msg = this.messages[index] as any;
913
+ if (!msg || !msg.text) return;
914
+
915
+ if (msg.videoUrl) {
916
+ this.openMessageVideo(index);
917
+ return;
918
+ }
919
+
920
+ if (msg.isVideoSynthesizing) return;
921
+ msg.isVideoSynthesizing = true;
922
+ this.cdr.detectChanges();
923
+
924
+ this.apiService.synthesizeVideo(msg.text).subscribe({
925
+ next: (res: any) => {
926
+ msg.isVideoSynthesizing = false;
927
+ if (res?.video_url) {
928
+ msg.videoUrl = res.video_url;
929
+ this.openMessageVideo(index);
930
+ } else {
931
+ console.error('No video_url returned from synth API:', res);
932
+ this.errorMessage = 'Video generation failed.';
933
+ }
934
+ this.cdr.detectChanges();
935
+ },
936
+ error: (err) => {
937
+ msg.isVideoSynthesizing = false;
938
+ console.error('Video synth API error', err);
939
+ this.errorMessage = 'Video generation failed. Try again.';
940
+ this.cdr.detectChanges();
941
+ }
942
+ });
943
  }
944
 
945
+ openMessageVideo(i: number): void {
946
+ const msg = this.messages[i] as any;
947
+ if (!msg?.videoUrl) return;
948
+
949
+ if (this.isVideoPlayingIndex === i) {
950
+ this.stopInlineVideo(i);
951
+ return;
952
+ }
953
+
954
+ // Stop any server audio and spoken TTS before playing video
955
+ this.stopServerAudio();
956
+ try { window.speechSynthesis?.cancel(); } catch { /* noop */ }
957
+
958
+ // Stop other videos (will also pause DOM video elements)
959
+ this.stopAllVideo();
960
+
961
+ msg.playingVideoUrl = msg.videoUrl;
962
+ this.isVideoEnabledIndex[i] = true;
963
+ this.cdr.detectChanges();
964
+
965
+ setTimeout(() => {
966
+ const vid = document.getElementById(`inline-video-${i}`) as HTMLVideoElement | null;
967
+ if (!vid) { this.isVideoPlayingIndex = null; this.cdr.detectChanges(); return; }
968
+
969
+ vid.onplay = () => { this.isVideoPlayingIndex = i; this.cdr.detectChanges(); };
970
+ vid.onpause = () => { this.cdr.detectChanges(); };
971
+ vid.onended = () => { this.onMessageVideoEnded(i); };
972
+
973
+ // Attempt to play; if blocked, clear playing state
974
+ vid.play().catch(err => {
975
+ console.warn('Inline video play blocked/failed:', err);
976
+ this.isVideoPlayingIndex = null;
977
+ msg.playingVideoUrl = '';
978
+ this.isVideoEnabledIndex[i] = false;
979
+ this.cdr.detectChanges();
980
+ });
981
+ }, 50);
982
+ }
983
+
984
+ // New toggle helper called from the template button. It uses existing open/stop helpers.
985
+ toggleMessageVideo(i: number): void {
986
+ const msg = this.messages[i] as any;
987
+ if (!msg || !msg.videoUrl) return;
988
+ if (!this.isVideoEnabledIndex[i]) {
989
+ this.openMessageVideo(i);
990
+ } else {
991
+ this.stopInlineVideo(i);
992
+ }
993
+ }
994
+
995
+ stopInlineVideo(index: number): void {
996
+ const vid = document.getElementById(`inline-video-${index}`) as HTMLVideoElement | null;
997
+ if (vid) {
998
+ try { vid.pause(); } catch { /* noop */ }
999
+ try { vid.currentTime = 0; } catch { /* noop */ }
1000
+ }
1001
+
1002
+ const msg = this.messages[index] as any;
1003
+ if (msg) msg.playingVideoUrl = '';
1004
+ this.isVideoEnabledIndex[index] = false;
1005
+ if (this.isVideoPlayingIndex === index) this.isVideoPlayingIndex = null;
1006
+ this.cdr.detectChanges();
1007
+ }
1008
+
1009
+ onMessageVideoEnded(i: number): void {
1010
+ const msg = this.messages[i] as any;
1011
+ if (msg) msg.playingVideoUrl = '';
1012
+ this.isVideoEnabledIndex[i] = false;
1013
+ if (this.isVideoPlayingIndex === i) this.isVideoPlayingIndex = null;
1014
+ this.cdr.detectChanges();
1015
+ }
1016
+
1017
+
1018
  loadVoices(): void {
1019
  const voices = window.speechSynthesis.getVoices();
1020
  if (!voices.length) {
 
1047
  }
1048
  }
1049
 
1050
+
1051
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1052
 
1053
  stopListening(): void {
1054
  this.isListening = false;
 
1083
  }
1084
  }
1085
 
1086
+ // UI / input helpers
1087
+ goToHome(): void { this.router.navigate(['/home']); }
1088
 
1089
  copySuccessIndex: number | null = null;
1090
  copyToClipboard(text: string, index: number): void {
1091
  navigator.clipboard.writeText(text).then(() => {
1092
  this.copySuccessIndex = index;
1093
  setTimeout(() => { this.copySuccessIndex = null; }, 2000);
1094
+ }).catch(err => console.error('Failed to copy:', err));
1095
  }
1096
 
1097
+ checkInput = (): void => { this.isInputValid = this.userInput.trim().length > 0; }
1098
+
1099
  handleButtonClick(): void {
1100
+ // If AI is currently streaming/responding, clicking the button should cancel that response.
1101
+ if (this.isAiResponding) {
1102
+ this.stopAiResponse();
1103
+ return;
1104
+ }
1105
+
1106
  if (this.userInput.trim().length > 0) {
1107
  this.showQuestions = false;
1108
+ const msg = this.userInput;
1109
  this.userInput = '';
1110
+ this.sendMessage(msg);
1111
+ } else if (this.isSpeaking && !this.serverAudio) {
1112
  this.pauseAudio();
1113
  } else if (this.isAudioPaused) {
1114
  this.resumeAudio();
1115
  } else {
1116
+ this.openMicrophonePopup();
1117
  }
1118
  }
1119
 
1120
+ startListening(): void {
1121
+ this.isListening = true;
1122
+ this.isProcessingSpeech = false;
1123
+
1124
+ if (navigator.mediaDevices?.getUserMedia) {
1125
+ navigator.mediaDevices.getUserMedia({ audio: true })
1126
+ .then(() => {
1127
+ if (this.recognition) {
1128
+ this.recognition.start();
1129
+ this.recognition.onresult = (event: any) => {
1130
+ if (event.results && event.results[0]) {
1131
+ const transcript = event.results[0][0].transcript.trim();
1132
+ this.userInput = transcript;
1133
+ if (this.userInput.trim()) this.sendMessage();
1134
+ this.recognition.stop();
1135
+ this.isListening = false;
1136
+ }
1137
+ };
1138
+ this.recognition.onnomatch = () => alert('No speech detected. Please try again.');
1139
+ this.recognition.onend = () => { this.isListening = false; };
1140
+ this.recognition.onerror = (error: any) => {
1141
+ console.error('Speech Recognition Error:', error);
1142
+ this.isListening = false;
1143
+ if (error.error === 'not-allowed') alert('Microphone permission denied.');
1144
+ };
1145
+ } else {
1146
+ alert('Speech Recognition not supported in this browser.');
1147
+ }
1148
+ }).catch((error) => {
1149
+ console.error('Microphone access denied:', error);
1150
+ this.errorMessage = 'Please enable microphone access to use this feature.';
1151
+ this.isListening = true;
1152
+ });
1153
+ } else {
1154
+ alert('Microphone access not supported in this browser.');
1155
+ }
1156
+ }
1157
+
1158
+ private stopPopupListening(): void {
1159
+ try {
1160
+ if (this.recognition && typeof this.recognition.stop === 'function') {
1161
+ this.recognition.stop();
1162
+ }
1163
+ } catch (e) {
1164
+ console.warn('Error stopping popup recognition:', e);
1165
+ }
1166
+ this._restoreRecognitionHandlers();
1167
+ this.isListening = false;
1168
+ this.cdr.detectChanges();
1169
+ }
1170
+
1171
+ private startPopupListening(): void {
1172
+ this.isListening = true;
1173
+ this.isProcessingSpeech = false;
1174
+ this.popupTranscript = '';
1175
+ this.errorMessage = '';
1176
+
1177
+ if (!this.recognition) {
1178
+ this.errorMessage = 'Speech Recognition not available in this browser.';
1179
+ this.isListening = false;
1180
+ this.cdr.detectChanges();
1181
+ return;
1182
+ }
1183
+
1184
+ this._savedRecognitionOnResult = this.recognition.onresult;
1185
+ this._savedInterimResults = Boolean(this.recognition.interimResults);
1186
+
1187
+ this.recognition.interimResults = true;
1188
+ this.popupListeningMode = true;
1189
+
1190
+ if (navigator.mediaDevices?.getUserMedia) {
1191
+ navigator.mediaDevices.getUserMedia({ audio: true })
1192
+ .then(() => {
1193
+ try {
1194
+ this.recognition.onresult = (event: any) => {
1195
+ let interim = '';
1196
+ let final = '';
1197
+ for (let i = event.resultIndex; i < event.results.length; i++) {
1198
+ const res = event.results[i];
1199
+ if (res.isFinal) final += res[0].transcript + ' ';
1200
+ else interim += res[0].transcript + ' ';
1201
+ }
1202
+ const display = (final + interim).trim();
1203
+ this.popupTranscript = display;
1204
+ this.cdr.detectChanges();
1205
+ if (final && final.trim()) {
1206
+ try { this.recognition.stop(); } catch { /* noop */ }
1207
+ this.isListening = false;
1208
+ this.cdr.detectChanges();
1209
+ }
1210
+ };
1211
+
1212
+ this.recognition.onnomatch = () => {
1213
+ this.errorMessage = 'No speech detected. Please try again.';
1214
+ this.isListening = false;
1215
+ this.cdr.detectChanges();
1216
+ };
1217
+
1218
+ this.recognition.onend = () => {
1219
+ this.isListening = false;
1220
+ this.popupListeningMode = false;
1221
+ this._restoreRecognitionHandlers();
1222
+ this.cdr.detectChanges();
1223
+ };
1224
+
1225
+ this.recognition.onerror = (error: any) => {
1226
+ console.error('Popup Speech Recognition Error:', error);
1227
+ this.errorMessage = 'Speech recognition error.';
1228
+ this.isListening = false;
1229
+ this.popupListeningMode = false;
1230
+ this._restoreRecognitionHandlers();
1231
+ this.cdr.detectChanges();
1232
+ };
1233
+
1234
+ this.recognition.start();
1235
+ } catch (err) {
1236
+ console.error('startPopupListening start error:', err);
1237
+ this.errorMessage = 'Unable to start speech recognition.';
1238
+ this.isListening = false;
1239
+ this.popupListeningMode = false;
1240
+ this._restoreRecognitionHandlers();
1241
+ this.cdr.detectChanges();
1242
+ }
1243
+ }).catch((error) => {
1244
+ console.error('Microphone access denied for popup:', error);
1245
+ this.errorMessage = 'Please enable microphone access to use this feature.';
1246
+ this.isListening = false;
1247
+ this.popupListeningMode = false;
1248
+ this._restoreRecognitionHandlers();
1249
+ this.cdr.detectChanges();
1250
+ });
1251
+ } else {
1252
+ this.errorMessage = 'Microphone access not supported in this browser.';
1253
+ this.isListening = false;
1254
+ this.popupListeningMode = false;
1255
+ this._restoreRecognitionHandlers();
1256
+ this.cdr.detectChanges();
1257
+ }
1258
+ }
1259
+
1260
+ private _restoreRecognitionHandlers(): void {
1261
+ try {
1262
+ if (!this.recognition) return;
1263
+ if (this._savedRecognitionOnResult) {
1264
+ this.recognition.onresult = this._savedRecognitionOnResult;
1265
+ this._savedRecognitionOnResult = null;
1266
+ }
1267
+ this.recognition.interimResults = Boolean(this._savedInterimResults);
1268
+ this._savedInterimResults = false;
1269
+ this.popupListeningMode = false;
1270
+ } catch (err) {
1271
+ console.warn('Error restoring recognition handlers:', err);
1272
+ }
1273
  }
1274
 
1275
+
1276
+
1277
  addNewLine(event: KeyboardEvent): void {
1278
  if (event.key === 'Enter' && event.shiftKey) {
1279
  event.preventDefault();
 
1287
  textarea.style.height = `${textarea.scrollHeight}px`;
1288
  }
1289
 
1290
+ handleEnterPress(event: KeyboardEvent): void {
1291
+ if (this.isSpeaking && !this.serverAudio) { event.preventDefault(); return; }
1292
+ if (event.key === 'Enter') {
1293
+ if (!event.shiftKey) {
1294
+ event.preventDefault();
1295
+ this.handleButtonClick();
1296
+ } else {
1297
+ event.preventDefault();
1298
+ this.userInput += '\n';
1299
+ }
1300
+ }
1301
+ }
1302
+
1303
+ isVoiceEnabled = false;
1304
+ isTutorEnabled = false;
1305
+ isSyllabusEnabled = true;
1306
+ isBreadcrumbEnabled = false;
1307
+
1308
+ toggleVoice(): void {
1309
+ // Toggle voice state and mute flag independently of tutor/video.
1310
+ this.isVoiceEnabled = !this.isVoiceEnabled;
1311
+ //this.isMuted = !this.isVoiceEnabled;
1312
+
1313
+ //// When turning voice OFF, stop voice TTS only — do not stop videos.
1314
+ //if (!this.isVoiceEnabled) {
1315
+ // this.stopVoiceOnly();
1316
+ //}
1317
+
1318
+ //if (this.serverAudio) {
1319
+ // try { this.serverAudio.volume = this.isMuted ? 0 : 1; } catch { /* noop */ }
1320
+ //}
1321
+ //this.saveToggleStates();
1322
+ }
1323
+
1324
+ toggleTutor(): void {
1325
+ // Toggle the global tutor/video mode and perform the appropriate actions.
1326
+ this.isTutorEnabled = !this.isTutorEnabled;
1327
+
1328
+
1329
+ }
1330
+
1331
+ toggleSyllabus(): void { this.isSyllabusEnabled = !this.isSyllabusEnabled; this.saveToggleStates(); }
1332
+ toggleBreadcrumb(): void { this.isBreadcrumbEnabled = !this.isBreadcrumbEnabled; this.saveToggleStates(); }
1333
+
1334
+ private playGlobalVideoFromLatest(): void {
1335
+ const idx = [...this.messages].reverse().findIndex(m => m.from === 'ai' && m.videoUrl);
1336
+ if (idx === -1) { this.videoUrl = ''; return; }
1337
+ const actualIndex = this.messages.length - 1 - idx;
1338
+ const msg = this.messages[actualIndex];
1339
+ this.videoUrl = msg.videoUrl || '';
1340
+ this.cdr.detectChanges();
1341
+ }
1342
+
1343
+ private saveToggleStates(): void {
1344
+ if (isPlatformBrowser(this.platformId)) {
1345
+ localStorage.setItem('voiceEnabled', String(this.isVoiceEnabled));
1346
+ localStorage.setItem('tutorEnabled', String(this.isTutorEnabled));
1347
+ localStorage.setItem('syllabusEnabled', String(this.isSyllabusEnabled));
1348
+ localStorage.setItem('breadcrumbEnabled', String(this.isBreadcrumbEnabled));
1349
+ }
1350
  }
1351
 
1352
+ private loadToggleStates(): void {
1353
+ if (isPlatformBrowser(this.platformId)) {
1354
+ this.isVoiceEnabled = localStorage.getItem('voiceEnabled') === 'true';
1355
+ this.isTutorEnabled = localStorage.getItem('tutorEnabled') === 'true';
1356
+ this.isSyllabusEnabled = localStorage.getItem('syllabusEnabled') !== 'false';
1357
+ this.isBreadcrumbEnabled = localStorage.getItem('breadcrumbEnabled') === 'true';
1358
+ }
1359
+ }
1360
+
1361
+ private stopVoiceOnly(): void {
1362
+ try { if (window.speechSynthesis.speaking || window.speechSynthesis.paused) window.speechSynthesis.cancel(); } catch { }
1363
+ this.speechSynthesisInstance = null;
1364
+ this.isSpeaking = false;
1365
+ }
1366
+
1367
+ private stopAllAudioAndMute(): void {
1368
+ this.stopServerAudio();
1369
+ try { if (window.speechSynthesis.speaking || window.speechSynthesis.paused) window.speechSynthesis.cancel(); } catch { }
1370
+ this.isVoiceEnabled = false;
1371
+ this.isMuted = true;
1372
+ this.messages.forEach(m => { m.isPlaying = false; });
1373
+ this.isReadingIndex = null;
1374
+ this.isSpeaking = false;
1375
+ this.isAudioPaused = false;
1376
+ this.cdr.detectChanges();
1377
+ }
1378
+
1379
+ clearVideoUrl(): void {
1380
+ this.videoUrl = '';
1381
+ if (this.isTutorEnabled) {
1382
+ this.isTutorEnabled = false;
1383
+ this.saveToggleStates();
1384
+ }
1385
+ this.cdr.detectChanges();
1386
+ }
1387
+
1388
+ private stopAllVideo(): void {
1389
+ this.videoUrl = '';
1390
+
1391
+ // Pause any inline video elements in the DOM (prevents a video from continuing to play)
1392
+ try {
1393
+ const vids = Array.from(document.querySelectorAll<HTMLVideoElement>('[id^="inline-video-"]'));
1394
+ vids.forEach(v => {
1395
+ try { v.pause(); v.currentTime = 0; } catch { /* noop */ }
1396
+ });
1397
+ } catch (err) {
1398
+ /* noop */
1399
+ }
1400
+
1401
+ this.messages.forEach((m, idx) => {
1402
+ m.playingVideoUrl = '';
1403
+ this.isVideoEnabledIndex[idx] = false;
1404
+ });
1405
+ this.cdr.detectChanges();
1406
+ }
1407
+
1408
+ getButtonIcon(): string {
1409
+ if (this.isAiResponding) return 'assets/images/chat/stop.png';
1410
+ if (this.serverAudio && !this.serverAudio.paused) return 'assets/images/chat/microphone-icon.png';
1411
+ if (this.userInput.trim().length > 0) return 'assets/images/chat/send-icon.png';
1412
+ if (this.isSpeaking && !this.serverAudio) return 'assets/images/chat/pause-icon.png';
1413
+ if (this.isAudioPaused) return 'assets/images/chat/resume-icon.png';
1414
+ return 'assets/images/chat/microphone-icon.png';
1415
+ }
1416
+
1417
+ goToHomePageShortcut(): void { this.router.navigate(['/home']); }
1418
+
1419
+
1420
+
1421
+ openUserGuide(): void { this.showUserGuide = true; }
1422
+ closeUserGuide(): void { this.showUserGuide = false; }
1423
+
1424
  stopSpeaking(): void {
1425
  try {
1426
+ if (window.speechSynthesis?.speaking || window.speechSynthesis?.paused) {
1427
  window.speechSynthesis.cancel();
1428
  }
1429
+ } catch { /* noop */ }
1430
+ this.speechSynthesisInstance = null;
1431
+ try { this.stopServerAudio(); } catch { /* noop */ }
1432
  this.isSpeaking = false;
1433
  this.isAudioPaused = false;
1434
+ if (this.aiResponseInterval) { clearInterval(this.aiResponseInterval); this.aiResponseInterval = null; }
1435
+ this.isAiResponding = false;
1436
+ this.cdr.detectChanges();
1437
  }
1438
 
1439
+ muteMicrophone(): void {
1440
+ try {
1441
+ if (this.recognition && typeof this.recognition.stop === 'function') {
1442
+ this.recognition.stop();
1443
+ }
1444
+ } catch (e) {
1445
+ console.warn('Error stopping recognition:', e);
1446
  }
1447
+ this.isListening = false;
1448
+ this.isProcessingSpeech = false;
1449
+ this.errorMessage = '';
1450
+ this.cdr.detectChanges();
1451
+ }
1452
+
1453
+ openMicrophoneSettings(): void {
1454
+ const ua = navigator.userAgent || '';
1455
+ try {
1456
+ if (ua.includes('Edg') || ua.includes('Edge')) {
1457
+ window.open('edge://settings/content/microphone', '_blank');
1458
+ } else if (ua.includes('Chrome') && !ua.includes('Chromium')) {
1459
+ window.open('chrome://settings/content/microphone', '_blank');
1460
+ } else if (ua.includes('Firefox')) {
1461
+ window.open('about:preferences#privacy', '_blank');
1462
+ } else if (/Safari/.test(ua) && /Macintosh/.test(navigator.platform)) {
1463
+ alert('Open Safari → Settings (or Preferences) → Websites → Microphone to enable access.');
1464
+ } else {
1465
+ alert("Please check your browser's settings to enable the microphone.");
1466
+ }
1467
+ } catch (err) {
1468
+ console.error('openMicrophoneSettings error:', err);
1469
+ alert("Unable to open settings automatically. Please check your browser's microphone/privacy settings.");
1470
+ }
1471
+ this.cdr.detectChanges();
1472
+ }
1473
+
1474
+ private _isShortFollowup(text: string | undefined): boolean {
1475
+ if (!text) return false;
1476
+ const t = text.toLowerCase().replace(/[^\w\s?]/g, "").trim();
1477
+ if (!t) return false;
1478
+ if (/\b(example|examples|more examples|another example|give example|more)\b/.test(t)) {
1479
+ return t.split(/\s+/).length <= 8;
1480
+ }
1481
+ return ["more", "more?", "another?", "another example?"].includes(t);
1482
+ }
1483
+
1484
+ // Add this method inside the ChatComponent class (near other audio helpers)
1485
+ stopReadAloud(): void {
1486
+ // Stop any server audio and reset UI playback state
1487
+ try { this.stopServerAudio(); } catch { /* noop */ }
1488
+ this.isReadingIndex = null;
1489
+ this.isSpeaking = false;
1490
+ this.cdr.detectChanges();
1491
+ }
1492
+
1493
+ openMicrophonePopup(): void {
1494
+ this._recordingFinalBuffer = '';
1495
+ this._recordingInterimBuffer = '';
1496
+ this.popupTranscript = '';
1497
+ this.errorMessage = '';
1498
+ this.showMicPopup = true;
1499
+ setTimeout(() => this.startRecording(), 200);
1500
+ }
1501
+
1502
+ closeMicrophonePopup(): void {
1503
+ this.stopRecording();
1504
+ this.showMicPopup = false;
1505
+ this.popupTranscript = '';
1506
+ this._recordingFinalBuffer = '';
1507
+ this._recordingInterimBuffer = '';
1508
+ this.errorMessage = '';
1509
+ this.cdr.detectChanges();
1510
+ }
1511
+
1512
+ async startRecording(): Promise<void> {
1513
+ if (!this.recognition) { this.errorMessage = 'Speech recognition not supported.'; return; }
1514
+
1515
+ this._recordingFinalBuffer = '';
1516
+ this._recordingInterimBuffer = '';
1517
+ this.popupTranscript = '';
1518
+ this.errorMessage = '';
1519
+ this.isRecording = true;
1520
+
1521
+ try { this.recognition.interimResults = true; } catch { }
1522
+ try { this.recognition.start(); } catch (e) {
1523
+ console.warn('Could not start recognition', e);
1524
+ if (!this._recognitionActive) {
1525
+ setTimeout(() => { try { this.recognition.start(); } catch { } }, 300);
1526
+ }
1527
+ }
1528
+
1529
+ try { await this.startAnalyzer(); } catch (err) { console.warn('Analyzer failed to start', err); }
1530
+ this.cdr.detectChanges();
1531
+ }
1532
+
1533
+ async stopRecording(): Promise<void> {
1534
+ if (this._restartTimer) { clearTimeout(this._restartTimer); this._restartTimer = null; }
1535
+ if (this.recognition && this.isRecording) { try { this.recognition.stop(); } catch { } }
1536
+ try { this.stopAnalyzer(); } catch { /* noop */ }
1537
+ this.isRecording = false;
1538
+
1539
+ const finalText = (this._recordingFinalBuffer || '').trim();
1540
+ const interimText = (this._recordingInterimBuffer || '').trim();
1541
+ const combinedRaw = (finalText + ' ' + interimText).trim();
1542
+
1543
+ if (!combinedRaw) {
1544
+ this.popupTranscript = '';
1545
+ this.cdr.detectChanges();
1546
+ return;
1547
+ }
1548
+
1549
+ this.popupTranscript = 'Processing…';
1550
+ this.cdr.detectChanges();
1551
+
1552
+ let punctuated = combinedRaw;
1553
+ try {
1554
+ console.log('[VOICE] raw transcript:', combinedRaw);
1555
+ // call backend at /rag/punctuate
1556
+ punctuated = await this.punctuateText(combinedRaw);
1557
+ console.log('[VOICE] punctuated result:', punctuated);
1558
+ } catch (err) {
1559
+ console.warn('Punctuation API failed', err);
1560
+ punctuated = combinedRaw;
1561
+ }
1562
+
1563
+ let normalized = this.normalizeTranscript(punctuated);
1564
+
1565
+ const hasTerminalPunctuation = /[.?!]$/.test(normalized);
1566
+ const questionPattern = /^(who|what|when|where|why|how|which|whom|whose|is|are|am|was|were|do|does|did|can|could|would|will|shall|should|have|has|had)\b/i;
1567
+ if (!hasTerminalPunctuation && questionPattern.test(combinedRaw)) {
1568
+ normalized = normalized + '?';
1569
+ }
1570
+
1571
+ this.popupTranscript = normalized;
1572
+ this.cdr.detectChanges();
1573
+ }
1574
+
1575
+ private extractAssistantContent(raw: string): string {
1576
+ if (!raw) return raw;
1577
+ try {
1578
+ // Look for ChatCompletionMessage(content='...') or content="..."
1579
+ const re1 = /message=ChatCompletionMessage\(\s*content=(['"])((?:\\.|(?!\1).)*)\1/;
1580
+ const m1 = raw.match(re1);
1581
+ if (m1 && m1[2]) return m1[2].replace(/\\'/g, "'").replace(/\\"/g, '"').trim();
1582
+
1583
+ const re2 = /ChatCompletionMessage\(\s*content=(['"])((?:\\.|(?!\1).)*)\1/;
1584
+ const m2 = raw.match(re2);
1585
+ if (m2 && m2[2]) return m2[2].replace(/\\'/g, "'").replace(/\\"/g, '"').trim();
1586
+
1587
+ // Fallback: any content='...' anywhere
1588
+ const re3 = /content=(['"])((?:\\.|(?!\1).)*)\1/;
1589
+ const m3 = raw.match(re3);
1590
+ if (m3 && m3[2]) return m3[2].replace(/\\'/g, "'").replace(/\\"/g, '"').trim();
1591
+ } catch (e) {
1592
+ console.warn('extractAssistantContent error', e);
1593
+ }
1594
+ return raw.trim();
1595
+ }
1596
+
1597
+ private async punctuateText(raw: string): Promise<string> {
1598
+ if (!raw) return raw;
1599
+ try {
1600
+ // Use ApiService so backend URL is centralized
1601
+ const resp$ = this.apiService.punctuate(raw);
1602
+ const res = await lastValueFrom(resp$);
1603
+ if (res && typeof res.punctuated === 'string' && res.punctuated.trim().length) {
1604
+ const punctuated = res.punctuated.trim();
1605
+ const extracted = this.extractAssistantContent(punctuated);
1606
+ return extracted || punctuated;
1607
+ }
1608
+ } catch (err) {
1609
+ console.warn('punctuateText error', err);
1610
+ }
1611
+ return raw;
1612
+ }
1613
+
1614
+ confirmAndSendTranscript(): void {
1615
+ let text = (this.popupTranscript || '').trim();
1616
+ if (!text) { this.errorMessage = 'No speech captured'; return; }
1617
+
1618
+ text = this.normalizeTranscript(text);
1619
+ // still emit in case other components listen
1620
+ this.transcriptConfirmed.emit(text);
1621
+
1622
+ // Send the extracted/normalized text as a user message to the chat
1623
+ try {
1624
+ this.sendMessage(text);
1625
+ } catch (e) {
1626
+ console.warn('Failed to send transcript as message', e);
1627
+ }
1628
+
1629
+ // reset popup state
1630
+ this._recordingFinalBuffer = '';
1631
+ this._recordingInterimBuffer = '';
1632
+ this.showMicPopup = false;
1633
+ this.isRecording = false;
1634
+ if (this._restartTimer) { clearTimeout(this._restartTimer); this._restartTimer = null; }
1635
+ this.popupTranscript = '';
1636
+ this.cdr.detectChanges();
1637
+ }
1638
+
1639
+ private normalizeTranscript(text: string): string {
1640
+ if (!text) return text;
1641
+ let t = text.trim();
1642
+
1643
+ const mappings: Array<[RegExp, string]> = [
1644
+ [/\b(full stop|period|dot)\b/gi, '.'],
1645
+ [/\b(question mark|question)\b/gi, '?'],
1646
+ [/\b(exclamation mark|exclamation|exclaim)\b/gi, '!'],
1647
+ [/\b(comma)\b/gi, ','],
1648
+ [/\b(colon)\b/gi, ':'],
1649
+ [/\b(semicolon)\b/gi, ';'],
1650
+ [/\b(ellipsis|dot dot dot|three dots)\b/gi, '...'],
1651
+ [/\b(new line|newline|new paragraph|line break)\b/gi, '\n'],
1652
+ [/\b(open parenthesis|open bracket)\b/gi, '('],
1653
+ [/\b(close parenthesis|close bracket)\b/gi, ')'],
1654
+ [/\b(double quote|quote|quotation)\b/gi, '"'],
1655
+ [/\b(single quote|apostrophe)\b/gi, "'"],
1656
+ [/\b(dash|hyphen)\b/gi, '-'],
1657
+ [/\b(percent|percent sign)\b/gi, '%'],
1658
+ [/\b(and sign|ampersand)\b/gi, '&'],
1659
+ [/\b(at sign)\b/gi, '@'],
1660
+ [/\b(forward slash|slash)\b/gi, '/'],
1661
+ [/\b(backslash)\b/gi, '\\\\']
1662
+ ];
1663
+
1664
+ for (const [re, rep] of mappings) t = t.replace(re, rep);
1665
+
1666
+ t = t.replace(/\s+([,.:;?!%'\)\]\}])/g, '$1');
1667
+ t = t.replace(/\s+([\(\[\{"'`])/g, '$1');
1668
+ t = t.replace(/([.?!:;,%\)\]'"-]{1,3})(?!\s|\n|$)/g, '$1 ');
1669
+ t = t.replace(/[ \t]{2,}/g, ' ');
1670
+ t = t.split('\n').map(line => line.trim()).join('\n');
1671
+ t = t.replace(/(^|[\n\.!\?]\s+)([a-z])/g, (m, p1, p2) => p1 + p2.toUpperCase());
1672
+
1673
+ return t.trim();
1674
+ }
1675
+
1676
+ // WebAudio analyzer (unchanged) — methods startAnalyzer, stopAnalyzer, drawWaveform, ngOnDestroy...
1677
+ private async startAnalyzer(): Promise<void> {
1678
+ if (!isPlatformBrowser(this.platformId)) return;
1679
+
1680
+ // Ensure canvas is available (rare race when popup just opened)
1681
+ if (!this.waveformCanvas || !this.waveformCanvas.nativeElement) {
1682
+ await new Promise(r => setTimeout(r, 80));
1683
+ if (!this.waveformCanvas || !this.waveformCanvas.nativeElement) {
1684
+ console.warn('Waveform canvas not available');
1685
+ return;
1686
+ }
1687
+ }
1688
+
1689
+ try {
1690
+ // Ensure we have a running AudioContext
1691
+ if (!this.audioContext || (this.audioContext && this.audioContext.state === 'closed')) {
1692
+ this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
1693
+ } else if (this.audioContext.state === 'suspended') {
1694
+ // resume if previously suspended
1695
+ try { await this.audioContext.resume(); } catch { /* noop */ }
1696
+ }
1697
+
1698
+ // Always request a fresh MediaStream for analyzer — it's cheap after permission is granted
1699
+ if (this.mediaStream) {
1700
+ try { this.mediaStream.getTracks().forEach(t => t.stop()); } catch { /* noop */ }
1701
+ this.mediaStream = null;
1702
+ }
1703
+
1704
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
1705
+ this.mediaStream = stream;
1706
+
1707
+ // Create source and analyser
1708
+ const source = this.audioContext.createMediaStreamSource(stream);
1709
+
1710
+ // Safely disconnect previous analyser if any
1711
+ try { if (this.analyser) { try { this.analyser.disconnect(); } catch { } } } catch { /* noop */ }
1712
+
1713
+ const analyser = this.audioContext.createAnalyser();
1714
+ analyser.fftSize = 2048;
1715
+ analyser.smoothingTimeConstant = 0.85;
1716
+ source.connect(analyser);
1717
+
1718
+ this.analyser = analyser;
1719
+ const bufferLength = analyser.fftSize;
1720
+ this.dataArray = new Uint8Array(bufferLength);
1721
+
1722
+ // Start drawing loop (will cancel any previous RAF)
1723
+ this.drawWaveform();
1724
+ } catch (err) {
1725
+ console.warn('startAnalyzer error', err);
1726
+ // clean partial state on failure
1727
+ try { if (this.mediaStream) { this.mediaStream.getTracks().forEach(t => t.stop()); this.mediaStream = null; } } catch { /* noop */ }
1728
+ try { if (this.analyser) { this.analyser.disconnect(); this.analyser = null; } } catch { /* noop */ }
1729
+ this.dataArray = null;
1730
+ // do not swallow error — caller can decide how to handle
1731
+ throw err;
1732
+ }
1733
+ }
1734
+
1735
+ private stopAnalyzer(): void {
1736
+ try {
1737
+ if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
1738
+
1739
+ if (this.analyser) {
1740
+ try { this.analyser.disconnect(); } catch { /* noop */ }
1741
+ this.analyser = null;
1742
+ }
1743
+
1744
+ if (this.mediaStream) {
1745
+ try { this.mediaStream.getTracks().forEach(t => t.stop()); } catch { /* noop */ }
1746
+ this.mediaStream = null;
1747
+ }
1748
+
1749
+ // Close the AudioContext to fully reset state (async). Do not await here to avoid blocking UI.
1750
+ try {
1751
+ if (this.audioContext && typeof this.audioContext.close === 'function') {
1752
+ this.audioContext.close().catch(() => { /* noop */ }).finally(() => { this.audioContext = null; });
1753
+ } else {
1754
+ this.audioContext = null;
1755
+ }
1756
+ } catch { this.audioContext = null; }
1757
+
1758
+ this.dataArray = null;
1759
+
1760
+ // Clear canvas
1761
+ if (this.waveformCanvas && this.waveformCanvas.nativeElement) {
1762
+ const c = this.waveformCanvas.nativeElement;
1763
+ const ctx = c.getContext('2d');
1764
+ if (ctx) ctx.clearRect(0, 0, c.width, c.height);
1765
+ }
1766
+ } catch (err) {
1767
+ console.warn('stopAnalyzer error', err);
1768
+ }
1769
+ }
1770
+
1771
+ private drawWaveform(): void {
1772
+ if (!this.waveformCanvas || !this.waveformCanvas.nativeElement || !this.analyser || !this.dataArray) return;
1773
+ const canvas = this.waveformCanvas.nativeElement;
1774
+ const ctx = canvas.getContext('2d');
1775
+ if (!ctx) return;
1776
+ const dpr = window.devicePixelRatio || 1;
1777
+
1778
+ const resize = () => {
1779
+ const rect = canvas.getBoundingClientRect();
1780
+ const w = Math.max(1, Math.floor(rect.width * dpr));
1781
+ const h = Math.max(1, Math.floor(rect.height * dpr));
1782
+ if (canvas.width !== w || canvas.height !== h) { canvas.width = w; canvas.height = h; }
1783
+ };
1784
+
1785
+ const render = () => {
1786
+ // If popup was closed or analyzer removed, stop rendering
1787
+ if (!this.waveformCanvas || !this.waveformCanvas.nativeElement || !this.analyser || !this.dataArray) {
1788
+ if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
1789
+ return;
1790
+ }
1791
+
1792
+ // If canvas is not visible (e.g. popup hidden), stop loop to avoid wasted CPU and potential silent failures
1793
+ // offsetParent is null for display:none, also check bounding rect sanity
1794
+ const rect = canvas.getBoundingClientRect();
1795
+ if (rect.width === 0 || rect.height === 0 || !canvas.offsetParent) {
1796
+ if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
1797
+ return;
1798
+ }
1799
+
1800
+ resize();
1801
+
1802
+ try {
1803
+ this.analyser.getByteTimeDomainData(this.dataArray);
1804
+ } catch (e) {
1805
+ // analyser may have been disconnected / audioContext closed mid-frame
1806
+ console.warn('analyser.getByteTimeDomainData failed', e);
1807
+ if (this.animationFrameId) { cancelAnimationFrame(this.animationFrameId); this.animationFrameId = null; }
1808
+ return;
1809
+ }
1810
+
1811
+ let sum = 0;
1812
+ for (let i = 0; i < this.dataArray.length; i++) {
1813
+ const v = this.dataArray[i] - 128;
1814
+ sum += v * v;
1815
+ }
1816
+ const rms = Math.sqrt(sum / this.dataArray.length) / 128;
1817
+ const level = Math.min(1, Math.max(0, rms));
1818
+
1819
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
1820
+
1821
+ const baselineY = canvas.height / 2;
1822
+
1823
+ // faint baseline
1824
+ ctx.save();
1825
+ ctx.globalAlpha = 0.25;
1826
+ ctx.strokeStyle = '#666';
1827
+ ctx.lineWidth = Math.max(1, 1 * dpr);
1828
+ ctx.setLineDash([2 * dpr, 3 * dpr]);
1829
+ ctx.beginPath();
1830
+ ctx.moveTo(0, baselineY);
1831
+ ctx.lineTo(canvas.width, baselineY);
1832
+ ctx.stroke();
1833
+ ctx.setLineDash([]);
1834
+ ctx.restore();
1835
+
1836
+ // waveform
1837
+ ctx.lineWidth = Math.max(1, 1 * dpr);
1838
+ ctx.strokeStyle = 'rgba(37,168,90,0.95)';
1839
+ ctx.beginPath();
1840
+ const slice = canvas.width / this.dataArray.length;
1841
+ let x = 0;
1842
+ for (let i = 0; i < this.dataArray.length; i++) {
1843
+ const v = this.dataArray[i] / 128.0;
1844
+ const y = (v * canvas.height) / 2;
1845
+ const drawY = baselineY - (y - canvas.height / 2) * 0.6;
1846
+ if (i === 0) ctx.moveTo(x, drawY); else ctx.lineTo(x, drawY);
1847
+ x += slice;
1848
+ }
1849
+ ctx.stroke();
1850
+
1851
+ // highlight center bar that responds to level
1852
+ const highlightMaxW = canvas.width * 0.7;
1853
+ const highlightW = Math.max(2 * dpr, highlightMaxW * (0.05 + level * 0.95));
1854
+ const hh = Math.max(4 * dpr, 6 * dpr);
1855
+ const hx = (canvas.width - highlightW) / 2;
1856
+ const hy = baselineY - hh / 2;
1857
+ ctx.save();
1858
+ ctx.globalAlpha = 0.18 + level * 0.3;
1859
+ ctx.fillStyle = '#25a85a';
1860
+ ctx.fillRect(hx - 6 * dpr, hy - 6 * dpr, highlightW + 12 * dpr, hh + 12 * dpr);
1861
+ ctx.restore();
1862
+
1863
+ ctx.fillStyle = '#25a85a';
1864
+ ctx.globalAlpha = 1;
1865
+ ctx.fillRect(hx, hy, highlightW, hh);
1866
+
1867
+ this.animationFrameId = requestAnimationFrame(render);
1868
+ };
1869
+
1870
+ // Start the loop (ensure any previous RAF is cancelled first)
1871
+ if (this.animationFrameId) cancelAnimationFrame(this.animationFrameId);
1872
+ this.animationFrameId = requestAnimationFrame(render);
1873
  }
1874
+ //ngOnDestroy(): void {
1875
+ // if (this._restartTimer) { clearTimeout(this._restartTimer); this._restartTimer = null; }
1876
+ // try { this.stopAnalyzer(); } catch { }
1877
+ // if (this.recognition) {
1878
+ // try { this.recognition.stop(); } catch { }
1879
+ // }
1880
+ //}
1881
  }
src/assets/images/chat/chatbg.png DELETED

Git LFS Details

  • SHA256: 9d0d8dcc2946d0203c57342145c90b74a5a7188a30a4d01c191784b2cfe73cff
  • Pointer size: 131 Bytes
  • Size of remote file: 521 kB
src/assets/images/chat/control.png DELETED

Git LFS Details

  • SHA256: 2d37166a1708f4d6e27babc1b9bc408c505e5b79e27a7a81ef0ef29f1680ac65
  • Pointer size: 129 Bytes
  • Size of remote file: 5.89 kB
src/assets/images/chat/cross.png DELETED

Git LFS Details

  • SHA256: 14fe82ba4882f3914c1c27f056184306e9d00b977e41cdc6787358323575d9af
  • Pointer size: 129 Bytes
  • Size of remote file: 7.86 kB
src/assets/images/chat/lion.png DELETED

Git LFS Details

  • SHA256: dd8aa4627ea6c6b89d20f5ccb87f0431f61ac71faef8ab71a2c17d7e19bffe58
  • Pointer size: 131 Bytes
  • Size of remote file: 142 kB
src/assets/images/chat/mic.png DELETED

Git LFS Details

  • SHA256: 9508a2126b6376388877e634f853716ddaa88ec3f5911ca3445a9163ad7ea2e3
  • Pointer size: 130 Bytes
  • Size of remote file: 34.4 kB
src/assets/images/chat/microphone-icon.png DELETED

Git LFS Details

  • SHA256: a38d8a8216a3c391fd1bdc983ec0e2bee95d9dc230e7e531262b5f0a95ebd161
  • Pointer size: 130 Bytes
  • Size of remote file: 21.8 kB
src/assets/images/chat/pause-icon.png DELETED

Git LFS Details

  • SHA256: c97de4a30ed36a3e9fe5185cdcf5ffaa02c00effbcaa2c1569432c21e10addfc
  • Pointer size: 129 Bytes
  • Size of remote file: 4.59 kB
src/assets/images/chat/rabbit.png DELETED

Git LFS Details

  • SHA256: c2f6c2a5907c2fc15e8b807b793d1e6222d2b305c4d7b754933cb923de827da7
  • Pointer size: 131 Bytes
  • Size of remote file: 189 kB
src/assets/images/chat/resume-icon.png DELETED

Git LFS Details

  • SHA256: 9d4edfeac774bb9881fd1c02b145465d240a04666ec2fd282a3cee4c889dd730
  • Pointer size: 129 Bytes
  • Size of remote file: 6.37 kB
src/assets/images/chat/send-icon.png DELETED

Git LFS Details

  • SHA256: ae307ee408cf741b95f48c3527472fdabab691d932d00da8f7e4f736a4573527
  • Pointer size: 130 Bytes
  • Size of remote file: 10.6 kB
src/assets/images/chat/stop-button.png DELETED

Git LFS Details

  • SHA256: a01d5cf4aa76de7da282c98decad20b19a4fe37076ec9f195e850966cdae8389
  • Pointer size: 129 Bytes
  • Size of remote file: 4.08 kB
src/assets/images/chat/stop.png DELETED

Git LFS Details

  • SHA256: 5c10275efe01a0421881c8a70652110158155578243833c1434472f6cbad805f
  • Pointer size: 130 Bytes
  • Size of remote file: 31.7 kB
src/assets/images/chat/teacher.png DELETED

Git LFS Details

  • SHA256: 15980253b89516a136cfee9a11e368dd80d1af34b8001818e87a5a129eef1a04
  • Pointer size: 131 Bytes
  • Size of remote file: 124 kB
src/assets/images/chat/y.png DELETED

Git LFS Details

  • SHA256: 9f6300a89d2f783f878f877256590ab5f51ed2f7c55d07ff324e7813e4e05006
  • Pointer size: 129 Bytes
  • Size of remote file: 1.96 kB