npc0 commited on
Commit
c9ad506
Β·
verified Β·
1 Parent(s): 08cbbb7

Upload voice_interview.html

Browse files
Files changed (1) hide show
  1. voice_interview.html +536 -0
voice_interview.html ADDED
@@ -0,0 +1,536 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Voice Interview - Project Story</title>
7
+ <script src="https://cdn.tailwindcss.com"></script>
8
+ <script src="https://cdn.jsdelivr.net/npm/driver.js@latest/dist/driver.js.iife.js"></script>
9
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/driver.js@latest/dist/driver.css"/>
10
+ <style>
11
+ body {
12
+ font-family: system-ui, -apple-system, sans-serif;
13
+ }
14
+
15
+ .gradient-bg {
16
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
17
+ }
18
+
19
+ .recording {
20
+ animation: pulse 1.5s infinite;
21
+ }
22
+
23
+ @keyframes pulse {
24
+ 0%, 100% { opacity: 1; transform: scale(1); }
25
+ 50% { opacity: 0.8; transform: scale(1.05); }
26
+ }
27
+
28
+ .speaking {
29
+ animation: speaking 0.8s infinite;
30
+ }
31
+
32
+ @keyframes speaking {
33
+ 0%, 100% { background-position: 0% 50%; }
34
+ 50% { background-position: 100% 50%; }
35
+ }
36
+
37
+ .transcript-msg {
38
+ animation: fadeIn 0.3s ease-in;
39
+ }
40
+
41
+ @keyframes fadeIn {
42
+ from { opacity: 0; transform: translateY(10px); }
43
+ to { opacity: 1; transform: translateY(0); }
44
+ }
45
+ </style>
46
+ </head>
47
+ <body class="bg-gray-50 min-h-screen flex flex-col items-center p-4">
48
+
49
+ <div class="max-w-4xl w-full">
50
+ <!-- Header -->
51
+ <div class="gradient-bg text-white p-6 rounded-t-xl shadow-lg">
52
+ <div class="flex items-center justify-between">
53
+ <div>
54
+ <h1 class="text-3xl font-bold mb-2">🎀 Voice Interview</h1>
55
+ <p class="text-sm opacity-90">Tell your project story - just speak naturally!</p>
56
+ </div>
57
+ <button
58
+ id="help-btn"
59
+ onclick="startTutorial()"
60
+ class="bg-white bg-opacity-20 hover:bg-opacity-30 text-white px-4 py-2 rounded-lg text-sm font-medium transition"
61
+ title="Show tutorial">
62
+ ❓ Help
63
+ </button>
64
+ </div>
65
+ </div>
66
+
67
+ <!-- Main Content -->
68
+ <div class="bg-white shadow-lg rounded-b-xl p-6">
69
+
70
+ <!-- Instructions -->
71
+ <div id="instructions-box" class="bg-blue-50 border border-blue-200 rounded-lg p-4 mb-6">
72
+ <h3 class="font-bold text-blue-900 mb-2">How it works:</h3>
73
+ <ol class="text-sm text-blue-800 space-y-1 list-decimal list-inside">
74
+ <li>Click "Start Interview" to begin</li>
75
+ <li>Speak naturally - the AI will ask you questions</li>
76
+ <li>When done, click "Generate Article"</li>
77
+ <li>Review and submit your story to GitHub</li>
78
+ </ol>
79
+ </div>
80
+
81
+ <!-- WebRTC Audio Container -->
82
+ <div id="audio-container" class="mb-6">
83
+ <!-- FastRTC will inject audio elements here -->
84
+ </div>
85
+
86
+ <!-- Status Display -->
87
+ <div class="flex flex-col items-center mb-6">
88
+ <button
89
+ id="start-btn"
90
+ onclick="startInterview()"
91
+ class="gradient-bg text-white px-8 py-4 rounded-full text-lg font-bold shadow-lg hover:shadow-xl transition-all mb-4">
92
+ πŸŽ™οΈ Start Interview
93
+ </button>
94
+
95
+ <div id="status" class="text-center">
96
+ <p class="text-gray-600 text-sm">Click above to begin</p>
97
+ </div>
98
+
99
+ <div id="speaking-indicator" class="hidden mt-4">
100
+ <div class="flex items-center gap-2 bg-gradient-to-r from-purple-500 to-indigo-500 text-white px-4 py-2 rounded-full speaking">
101
+ <div class="w-3 h-3 bg-white rounded-full"></div>
102
+ <span class="text-sm font-medium">AI is speaking...</span>
103
+ </div>
104
+ </div>
105
+
106
+ <div id="listening-indicator" class="hidden mt-4">
107
+ <div class="flex items-center gap-2 bg-red-500 text-white px-4 py-2 rounded-full recording">
108
+ <div class="w-3 h-3 bg-white rounded-full"></div>
109
+ <span class="text-sm font-medium">Listening...</span>
110
+ </div>
111
+ </div>
112
+ </div>
113
+
114
+ <!-- Transcript Display -->
115
+ <div id="transcript-section" class="mb-6">
116
+ <h3 class="font-bold text-gray-800 mb-3">πŸ“ Transcript:</h3>
117
+ <div id="transcript" class="bg-gray-50 border border-gray-200 rounded-lg p-4 min-h-[300px] max-h-[500px] overflow-y-auto space-y-3">
118
+ <p class="text-gray-400 italic text-sm">Your conversation will appear here...</p>
119
+ </div>
120
+ </div>
121
+
122
+ <!-- Actions -->
123
+ <div id="action-buttons" class="flex gap-3">
124
+ <button
125
+ id="end-btn"
126
+ onclick="endInterview()"
127
+ disabled
128
+ class="flex-1 bg-orange-600 text-white px-6 py-3 rounded-lg font-medium hover:bg-orange-700 transition disabled:opacity-50 disabled:cursor-not-allowed">
129
+ ⏹️ End Interview
130
+ </button>
131
+ <button
132
+ id="generate-btn"
133
+ onclick="generateArticle()"
134
+ disabled
135
+ class="flex-1 gradient-bg text-white px-6 py-3 rounded-lg font-medium hover:opacity-90 transition disabled:opacity-50 disabled:cursor-not-allowed">
136
+ πŸ“„ Generate Article
137
+ </button>
138
+ </div>
139
+ </div>
140
+ </div>
141
+
142
+ <!-- Article Modal -->
143
+ <div id="article-modal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center p-4 z-50">
144
+ <div class="bg-white rounded-xl max-w-4xl w-full max-h-[90vh] flex flex-col shadow-2xl">
145
+ <div class="p-4 border-b gradient-bg text-white rounded-t-xl">
146
+ <h2 class="text-xl font-bold">πŸ“„ Your Article</h2>
147
+ </div>
148
+ <div class="p-6 overflow-y-auto flex-1">
149
+ <div id="article-content" class="prose max-w-none">
150
+ <p class="text-gray-500 italic">Generating...</p>
151
+ </div>
152
+ </div>
153
+ <div id="article-actions" class="p-4 border-t flex gap-3">
154
+ <button onclick="closeArticle()" class="px-6 py-2 text-gray-600 hover:bg-gray-100 rounded-lg font-medium">
155
+ Close
156
+ </button>
157
+ <button onclick="downloadArticle()" class="px-6 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 font-medium">
158
+ πŸ’Ύ Download
159
+ </button>
160
+ <button onclick="submitArticle()" id="submit-btn" class="px-6 py-2 bg-green-600 text-white rounded-lg hover:bg-green-700 font-medium">
161
+ πŸš€ Submit to GitHub
162
+ </button>
163
+ </div>
164
+ </div>
165
+ </div>
166
+
167
+ <script type="module">
168
+ const RTC_CONFIGURATION = __RTC_CONFIGURATION__;
169
+
170
+ let pc = null;
171
+ let audioContext = null;
172
+ let webrtcId = null;
173
+ let eventSource = null;
174
+ let currentArticle = "";
175
+ let driverObj = null;
176
+
177
+ // Check if user has seen tutorial
178
+ function checkFirstVisit() {
179
+ const hasSeenTutorial = localStorage.getItem('voiceInterviewTutorialSeen');
180
+ if (!hasSeenTutorial) {
181
+ // Show tutorial after a brief delay
182
+ setTimeout(() => {
183
+ startTutorial();
184
+ localStorage.setItem('voiceInterviewTutorialSeen', 'true');
185
+ }, 800);
186
+ }
187
+ }
188
+
189
+ function startTutorial() {
190
+ const driver = window.driver.js.driver;
191
+
192
+ if (driverObj) {
193
+ driverObj.destroy();
194
+ }
195
+
196
+ driverObj = driver({
197
+ showProgress: true,
198
+ steps: [
199
+ {
200
+ element: '#instructions-box',
201
+ popover: {
202
+ title: 'πŸ‘‹ Welcome to Voice Interview!',
203
+ description: 'This tool helps you create project stories through natural conversation. Let me show you around!',
204
+ side: "bottom",
205
+ align: 'start'
206
+ }
207
+ },
208
+ {
209
+ element: '#start-btn',
210
+ popover: {
211
+ title: 'πŸŽ™οΈ Start Your Interview',
212
+ description: 'Click this button to begin. You\'ll need to allow microphone access when prompted. The AI interviewer will ask you questions about your project.',
213
+ side: "bottom",
214
+ align: 'center'
215
+ }
216
+ },
217
+ {
218
+ element: '#transcript-section',
219
+ popover: {
220
+ title: 'πŸ“ Live Transcript',
221
+ description: 'Watch your conversation unfold here in real-time. You\'ll see both your responses and the interviewer\'s questions.',
222
+ side: "top",
223
+ align: 'start'
224
+ }
225
+ },
226
+ {
227
+ element: '#speaking-indicator',
228
+ popover: {
229
+ title: 'πŸ”Š Status Indicators',
230
+ description: 'Purple indicator = AI is speaking. Red indicator = AI is listening to you. These help you know when to talk.',
231
+ side: "top",
232
+ align: 'center'
233
+ }
234
+ },
235
+ {
236
+ element: '#end-btn',
237
+ popover: {
238
+ title: '⏹️ End Interview',
239
+ description: 'When you\'re done sharing your story, click here to stop the interview. You can then generate your article.',
240
+ side: "top",
241
+ align: 'start'
242
+ }
243
+ },
244
+ {
245
+ element: '#generate-btn',
246
+ popover: {
247
+ title: 'πŸ“„ Generate Article',
248
+ description: 'This transforms your conversation into a polished article. The AI will structure your responses into a compelling project story.',
249
+ side: "top",
250
+ align: 'end'
251
+ }
252
+ },
253
+ {
254
+ element: '#help-btn',
255
+ popover: {
256
+ title: '❓ Need Help?',
257
+ description: 'Click this anytime to see this tutorial again. Now you\'re ready to start your interview!',
258
+ side: "bottom",
259
+ align: 'end'
260
+ }
261
+ }
262
+ ],
263
+ onDestroyStarted: () => {
264
+ if (!driverObj.hasNextStep()) {
265
+ driverObj.destroy();
266
+ }
267
+ }
268
+ });
269
+
270
+ driverObj.drive();
271
+ }
272
+
273
+ async function startInterview() {
274
+ const startBtn = document.getElementById('start-btn');
275
+ const endBtn = document.getElementById('end-btn');
276
+ const status = document.getElementById('status');
277
+
278
+ startBtn.disabled = true;
279
+ status.innerHTML = '<p class="text-blue-600">πŸ”„ Connecting...</p>';
280
+
281
+ try {
282
+ // Create WebRTC connection
283
+ pc = new RTCPeerConnection(RTC_CONFIGURATION);
284
+
285
+ // Generate session ID
286
+ webrtcId = 'session_' + Date.now() + '_' + Math.random().toString(36).substr(2, 9);
287
+
288
+ // Get user media
289
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
290
+ stream.getTracks().forEach(track => pc.addTrack(track, stream));
291
+
292
+ // Handle incoming audio
293
+ pc.ontrack = (event) => {
294
+ const audio = new Audio();
295
+ audio.srcObject = event.streams[0];
296
+ audio.autoplay = true;
297
+
298
+ // Show speaking indicator when AI is speaking
299
+ audio.onplay = () => {
300
+ document.getElementById('speaking-indicator').classList.remove('hidden');
301
+ document.getElementById('listening-indicator').classList.add('hidden');
302
+ };
303
+
304
+ audio.onended = () => {
305
+ document.getElementById('speaking-indicator').classList.add('hidden');
306
+ document.getElementById('listening-indicator').classList.remove('hidden');
307
+ };
308
+ };
309
+
310
+ // Create offer
311
+ const offer = await pc.createOffer();
312
+ await pc.setLocalDescription(offer);
313
+
314
+ // Send offer to server
315
+ const response = await fetch('/webrtc/start', {
316
+ method: 'POST',
317
+ headers: { 'Content-Type': 'application/json' },
318
+ body: JSON.stringify({
319
+ webrtc_id: webrtcId,
320
+ sdp_offer: pc.localDescription.sdp
321
+ })
322
+ });
323
+
324
+ const data = await response.json();
325
+ await pc.setRemoteDescription({
326
+ type: 'answer',
327
+ sdp: data.sdp_answer
328
+ });
329
+
330
+ status.innerHTML = '<p class="text-green-600">βœ… Connected! Speak naturally.</p>';
331
+ endBtn.disabled = false;
332
+ document.getElementById('generate-btn').disabled = false;
333
+ document.getElementById('listening-indicator').classList.remove('hidden');
334
+
335
+ // Start listening to transcript updates
336
+ listenForTranscript();
337
+
338
+ } catch (error) {
339
+ console.error('Failed to start:', error);
340
+ status.innerHTML = '<p class="text-red-600">❌ Failed to connect. Please check microphone permissions.</p>';
341
+ startBtn.disabled = false;
342
+ }
343
+ }
344
+
345
+ function listenForTranscript() {
346
+ eventSource = new EventSource(`/outputs?webrtc_id=${webrtcId}`);
347
+
348
+ eventSource.addEventListener('output', (event) => {
349
+ const data = JSON.parse(event.data);
350
+ const msg = data.message;
351
+
352
+ const transcript = document.getElementById('transcript');
353
+ const msgDiv = document.createElement('div');
354
+ msgDiv.className = 'transcript-msg';
355
+
356
+ if (msg.role === 'user') {
357
+ msgDiv.innerHTML = `
358
+ <div class="flex justify-end">
359
+ <div class="bg-purple-100 text-purple-900 px-4 py-2 rounded-lg max-w-[80%]">
360
+ <p class="text-xs font-semibold mb-1">You:</p>
361
+ <p>${msg.content}</p>
362
+ </div>
363
+ </div>
364
+ `;
365
+ } else if (msg.role === 'assistant') {
366
+ msgDiv.innerHTML = `
367
+ <div class="flex justify-start">
368
+ <div class="bg-gray-100 text-gray-900 px-4 py-2 rounded-lg max-w-[80%]">
369
+ <p class="text-xs font-semibold mb-1">Interviewer:</p>
370
+ <p>${msg.content}</p>
371
+ </div>
372
+ </div>
373
+ `;
374
+ }
375
+
376
+ // Remove placeholder if exists
377
+ const placeholder = transcript.querySelector('.text-gray-400');
378
+ if (placeholder) placeholder.remove();
379
+
380
+ transcript.appendChild(msgDiv);
381
+ transcript.scrollTop = transcript.scrollHeight;
382
+ });
383
+ }
384
+
385
+ function endInterview() {
386
+ if (pc) {
387
+ pc.close();
388
+ pc = null;
389
+ }
390
+ if (eventSource) {
391
+ eventSource.close();
392
+ eventSource = null;
393
+ }
394
+
395
+ document.getElementById('start-btn').disabled = false;
396
+ document.getElementById('end-btn').disabled = true;
397
+ document.getElementById('status').innerHTML = '<p class="text-gray-600">Interview ended. Click "Generate Article" when ready.</p>';
398
+ document.getElementById('speaking-indicator').classList.add('hidden');
399
+ document.getElementById('listening-indicator').classList.add('hidden');
400
+ }
401
+
402
+ async function generateArticle() {
403
+ if (!webrtcId) {
404
+ alert('No interview session found!');
405
+ return;
406
+ }
407
+
408
+ const modal = document.getElementById('article-modal');
409
+ modal.classList.remove('hidden');
410
+ modal.classList.add('flex');
411
+
412
+ document.getElementById('article-content').innerHTML = '<p class="text-gray-500 italic">⏳ Generating article... This may take 10-30 seconds...</p>';
413
+
414
+ try {
415
+ const response = await fetch(`/generate_article?webrtc_id=${webrtcId}`, {
416
+ method: 'POST'
417
+ });
418
+
419
+ const data = await response.json();
420
+
421
+ if (data.status === 'success') {
422
+ currentArticle = data.article;
423
+ document.getElementById('article-content').innerHTML =
424
+ `<div class="prose max-w-none">${marked.parse(currentArticle)}</div>`;
425
+
426
+ // Highlight article actions for first-time users
427
+ const hasSeenArticleTutorial = localStorage.getItem('articleActionsSeen');
428
+ if (!hasSeenArticleTutorial) {
429
+ setTimeout(() => {
430
+ highlightArticleActions();
431
+ localStorage.setItem('articleActionsSeen', 'true');
432
+ }, 500);
433
+ }
434
+ } else {
435
+ document.getElementById('article-content').innerHTML =
436
+ `<p class="text-red-600">❌ Error: ${data.error}</p>`;
437
+ }
438
+ } catch (error) {
439
+ document.getElementById('article-content').innerHTML =
440
+ `<p class="text-red-600">❌ Failed to generate: ${error.message}</p>`;
441
+ }
442
+ }
443
+
444
+ function highlightArticleActions() {
445
+ const driver = window.driver.js.driver;
446
+ const articleDriver = driver({
447
+ showProgress: false,
448
+ steps: [
449
+ {
450
+ element: '#article-content',
451
+ popover: {
452
+ title: '✨ Your Article is Ready!',
453
+ description: 'Review your AI-generated article. You can edit it if needed before downloading or submitting.',
454
+ side: "top",
455
+ align: 'center'
456
+ }
457
+ },
458
+ {
459
+ element: '#article-actions',
460
+ popover: {
461
+ title: 'πŸ“€ What\'s Next?',
462
+ description: 'Download as markdown, submit to GitHub, or close and generate again if needed.',
463
+ side: "top",
464
+ align: 'center'
465
+ }
466
+ }
467
+ ]
468
+ });
469
+
470
+ articleDriver.drive();
471
+ }
472
+
473
+ async function submitArticle() {
474
+ if (!currentArticle) return;
475
+
476
+ const btn = document.getElementById('submit-btn');
477
+ btn.disabled = true;
478
+ btn.textContent = '⏳ Submitting...';
479
+
480
+ try {
481
+ const response = await fetch('/submit_article', {
482
+ method: 'POST',
483
+ headers: { 'Content-Type': 'application/json' },
484
+ body: JSON.stringify({ article: currentArticle })
485
+ });
486
+
487
+ const data = await response.json();
488
+
489
+ if (data.status === 'success') {
490
+ alert(`βœ… Submitted!\n\nπŸ“ ${data.filename}\n\nπŸ”— ${data.url}`);
491
+ closeArticle();
492
+ } else {
493
+ alert(`❌ Failed: ${data.error}`);
494
+ }
495
+ } catch (error) {
496
+ alert(`❌ Failed: ${error.message}`);
497
+ } finally {
498
+ btn.disabled = false;
499
+ btn.textContent = 'πŸš€ Submit to GitHub';
500
+ }
501
+ }
502
+
503
+ function downloadArticle() {
504
+ if (!currentArticle) return;
505
+
506
+ const blob = new Blob([currentArticle], { type: 'text/markdown' });
507
+ const url = URL.createObjectURL(blob);
508
+ const a = document.createElement('a');
509
+ a.href = url;
510
+ a.download = 'voice-interview-' + new Date().toISOString().split('T')[0] + '.md';
511
+ a.click();
512
+ URL.revokeObjectURL(url);
513
+ }
514
+
515
+ function closeArticle() {
516
+ document.getElementById('article-modal').classList.add('hidden');
517
+ document.getElementById('article-modal').classList.remove('flex');
518
+ }
519
+
520
+ // Initialize on page load
521
+ window.addEventListener('DOMContentLoaded', () => {
522
+ checkFirstVisit();
523
+ });
524
+
525
+ window.startInterview = startInterview;
526
+ window.endInterview = endInterview;
527
+ window.generateArticle = generateArticle;
528
+ window.submitArticle = submitArticle;
529
+ window.downloadArticle = downloadArticle;
530
+ window.closeArticle = closeArticle;
531
+ window.startTutorial = startTutorial;
532
+ </script>
533
+
534
+ <script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
535
+ </body>
536
+ </html>