MeysamSh commited on
Commit
4fcb7f8
·
1 Parent(s): 6a0c23b

change interface to visualize the wavform

Browse files
Files changed (4) hide show
  1. Web/index.html +29 -106
  2. Web/index_org.html +162 -0
  3. Web/script.js +345 -410
  4. Web/script_org.js +498 -0
Web/index.html CHANGED
@@ -1,93 +1,30 @@
1
  <!DOCTYPE html>
2
  <html lang="en">
3
-
4
  <head>
5
- <meta charset="UTF-8">
6
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
  <title>Audio Analysis API</title>
8
- <link rel="stylesheet" href="/static/styles.css">
9
 
10
  <!-- Bootstrap CSS -->
11
  <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
12
- <style>
13
- body {
14
- background-color: #f8f9fa;
15
- padding: 20px;
16
- }
17
-
18
- .container {
19
- max-width: 800px;
20
- margin: 0 auto;
21
- background: #fff;
22
- padding: 30px;
23
- border-radius: 10px;
24
- box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
25
- }
26
-
27
- h1 {
28
- text-align: center;
29
- margin-bottom: 20px;
30
- color: #333;
31
- font-weight: bold;
32
- }
33
-
34
- h2 {
35
- color: #555;
36
- margin-bottom: 20px;
37
- font-size: 1.5rem;
38
- }
39
-
40
- .btn {
41
- margin: 5px;
42
- font-weight: 500;
43
- }
44
-
45
- #recordingsList {
46
- margin-top: 20px;
47
- }
48
-
49
- .response {
50
- margin-top: 20px;
51
- padding: 15px;
52
- background-color: #e9ecef;
53
- border-radius: 5px;
54
- color: #333;
55
- font-size: 1.1rem;
56
- }
57
-
58
- .metadata {
59
- margin-top: 20px;
60
- padding: 15px;
61
- background-color: #f1f3f4;
62
- border-radius: 5px;
63
- color: #333;
64
- font-size: 1.1rem;
65
- }
66
-
67
- .list-group-item {
68
- display: flex;
69
- justify-content: space-between;
70
- align-items: center;
71
- }
72
 
73
- .list-group-item a {
74
- text-decoration: none;
75
- color: #0d6efd;
76
- }
77
 
78
- .list-group-item a:hover {
79
- text-decoration: underline;
80
- }
81
-
82
- #controls {
83
- margin-bottom: 20px;
84
- }
85
-
86
- #formats {
87
- font-size: 0.9rem;
88
- color: #666;
89
- margin-bottom: 10px;
90
- }
91
  </style>
92
  </head>
93
 
@@ -96,7 +33,6 @@
96
  <h1>Audio Analysis API</h1>
97
  <h2>Upload or Record Audio Files</h2>
98
 
99
- <!-- Form for Uploading Files -->
100
  <form id="upload-form" class="mb-4">
101
  <div class="mb-3">
102
  <input type="file" id="audio-file" class="form-control" accept="audio/*" multiple />
@@ -106,57 +42,44 @@
106
 
107
  <hr>
108
 
109
- <!-- Buttons for Recording Audio -->
110
  <div id="controls" class="mb-4 text-center">
111
  <button id="recordButton" class="btn btn-success">Record</button>
112
  <button id="pauseButton" class="btn btn-warning" disabled>Pause</button>
113
  <button id="stopButton" class="btn btn-danger" disabled>Stop</button>
114
  </div>
 
115
  <div id="formats" class="mb-3 text-center">Format: Start recording to see sample rate</div>
116
- <p class="text-center"><strong>Recordings:</strong></p>
 
117
  <ol id="recordingsList" class="list-group"></ol>
118
 
119
- <!-- Metadata Display -->
120
  <div class="metadata mt-4">
121
  <h3>File Metadata</h3>
122
-
123
- <!-- Dropdown Filters -->
124
  <div class="mb-3 d-flex flex-wrap gap-3">
125
  <i>Choisir un Label</i>
126
- <select id="filter-label" class="form-select">
127
- <option value="">All Labels</option>
128
- </select>
129
  <i>Choisir un System</i>
130
- <select id="filter-system" class="form-select">
131
- <option value="">All Systems</option>
132
- </select>
133
  <i>Choisir un Codec</i>
134
- <select id="filter-codec" class="form-select">
135
- <option value="">All Codecs</option>
136
- </select>
137
  <i>Choisir un Genre</i>
138
- <select id="filter-genre" class="form-select">
139
- <option value="">All Genres</option>
140
- </select>
141
  <i>Choisir une Année</i>
142
- <select id="filter-year" class="form-select">
143
- <option value="">All Years</option>
144
- </select>
145
  </div>
146
-
147
  <div id="metadata-display"></div>
148
  </div>
149
 
150
- <!-- Response Display -->
151
  <div class="response mt-4">
152
  <h3>Analysis Results</h3>
153
  <div id="response"></div>
154
  </div>
155
  </div>
156
 
157
- <!-- Load Recorder.js and your script.js -->
158
  <script src="/static/recorder.js"></script>
 
 
159
  <script src="/static/script.js"></script>
160
  </body>
161
-
162
  </html>
 
1
  <!DOCTYPE html>
2
  <html lang="en">
 
3
  <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0"/>
6
  <title>Audio Analysis API</title>
7
+ <link rel="stylesheet" href="/static/styles.css" />
8
 
9
  <!-- Bootstrap CSS -->
10
  <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
+ <!-- Wavesurfer -->
13
+ <script src="https://unpkg.com/wavesurfer.js"></script>
 
 
14
 
15
+ <style>
16
+ body { background-color: #f8f9fa; padding: 20px; }
17
+ .container { max-width: 900px; margin: 0 auto; background:#fff; padding:30px; border-radius:10px; box-shadow:0 4px 6px rgba(0,0,0,0.1); }
18
+ h1 { text-align:center; margin-bottom:20px; color:#333; font-weight:bold; }
19
+ .wave-item { margin-bottom: 18px; padding: 12px; border-radius: 8px; background: #fbfbfb; border: 1px solid #eee; }
20
+ .wave-controls { display:flex; gap:8px; align-items:center; margin-top:8px; flex-wrap:wrap; }
21
+ .wave-canvas { width:100%; height:80px; }
22
+ .file-title { font-weight:600; margin-bottom:6px; color:#222; }
23
+ .btn-small { padding: .25rem .5rem; font-size:.85rem; }
24
+ #recordingsList .list-group-item { display:block; padding:0; border:none; }
25
+ .metadata, .response { margin-top:20px; padding:15px; border-radius:5px; }
26
+ .metadata { background:#f1f3f4; }
27
+ .response { background:#e9ecef; }
28
  </style>
29
  </head>
30
 
 
33
  <h1>Audio Analysis API</h1>
34
  <h2>Upload or Record Audio Files</h2>
35
 
 
36
  <form id="upload-form" class="mb-4">
37
  <div class="mb-3">
38
  <input type="file" id="audio-file" class="form-control" accept="audio/*" multiple />
 
42
 
43
  <hr>
44
 
 
45
  <div id="controls" class="mb-4 text-center">
46
  <button id="recordButton" class="btn btn-success">Record</button>
47
  <button id="pauseButton" class="btn btn-warning" disabled>Pause</button>
48
  <button id="stopButton" class="btn btn-danger" disabled>Stop</button>
49
  </div>
50
+
51
  <div id="formats" class="mb-3 text-center">Format: Start recording to see sample rate</div>
52
+
53
+ <p class="text-center"><strong>Recordings & Uploads:</strong></p>
54
  <ol id="recordingsList" class="list-group"></ol>
55
 
 
56
  <div class="metadata mt-4">
57
  <h3>File Metadata</h3>
 
 
58
  <div class="mb-3 d-flex flex-wrap gap-3">
59
  <i>Choisir un Label</i>
60
+ <select id="filter-label" class="form-select"><option value="">All Labels</option></select>
 
 
61
  <i>Choisir un System</i>
62
+ <select id="filter-system" class="form-select"><option value="">All Systems</option></select>
 
 
63
  <i>Choisir un Codec</i>
64
+ <select id="filter-codec" class="form-select"><option value="">All Codecs</option></select>
 
 
65
  <i>Choisir un Genre</i>
66
+ <select id="filter-genre" class="form-select"><option value="">All Genres</option></select>
 
 
67
  <i>Choisir une Année</i>
68
+ <select id="filter-year" class="form-select"><option value="">All Years</option></select>
 
 
69
  </div>
 
70
  <div id="metadata-display"></div>
71
  </div>
72
 
 
73
  <div class="response mt-4">
74
  <h3>Analysis Results</h3>
75
  <div id="response"></div>
76
  </div>
77
  </div>
78
 
79
+ <!-- Recorder.js (must be present in /Web/recorder.js) -->
80
  <script src="/static/recorder.js"></script>
81
+
82
+ <!-- Your main script -->
83
  <script src="/static/script.js"></script>
84
  </body>
 
85
  </html>
Web/index_org.html ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>Audio Analysis API</title>
8
+ <link rel="stylesheet" href="/static/styles.css">
9
+
10
+ <!-- Bootstrap CSS -->
11
+ <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
12
+ <style>
13
+ body {
14
+ background-color: #f8f9fa;
15
+ padding: 20px;
16
+ }
17
+
18
+ .container {
19
+ max-width: 800px;
20
+ margin: 0 auto;
21
+ background: #fff;
22
+ padding: 30px;
23
+ border-radius: 10px;
24
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
25
+ }
26
+
27
+ h1 {
28
+ text-align: center;
29
+ margin-bottom: 20px;
30
+ color: #333;
31
+ font-weight: bold;
32
+ }
33
+
34
+ h2 {
35
+ color: #555;
36
+ margin-bottom: 20px;
37
+ font-size: 1.5rem;
38
+ }
39
+
40
+ .btn {
41
+ margin: 5px;
42
+ font-weight: 500;
43
+ }
44
+
45
+ #recordingsList {
46
+ margin-top: 20px;
47
+ }
48
+
49
+ .response {
50
+ margin-top: 20px;
51
+ padding: 15px;
52
+ background-color: #e9ecef;
53
+ border-radius: 5px;
54
+ color: #333;
55
+ font-size: 1.1rem;
56
+ }
57
+
58
+ .metadata {
59
+ margin-top: 20px;
60
+ padding: 15px;
61
+ background-color: #f1f3f4;
62
+ border-radius: 5px;
63
+ color: #333;
64
+ font-size: 1.1rem;
65
+ }
66
+
67
+ .list-group-item {
68
+ display: flex;
69
+ justify-content: space-between;
70
+ align-items: center;
71
+ }
72
+
73
+ .list-group-item a {
74
+ text-decoration: none;
75
+ color: #0d6efd;
76
+ }
77
+
78
+ .list-group-item a:hover {
79
+ text-decoration: underline;
80
+ }
81
+
82
+ #controls {
83
+ margin-bottom: 20px;
84
+ }
85
+
86
+ #formats {
87
+ font-size: 0.9rem;
88
+ color: #666;
89
+ margin-bottom: 10px;
90
+ }
91
+ </style>
92
+ </head>
93
+
94
+ <body>
95
+ <div class="container">
96
+ <h1>Audio Analysis API</h1>
97
+ <h2>Upload or Record Audio Files</h2>
98
+
99
+ <!-- Form for Uploading Files -->
100
+ <form id="upload-form" class="mb-4">
101
+ <div class="mb-3">
102
+ <input type="file" id="audio-file" class="form-control" accept="audio/*" multiple />
103
+ </div>
104
+ <button type="button" id="upload-button" class="btn btn-primary w-100">Upload & Analyze</button>
105
+ </form>
106
+
107
+ <hr>
108
+
109
+ <!-- Buttons for Recording Audio -->
110
+ <div id="controls" class="mb-4 text-center">
111
+ <button id="recordButton" class="btn btn-success">Record</button>
112
+ <button id="pauseButton" class="btn btn-warning" disabled>Pause</button>
113
+ <button id="stopButton" class="btn btn-danger" disabled>Stop</button>
114
+ </div>
115
+ <div id="formats" class="mb-3 text-center">Format: Start recording to see sample rate</div>
116
+ <p class="text-center"><strong>Recordings:</strong></p>
117
+ <ol id="recordingsList" class="list-group"></ol>
118
+
119
+ <!-- Metadata Display -->
120
+ <div class="metadata mt-4">
121
+ <h3>File Metadata</h3>
122
+
123
+ <!-- Dropdown Filters -->
124
+ <div class="mb-3 d-flex flex-wrap gap-3">
125
+ <i>Choisir un Label</i>
126
+ <select id="filter-label" class="form-select">
127
+ <option value="">All Labels</option>
128
+ </select>
129
+ <i>Choisir un System</i>
130
+ <select id="filter-system" class="form-select">
131
+ <option value="">All Systems</option>
132
+ </select>
133
+ <i>Choisir un Codec</i>
134
+ <select id="filter-codec" class="form-select">
135
+ <option value="">All Codecs</option>
136
+ </select>
137
+ <i>Choisir un Genre</i>
138
+ <select id="filter-genre" class="form-select">
139
+ <option value="">All Genres</option>
140
+ </select>
141
+ <i>Choisir une Année</i>
142
+ <select id="filter-year" class="form-select">
143
+ <option value="">All Years</option>
144
+ </select>
145
+ </div>
146
+
147
+ <div id="metadata-display"></div>
148
+ </div>
149
+
150
+ <!-- Response Display -->
151
+ <div class="response mt-4">
152
+ <h3>Analysis Results</h3>
153
+ <div id="response"></div>
154
+ </div>
155
+ </div>
156
+
157
+ <!-- Load Recorder.js and your script.js -->
158
+ <script src="/static/recorder.js"></script>
159
+ <script src="/static/script.js"></script>
160
+ </body>
161
+
162
+ </html>
Web/script.js CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  const uploadButton = document.getElementById('upload-button');
2
  const audioFileInput = document.getElementById('audio-file');
3
  const recordButton = document.getElementById('recordButton');
@@ -5,494 +8,426 @@ const stopButton = document.getElementById('stopButton');
5
  const pauseButton = document.getElementById('pauseButton');
6
  const responseDiv = document.getElementById('response');
7
  const metadataDisplay = document.getElementById('metadata-display');
 
8
 
9
- let gumStream;
10
- let rec;
11
- let input;
12
- let audioContext;
13
 
 
14
  function startAudioContext() {
15
- if (!audioContext) {
16
- audioContext = new (window.AudioContext || window.webkitAudioContext)();
17
- } else if (audioContext.state === 'suspended') {
18
- audioContext.resume().then(() => {
19
- console.log('AudioContext repris');
20
- });
21
- }
22
  }
 
23
 
24
- // Fonction pour rééchantillonner l'audio à 16 kHz
25
  async function resampleAudio(blob, targetSampleRate = 16000) {
26
- return new Promise((resolve, reject) => {
27
- const reader = new FileReader();
28
- reader.onload = async () => {
29
- const audioContext = new (window.AudioContext || window.webkitAudioContext)();
30
- const buffer = await audioContext.decodeAudioData(reader.result);
31
-
32
- // Créer un nouvel AudioContext avec le taux d'échantillonnage cible
33
- const offlineContext = new OfflineAudioContext(
34
- buffer.numberOfChannels,
35
- buffer.length * (targetSampleRate / buffer.sampleRate),
36
- targetSampleRate
37
- );
38
-
39
- // Créer une source audio avec le buffer original
40
- const source = offlineContext.createBufferSource();
41
- source.buffer = buffer;
42
-
43
- // Connecter la source au contexte offline
44
- source.connect(offlineContext.destination);
45
- source.start();
46
-
47
- // Rendre l'audio
48
- const resampledBuffer = await offlineContext.startRendering();
49
-
50
- // Convertir le buffer rééchantillonné en WAV
51
- const wavBlob = bufferToWav(resampledBuffer);
52
  resolve(wavBlob);
53
- };
54
- reader.onerror = reject;
55
- reader.readAsArrayBuffer(blob);
56
- });
 
 
 
57
  }
58
 
59
- // Fonction pour convertir un AudioBuffer en WAV
60
  function bufferToWav(buffer) {
61
  const numChannels = buffer.numberOfChannels;
62
  const sampleRate = buffer.sampleRate;
63
- const length = buffer.length * numChannels * 2; // 2 bytes par échantillon
64
- const data = new Float32Array(length);
65
-
66
- // Interleave les canaux
67
- for (let channel = 0; channel < numChannels; channel++) {
68
- const channelData = buffer.getChannelData(channel);
69
  for (let i = 0; i < channelData.length; i++) {
70
- data[i * numChannels + channel] = channelData[i];
71
  }
72
  }
73
-
74
- // Encoder en WAV
75
- const wavBlob = encodeWAV(data, sampleRate, numChannels);
76
- return wavBlob;
77
  }
78
 
79
- // Fonction pour encoder des données audio en WAV
80
  function encodeWAV(samples, sampleRate, numChannels) {
81
  const buffer = new ArrayBuffer(44 + samples.length * 2);
82
  const view = new DataView(buffer);
83
-
84
- // Écrire l'en-tête WAV
 
 
 
85
  writeString(view, 0, 'RIFF');
86
  view.setUint32(4, 36 + samples.length * 2, true);
87
  writeString(view, 8, 'WAVE');
88
  writeString(view, 12, 'fmt ');
89
  view.setUint32(16, 16, true);
90
- view.setUint16(20, 1, true); // Format PCM
91
  view.setUint16(22, numChannels, true);
92
  view.setUint32(24, sampleRate, true);
93
  view.setUint32(28, sampleRate * numChannels * 2, true);
94
  view.setUint16(32, numChannels * 2, true);
95
- view.setUint16(34, 16, true); // Bits par échantillon
96
  writeString(view, 36, 'data');
97
  view.setUint32(40, samples.length * 2, true);
98
-
99
- // Écrire les échantillons audio
100
  floatTo16BitPCM(view, 44, samples);
101
-
102
  return new Blob([view], { type: 'audio/wav' });
103
  }
104
 
105
- // Fonction utilitaire pour écrire une chaîne dans un DataView
106
- function writeString(view, offset, string) {
107
- for (let i = 0; i < string.length; i++) {
108
- view.setUint8(offset + i, string.charCodeAt(i));
109
- }
110
- }
111
-
112
- // Fonction utilitaire pour convertir des échantillons flottants en PCM 16 bits
113
  function floatTo16BitPCM(view, offset, input) {
114
  for (let i = 0; i < input.length; i++, offset += 2) {
115
- const s = Math.max(-1, Math.min(1, input[i]));
116
  view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
117
  }
118
  }
119
 
120
- // Function to fetch metadata from the text file
121
  async function fetchMetadata() {
122
- try {
123
- const response = await fetch('../metadata.txt'); // Assurez-vous que le fichier est accessible
124
- if (!response.ok) {
125
- throw new Error('Failed to fetch metadata');
126
- }
127
- const text = await response.text();
128
- console.log('Metadata file content:', text); // Debugging
129
-
130
- // Split text into lines
131
- const lines = text.split('\n').map(line => line.trim()).filter(line => line !== '');
132
-
133
- if (lines.length < 2) {
134
- throw new Error('Metadata file is empty or malformed');
135
- }
136
-
137
- // Extract headers
138
- const headers = lines[0].split(';').map(h => h.trim().toLowerCase());
139
-
140
- // Extract data
141
- const metadata = lines.slice(1).map(line => {
142
- const values = line.split(';').map(value => value.trim());
143
- let entry = {};
144
- headers.forEach((header, index) => {
145
- entry[header] = values[index] || 'N/A'; // Default to 'N/A' if missing data
146
- });
147
- return entry;
148
- });
149
-
150
- console.log('Parsed Metadata:', metadata); // Debugging
151
- return metadata;
152
- } catch (error) {
153
- console.error('Error fetching metadata:', error);
154
- return [];
155
- }
156
  }
157
 
158
  function populateFilters() {
159
- const predefinedValues = {
160
- label: ["spoof", "genuine"],
161
- system: ["bonafide"].concat(Array.from({ length: 19 }, (_, i) => `A${String(i + 1).padStart(2, '0')}`)),
162
- codec: ["FLAC", "WAV", "MP3"],
163
- genre: ["male", "female"],
164
- year: ["2020", "2021", "2022", "2023", "2024", "2025"]
165
- };
166
-
167
- Object.keys(predefinedValues).forEach(key => {
168
- populateDropdown(`filter-${key}`, predefinedValues[key]);
169
- });
170
  }
171
 
172
  function populateDropdown(id, values) {
173
- const select = document.getElementById(id);
174
- select.innerHTML = '<option value="">All</option>'; // Ajouter l'option "All" par défaut
175
-
176
- values.forEach(value => {
177
- const option = document.createElement("option");
178
- option.value = value;
179
- option.textContent = value.charAt(0).toUpperCase() + value.slice(1); // Majuscule initiale
180
- select.appendChild(option);
181
- });
182
-
183
- select.addEventListener("change", filterMetadata);
184
- }
185
-
186
- function filterMetadata() {
187
- const selectedLabel = document.getElementById("filter-label").value.toLowerCase();
188
- const selectedSystem = document.getElementById("filter-system").value.toLowerCase();
189
- const selectedCodec = document.getElementById("filter-codec").value.toLowerCase();
190
- const selectedGenre = document.getElementById("filter-genre").value.toLowerCase();
191
- const selectedYear = document.getElementById("filter-year").value.toLowerCase();
192
-
193
- fetchMetadata().then(metadata => {
194
- const filteredMetadata = metadata.filter(entry =>
195
- (selectedLabel === "" || entry.label.toLowerCase() === selectedLabel) &&
196
- (selectedSystem === "" || entry.system.toLowerCase() === selectedSystem) &&
197
- (selectedCodec === "" || entry.codec.toLowerCase() === selectedCodec) &&
198
- (selectedGenre === "" || entry.genre.toLowerCase() === selectedGenre) &&
199
- (selectedYear === "" || entry.year.toLowerCase() === selectedYear)
200
- );
201
-
202
- displayMetadata(null, metadata, true); // Mode filtrage
203
- });
204
  }
205
 
206
-
207
  function displayMetadata(files, metadata, filteredOnly = false) {
208
- metadataDisplay.innerHTML = ''; // Nettoyer l'affichage avant de remplir
209
-
210
- // Si on ne filtre pas et qu'aucun fichier n'est sélectionné, afficher tout
211
- if (!filteredOnly && (!files || files.length === 0)) {
212
- metadataDisplay.innerHTML = '<p>No files selected.</p>';
213
- return;
214
- }
215
-
216
- let filteredMetadata;
217
-
218
- if (filteredOnly) {
219
- // Appliquer les filtres des drop-downs
220
- const selectedLabel = document.getElementById("filter-label").value.toLowerCase();
221
- const selectedSystem = document.getElementById("filter-system").value.toLowerCase();
222
- const selectedCodec = document.getElementById("filter-codec").value.toLowerCase();
223
- const selectedGenre = document.getElementById("filter-genre").value.toLowerCase();
224
- const selectedYear = document.getElementById("filter-year").value.toLowerCase();
225
-
226
- filteredMetadata = metadata.filter(entry =>
227
- (selectedLabel === "" || entry.label.toLowerCase() === selectedLabel) &&
228
- (selectedSystem === "" || entry.system.toLowerCase() === selectedSystem) &&
229
- (selectedCodec === "" || entry.codec.toLowerCase() === selectedCodec) &&
230
- (selectedGenre === "" || entry.genre.toLowerCase() === selectedGenre) &&
231
- (selectedYear === "" || entry.year.toLowerCase() === selectedYear)
232
- );
233
- } else {
234
- // Obtenir la liste des fichiers sélectionnés
235
- const selectedFiles = Array.from(files).map(file => file.name.trim().toLowerCase());
236
-
237
- // Filtrer les métadonnées pour ne garder que celles des fichiers sélectionnés
238
- filteredMetadata = metadata.filter(entry => selectedFiles.includes(entry.filedir.trim().toLowerCase()));
239
- }
240
-
241
- // Vérifier si aucun résultat après filtrage
242
- if (filteredMetadata.length === 0) {
243
- metadataDisplay.innerHTML = '<p>No metadata found.</p>';
244
- return;
245
- }
246
-
247
- // Création du tableau Bootstrap
248
- const table = document.createElement('table');
249
- table.classList.add('table', 'table-striped', 'table-bordered');
250
-
251
- // Création de l'en-tête du tableau
252
- const headerRow = document.createElement('tr');
253
- Object.keys(filteredMetadata[0]).forEach(headerText => {
254
- const header = document.createElement('th');
255
- header.textContent = headerText.charAt(0).toUpperCase() + headerText.slice(1);
256
- headerRow.appendChild(header);
257
- });
258
- table.appendChild(headerRow);
259
-
260
- // Remplir le tableau avec les métadonnées filtrées
261
- filteredMetadata.forEach(entry => {
262
- const row = document.createElement('tr');
263
- Object.values(entry).forEach(value => {
264
- const cell = document.createElement('td');
265
- cell.textContent = value;
266
- row.appendChild(cell);
267
- });
268
- table.appendChild(row);
269
- });
270
-
271
- // Ajouter le tableau à la section d'affichage des métadonnées
272
- metadataDisplay.appendChild(table);
273
  }
274
 
275
-
276
  document.addEventListener('DOMContentLoaded', async () => {
277
- populateFilters(); // Charger les valeurs fixes dans les drop-downs
278
- const metadata = await fetchMetadata();
279
- displayMetadata(metadata);
280
  });
281
 
282
- async function uploadAudio(files) {
283
- if (!files || files.length === 0) {
284
- alert('Please select or record files first!');
285
- return;
286
- }
287
-
288
- const formData = new FormData();
289
- const filesArray = Array.from(files);
290
- for (let i = 0; i < filesArray.length; i++) {
291
- formData.append('files', filesArray[i]);
292
- }
293
 
294
- responseDiv.textContent = 'Uploading and analyzing audio...';
295
-
296
- try {
297
- const metadataObj = await fetchMetadata();
298
- displayMetadata(filesArray, metadataObj); // Afficher uniquement les métadonnées des fichiers sélectionnés
299
 
300
- const response = await fetch('http://127.0.0.1:8000/predict/', {
301
- method: 'POST',
302
- body: formData,
303
- });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
 
305
- if (!response.ok) {
306
- const errorData = await response.json();
307
- throw new Error(`Server error: ${errorData.message || response.statusText}`);
308
- }
309
 
310
- const data = await response.json();
311
- responseDiv.innerHTML = '';
 
 
 
 
312
 
313
- data.forEach((result, index) => {
314
- const resultDiv = document.createElement('div');
315
- resultDiv.innerHTML = `File: <b>${result.filename}</b>, Label: <b>${result.label}</b>, Confidence: <b>${result.confidence}</b>`;
316
- responseDiv.appendChild(resultDiv);
 
 
 
 
 
 
 
 
 
 
317
 
318
-
319
- });
320
 
321
- } catch (error) {
322
- console.error('Error:', error);
323
- responseDiv.textContent = 'Error: ' + error.message;
324
- }
 
 
 
 
 
 
325
  }
326
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
327
 
 
 
 
 
 
 
 
 
328
 
329
- uploadButton.addEventListener('click', () => {
330
- const files = audioFileInput.files;
331
- if (!files || files.length === 0) {
332
- alert('Please select files first!');
333
- return;
 
 
334
  }
335
- uploadAudio(files);
 
 
 
 
 
 
 
 
 
 
 
 
336
  });
337
 
338
- // Start Recording
339
  recordButton.addEventListener('click', async () => {
340
- startAudioContext(); // Initialiser ou reprendre l'AudioContext
341
-
342
- console.log('Recording started');
343
-
344
- const constraints = { audio: true, video: false };
345
-
346
- try {
347
- gumStream = await navigator.mediaDevices.getUserMedia(constraints);
348
- console.log('Microphone access granted');
349
- input = audioContext.createMediaStreamSource(gumStream);
350
- console.log('Audio source created');
351
-
352
- // Initialize Recorder.js
353
- rec = new Recorder(input, { numChannels: 1 });
354
- console.log('Recorder initialized');
355
-
356
- // Start recording
357
- rec.record();
358
- console.log('Recording started');
359
-
360
- // Update button states
361
- recordButton.disabled = true;
362
- stopButton.disabled = false;
363
- pauseButton.disabled = false;
364
- } catch (error) {
365
- console.error('Error accessing microphone:', error);
366
- alert('Error accessing microphone: ' + error.message);
367
- }
368
- });
369
-
370
-
371
- function stopRecording() {
372
- console.log('stopRecording called');
373
-
374
- // Désactiver les boutons
375
- stopButton.disabled = true;
376
- recordButton.disabled = false;
377
- pauseButton.disabled = true;
378
- pauseButton.innerHTML = 'Pause';
379
-
380
- // Arrêter l'enregistrement
381
  rec.stop();
382
- console.log('Recording stopped');
383
-
384
- // Arrêter l'accès au microphone
 
 
 
 
 
 
 
 
 
 
 
 
385
  gumStream.getAudioTracks()[0].stop();
386
- console.log('Microphone access stopped');
387
-
388
- // Exporter l'audio en WAV
389
- rec.exportWAV(async (blob) => {
390
- console.log('Audio exported as WAV');
391
-
392
- // Vérifier la taille du fichier audio
393
- if (blob.size === 0) {
394
- console.error('Le fichier audio est vide.');
395
- responseDiv.textContent = 'Erreur : Le fichier audio est vide.';
396
- return;
397
- }
398
-
399
- // Rééchantillonner l'audio à 16 kHz
400
- try {
401
- const resampledBlob = await resampleAudio(blob, 16000);
402
- console.log('Audio rééchantillonné à 16 kHz');
403
-
404
-
405
- // Envoyer l'audio rééchantillonné à l'API pour analyse
406
- await sendAudioToAPI(resampledBlob); // Ajouter await ici
407
- } catch (error) {
408
- console.error('Erreur lors du rééchantillonnage :', error);
409
- responseDiv.textContent = 'Erreur : ' + error.message;
410
- }
411
- });
412
- }
413
 
414
- async function sendAudioToAPI(blob) {
415
- console.log('Sending audio to API');
416
-
417
- const formData = new FormData();
418
- const filename = 'recorded-audio.wav'; // Nom du fichier
419
- formData.append('files', blob, filename); // Utiliser 'files' comme nom de champ
420
-
 
421
  try {
422
- const response = await fetch('http://127.0.0.1:8000/predict/', {
423
- method: 'POST',
424
- body: formData,
425
- });
426
-
427
- console.log('API response status:', response.status);
428
-
429
- if (!response.ok) {
430
- throw new Error(`HTTP error! status: ${response.status}`);
 
 
 
 
 
 
 
 
431
  }
432
-
433
- const data = await response.json();
434
- console.log('API response data:', data);
435
-
436
- // Afficher le résultat de l'API
437
- if (data.length > 0) {
438
- responseDiv.innerHTML = `Label: <b>${data[0].label}</b>, Confidence: <b>${data[0].confidence}</b>`;
439
- } else {
440
- responseDiv.textContent = 'Error: No data returned from the API.';
441
- }
442
- } catch (error) {
443
- console.error('Error sending audio to API:', error);
444
- responseDiv.textContent = 'Error: ' + error.message;
445
- }
446
- }
447
-
448
- // Pause Recording
449
- pauseButton.addEventListener('click', () => {
450
- if (rec.recording) {
451
- // Pause recording
452
- rec.stop();
453
- pauseButton.textContent = 'Resume';
454
- } else {
455
- // Resume recording
456
- rec.record();
457
- pauseButton.textContent = 'Pause';
458
  }
459
  });
460
-
461
-
462
- stopButton.addEventListener('click', () => {
463
- stopRecording();
464
  });
465
 
466
- // Ajouter un écouteur d'événement pour un clic utilisateur sur le bouton d'enregistrement
467
- recordButton.addEventListener('click', async () => {
468
- startAudioContext(); // Initialiser ou reprendre l'AudioContext
469
-
470
- console.log('Recording started');
471
-
472
- const constraints = { audio: true, video: false };
473
-
474
- try {
475
- gumStream = await navigator.mediaDevices.getUserMedia(constraints);
476
- console.log('Microphone access granted');
477
- input = audioContext.createMediaStreamSource(gumStream);
478
- console.log('Audio source created');
479
-
480
- // Initialize Recorder.js
481
- rec = new Recorder(input, { numChannels: 1 });
482
- console.log('Recorder initialized');
483
-
484
- // Start recording
485
- rec.record();
486
- console.log('Recording started');
487
-
488
- // Update button states
489
- recordButton.disabled = true;
490
- stopButton.disabled = false;
491
- pauseButton.disabled = false;
492
- } catch (error) {
493
- console.error('Error accessing microphone:', error);
494
- alert('Error accessing microphone: ' + error.message);
495
- }
496
- });
497
-
498
 
 
 
1
+ // Web/script.js
2
+ // Requires: recorder.js included in page and wavesurfer.js (CDN in index.html)
3
+
4
  const uploadButton = document.getElementById('upload-button');
5
  const audioFileInput = document.getElementById('audio-file');
6
  const recordButton = document.getElementById('recordButton');
 
8
  const pauseButton = document.getElementById('pauseButton');
9
  const responseDiv = document.getElementById('response');
10
  const metadataDisplay = document.getElementById('metadata-display');
11
+ const recordingsList = document.getElementById('recordingsList');
12
 
13
+ let gumStream = null;
14
+ let rec = null;
15
+ let input = null;
16
+ let audioContext = null;
17
 
18
+ // initialize audio context
19
  function startAudioContext() {
20
+ if (!audioContext) {
21
+ audioContext = new (window.AudioContext || window.webkitAudioContext)();
22
+ } else if (audioContext.state === 'suspended') {
23
+ audioContext.resume();
 
 
 
24
  }
25
+ }
26
 
27
+ // ---- Resampling helpers (kept from your original code) ----
28
  async function resampleAudio(blob, targetSampleRate = 16000) {
29
+ return new Promise((resolve, reject) => {
30
+ const reader = new FileReader();
31
+ reader.onload = async () => {
32
+ const aCtx = new (window.OfflineAudioContext || window.webkitOfflineAudioContext || window.AudioContext)(1, 2, targetSampleRate);
33
+ const baseCtx = new (window.AudioContext || window.webkitAudioContext)();
34
+ try {
35
+ const decoded = await baseCtx.decodeAudioData(reader.result.slice(0));
36
+ const offline = new OfflineAudioContext(decoded.numberOfChannels, Math.ceil(decoded.length * targetSampleRate / decoded.sampleRate), targetSampleRate);
37
+ const src = offline.createBufferSource();
38
+ src.buffer = decoded;
39
+ src.connect(offline.destination);
40
+ src.start(0);
41
+ const rendered = await offline.startRendering();
42
+ const wavBlob = bufferToWav(rendered);
 
 
 
 
 
 
 
 
 
 
 
 
43
  resolve(wavBlob);
44
+ } catch (err) {
45
+ reject(err);
46
+ }
47
+ };
48
+ reader.onerror = reject;
49
+ reader.readAsArrayBuffer(blob);
50
+ });
51
  }
52
 
 
53
  function bufferToWav(buffer) {
54
  const numChannels = buffer.numberOfChannels;
55
  const sampleRate = buffer.sampleRate;
56
+ const length = buffer.length * numChannels;
57
+ const samples = new Float32Array(length);
58
+ for (let ch = 0; ch < numChannels; ch++) {
59
+ const channelData = buffer.getChannelData(ch);
 
 
60
  for (let i = 0; i < channelData.length; i++) {
61
+ samples[i * numChannels + ch] = channelData[i];
62
  }
63
  }
64
+ return encodeWAV(samples, sampleRate, numChannels);
 
 
 
65
  }
66
 
 
67
  function encodeWAV(samples, sampleRate, numChannels) {
68
  const buffer = new ArrayBuffer(44 + samples.length * 2);
69
  const view = new DataView(buffer);
70
+ function writeString(view, offset, string) {
71
+ for (let i = 0; i < string.length; i++) {
72
+ view.setUint8(offset + i, string.charCodeAt(i));
73
+ }
74
+ }
75
  writeString(view, 0, 'RIFF');
76
  view.setUint32(4, 36 + samples.length * 2, true);
77
  writeString(view, 8, 'WAVE');
78
  writeString(view, 12, 'fmt ');
79
  view.setUint32(16, 16, true);
80
+ view.setUint16(20, 1, true);
81
  view.setUint16(22, numChannels, true);
82
  view.setUint32(24, sampleRate, true);
83
  view.setUint32(28, sampleRate * numChannels * 2, true);
84
  view.setUint16(32, numChannels * 2, true);
85
+ view.setUint16(34, 16, true);
86
  writeString(view, 36, 'data');
87
  view.setUint32(40, samples.length * 2, true);
 
 
88
  floatTo16BitPCM(view, 44, samples);
 
89
  return new Blob([view], { type: 'audio/wav' });
90
  }
91
 
 
 
 
 
 
 
 
 
92
  function floatTo16BitPCM(view, offset, input) {
93
  for (let i = 0; i < input.length; i++, offset += 2) {
94
+ let s = Math.max(-1, Math.min(1, input[i]));
95
  view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
96
  }
97
  }
98
 
99
+ // ---------------- Metadata functions (kept with minimal refactor) ----------------
100
  async function fetchMetadata() {
101
+ try {
102
+ const response = await fetch('/static/metadata.txt');
103
+ if (!response.ok) throw new Error('Failed to fetch metadata');
104
+ const text = await response.text();
105
+ const lines = text.split('\n').map(l => l.trim()).filter(l => l !== '');
106
+ if (lines.length < 2) return [];
107
+ const headers = lines[0].split(';').map(h => h.trim().toLowerCase());
108
+ const metadata = lines.slice(1).map(line => {
109
+ const values = line.split(';').map(v => v.trim());
110
+ const entry = {};
111
+ headers.forEach((h, i) => entry[h] = values[i] || 'N/A');
112
+ return entry;
113
+ });
114
+ return metadata;
115
+ } catch (err) {
116
+ console.warn('fetchMetadata failed:', err);
117
+ return [];
118
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
  }
120
 
121
  function populateFilters() {
122
+ const predefinedValues = {
123
+ label: ["spoof", "genuine"],
124
+ system: ["bonafide"].concat(Array.from({ length: 19 }, (_, i) => `A${String(i + 1).padStart(2, '0')}`)),
125
+ codec: ["FLAC", "WAV", "MP3"],
126
+ genre: ["male", "female"],
127
+ year: ["2020", "2021", "2022", "2023", "2024", "2025"]
128
+ };
129
+ Object.keys(predefinedValues).forEach(key => {
130
+ populateDropdown(`filter-${key}`, predefinedValues[key]);
131
+ });
 
132
  }
133
 
134
  function populateDropdown(id, values) {
135
+ const select = document.getElementById(id);
136
+ if (!select) return;
137
+ select.innerHTML = '<option value="">All</option>';
138
+ values.forEach(v => {
139
+ const option = document.createElement('option');
140
+ option.value = v;
141
+ option.textContent = v.charAt(0).toUpperCase() + v.slice(1);
142
+ select.appendChild(option);
143
+ });
144
+ select.addEventListener('change', filterMetadata);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
  }
146
 
 
147
  function displayMetadata(files, metadata, filteredOnly = false) {
148
+ metadataDisplay.innerHTML = '';
149
+ if (!filteredOnly && (!files || files.length === 0)) {
150
+ metadataDisplay.innerHTML = '<p>No files selected.</p>';
151
+ return;
152
+ }
153
+ let filteredMetadata;
154
+ if (filteredOnly) {
155
+ const sLabel = document.getElementById("filter-label").value.toLowerCase();
156
+ const sSystem = document.getElementById("filter-system").value.toLowerCase();
157
+ const sCodec = document.getElementById("filter-codec").value.toLowerCase();
158
+ const sGenre = document.getElementById("filter-genre").value.toLowerCase();
159
+ const sYear = document.getElementById("filter-year").value.toLowerCase();
160
+ filteredMetadata = metadata.filter(entry =>
161
+ (sLabel === "" || (entry.label && entry.label.toLowerCase() === sLabel)) &&
162
+ (sSystem === "" || (entry.system && entry.system.toLowerCase() === sSystem)) &&
163
+ (sCodec === "" || (entry.codec && entry.codec.toLowerCase() === sCodec)) &&
164
+ (sGenre === "" || (entry.genre && entry.genre.toLowerCase() === sGenre)) &&
165
+ (sYear === "" || (entry.year && entry.year.toLowerCase() === sYear))
166
+ );
167
+ } else {
168
+ const selectedFiles = Array.from(files).map(f => f.name.trim().toLowerCase());
169
+ filteredMetadata = metadata.filter(entry => selectedFiles.includes(entry.filedir.trim().toLowerCase()));
170
+ }
171
+ if (!filteredMetadata || filteredMetadata.length === 0) {
172
+ metadataDisplay.innerHTML = '<p>No metadata found.</p>';
173
+ return;
174
+ }
175
+ const table = document.createElement('table');
176
+ table.className = 'table table-striped table-bordered';
177
+ const headerRow = document.createElement('tr');
178
+ Object.keys(filteredMetadata[0]).forEach(h => {
179
+ const th = document.createElement('th'); th.textContent = h.charAt(0).toUpperCase() + h.slice(1); headerRow.appendChild(th);
180
+ });
181
+ table.appendChild(headerRow);
182
+ filteredMetadata.forEach(entry => {
183
+ const row = document.createElement('tr');
184
+ Object.values(entry).forEach(val => { const td = document.createElement('td'); td.textContent = val; row.appendChild(td); });
185
+ table.appendChild(row);
186
+ });
187
+ metadataDisplay.appendChild(table);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188
  }
189
 
 
190
  document.addEventListener('DOMContentLoaded', async () => {
191
+ populateFilters();
192
+ const metadata = await fetchMetadata();
193
+ displayMetadata(null, metadata, true);
194
  });
195
 
196
+ // ---------------- Waveform / UI helpers ----------------
 
 
 
 
 
 
 
 
 
 
197
 
198
+ // create a unique id
199
+ function uid(prefix='id') {
200
+ return prefix + '_' + Math.random().toString(36).slice(2,9);
201
+ }
 
202
 
203
+ // create waveform card for a blob or file
204
+ function createWaveformItem({ blob, filename, filetype, origin = 'upload' }) {
205
+ const id = uid('wave');
206
+
207
+ // List item
208
+ const li = document.createElement('li');
209
+ li.className = 'list-group-item';
210
+
211
+ // Card wrapper
212
+ const wrapper = document.createElement('div');
213
+ wrapper.className = 'wave-item';
214
+
215
+ const title = document.createElement('div');
216
+ title.className = 'file-title';
217
+ title.textContent = filename || 'Audio';
218
+
219
+ // waveform container
220
+ const canvasDiv = document.createElement('div');
221
+ canvasDiv.className = 'wave-canvas';
222
+ canvasDiv.id = id;
223
+
224
+ // controls
225
+ const controls = document.createElement('div');
226
+ controls.className = 'wave-controls';
227
+
228
+ const playBtn = document.createElement('button');
229
+ playBtn.className = 'btn btn-sm btn-primary btn-small';
230
+ playBtn.textContent = 'Play';
231
+
232
+ const pauseBtn = document.createElement('button');
233
+ pauseBtn.className = 'btn btn-sm btn-secondary btn-small';
234
+ pauseBtn.textContent = 'Pause';
235
+
236
+ const downloadBtn = document.createElement('a');
237
+ downloadBtn.className = 'btn btn-sm btn-outline-success btn-small';
238
+ downloadBtn.textContent = 'Download';
239
+ downloadBtn.href = URL.createObjectURL(blob);
240
+ downloadBtn.download = filename || 'audio.wav';
241
+
242
+ const analyzeBtn = document.createElement('button');
243
+ analyzeBtn.className = 'btn btn-sm btn-info btn-small';
244
+ analyzeBtn.textContent = 'Analyze';
245
+
246
+ controls.appendChild(playBtn);
247
+ controls.appendChild(pauseBtn);
248
+ controls.appendChild(downloadBtn);
249
+ controls.appendChild(analyzeBtn);
250
+
251
+ wrapper.appendChild(title);
252
+ wrapper.appendChild(canvasDiv);
253
+ wrapper.appendChild(controls);
254
+
255
+ li.appendChild(wrapper);
256
+ recordingsList.prepend(li); // newest first
257
+
258
+ // instantiate wavesurfer on the container
259
+ const ws = WaveSurfer.create({
260
+ container: `#${id}`,
261
+ waveColor: '#8ab4f8',
262
+ progressColor: '#0d6efd',
263
+ height: 80,
264
+ responsive: true,
265
+ normalize: true,
266
+ backend: 'WebAudio'
267
+ });
268
 
269
+ ws.loadBlob(blob);
 
 
 
270
 
271
+ // wire buttons
272
+ playBtn.addEventListener('click', () => ws.play());
273
+ pauseBtn.addEventListener('click', () => ws.pause());
274
+ ws.on('finish', () => { playBtn.textContent = 'Play'; });
275
+ ws.on('play', () => { playBtn.textContent = 'Playing'; });
276
+ ws.on('pause', () => { playBtn.textContent = 'Play'; });
277
 
278
+ analyzeBtn.addEventListener('click', async () => {
279
+ // send this blob to API for analysis
280
+ responseDiv.textContent = 'Analyzing ' + (filename || 'file') + ' ...';
281
+ try {
282
+ const res = await sendBlobToAPI(blob, filename);
283
+ if (Array.isArray(res) && res.length > 0) {
284
+ responseDiv.innerHTML = `File: <b>${res[0].filename || filename}</b>, Label: <b>${res[0].label}</b>, Confidence: <b>${res[0].confidence}</b>`;
285
+ } else {
286
+ responseDiv.textContent = 'No response from API';
287
+ }
288
+ } catch (err) {
289
+ responseDiv.textContent = 'Error: ' + err.message;
290
+ }
291
+ });
292
 
293
+ return { listItem: li, wavesurfer: ws };
294
+ }
295
 
296
+ // send a Blob to backend predict endpoint
297
+ async function sendBlobToAPI(blob, filename = 'file.wav') {
298
+ const formData = new FormData();
299
+ formData.append('files', blob, filename);
300
+ const resp = await fetch('http://127.0.0.1:8000/predict/', { method: 'POST', body: formData });
301
+ if (!resp.ok) {
302
+ const txt = await resp.text();
303
+ throw new Error('Server error: ' + resp.status + ' - ' + txt);
304
+ }
305
+ return resp.json();
306
  }
307
 
308
+ // handle file uploads -> create waveform items (and optionally auto-analyze)
309
+ audioFileInput.addEventListener('change', async (e) => {
310
+ const files = Array.from(e.target.files || []);
311
+ if (!files.length) return;
312
+ const metadata = await fetchMetadata();
313
+ displayMetadata(files, metadata, false);
314
+
315
+ for (const f of files) {
316
+ // convert to blob (it's already blob/file)
317
+ const blob = f;
318
+ // create waveform card
319
+ createWaveformItem({ blob, filename: f.name, filetype: f.type, origin: 'upload' });
320
+ }
321
+ });
322
 
323
+ // upload & analyze button (sends all current files displayed in list)
324
+ uploadButton.addEventListener('click', async () => {
325
+ // collect blobs from list items (download link href or wavesurfer backend)
326
+ const items = Array.from(recordingsList.querySelectorAll('.wave-item'));
327
+ if (items.length === 0) {
328
+ alert('No files to upload/analyze. Please upload or record first.');
329
+ return;
330
+ }
331
 
332
+ // gather blobs by reading download links
333
+ const blobs = [];
334
+ items.forEach(item => {
335
+ const dl = item.querySelector('a[download]');
336
+ if (dl) {
337
+ // reconstruct blob from object URL is not possible; better to re-create from wavesurfer peaks via export?
338
+ // Simpler approach: when creating each item we stored blob via closure. We'll instead keep a weak map.
339
  }
340
+ });
341
+
342
+ // Simpler: ask user to select files via the file input for bulk upload; otherwise analyze each displayed item individually:
343
+ const list = recordingsList.querySelectorAll('.wave-item');
344
+ if (list.length === 0) return;
345
+ responseDiv.textContent = 'Uploading and analyzing audio...';
346
+ // iterate and click each Analyze button sequentially
347
+ for (const item of list) {
348
+ const analyzeBtn = item.querySelector('button.btn-info');
349
+ if (analyzeBtn) analyzeBtn.click();
350
+ // slight delay between requests to avoid flooding
351
+ await new Promise(r => setTimeout(r, 500));
352
+ }
353
  });
354
 
355
+ // ---------------- Recording logic (cleaned, single set of handlers) ----------------
356
  recordButton.addEventListener('click', async () => {
357
+ startAudioContext();
358
+ const constraints = { audio: true };
359
+ try {
360
+ gumStream = await navigator.mediaDevices.getUserMedia(constraints);
361
+ input = audioContext.createMediaStreamSource(gumStream);
362
+ rec = new Recorder(input, { numChannels: 1 });
363
+ rec.record();
364
+ recordButton.disabled = true;
365
+ stopButton.disabled = false;
366
+ pauseButton.disabled = false;
367
+ pauseButton.textContent = 'Pause';
368
+ } catch (err) {
369
+ alert('Microphone access denied: ' + (err.message || err));
370
+ }
371
+ });
372
+
373
+ pauseButton.addEventListener('click', () => {
374
+ if (!rec) return;
375
+ if (rec.recording) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
376
  rec.stop();
377
+ pauseButton.textContent = 'Resume';
378
+ } else {
379
+ rec.record();
380
+ pauseButton.textContent = 'Pause';
381
+ }
382
+ });
383
+
384
+ stopButton.addEventListener('click', () => {
385
+ if (!rec) return;
386
+ stopButton.disabled = true;
387
+ pauseButton.disabled = true;
388
+ recordButton.disabled = false;
389
+ rec.stop();
390
+ // stop tracks
391
+ if (gumStream && gumStream.getAudioTracks && gumStream.getAudioTracks()[0]) {
392
  gumStream.getAudioTracks()[0].stop();
393
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
394
 
395
+ rec.exportWAV(async (blob) => {
396
+ // sanity check
397
+ if (!blob || blob.size === 0) {
398
+ responseDiv.textContent = 'Error: recorded file is empty.';
399
+ return;
400
+ }
401
+
402
+ // create waveform from original blob (also resample for sending)
403
  try {
404
+ // show original waveform
405
+ const item = createWaveformItem({ blob, filename: `recording_${new Date().toISOString()}.wav`, origin: 'record' });
406
+ // resample to 16kHz for API and attach analyze behavior to that blob (we will override analyze button behavior to use resampled blob)
407
+ const resampledBlob = await resampleAudio(blob, 16000);
408
+ // patch the analyze button to send resampledBlob instead of original
409
+ const analyzeBtn = item.listItem.querySelector('button.btn-info');
410
+ analyzeBtn.onclick = async () => {
411
+ responseDiv.textContent = 'Analyzing recorded audio...';
412
+ try {
413
+ const res = await sendBlobToAPI(resampledBlob, analyzeBtn ? analyzeBtn.dataset?.filename : 'recording.wav');
414
+ if (Array.isArray(res) && res.length > 0) {
415
+ responseDiv.innerHTML = `File: <b>${res[0].filename || 'recording'}</b>, Label: <b>${res[0].label}</b>, Confidence: <b>${res[0].confidence}</b>`;
416
+ } else {
417
+ responseDiv.textContent = 'No response from API';
418
+ }
419
+ } catch (err) {
420
+ responseDiv.textContent = 'Error: ' + err.message;
421
  }
422
+ };
423
+ } catch (err) {
424
+ console.error('Resample or create waveform failed:', err);
425
+ responseDiv.textContent = 'Error processing recording: ' + err.message;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
426
  }
427
  });
 
 
 
 
428
  });
429
 
430
+ // ---------------- Utility: attach analyze to uploaded file items so they use original blob ----------------
431
+ // Note: createWaveformItem already wires analyze to send the provided blob; so uploaded files will analyze original file
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
432
 
433
+ // End of script
Web/script_org.js ADDED
@@ -0,0 +1,498 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const uploadButton = document.getElementById('upload-button');
2
+ const audioFileInput = document.getElementById('audio-file');
3
+ const recordButton = document.getElementById('recordButton');
4
+ const stopButton = document.getElementById('stopButton');
5
+ const pauseButton = document.getElementById('pauseButton');
6
+ const responseDiv = document.getElementById('response');
7
+ const metadataDisplay = document.getElementById('metadata-display');
8
+
9
+ let gumStream;
10
+ let rec;
11
+ let input;
12
+ let audioContext;
13
+
14
+ function startAudioContext() {
15
+ if (!audioContext) {
16
+ audioContext = new (window.AudioContext || window.webkitAudioContext)();
17
+ } else if (audioContext.state === 'suspended') {
18
+ audioContext.resume().then(() => {
19
+ console.log('AudioContext repris');
20
+ });
21
+ }
22
+ }
23
+
24
+ // Fonction pour rééchantillonner l'audio à 16 kHz
25
+ async function resampleAudio(blob, targetSampleRate = 16000) {
26
+ return new Promise((resolve, reject) => {
27
+ const reader = new FileReader();
28
+ reader.onload = async () => {
29
+ const audioContext = new (window.AudioContext || window.webkitAudioContext)();
30
+ const buffer = await audioContext.decodeAudioData(reader.result);
31
+
32
+ // Créer un nouvel AudioContext avec le taux d'échantillonnage cible
33
+ const offlineContext = new OfflineAudioContext(
34
+ buffer.numberOfChannels,
35
+ buffer.length * (targetSampleRate / buffer.sampleRate),
36
+ targetSampleRate
37
+ );
38
+
39
+ // Créer une source audio avec le buffer original
40
+ const source = offlineContext.createBufferSource();
41
+ source.buffer = buffer;
42
+
43
+ // Connecter la source au contexte offline
44
+ source.connect(offlineContext.destination);
45
+ source.start();
46
+
47
+ // Rendre l'audio
48
+ const resampledBuffer = await offlineContext.startRendering();
49
+
50
+ // Convertir le buffer rééchantillonné en WAV
51
+ const wavBlob = bufferToWav(resampledBuffer);
52
+ resolve(wavBlob);
53
+ };
54
+ reader.onerror = reject;
55
+ reader.readAsArrayBuffer(blob);
56
+ });
57
+ }
58
+
59
+ // Fonction pour convertir un AudioBuffer en WAV
60
+ function bufferToWav(buffer) {
61
+ const numChannels = buffer.numberOfChannels;
62
+ const sampleRate = buffer.sampleRate;
63
+ const length = buffer.length * numChannels * 2; // 2 bytes par échantillon
64
+ const data = new Float32Array(length);
65
+
66
+ // Interleave les canaux
67
+ for (let channel = 0; channel < numChannels; channel++) {
68
+ const channelData = buffer.getChannelData(channel);
69
+ for (let i = 0; i < channelData.length; i++) {
70
+ data[i * numChannels + channel] = channelData[i];
71
+ }
72
+ }
73
+
74
+ // Encoder en WAV
75
+ const wavBlob = encodeWAV(data, sampleRate, numChannels);
76
+ return wavBlob;
77
+ }
78
+
79
+ // Fonction pour encoder des données audio en WAV
80
+ function encodeWAV(samples, sampleRate, numChannels) {
81
+ const buffer = new ArrayBuffer(44 + samples.length * 2);
82
+ const view = new DataView(buffer);
83
+
84
+ // Écrire l'en-tête WAV
85
+ writeString(view, 0, 'RIFF');
86
+ view.setUint32(4, 36 + samples.length * 2, true);
87
+ writeString(view, 8, 'WAVE');
88
+ writeString(view, 12, 'fmt ');
89
+ view.setUint32(16, 16, true);
90
+ view.setUint16(20, 1, true); // Format PCM
91
+ view.setUint16(22, numChannels, true);
92
+ view.setUint32(24, sampleRate, true);
93
+ view.setUint32(28, sampleRate * numChannels * 2, true);
94
+ view.setUint16(32, numChannels * 2, true);
95
+ view.setUint16(34, 16, true); // Bits par échantillon
96
+ writeString(view, 36, 'data');
97
+ view.setUint32(40, samples.length * 2, true);
98
+
99
+ // Écrire les échantillons audio
100
+ floatTo16BitPCM(view, 44, samples);
101
+
102
+ return new Blob([view], { type: 'audio/wav' });
103
+ }
104
+
105
+ // Fonction utilitaire pour écrire une chaîne dans un DataView
106
+ function writeString(view, offset, string) {
107
+ for (let i = 0; i < string.length; i++) {
108
+ view.setUint8(offset + i, string.charCodeAt(i));
109
+ }
110
+ }
111
+
112
+ // Fonction utilitaire pour convertir des échantillons flottants en PCM 16 bits
113
+ function floatTo16BitPCM(view, offset, input) {
114
+ for (let i = 0; i < input.length; i++, offset += 2) {
115
+ const s = Math.max(-1, Math.min(1, input[i]));
116
+ view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
117
+ }
118
+ }
119
+
120
+ // Function to fetch metadata from the text file
121
+ async function fetchMetadata() {
122
+ try {
123
+ const response = await fetch('../metadata.txt'); // Assurez-vous que le fichier est accessible
124
+ if (!response.ok) {
125
+ throw new Error('Failed to fetch metadata');
126
+ }
127
+ const text = await response.text();
128
+ console.log('Metadata file content:', text); // Debugging
129
+
130
+ // Split text into lines
131
+ const lines = text.split('\n').map(line => line.trim()).filter(line => line !== '');
132
+
133
+ if (lines.length < 2) {
134
+ throw new Error('Metadata file is empty or malformed');
135
+ }
136
+
137
+ // Extract headers
138
+ const headers = lines[0].split(';').map(h => h.trim().toLowerCase());
139
+
140
+ // Extract data
141
+ const metadata = lines.slice(1).map(line => {
142
+ const values = line.split(';').map(value => value.trim());
143
+ let entry = {};
144
+ headers.forEach((header, index) => {
145
+ entry[header] = values[index] || 'N/A'; // Default to 'N/A' if missing data
146
+ });
147
+ return entry;
148
+ });
149
+
150
+ console.log('Parsed Metadata:', metadata); // Debugging
151
+ return metadata;
152
+ } catch (error) {
153
+ console.error('Error fetching metadata:', error);
154
+ return [];
155
+ }
156
+ }
157
+
158
+ function populateFilters() {
159
+ const predefinedValues = {
160
+ label: ["spoof", "genuine"],
161
+ system: ["bonafide"].concat(Array.from({ length: 19 }, (_, i) => `A${String(i + 1).padStart(2, '0')}`)),
162
+ codec: ["FLAC", "WAV", "MP3"],
163
+ genre: ["male", "female"],
164
+ year: ["2020", "2021", "2022", "2023", "2024", "2025"]
165
+ };
166
+
167
+ Object.keys(predefinedValues).forEach(key => {
168
+ populateDropdown(`filter-${key}`, predefinedValues[key]);
169
+ });
170
+ }
171
+
172
+ function populateDropdown(id, values) {
173
+ const select = document.getElementById(id);
174
+ select.innerHTML = '<option value="">All</option>'; // Ajouter l'option "All" par défaut
175
+
176
+ values.forEach(value => {
177
+ const option = document.createElement("option");
178
+ option.value = value;
179
+ option.textContent = value.charAt(0).toUpperCase() + value.slice(1); // Majuscule initiale
180
+ select.appendChild(option);
181
+ });
182
+
183
+ select.addEventListener("change", filterMetadata);
184
+ }
185
+
186
+ function filterMetadata() {
187
+ const selectedLabel = document.getElementById("filter-label").value.toLowerCase();
188
+ const selectedSystem = document.getElementById("filter-system").value.toLowerCase();
189
+ const selectedCodec = document.getElementById("filter-codec").value.toLowerCase();
190
+ const selectedGenre = document.getElementById("filter-genre").value.toLowerCase();
191
+ const selectedYear = document.getElementById("filter-year").value.toLowerCase();
192
+
193
+ fetchMetadata().then(metadata => {
194
+ const filteredMetadata = metadata.filter(entry =>
195
+ (selectedLabel === "" || entry.label.toLowerCase() === selectedLabel) &&
196
+ (selectedSystem === "" || entry.system.toLowerCase() === selectedSystem) &&
197
+ (selectedCodec === "" || entry.codec.toLowerCase() === selectedCodec) &&
198
+ (selectedGenre === "" || entry.genre.toLowerCase() === selectedGenre) &&
199
+ (selectedYear === "" || entry.year.toLowerCase() === selectedYear)
200
+ );
201
+
202
+ displayMetadata(null, metadata, true); // Mode filtrage
203
+ });
204
+ }
205
+
206
+
207
+ function displayMetadata(files, metadata, filteredOnly = false) {
208
+ metadataDisplay.innerHTML = ''; // Nettoyer l'affichage avant de remplir
209
+
210
+ // Si on ne filtre pas et qu'aucun fichier n'est sélectionné, afficher tout
211
+ if (!filteredOnly && (!files || files.length === 0)) {
212
+ metadataDisplay.innerHTML = '<p>No files selected.</p>';
213
+ return;
214
+ }
215
+
216
+ let filteredMetadata;
217
+
218
+ if (filteredOnly) {
219
+ // Appliquer les filtres des drop-downs
220
+ const selectedLabel = document.getElementById("filter-label").value.toLowerCase();
221
+ const selectedSystem = document.getElementById("filter-system").value.toLowerCase();
222
+ const selectedCodec = document.getElementById("filter-codec").value.toLowerCase();
223
+ const selectedGenre = document.getElementById("filter-genre").value.toLowerCase();
224
+ const selectedYear = document.getElementById("filter-year").value.toLowerCase();
225
+
226
+ filteredMetadata = metadata.filter(entry =>
227
+ (selectedLabel === "" || entry.label.toLowerCase() === selectedLabel) &&
228
+ (selectedSystem === "" || entry.system.toLowerCase() === selectedSystem) &&
229
+ (selectedCodec === "" || entry.codec.toLowerCase() === selectedCodec) &&
230
+ (selectedGenre === "" || entry.genre.toLowerCase() === selectedGenre) &&
231
+ (selectedYear === "" || entry.year.toLowerCase() === selectedYear)
232
+ );
233
+ } else {
234
+ // Obtenir la liste des fichiers sélectionnés
235
+ const selectedFiles = Array.from(files).map(file => file.name.trim().toLowerCase());
236
+
237
+ // Filtrer les métadonnées pour ne garder que celles des fichiers sélectionnés
238
+ filteredMetadata = metadata.filter(entry => selectedFiles.includes(entry.filedir.trim().toLowerCase()));
239
+ }
240
+
241
+ // Vérifier si aucun résultat après filtrage
242
+ if (filteredMetadata.length === 0) {
243
+ metadataDisplay.innerHTML = '<p>No metadata found.</p>';
244
+ return;
245
+ }
246
+
247
+ // Création du tableau Bootstrap
248
+ const table = document.createElement('table');
249
+ table.classList.add('table', 'table-striped', 'table-bordered');
250
+
251
+ // Création de l'en-tête du tableau
252
+ const headerRow = document.createElement('tr');
253
+ Object.keys(filteredMetadata[0]).forEach(headerText => {
254
+ const header = document.createElement('th');
255
+ header.textContent = headerText.charAt(0).toUpperCase() + headerText.slice(1);
256
+ headerRow.appendChild(header);
257
+ });
258
+ table.appendChild(headerRow);
259
+
260
+ // Remplir le tableau avec les métadonnées filtrées
261
+ filteredMetadata.forEach(entry => {
262
+ const row = document.createElement('tr');
263
+ Object.values(entry).forEach(value => {
264
+ const cell = document.createElement('td');
265
+ cell.textContent = value;
266
+ row.appendChild(cell);
267
+ });
268
+ table.appendChild(row);
269
+ });
270
+
271
+ // Ajouter le tableau à la section d'affichage des métadonnées
272
+ metadataDisplay.appendChild(table);
273
+ }
274
+
275
+
276
+ document.addEventListener('DOMContentLoaded', async () => {
277
+ populateFilters(); // Charger les valeurs fixes dans les drop-downs
278
+ const metadata = await fetchMetadata();
279
+ displayMetadata(metadata);
280
+ });
281
+
282
+ async function uploadAudio(files) {
283
+ if (!files || files.length === 0) {
284
+ alert('Please select or record files first!');
285
+ return;
286
+ }
287
+
288
+ const formData = new FormData();
289
+ const filesArray = Array.from(files);
290
+ for (let i = 0; i < filesArray.length; i++) {
291
+ formData.append('files', filesArray[i]);
292
+ }
293
+
294
+ responseDiv.textContent = 'Uploading and analyzing audio...';
295
+
296
+ try {
297
+ const metadataObj = await fetchMetadata();
298
+ displayMetadata(filesArray, metadataObj); // Afficher uniquement les métadonnées des fichiers sélectionnés
299
+
300
+ const response = await fetch('http://127.0.0.1:8000/predict/', {
301
+ method: 'POST',
302
+ body: formData,
303
+ });
304
+
305
+ if (!response.ok) {
306
+ const errorData = await response.json();
307
+ throw new Error(`Server error: ${errorData.message || response.statusText}`);
308
+ }
309
+
310
+ const data = await response.json();
311
+ responseDiv.innerHTML = '';
312
+
313
+ data.forEach((result, index) => {
314
+ const resultDiv = document.createElement('div');
315
+ resultDiv.innerHTML = `File: <b>${result.filename}</b>, Label: <b>${result.label}</b>, Confidence: <b>${result.confidence}</b>`;
316
+ responseDiv.appendChild(resultDiv);
317
+
318
+
319
+ });
320
+
321
+ } catch (error) {
322
+ console.error('Error:', error);
323
+ responseDiv.textContent = 'Error: ' + error.message;
324
+ }
325
+ }
326
+
327
+
328
+
329
+ uploadButton.addEventListener('click', () => {
330
+ const files = audioFileInput.files;
331
+ if (!files || files.length === 0) {
332
+ alert('Please select files first!');
333
+ return;
334
+ }
335
+ uploadAudio(files);
336
+ });
337
+
338
+ // Start Recording
339
+ recordButton.addEventListener('click', async () => {
340
+ startAudioContext(); // Initialiser ou reprendre l'AudioContext
341
+
342
+ console.log('Recording started');
343
+
344
+ const constraints = { audio: true, video: false };
345
+
346
+ try {
347
+ gumStream = await navigator.mediaDevices.getUserMedia(constraints);
348
+ console.log('Microphone access granted');
349
+ input = audioContext.createMediaStreamSource(gumStream);
350
+ console.log('Audio source created');
351
+
352
+ // Initialize Recorder.js
353
+ rec = new Recorder(input, { numChannels: 1 });
354
+ console.log('Recorder initialized');
355
+
356
+ // Start recording
357
+ rec.record();
358
+ console.log('Recording started');
359
+
360
+ // Update button states
361
+ recordButton.disabled = true;
362
+ stopButton.disabled = false;
363
+ pauseButton.disabled = false;
364
+ } catch (error) {
365
+ console.error('Error accessing microphone:', error);
366
+ alert('Error accessing microphone: ' + error.message);
367
+ }
368
+ });
369
+
370
+
371
+ function stopRecording() {
372
+ console.log('stopRecording called');
373
+
374
+ // Désactiver les boutons
375
+ stopButton.disabled = true;
376
+ recordButton.disabled = false;
377
+ pauseButton.disabled = true;
378
+ pauseButton.innerHTML = 'Pause';
379
+
380
+ // Arrêter l'enregistrement
381
+ rec.stop();
382
+ console.log('Recording stopped');
383
+
384
+ // Arrêter l'accès au microphone
385
+ gumStream.getAudioTracks()[0].stop();
386
+ console.log('Microphone access stopped');
387
+
388
+ // Exporter l'audio en WAV
389
+ rec.exportWAV(async (blob) => {
390
+ console.log('Audio exported as WAV');
391
+
392
+ // Vérifier la taille du fichier audio
393
+ if (blob.size === 0) {
394
+ console.error('Le fichier audio est vide.');
395
+ responseDiv.textContent = 'Erreur : Le fichier audio est vide.';
396
+ return;
397
+ }
398
+
399
+ // Rééchantillonner l'audio à 16 kHz
400
+ try {
401
+ const resampledBlob = await resampleAudio(blob, 16000);
402
+ console.log('Audio rééchantillonné à 16 kHz');
403
+
404
+
405
+ // Envoyer l'audio rééchantillonné à l'API pour analyse
406
+ await sendAudioToAPI(resampledBlob); // Ajouter await ici
407
+ } catch (error) {
408
+ console.error('Erreur lors du rééchantillonnage :', error);
409
+ responseDiv.textContent = 'Erreur : ' + error.message;
410
+ }
411
+ });
412
+ }
413
+
414
+ async function sendAudioToAPI(blob) {
415
+ console.log('Sending audio to API');
416
+
417
+ const formData = new FormData();
418
+ const filename = 'recorded-audio.wav'; // Nom du fichier
419
+ formData.append('files', blob, filename); // Utiliser 'files' comme nom de champ
420
+
421
+ try {
422
+ const response = await fetch('http://127.0.0.1:8000/predict/', {
423
+ method: 'POST',
424
+ body: formData,
425
+ });
426
+
427
+ console.log('API response status:', response.status);
428
+
429
+ if (!response.ok) {
430
+ throw new Error(`HTTP error! status: ${response.status}`);
431
+ }
432
+
433
+ const data = await response.json();
434
+ console.log('API response data:', data);
435
+
436
+ // Afficher le résultat de l'API
437
+ if (data.length > 0) {
438
+ responseDiv.innerHTML = `Label: <b>${data[0].label}</b>, Confidence: <b>${data[0].confidence}</b>`;
439
+ } else {
440
+ responseDiv.textContent = 'Error: No data returned from the API.';
441
+ }
442
+ } catch (error) {
443
+ console.error('Error sending audio to API:', error);
444
+ responseDiv.textContent = 'Error: ' + error.message;
445
+ }
446
+ }
447
+
448
+ // Pause Recording
449
+ pauseButton.addEventListener('click', () => {
450
+ if (rec.recording) {
451
+ // Pause recording
452
+ rec.stop();
453
+ pauseButton.textContent = 'Resume';
454
+ } else {
455
+ // Resume recording
456
+ rec.record();
457
+ pauseButton.textContent = 'Pause';
458
+ }
459
+ });
460
+
461
+
462
+ stopButton.addEventListener('click', () => {
463
+ stopRecording();
464
+ });
465
+
466
+ // Ajouter un écouteur d'événement pour un clic utilisateur sur le bouton d'enregistrement
467
+ recordButton.addEventListener('click', async () => {
468
+ startAudioContext(); // Initialiser ou reprendre l'AudioContext
469
+
470
+ console.log('Recording started');
471
+
472
+ const constraints = { audio: true, video: false };
473
+
474
+ try {
475
+ gumStream = await navigator.mediaDevices.getUserMedia(constraints);
476
+ console.log('Microphone access granted');
477
+ input = audioContext.createMediaStreamSource(gumStream);
478
+ console.log('Audio source created');
479
+
480
+ // Initialize Recorder.js
481
+ rec = new Recorder(input, { numChannels: 1 });
482
+ console.log('Recorder initialized');
483
+
484
+ // Start recording
485
+ rec.record();
486
+ console.log('Recording started');
487
+
488
+ // Update button states
489
+ recordButton.disabled = true;
490
+ stopButton.disabled = false;
491
+ pauseButton.disabled = false;
492
+ } catch (error) {
493
+ console.error('Error accessing microphone:', error);
494
+ alert('Error accessing microphone: ' + error.message);
495
+ }
496
+ });
497
+
498
+