Claude commited on
Commit
b2d51c7
·
unverified ·
1 Parent(s): 51de4e5

fix(frontend): refactor retry logic to use loop instead of recursion

Browse files

ESLint's react-hooks/immutability rule flagged the recursive call to
runSegmentation as accessing a variable before declaration. Refactored
to use a while loop for cold start retry handling, which is cleaner
and avoids the stale closure issue.

Files changed (1) hide show
  1. frontend/src/hooks/useSegmentation.ts +86 -80
frontend/src/hooks/useSegmentation.ts CHANGED
@@ -123,92 +123,98 @@ export function useSegmentation() {
123
  *
124
  * @param caseId - The case ID to process
125
  * @param fastMode - Whether to use fast inference mode
126
- * @param retryCount - Internal retry counter for cold start handling (do not set manually)
127
  */
128
  const runSegmentation = useCallback(
129
- async (caseId: string, fastMode = true, retryCount = 0) => {
130
- // Only reset state on first attempt (not retries)
131
- if (retryCount === 0) {
132
- // Cancel any existing job/polling
133
- stopPolling()
134
- abortControllerRef.current?.abort()
135
-
136
- const abortController = new AbortController()
137
- abortControllerRef.current = abortController
138
-
139
- // Reset state
140
- setError(null)
141
- setResult(null)
142
- setProgress(0)
143
- setProgressMessage('Creating job...')
144
- setJobStatus('pending')
145
- setElapsedSeconds(undefined)
146
- setIsLoading(true)
147
- }
148
-
149
- const abortController = abortControllerRef.current
150
- if (!abortController) return
151
-
152
- try {
153
- // Create the job
154
- const response = await apiClient.createSegmentJob(
155
- caseId,
156
- fastMode,
157
- abortController.signal
158
- )
159
-
160
- // Store job reference
161
- const newJobId = response.jobId
162
- setJobId(newJobId)
163
- currentJobRef.current = newJobId
164
- setJobStatus(response.status)
165
- setProgressMessage(response.message)
166
-
167
- // Start polling
168
- pollingIntervalRef.current = window.setInterval(() => {
169
- pollJobStatus(newJobId, abortController.signal)
170
- }, POLLING_INTERVAL)
171
-
172
- // Do an initial poll immediately
173
- await pollJobStatus(newJobId, abortController.signal)
174
- } catch (err) {
175
- // Ignore abort errors
176
- if (err instanceof Error && err.name === 'AbortError') return
177
-
178
- // Detect cold start (503 Service Unavailable or network failure)
179
- const is503 = err instanceof ApiError && err.status === 503
180
- const isNetworkError =
181
- err instanceof TypeError && err.message.toLowerCase().includes('fetch')
182
-
183
- // Retry on cold start errors with exponential backoff
184
- if ((is503 || isNetworkError) && retryCount < MAX_COLD_START_RETRIES) {
185
- setJobStatus('waking_up')
186
- setProgressMessage(
187
- `Backend is waking up... Please wait (~30-60s). Retry ${retryCount + 1}/${MAX_COLD_START_RETRIES}`
188
- )
189
- setProgress(0)
190
 
191
- // Exponential backoff: 2s, 4s, 8s, 16s, 30s (capped)
192
- const delay = Math.min(
193
- INITIAL_RETRY_DELAY * Math.pow(2, retryCount),
194
- MAX_RETRY_DELAY
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195
  )
196
- await sleep(delay)
197
 
198
- // Recursive retry
199
- return runSegmentation(caseId, fastMode, retryCount + 1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  }
201
-
202
- // Max retries exceeded or non-retryable error
203
- const message =
204
- is503 || isNetworkError
205
- ? 'Backend failed to wake up. Please try again later.'
206
- : err instanceof Error
207
- ? err.message
208
- : 'Failed to start job'
209
- setError(message)
210
- setIsLoading(false)
211
- setJobStatus('failed')
212
  }
213
  },
214
  [pollJobStatus, stopPolling]
 
123
  *
124
  * @param caseId - The case ID to process
125
  * @param fastMode - Whether to use fast inference mode
 
126
  */
127
  const runSegmentation = useCallback(
128
+ async (caseId: string, fastMode = true) => {
129
+ // Cancel any existing job/polling
130
+ stopPolling()
131
+ abortControllerRef.current?.abort()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
 
133
+ const abortController = new AbortController()
134
+ abortControllerRef.current = abortController
135
+
136
+ // Reset state
137
+ setError(null)
138
+ setResult(null)
139
+ setProgress(0)
140
+ setProgressMessage('Creating job...')
141
+ setJobStatus('pending')
142
+ setElapsedSeconds(undefined)
143
+ setIsLoading(true)
144
+
145
+ // Retry loop for cold start handling (replaces recursive call)
146
+ let retryCount = 0
147
+ while (retryCount <= MAX_COLD_START_RETRIES) {
148
+ try {
149
+ // Create the job
150
+ const response = await apiClient.createSegmentJob(
151
+ caseId,
152
+ fastMode,
153
+ abortController.signal
154
  )
 
155
 
156
+ // Store job reference
157
+ const newJobId = response.jobId
158
+ setJobId(newJobId)
159
+ currentJobRef.current = newJobId
160
+ setJobStatus(response.status)
161
+ setProgressMessage(response.message)
162
+
163
+ // Start polling
164
+ pollingIntervalRef.current = window.setInterval(() => {
165
+ pollJobStatus(newJobId, abortController.signal)
166
+ }, POLLING_INTERVAL)
167
+
168
+ // Do an initial poll immediately
169
+ await pollJobStatus(newJobId, abortController.signal)
170
+
171
+ // Success - exit retry loop
172
+ return
173
+ } catch (err) {
174
+ // Ignore abort errors
175
+ if (err instanceof Error && err.name === 'AbortError') return
176
+
177
+ // Detect cold start (503 Service Unavailable or network failure)
178
+ const is503 = err instanceof ApiError && err.status === 503
179
+ const isNetworkError =
180
+ err instanceof TypeError &&
181
+ err.message.toLowerCase().includes('fetch')
182
+
183
+ // Retry on cold start errors with exponential backoff
184
+ if (
185
+ (is503 || isNetworkError) &&
186
+ retryCount < MAX_COLD_START_RETRIES
187
+ ) {
188
+ retryCount++
189
+ setJobStatus('waking_up')
190
+ setProgressMessage(
191
+ `Backend is waking up... Please wait (~30-60s). Retry ${retryCount}/${MAX_COLD_START_RETRIES}`
192
+ )
193
+ setProgress(0)
194
+
195
+ // Exponential backoff: 2s, 4s, 8s, 16s, 30s (capped)
196
+ const delay = Math.min(
197
+ INITIAL_RETRY_DELAY * Math.pow(2, retryCount - 1),
198
+ MAX_RETRY_DELAY
199
+ )
200
+ await sleep(delay)
201
+
202
+ // Continue to next iteration of retry loop
203
+ continue
204
+ }
205
+
206
+ // Max retries exceeded or non-retryable error
207
+ const message =
208
+ is503 || isNetworkError
209
+ ? 'Backend failed to wake up. Please try again later.'
210
+ : err instanceof Error
211
+ ? err.message
212
+ : 'Failed to start job'
213
+ setError(message)
214
+ setIsLoading(false)
215
+ setJobStatus('failed')
216
+ return
217
  }
 
 
 
 
 
 
 
 
 
 
 
218
  }
219
  },
220
  [pollJobStatus, stopPolling]