File size: 1,805 Bytes
1e92f2d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
/*
This is a simple promise queue that allows you to limit the number of concurrent promises
that are running at any given time. It's used to limit the number of concurrent
prefetch requests that are being made to the server but could be used for other
things as well.
*/
export class PromiseQueue {
#maxConcurrency: number
#runningCount: number
#queue: Array<{
promiseFn: Promise<any>
task: () => void
}>
constructor(maxConcurrency = 5) {
this.#maxConcurrency = maxConcurrency
this.#runningCount = 0
this.#queue = []
}
enqueue<T>(promiseFn: () => Promise<T>): Promise<T> {
let taskResolve: (value: T | PromiseLike<T>) => void
let taskReject: (reason?: any) => void
const taskPromise = new Promise((resolve, reject) => {
taskResolve = resolve
taskReject = reject
}) as Promise<T>
const task = async () => {
try {
this.#runningCount++
const result = await promiseFn()
taskResolve(result)
} catch (error) {
taskReject(error)
} finally {
this.#runningCount--
this.#processNext()
}
}
const enqueueResult = { promiseFn: taskPromise, task }
// wonder if we should take a LIFO approach here
this.#queue.push(enqueueResult)
this.#processNext()
return taskPromise
}
bump(promiseFn: Promise<any>) {
const index = this.#queue.findIndex((item) => item.promiseFn === promiseFn)
if (index > -1) {
const bumpedItem = this.#queue.splice(index, 1)[0]
this.#queue.unshift(bumpedItem)
this.#processNext(true)
}
}
#processNext(forced = false) {
if (
(this.#runningCount < this.#maxConcurrency || forced) &&
this.#queue.length > 0
) {
this.#queue.shift()?.task()
}
}
}
|