File size: 16,474 Bytes
71174bc | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 | // The purpose of this class is to maintain
import {
doneInQueueStore,
makeUniqJobId,
startInQueueStore,
updateProgressInQueueStore,
} from "./QueueStore";
import { IJobInfo, IQueueCallbacks } from "./QueueTypes";
import { messagesApi } from "@/Api/Messages";
/**
* The parent class for all queues. This class is not meant to be used directly.
* It is meant to be extended by a child class that implements the runJobBatch()
* method.
*/
export abstract class QueueParent {
private _maxTotalProcs: number;
private _procsPerJobBatch: number;
// Each batch of inputs runs per job. Inputs are batched because it might be
// useful to not create and destroy workers for every single job. So it
// happens per batch, not per job. But note that batchSize is 1 by default.
private _inputBatches: IJobInfo[][] = [];
// This is just for reporting. Not actually used for anything beyond that.
private _jobsCurrentlyRunning: { [index: number]: IJobInfo } = {};
// The outputs of each job are stored here. Not batched.
private _outputs: IJobInfo[] = [];
// A timer that checks job status every so often to add new jobs to the
// queue if appropriate.
private _queueTimer: any;
// How many processors are being currently used by jobs in the queue.
private _numProcsCurrentlyRunning = 0;
public _callbacks: IQueueCallbacks | undefined;
// The total number of jobs
protected _numTotalJobs: number;
// An id unique to this queue.
private _id: string;
private _showInQueue: boolean;
// If this is true, job will be cancelled as soon as possible.
protected jobsCancelling: boolean;
// The done promise is resolved when the queue is finished. Same as the
// callback, but promised based if that's your preference.
public done: Promise<any>;
public _doneResolveFunc: any;
private _onJobAfterQueueDone: boolean;
/**
* The class constructor.
*
* @param {string|undefined} jobTypeId A string that
* identifies the type
* of job.
* @param {any[]} inputs An flat array of
* inputs to be
* processed.
* @param {number} maxProcs The maximum number
* of processors that
* can be used by the
* queue.
* @param {IQueueCallbacks} [callbacks=undefined] The callbacks to be
* used by the queue,
* if any.
* @param {number} [procsPerJobBatch=1] The number of
* processors that can
* be used by each
* batch of jobs.
* @param {number} [simulBatches=undefined] The max number of
* batches to run
* simultaneously. If
* undefined,
* calculated as
* maxProcs /
* procsPerJobBatch
* (to run as many
* batches at same
* time as possible).
* @param {number} [batchSize=undefined] The number of jobs
* per batch. If
* undefined,
* calculated as
* inputs.length /
* simulBatches.
* @param {boolean} [showInQueue=true] Whether to show
* this job in the
* queue.
* @param {boolean} [onJobAfterQueueDone=true] Whether to run the
* onJobDone callback
* after the queue is
* done. Set to false
* if you want to run
* the onJobDone
* callback yourself.
*/
constructor(
jobTypeId: string,
inputs: any[],
maxProcs: number,
callbacks?: IQueueCallbacks,
procsPerJobBatch = 1,
simulBatches?: number,
batchSize: number | undefined = undefined,
showInQueue = true,
onJobAfterQueueDone = true
) {
this._numTotalJobs = inputs.length;
this._procsPerJobBatch = procsPerJobBatch;
this._callbacks = callbacks;
this.jobsCancelling = false;
this._showInQueue = showInQueue;
this._onJobAfterQueueDone = onJobAfterQueueDone;
// Adjust max number of processors to be used by the queue if necessary.
// Useful if there are very few items in the queue.
this._maxTotalProcs = maxProcs;
this._maxTotalProcs = Math.min(
this._maxTotalProcs,
this._numTotalJobs * this._procsPerJobBatch
);
if (simulBatches === undefined) {
simulBatches = Math.floor(this._maxTotalProcs / this._procsPerJobBatch);
}
this._maxTotalProcs = Math.min(
this._maxTotalProcs,
simulBatches * this._procsPerJobBatch
);
// this._maxTotalProcs -= this._maxTotalProcs % batchSize;
if (batchSize === undefined) {
// If batch size isn't defined, make it big enough to use all the
// processors. Letting the user define this specifically in case
// they want to use the onJobDone callback to do something with the
// outputs as they become available, instead of using the
// onQueueDone callback (when everything done).
batchSize = Math.ceil(inputs.length / simulBatches);
}
this._id = makeUniqJobId(jobTypeId);
this.done = new Promise((resolve) => {
this._doneResolveFunc = resolve;
});
if (inputs.length === 0) {
// If the queue is empty, just be done with it.
this._onQueueDone([]);
return;
}
this._onQueueStart();
// Copy inputs into IJobInfo[]. Use map.
const inputInfos = inputs.map((input, index) => {
return {
index,
input: input,
} as IJobInfo;
});
// Split the inputs into batches. Keep in mind that the size of inputs
// may not be exactly divisible by batchSize.
for (let i = 0; i < inputInfos.length; i += batchSize) {
this._inputBatches.push(inputInfos.slice(i, i + batchSize));
}
// this._reportQueueStatusForDebug();
this._queueTimer = setInterval(() => {
// If there are no jobs left in _inputBatches, stop the timer.
if (this._inputBatches.length === 0) {
clearInterval(this._queueTimer);
return;
}
if (this.jobsCancelling) {
// Note that this._queueTimer is not cleared here. It is cleared
// elsewhere.
return;
}
this._fillQueueToCapacity();
}, 250);
}
/**
* A function that shows the status of the queue in the console. This is
* only for debugging.
*/
private _reportQueueStatusForDebug() {
let lastNumJobsFinished = 0;
setInterval(() => {
// How many jobs are in this._inputBatches, noting that it is a list of
// lists?
let numJobsNotStarted = 0;
for (const batch of this._inputBatches) {
numJobsNotStarted += batch.length;
}
const jobsRunning = Object.keys(this._jobsCurrentlyRunning).length;
const numJobsFinished = this._outputs.length;
if (numJobsFinished === lastNumJobsFinished) {
// No change since last time. Don't report.
return;
}
lastNumJobsFinished = numJobsFinished;
// clear console
// console.clear();
console.log("Jobs not yet started:", numJobsNotStarted);
console.log("Jobs running:", jobsRunning);
console.log("Jobs finished:", numJobsFinished);
}, 100);
}
/**
* A function that fills the queue to capacity with new running jobs. This
* function is called every so often by a timer.
*/
private _fillQueueToCapacity() {
// Start jobs until the queue is full or there are no more jobs.
// eslint-disable-next-line no-constant-condition
while (true) {
// Cancel in progress
if (this.jobsCancelling) {
return;
}
// No more input batches to add.
if (this._inputBatches.length === 0) {
break;
}
if (
this._numProcsCurrentlyRunning + this._procsPerJobBatch >
this._maxTotalProcs
) {
// Adding job wouldn't fit in the queue.
break;
}
const inputBatch = this._inputBatches.shift();
// No more input batches to add.
if (!inputBatch) {
break;
}
// Add jobs to the _jobsCurrentlyRunning list.
for (const jobInfo of inputBatch) {
this._jobsCurrentlyRunning[jobInfo.index] = jobInfo;
}
this._numProcsCurrentlyRunning += this._procsPerJobBatch;
this.runJobBatch(inputBatch, this._procsPerJobBatch)
.then((outBatch: any[]) => {
this._outputs.push(...outBatch);
this._numProcsCurrentlyRunning -= this._procsPerJobBatch;
// Remove jobs from the _jobsCurrentlyRunning list
for (const jobInfo of inputBatch) {
delete this._jobsCurrentlyRunning[jobInfo.index];
}
// Call the onJobDone callback for each job in the batch.
if (this._onJobAfterQueueDone) {
for (const jobInfo of inputBatch) {
this._onJobDone(jobInfo);
}
}
// Call the onProgress callback.
this._onProgress(this._outputs.length / this._numTotalJobs);
// Check if there are no jobs left.
if (
Object.keys(this._jobsCurrentlyRunning).length === 0 &&
this._inputBatches.length === 0
) {
// No jobs left. Call the onQueueDone callback.
this._onQueueDone(this._outputs);
}
return;
})
.catch((err) => {
// throw err;
// TODO: Never gets here. Why?
console.error("Error running job:", err);
this._numProcsCurrentlyRunning -= this._procsPerJobBatch;
// Call the onError callback for each job in the batch.
this._onError(inputBatch, err);
let msg = err.message;
if (msg.indexOf("SharedArrayBuffer") !== -1) {
msg += ". Consider updating your Safari browser or using a different browser such as Google Chrome."
}
if (msg.indexOf("must not be shared") !== -1) {
msg += ". This browser (likely Safari) does not support using SharedArrayBuffer with TextDecoder. We recommend using Google Chrome on Desktop for docking."
}
messagesApi.popupError(msg);
throw new Error(msg);
});
}
}
/**
* The onQueueStart callback to call when the queue starts.
*/
private _onQueueStart() {
if (this._showInQueue) {
startInQueueStore(this._id, this._maxTotalProcs, () => {
// This function allows the queue to be cancelled from an external
// location.
// To the extent possible, abort currently running jobs.
this.jobsCancelling = true;
// Stop the timer that will try to submit additional jobs
clearInterval(this._queueTimer);
});
}
}
/**
* The onJobDone callback to called when a single job is done.
*
* @param {IJobInfo} jobInfo The job info of the job that is done.
*/
private _onJobDone(jobInfo: IJobInfo) {
if (this._callbacks && this._callbacks.onJobDone) {
this._callbacks.onJobDone(jobInfo.output, jobInfo.index);
}
}
/**
* The onError callback to called when a batch of jobs fails.
*
* @param {IJobInfo[]} jobInfos The job infos of the failed batch.
* @param {any} error The error.
*/
private _onError(jobInfos: IJobInfo[], error: any) {
const payloadsOfBatchThatFailed = jobInfos.map((jobInfo) => {
return jobInfo.input;
});
if (this._callbacks && this._callbacks.onError) {
this._callbacks.onError(payloadsOfBatchThatFailed, error);
}
}
/**
* The onQueueDone callback to called when the queue (all jobs) is done.
*
* @param {IJobInfo[]} outputJobs The output jobs.
*/
private _onQueueDone(outputJobs: IJobInfo[]) {
// Sort the output jobs by their original index.
outputJobs.sort((a, b) => a.index - b.index);
// Get the payloads of the outputs.
const outputPayloads = outputJobs.map((jobInfo) => jobInfo.output);
if (this._showInQueue) {
doneInQueueStore(this._id);
}
if (this._callbacks && this._callbacks.onQueueDone) {
this._callbacks.onQueueDone(outputPayloads);
}
// Also resolve the promise, in case promise is being used instead of
// callbacks.
this._doneResolveFunc(outputPayloads);
}
/**
* The onProgress callback to update the progress.
*
* @param {number} percent The percent of jobs that have been completed.
*/
protected _onProgress(percent: number) {
if (this._showInQueue) {
updateProgressInQueueStore(this._id, percent);
}
if (this._callbacks && this._callbacks.onProgress) {
this._callbacks.onProgress(percent);
}
}
/**
* Run a batch of jobs.
*
* @param {IJobInfo[]} inputBatch The batch of inputs to run.
* @param {number} procs The number of processes to use to run the
* batch.
* @returns {Promise<IJobInfo[]>} A promise that resolves to the output
* when all the jobs are done. Put the
* output of each job in jobInfo.output.
*/
public abstract runJobBatch(
inputBatch: IJobInfo[],
procs: number
): Promise<IJobInfo[]>;
}
|