Refactor routing in App component to enhance navigation and improve error handling by integrating dynamic routes and updating the NotFound route.

This commit is contained in:
becarta
2025-05-23 12:43:00 +02:00
parent f40db0f5c9
commit a544759a3b
11127 changed files with 1647032 additions and 0 deletions

126
node_modules/p-queue/dist/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,126 @@
import { EventEmitter } from 'eventemitter3';
import { type Queue, type RunFunction } from './queue.js';
import PriorityQueue from './priority-queue.js';
import { type QueueAddOptions, type Options, type TaskOptions } from './options.js';
type Task<TaskResultType> = ((options: TaskOptions) => PromiseLike<TaskResultType>) | ((options: TaskOptions) => TaskResultType);
type EventName = 'active' | 'idle' | 'empty' | 'add' | 'next' | 'completed' | 'error';
/**
Promise queue with concurrency control.
*/
export default class PQueue<QueueType extends Queue<RunFunction, EnqueueOptionsType> = PriorityQueue, EnqueueOptionsType extends QueueAddOptions = QueueAddOptions> extends EventEmitter<EventName> {
#private;
/**
Per-operation timeout in milliseconds. Operations fulfill once `timeout` elapses if they haven't already.
Applies to each future operation.
*/
timeout?: number;
constructor(options?: Options<QueueType, EnqueueOptionsType>);
get concurrency(): number;
set concurrency(newConcurrency: number);
/**
Updates the priority of a promise function by its id, affecting its execution order. Requires a defined concurrency limit to take effect.
For example, this can be used to prioritize a promise function to run earlier.
```js
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦀', {priority: 0, id: '🦀'});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦄', {priority: 1});
queue.setPriority('🦀', 2);
```
In this case, the promise function with `id: '🦀'` runs second.
You can also deprioritize a promise function to delay its execution:
```js
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦀', {priority: 1, id: '🦀'});
queue.add(async () => '🦄');
queue.add(async () => '🦄', {priority: 0});
queue.setPriority('🦀', -1);
```
Here, the promise function with `id: '🦀'` executes last.
*/
setPriority(id: string, priority: number): void;
/**
Adds a sync or async task to the queue. Always returns a promise.
*/
add<TaskResultType>(function_: Task<TaskResultType>, options: {
throwOnTimeout: true;
} & Exclude<EnqueueOptionsType, 'throwOnTimeout'>): Promise<TaskResultType>;
add<TaskResultType>(function_: Task<TaskResultType>, options?: Partial<EnqueueOptionsType>): Promise<TaskResultType | void>;
/**
Same as `.add()`, but accepts an array of sync or async functions.
@returns A promise that resolves when all functions are resolved.
*/
addAll<TaskResultsType>(functions: ReadonlyArray<Task<TaskResultsType>>, options?: {
throwOnTimeout: true;
} & Partial<Exclude<EnqueueOptionsType, 'throwOnTimeout'>>): Promise<TaskResultsType[]>;
addAll<TaskResultsType>(functions: ReadonlyArray<Task<TaskResultsType>>, options?: Partial<EnqueueOptionsType>): Promise<Array<TaskResultsType | void>>;
/**
Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
*/
start(): this;
/**
Put queue execution on hold.
*/
pause(): void;
/**
Clear the queue.
*/
clear(): void;
/**
Can be called multiple times. Useful if you for example add additional items at a later time.
@returns A promise that settles when the queue becomes empty.
*/
onEmpty(): Promise<void>;
/**
@returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`.
If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
*/
onSizeLessThan(limit: number): Promise<void>;
/**
The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
@returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
*/
onIdle(): Promise<void>;
/**
Size of the queue, the number of queued items waiting to run.
*/
get size(): number;
/**
Size of the queue, filtered by the given options.
For example, this can be used to find the number of items remaining in the queue with a specific priority level.
*/
sizeBy(options: Readonly<Partial<EnqueueOptionsType>>): number;
/**
Number of running items (no longer in the queue).
*/
get pending(): number;
/**
Whether the queue is currently paused.
*/
get isPaused(): boolean;
}
export type { Queue } from './queue.js';
export { type QueueAddOptions, type Options } from './options.js';

353
node_modules/p-queue/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,353 @@
import { EventEmitter } from 'eventemitter3';
import pTimeout, { TimeoutError } from 'p-timeout';
import PriorityQueue from './priority-queue.js';
/**
Promise queue with concurrency control.
*/
export default class PQueue extends EventEmitter {
#carryoverConcurrencyCount;
#isIntervalIgnored;
#intervalCount = 0;
#intervalCap;
#interval;
#intervalEnd = 0;
#intervalId;
#timeoutId;
#queue;
#queueClass;
#pending = 0;
// The `!` is needed because of https://github.com/microsoft/TypeScript/issues/32194
#concurrency;
#isPaused;
#throwOnTimeout;
// Use to assign a unique identifier to a promise function, if not explicitly specified
#idAssigner = 1n;
/**
Per-operation timeout in milliseconds. Operations fulfill once `timeout` elapses if they haven't already.
Applies to each future operation.
*/
timeout;
// TODO: The `throwOnTimeout` option should affect the return types of `add()` and `addAll()`
constructor(options) {
super();
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
options = {
carryoverConcurrencyCount: false,
intervalCap: Number.POSITIVE_INFINITY,
interval: 0,
concurrency: Number.POSITIVE_INFINITY,
autoStart: true,
queueClass: PriorityQueue,
...options,
};
if (!(typeof options.intervalCap === 'number' && options.intervalCap >= 1)) {
throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${options.intervalCap?.toString() ?? ''}\` (${typeof options.intervalCap})`);
}
if (options.interval === undefined || !(Number.isFinite(options.interval) && options.interval >= 0)) {
throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${options.interval?.toString() ?? ''}\` (${typeof options.interval})`);
}
this.#carryoverConcurrencyCount = options.carryoverConcurrencyCount;
this.#isIntervalIgnored = options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0;
this.#intervalCap = options.intervalCap;
this.#interval = options.interval;
this.#queue = new options.queueClass();
this.#queueClass = options.queueClass;
this.concurrency = options.concurrency;
this.timeout = options.timeout;
this.#throwOnTimeout = options.throwOnTimeout === true;
this.#isPaused = options.autoStart === false;
}
get #doesIntervalAllowAnother() {
return this.#isIntervalIgnored || this.#intervalCount < this.#intervalCap;
}
get #doesConcurrentAllowAnother() {
return this.#pending < this.#concurrency;
}
#next() {
this.#pending--;
this.#tryToStartAnother();
this.emit('next');
}
#onResumeInterval() {
this.#onInterval();
this.#initializeIntervalIfNeeded();
this.#timeoutId = undefined;
}
get #isIntervalPaused() {
const now = Date.now();
if (this.#intervalId === undefined) {
const delay = this.#intervalEnd - now;
if (delay < 0) {
// Act as the interval was done
// We don't need to resume it here because it will be resumed on line 160
this.#intervalCount = (this.#carryoverConcurrencyCount) ? this.#pending : 0;
}
else {
// Act as the interval is pending
if (this.#timeoutId === undefined) {
this.#timeoutId = setTimeout(() => {
this.#onResumeInterval();
}, delay);
}
return true;
}
}
return false;
}
#tryToStartAnother() {
if (this.#queue.size === 0) {
// We can clear the interval ("pause")
// Because we can redo it later ("resume")
if (this.#intervalId) {
clearInterval(this.#intervalId);
}
this.#intervalId = undefined;
this.emit('empty');
if (this.#pending === 0) {
this.emit('idle');
}
return false;
}
if (!this.#isPaused) {
const canInitializeInterval = !this.#isIntervalPaused;
if (this.#doesIntervalAllowAnother && this.#doesConcurrentAllowAnother) {
const job = this.#queue.dequeue();
if (!job) {
return false;
}
this.emit('active');
job();
if (canInitializeInterval) {
this.#initializeIntervalIfNeeded();
}
return true;
}
}
return false;
}
#initializeIntervalIfNeeded() {
if (this.#isIntervalIgnored || this.#intervalId !== undefined) {
return;
}
this.#intervalId = setInterval(() => {
this.#onInterval();
}, this.#interval);
this.#intervalEnd = Date.now() + this.#interval;
}
#onInterval() {
if (this.#intervalCount === 0 && this.#pending === 0 && this.#intervalId) {
clearInterval(this.#intervalId);
this.#intervalId = undefined;
}
this.#intervalCount = this.#carryoverConcurrencyCount ? this.#pending : 0;
this.#processQueue();
}
/**
Executes all queued functions until it reaches the limit.
*/
#processQueue() {
// eslint-disable-next-line no-empty
while (this.#tryToStartAnother()) { }
}
get concurrency() {
return this.#concurrency;
}
set concurrency(newConcurrency) {
if (!(typeof newConcurrency === 'number' && newConcurrency >= 1)) {
throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`);
}
this.#concurrency = newConcurrency;
this.#processQueue();
}
async #throwOnAbort(signal) {
return new Promise((_resolve, reject) => {
signal.addEventListener('abort', () => {
reject(signal.reason);
}, { once: true });
});
}
/**
Updates the priority of a promise function by its id, affecting its execution order. Requires a defined concurrency limit to take effect.
For example, this can be used to prioritize a promise function to run earlier.
```js
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦀', {priority: 0, id: '🦀'});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦄', {priority: 1});
queue.setPriority('🦀', 2);
```
In this case, the promise function with `id: '🦀'` runs second.
You can also deprioritize a promise function to delay its execution:
```js
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦀', {priority: 1, id: '🦀'});
queue.add(async () => '🦄');
queue.add(async () => '🦄', {priority: 0});
queue.setPriority('🦀', -1);
```
Here, the promise function with `id: '🦀'` executes last.
*/
setPriority(id, priority) {
this.#queue.setPriority(id, priority);
}
async add(function_, options = {}) {
// In case `id` is not defined.
options.id ??= (this.#idAssigner++).toString();
options = {
timeout: this.timeout,
throwOnTimeout: this.#throwOnTimeout,
...options,
};
return new Promise((resolve, reject) => {
this.#queue.enqueue(async () => {
this.#pending++;
this.#intervalCount++;
try {
options.signal?.throwIfAborted();
let operation = function_({ signal: options.signal });
if (options.timeout) {
operation = pTimeout(Promise.resolve(operation), { milliseconds: options.timeout });
}
if (options.signal) {
operation = Promise.race([operation, this.#throwOnAbort(options.signal)]);
}
const result = await operation;
resolve(result);
this.emit('completed', result);
}
catch (error) {
if (error instanceof TimeoutError && !options.throwOnTimeout) {
resolve();
return;
}
reject(error);
this.emit('error', error);
}
finally {
this.#next();
}
}, options);
this.emit('add');
this.#tryToStartAnother();
});
}
async addAll(functions, options) {
return Promise.all(functions.map(async (function_) => this.add(function_, options)));
}
/**
Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
*/
start() {
if (!this.#isPaused) {
return this;
}
this.#isPaused = false;
this.#processQueue();
return this;
}
/**
Put queue execution on hold.
*/
pause() {
this.#isPaused = true;
}
/**
Clear the queue.
*/
clear() {
this.#queue = new this.#queueClass();
}
/**
Can be called multiple times. Useful if you for example add additional items at a later time.
@returns A promise that settles when the queue becomes empty.
*/
async onEmpty() {
// Instantly resolve if the queue is empty
if (this.#queue.size === 0) {
return;
}
await this.#onEvent('empty');
}
/**
@returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`.
If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
*/
async onSizeLessThan(limit) {
// Instantly resolve if the queue is empty.
if (this.#queue.size < limit) {
return;
}
await this.#onEvent('next', () => this.#queue.size < limit);
}
/**
The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
@returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
*/
async onIdle() {
// Instantly resolve if none pending and if nothing else is queued
if (this.#pending === 0 && this.#queue.size === 0) {
return;
}
await this.#onEvent('idle');
}
async #onEvent(event, filter) {
return new Promise(resolve => {
const listener = () => {
if (filter && !filter()) {
return;
}
this.off(event, listener);
resolve();
};
this.on(event, listener);
});
}
/**
Size of the queue, the number of queued items waiting to run.
*/
get size() {
return this.#queue.size;
}
/**
Size of the queue, filtered by the given options.
For example, this can be used to find the number of items remaining in the queue with a specific priority level.
*/
sizeBy(options) {
// eslint-disable-next-line unicorn/no-array-callback-reference
return this.#queue.filter(options).length;
}
/**
Number of running items (no longer in the queue).
*/
get pending() {
return this.#pending;
}
/**
Whether the queue is currently paused.
*/
get isPaused() {
return this.#isPaused;
}
}

1
node_modules/p-queue/dist/lower-bound.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export default function lowerBound<T>(array: readonly T[], value: T, comparator: (a: T, b: T) => number): number;

18
node_modules/p-queue/dist/lower-bound.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
// Port of lower_bound from https://en.cppreference.com/w/cpp/algorithm/lower_bound
// Used to compute insertion index to keep queue sorted after insertion
export default function lowerBound(array, value, comparator) {
let first = 0;
let count = array.length;
while (count > 0) {
const step = Math.trunc(count / 2);
let it = first + step;
if (comparator(array[it], value) <= 0) {
first = ++it;
count -= step + 1;
}
else {
count = step;
}
}
return first;
}

106
node_modules/p-queue/dist/options.d.ts generated vendored Normal file
View File

@@ -0,0 +1,106 @@
import { type Queue, type RunFunction } from './queue.js';
type TimeoutOptions = {
/**
Per-operation timeout in milliseconds. Operations fulfill once `timeout` elapses if they haven't already.
*/
timeout?: number;
/**
Whether or not a timeout is considered an exception.
@default false
*/
throwOnTimeout?: boolean;
};
export type Options<QueueType extends Queue<RunFunction, QueueOptions>, QueueOptions extends QueueAddOptions> = {
/**
Concurrency limit.
Minimum: `1`.
@default Infinity
*/
readonly concurrency?: number;
/**
Whether queue tasks within concurrency limit, are auto-executed as soon as they're added.
@default true
*/
readonly autoStart?: boolean;
/**
Class with a `enqueue` and `dequeue` method, and a `size` getter. See the [Custom QueueClass](https://github.com/sindresorhus/p-queue#custom-queueclass) section.
*/
readonly queueClass?: new () => QueueType;
/**
The max number of runs in the given interval of time.
Minimum: `1`.
@default Infinity
*/
readonly intervalCap?: number;
/**
The length of time in milliseconds before the interval count resets. Must be finite.
Minimum: `0`.
@default 0
*/
readonly interval?: number;
/**
Whether the task must finish in the given interval or will be carried over into the next interval count.
@default false
*/
readonly carryoverConcurrencyCount?: boolean;
} & TimeoutOptions;
export type QueueAddOptions = {
/**
Priority of operation. Operations with greater priority will be scheduled first.
@default 0
*/
readonly priority?: number;
/**
Unique identifier for the promise function, used to update its priority before execution. If not specified, it is auto-assigned an incrementing BigInt starting from `1n`.
*/
id?: string;
} & TaskOptions & TimeoutOptions;
export type TaskOptions = {
/**
[`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) for cancellation of the operation. When aborted, it will be removed from the queue and the `queue.add()` call will reject with an `AbortError`. If the operation is already running, the signal will need to be handled by the operation itself.
@example
```
import PQueue, {AbortError} from 'p-queue';
import got, {CancelError} from 'got';
const queue = new PQueue();
const controller = new AbortController();
try {
await queue.add(({signal}) => {
const request = got('https://sindresorhus.com');
signal.addEventListener('abort', () => {
request.cancel();
});
try {
return await request;
} catch (error) {
if (!(error instanceof CancelError)) {
throw error;
}
}
}, {signal: controller.signal});
} catch (error) {
if (!(error instanceof AbortError)) {
throw error;
}
}
```
*/
readonly signal?: AbortSignal;
};
export {};

1
node_modules/p-queue/dist/options.js generated vendored Normal file
View File

@@ -0,0 +1 @@
export {};

13
node_modules/p-queue/dist/priority-queue.d.ts generated vendored Normal file
View File

@@ -0,0 +1,13 @@
import { type Queue, type RunFunction } from './queue.js';
import { type QueueAddOptions } from './options.js';
export type PriorityQueueOptions = {
priority?: number;
} & QueueAddOptions;
export default class PriorityQueue implements Queue<RunFunction, PriorityQueueOptions> {
#private;
enqueue(run: RunFunction, options?: Partial<PriorityQueueOptions>): void;
setPriority(id: string, priority: number): void;
dequeue(): RunFunction | undefined;
filter(options: Readonly<Partial<PriorityQueueOptions>>): RunFunction[];
get size(): number;
}

39
node_modules/p-queue/dist/priority-queue.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
import lowerBound from './lower-bound.js';
export default class PriorityQueue {
#queue = [];
enqueue(run, options) {
options = {
priority: 0,
...options,
};
const element = {
priority: options.priority,
id: options.id,
run,
};
if (this.size === 0 || this.#queue[this.size - 1].priority >= options.priority) {
this.#queue.push(element);
return;
}
const index = lowerBound(this.#queue, element, (a, b) => b.priority - a.priority);
this.#queue.splice(index, 0, element);
}
setPriority(id, priority) {
const index = this.#queue.findIndex((element) => element.id === id);
if (index === -1) {
throw new ReferenceError(`No promise function with the id "${id}" exists in the queue.`);
}
const [item] = this.#queue.splice(index, 1);
this.enqueue(item.run, { priority, id });
}
dequeue() {
const item = this.#queue.shift();
return item?.run;
}
filter(options) {
return this.#queue.filter((element) => element.priority === options.priority).map((element) => element.run);
}
get size() {
return this.#queue.length;
}
}

8
node_modules/p-queue/dist/queue.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
export type RunFunction = () => Promise<unknown>;
export type Queue<Element, Options> = {
size: number;
filter: (options: Readonly<Partial<Options>>) => Element[];
dequeue: () => Element | undefined;
enqueue: (run: Element, options?: Partial<Options>) => void;
setPriority: (id: string, priority: number) => void;
};

1
node_modules/p-queue/dist/queue.js generated vendored Normal file
View File

@@ -0,0 +1 @@
export {};

9
node_modules/p-queue/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

89
node_modules/p-queue/package.json generated vendored Normal file
View File

@@ -0,0 +1,89 @@
{
"name": "p-queue",
"version": "8.1.0",
"description": "Promise queue with concurrency control",
"license": "MIT",
"repository": "sindresorhus/p-queue",
"funding": "https://github.com/sponsors/sindresorhus",
"type": "module",
"exports": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"sideEffects": false,
"engines": {
"node": ">=18"
},
"scripts": {
"build": "del-cli dist && tsc",
"test": "xo && ava && del-cli dist && tsc && tsd",
"bench": "node --import=tsx/esm bench.ts",
"prepublishOnly": "del-cli dist && tsc"
},
"files": [
"dist"
],
"types": "dist/index.d.ts",
"keywords": [
"promise",
"queue",
"enqueue",
"limit",
"limited",
"concurrency",
"throttle",
"throat",
"rate",
"batch",
"ratelimit",
"priority",
"priorityqueue",
"fifo",
"job",
"task",
"async",
"await",
"promises",
"bluebird"
],
"dependencies": {
"eventemitter3": "^5.0.1",
"p-timeout": "^6.1.2"
},
"devDependencies": {
"@sindresorhus/tsconfig": "^5.0.0",
"@types/benchmark": "^2.1.5",
"@types/node": "^20.10.4",
"ava": "^5.3.1",
"benchmark": "^2.1.4",
"del-cli": "^5.1.0",
"delay": "^6.0.0",
"in-range": "^3.0.0",
"p-defer": "^4.0.0",
"random-int": "^3.0.0",
"time-span": "^5.1.0",
"tsd": "^0.29.0",
"tsx": "^4.6.2",
"typescript": "^5.3.3",
"xo": "^0.56.0"
},
"ava": {
"workerThreads": false,
"files": [
"test/**"
],
"extensions": {
"ts": "module"
},
"nodeArguments": [
"--import=tsx/esm"
]
},
"xo": {
"rules": {
"@typescript-eslint/member-ordering": "off",
"@typescript-eslint/no-floating-promises": "off",
"@typescript-eslint/no-invalid-void-type": "off"
}
}
}

546
node_modules/p-queue/readme.md generated vendored Normal file
View File

@@ -0,0 +1,546 @@
# p-queue
> Promise queue with concurrency control
Useful for rate-limiting async (or sync) operations. For example, when interacting with a REST API or when doing CPU/memory intensive tasks.
For servers, you probably want a Redis-backed [job queue](https://github.com/sindresorhus/awesome-nodejs#job-queues) instead.
Note that the project is feature complete. We are happy to review pull requests, but we don't plan any further development. We are also not answering email support questions.
## Install
```sh
npm install p-queue
```
**Warning:** This package is native [ESM](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules) and no longer provides a CommonJS export. If your project uses CommonJS, you'll have to [convert to ESM](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c). Please don't open issues for questions regarding CommonJS / ESM.
## Usage
Here we run only one promise at the time. For example, set `concurrency` to 4 to run four promises at the same time.
```js
import PQueue from 'p-queue';
import got from 'got';
const queue = new PQueue({concurrency: 1});
(async () => {
await queue.add(() => got('https://sindresorhus.com'));
console.log('Done: sindresorhus.com');
})();
(async () => {
await queue.add(() => got('https://avajs.dev'));
console.log('Done: avajs.dev');
})();
(async () => {
const task = await getUnicornTask();
await queue.add(task);
console.log('Done: Unicorn task');
})();
```
## API
### PQueue(options?)
Returns a new `queue` instance, which is an [`EventEmitter3`](https://github.com/primus/eventemitter3) subclass.
#### options
Type: `object`
##### concurrency
Type: `number`\
Default: `Infinity`\
Minimum: `1`
Concurrency limit.
##### timeout
Type: `number`
Per-operation timeout in milliseconds. Operations fulfill once `timeout` elapses if they haven't already.
##### throwOnTimeout
Type: `boolean`\
Default: `false`
Whether or not a timeout is considered an exception.
##### autoStart
Type: `boolean`\
Default: `true`
Whether queue tasks within concurrency limit, are auto-executed as soon as they're added.
##### queueClass
Type: `Function`
Class with a `enqueue` and `dequeue` method, and a `size` getter. See the [Custom QueueClass](#custom-queueclass) section.
##### intervalCap
Type: `number`\
Default: `Infinity`\
Minimum: `1`
The max number of runs in the given interval of time.
##### interval
Type: `number`\
Default: `0`\
Minimum: `0`
The length of time in milliseconds before the interval count resets. Must be finite.
##### carryoverConcurrencyCount
Type: `boolean`\
Default: `false`
If `true`, specifies that any [pending](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) Promises, should be carried over into the next interval and counted against the `intervalCap`. If `false`, any of those pending Promises will not count towards the next `intervalCap`.
### queue
`PQueue` instance.
#### .add(fn, options?)
Adds a sync or async task to the queue.
Returns a promise with the return value of `fn`.
Note: If your items can potentially throw an exception, you must handle those errors from the returned Promise or they may be reported as an unhandled Promise rejection and potentially cause your process to exit immediately.
##### fn
Type: `Function`
Promise-returning/async function. When executed, it will receive `{signal}` as the first argument.
#### options
Type: `object`
##### priority
Type: `number`\
Default: `0`
Priority of operation. Operations with greater priority will be scheduled first.
##### id
Type `string`
Unique identifier for the promise function, used to update its priority before execution. If not specified, it is auto-assigned an incrementing BigInt starting from `1n`.
##### signal
[`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) for cancellation of the operation. When aborted, it will be removed from the queue and the `queue.add()` call will reject with an [error](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal/reason). If the operation is already running, the signal will need to be handled by the operation itself.
```js
import PQueue from 'p-queue';
import got, {CancelError} from 'got';
const queue = new PQueue();
const controller = new AbortController();
try {
await queue.add(({signal}) => {
const request = got('https://sindresorhus.com');
signal.addEventListener('abort', () => {
request.cancel();
});
try {
return await request;
} catch (error) {
if (!(error instanceof CancelError)) {
throw error;
}
}
}, {signal: controller.signal});
} catch (error) {
if (!(error instanceof DOMException)) {
throw error;
}
}
```
#### .addAll(fns, options?)
Same as `.add()`, but accepts an array of sync or async functions and returns a promise that resolves when all functions are resolved.
#### .pause()
Put queue execution on hold.
#### .start()
Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
Returns `this` (the instance).
#### .onEmpty()
Returns a promise that settles when the queue becomes empty.
Can be called multiple times. Useful if you for example add additional items at a later time.
#### .onIdle()
Returns a promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
#### .onSizeLessThan(limit)
Returns a promise that settles when the queue size is less than the given limit: `queue.size < limit`.
If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
#### .clear()
Clear the queue.
#### .size
Size of the queue, the number of queued items waiting to run.
#### .sizeBy(options)
Size of the queue, filtered by the given options.
For example, this can be used to find the number of items remaining in the queue with a specific priority level.
```js
import PQueue from 'p-queue';
const queue = new PQueue();
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦄', {priority: 0});
queue.add(async () => '🦄', {priority: 1});
console.log(queue.sizeBy({priority: 1}));
//=> 2
console.log(queue.sizeBy({priority: 0}));
//=> 1
```
#### .setPriority(id, priority)
Updates the priority of a promise function by its id, affecting its execution order. Requires a defined concurrency limit to take effect.
For example, this can be used to prioritize a promise function to run earlier.
```js
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦀', {priority: 0, id: '🦀'});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦄', {priority: 1});
queue.setPriority('🦀', 2);
```
In this case, the promise function with `id: '🦀'` runs second.
You can also deprioritize a promise function to delay its execution:
```js
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
queue.add(async () => '🦄', {priority: 1});
queue.add(async () => '🦀', {priority: 1, id: '🦀'});
queue.add(async () => '🦄');
queue.add(async () => '🦄', {priority: 0});
queue.setPriority('🦀', -1);
```
Here, the promise function with `id: '🦀'` executes last.
#### .pending
Number of running items (no longer in the queue).
#### [.timeout](#timeout)
#### [.concurrency](#concurrency)
#### .isPaused
Whether the queue is currently paused.
## Events
#### active
Emitted as each item is processed in the queue for the purpose of tracking progress.
```js
import delay from 'delay';
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 2});
let count = 0;
queue.on('active', () => {
console.log(`Working on item #${++count}. Size: ${queue.size} Pending: ${queue.pending}`);
});
queue.add(() => Promise.resolve());
queue.add(() => delay(2000));
queue.add(() => Promise.resolve());
queue.add(() => Promise.resolve());
queue.add(() => delay(500));
```
#### completed
Emitted when an item completes without error.
```js
import delay from 'delay';
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 2});
queue.on('completed', result => {
console.log(result);
});
queue.add(() => Promise.resolve('hello, world!'));
```
#### error
Emitted if an item throws an error.
```js
import delay from 'delay';
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 2});
queue.on('error', error => {
console.error(error);
});
queue.add(() => Promise.reject(new Error('error')));
```
#### empty
Emitted every time the queue becomes empty.
Useful if you for example add additional items at a later time.
#### idle
Emitted every time the queue becomes empty and all promises have completed; `queue.size === 0 && queue.pending === 0`.
The difference with `empty` is that `idle` guarantees that all work from the queue has finished. `empty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
```js
import delay from 'delay';
import PQueue from 'p-queue';
const queue = new PQueue();
queue.on('idle', () => {
console.log(`Queue is idle. Size: ${queue.size} Pending: ${queue.pending}`);
});
const job1 = queue.add(() => delay(2000));
const job2 = queue.add(() => delay(500));
await job1;
await job2;
// => 'Queue is idle. Size: 0 Pending: 0'
await queue.add(() => delay(600));
// => 'Queue is idle. Size: 0 Pending: 0'
```
The `idle` event is emitted every time the queue reaches an idle state. On the other hand, the promise the `onIdle()` function returns resolves once the queue becomes idle instead of every time the queue is idle.
#### add
Emitted every time the add method is called and the number of pending or queued tasks is increased.
#### next
Emitted every time a task is completed and the number of pending or queued tasks is decreased. This is emitted regardless of whether the task completed normally or with an error.
```js
import delay from 'delay';
import PQueue from 'p-queue';
const queue = new PQueue();
queue.on('add', () => {
console.log(`Task is added. Size: ${queue.size} Pending: ${queue.pending}`);
});
queue.on('next', () => {
console.log(`Task is completed. Size: ${queue.size} Pending: ${queue.pending}`);
});
const job1 = queue.add(() => delay(2000));
const job2 = queue.add(() => delay(500));
await job1;
await job2;
//=> 'Task is added. Size: 0 Pending: 1'
//=> 'Task is added. Size: 0 Pending: 2'
await queue.add(() => delay(600));
//=> 'Task is completed. Size: 0 Pending: 1'
//=> 'Task is completed. Size: 0 Pending: 0'
```
## Advanced example
A more advanced example to help you understand the flow.
```js
import delay from 'delay';
import PQueue from 'p-queue';
const queue = new PQueue({concurrency: 1});
(async () => {
await delay(200);
console.log(`8. Pending promises: ${queue.pending}`);
//=> '8. Pending promises: 0'
(async () => {
await queue.add(async () => '🐙');
console.log('11. Resolved')
})();
console.log('9. Added 🐙');
console.log(`10. Pending promises: ${queue.pending}`);
//=> '10. Pending promises: 1'
await queue.onIdle();
console.log('12. All work is done');
})();
(async () => {
await queue.add(async () => '🦄');
console.log('5. Resolved')
})();
console.log('1. Added 🦄');
(async () => {
await queue.add(async () => '🐴');
console.log('6. Resolved')
})();
console.log('2. Added 🐴');
(async () => {
await queue.onEmpty();
console.log('7. Queue is empty');
})();
console.log(`3. Queue size: ${queue.size}`);
//=> '3. Queue size: 1`
console.log(`4. Pending promises: ${queue.pending}`);
//=> '4. Pending promises: 1'
```
```
$ node example.js
1. Added 🦄
2. Added 🐴
3. Queue size: 1
4. Pending promises: 1
5. Resolved 🦄
6. Resolved 🐴
7. Queue is empty
8. Pending promises: 0
9. Added 🐙
10. Pending promises: 1
11. Resolved 🐙
12. All work is done
```
## Custom QueueClass
For implementing more complex scheduling policies, you can provide a QueueClass in the options:
```js
import PQueue from 'p-queue';
class QueueClass {
constructor() {
this._queue = [];
}
enqueue(run, options) {
this._queue.push(run);
}
dequeue() {
return this._queue.shift();
}
get size() {
return this._queue.length;
}
filter(options) {
return this._queue;
}
}
const queue = new PQueue({queueClass: QueueClass});
```
`p-queue` will call corresponding methods to put and get operations from this queue.
## FAQ
#### How do the `concurrency` and `intervalCap` options affect each other?
They are just different constraints. The `concurrency` option limits how many things run at the same time. The `intervalCap` option limits how many things run in total during the interval (over time).
## Maintainers
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Richie Bendall](https://github.com/Richienb)
## Related
- [p-limit](https://github.com/sindresorhus/p-limit) - Run multiple promise-returning & async functions with limited concurrency
- [p-throttle](https://github.com/sindresorhus/p-throttle) - Throttle promise-returning & async functions
- [p-debounce](https://github.com/sindresorhus/p-debounce) - Debounce promise-returning & async functions
- [p-all](https://github.com/sindresorhus/p-all) - Run promise-returning & async functions concurrently with optional limited concurrency
- [More…](https://github.com/sindresorhus/promise-fun)