diff --git a/package-lock.json b/package-lock.json index 1299c61e4..fef6de585 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,6 @@ "@node-rs/crc32": "1.10.3", "7zip-min": "1.4.4", "archiver": "7.0.1", - "async": "3.2.5", "async-mutex": "0.5.0", "chalk": "5.3.0", "class-transformer": "0.5.1", diff --git a/package.json b/package.json index 981e3d9ac..5a1de6326 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,6 @@ "@node-rs/crc32": "1.10.3", "7zip-min": "1.4.4", "archiver": "7.0.1", - "async": "3.2.5", "async-mutex": "0.5.0", "chalk": "5.3.0", "class-transformer": "0.5.1", diff --git a/src/driveSemaphore.ts b/src/driveSemaphore.ts index 74a1eb0f4..7fc90cb9f 100644 --- a/src/driveSemaphore.ts +++ b/src/driveSemaphore.ts @@ -1,9 +1,9 @@ import path from 'node:path'; -import async, { AsyncResultCallback } from 'async'; import { Mutex, Semaphore } from 'async-mutex'; import Constants from './constants.js'; +import async from './polyfill/async.js'; import FsPoly from './polyfill/fsPoly.js'; import File from './types/files/file.js'; @@ -38,17 +38,16 @@ export default class DriveSemaphore { return async.mapLimit( files, Constants.MAX_FS_THREADS, - async (file, callback: AsyncResultCallback) => { + async (file) => { try { - const val = await this.processFile(file, runnable, disks); - callback(undefined, val); + return await this.processFile(file, runnable, disks); } catch (error) { if (error instanceof Error) { - callback(error); + throw error; } else if (typeof error === 'string') { - callback(new Error(error)); + throw new Error(error); } else { - callback(new Error('failed to execute runnable')); + throw new Error('failed to execute runnable'); } } }, diff --git a/src/igir.ts b/src/igir.ts index 44a9670c1..12f380650 100644 --- a/src/igir.ts +++ b/src/igir.ts @@ -1,4 +1,3 @@ -import async from 'async'; import chalk from 'chalk'; import isAdmin from 'is-admin'; @@ -30,6 +29,7 @@ import ROMIndexer from './modules/romIndexer.js'; import ROMScanner from './modules/romScanner.js'; import StatusGenerator from './modules/statusGenerator.js'; import ArrayPoly from './polyfill/arrayPoly.js'; +import async from './polyfill/async.js'; import FsPoly from './polyfill/fsPoly.js'; import Timer from './timer.js'; import DAT from './types/dats/dat.js'; @@ -101,7 +101,7 @@ export default class Igir { // Process every DAT datProcessProgressBar.logTrace(`processing ${dats.length.toLocaleString()} DAT${dats.length !== 1 ? 's' : ''}`); - await async.eachLimit(dats, this.options.getDatThreads(), async (dat, callback) => { + await async.eachLimit(dats, this.options.getDatThreads(), async (dat) => { await datProcessProgressBar.incrementProgress(); const progressBar = await this.logger.addProgressBar( @@ -173,7 +173,6 @@ export default class Igir { } await datProcessProgressBar.incrementDone(); - callback(); }); datProcessProgressBar.logTrace(`done processing ${dats.length.toLocaleString()} DAT${dats.length !== 1 ? 's' : ''}`); diff --git a/src/polyfill/async.ts b/src/polyfill/async.ts new file mode 100644 index 000000000..c8131352f --- /dev/null +++ b/src/polyfill/async.ts @@ -0,0 +1,25 @@ +import { Semaphore } from 'async-mutex'; + +export default { + async eachLimit( + arr: T[], + limit: number, + iterator: (val: T) => void | Promise, + ): Promise { + const semaphore = new Semaphore(limit); + await Promise.all( + arr.map(async (val) => semaphore.runExclusive(async () => iterator(val))), + ); + }, + + async mapLimit( + arr: T[], + limit: number, + iterator: (val: T) => R | Promise, + ): Promise { + const semaphore = new Semaphore(limit); + return Promise.all( + arr.map(async (val) => semaphore.runExclusive(async () => iterator(val))), + ); + }, +}; diff --git a/src/types/files/archives/rar.ts b/src/types/files/archives/rar.ts index e6afb7e66..dc2c594d5 100644 --- a/src/types/files/archives/rar.ts +++ b/src/types/files/archives/rar.ts @@ -1,10 +1,10 @@ import path from 'node:path'; -import async, { AsyncResultCallback } from 'async'; import { Mutex } from 'async-mutex'; import unrar from 'node-unrar-js'; import Constants from '../../../constants.js'; +import async from '../../../polyfill/async.js'; import Archive from './archive.js'; import ArchiveEntry from './archiveEntry.js'; @@ -25,16 +25,13 @@ export default class Rar extends Archive { return async.mapLimit( [...rar.getFileList().fileHeaders].filter((fileHeader) => !fileHeader.flags.directory), Constants.ARCHIVE_ENTRY_SCANNER_THREADS_PER_ARCHIVE, - async (fileHeader, callback: AsyncResultCallback, Error>) => { - const archiveEntry = await ArchiveEntry.entryOf({ - archive: this, - entryPath: fileHeader.name, - size: fileHeader.unpSize, - crc32: fileHeader.crc.toString(16), - // If MD5, SHA1, or SHA256 is desired, this file will need to be extracted to calculate - }, checksumBitmask); - callback(undefined, archiveEntry); - }, + async (fileHeader) => ArchiveEntry.entryOf({ + archive: this, + entryPath: fileHeader.name, + size: fileHeader.unpSize, + crc32: fileHeader.crc.toString(16), + // If MD5, SHA1, or SHA256 is desired, this file will need to be extracted to calculate + }, checksumBitmask), ); } diff --git a/src/types/files/archives/sevenZip.ts b/src/types/files/archives/sevenZip.ts index 9e0238938..85a8cc316 100644 --- a/src/types/files/archives/sevenZip.ts +++ b/src/types/files/archives/sevenZip.ts @@ -1,10 +1,10 @@ import path from 'node:path'; import _7z, { Result } from '7zip-min'; -import async, { AsyncResultCallback } from 'async'; import { Mutex } from 'async-mutex'; import Constants from '../../../constants.js'; +import async from '../../../polyfill/async.js'; import fsPoly from '../../../polyfill/fsPoly.js'; import Archive from './archive.js'; import ArchiveEntry from './archiveEntry.js'; @@ -86,16 +86,13 @@ export default class SevenZip extends Archive { return async.mapLimit( filesIn7z.filter((result) => !result.attr?.startsWith('D')), Constants.ARCHIVE_ENTRY_SCANNER_THREADS_PER_ARCHIVE, - async (result, callback: AsyncResultCallback, Error>) => { - const archiveEntry = await ArchiveEntry.entryOf({ - archive: this, - entryPath: result.name, - size: Number.parseInt(result.size, 10), - crc32: result.crc, - // If MD5, SHA1, or SHA256 is desired, this file will need to be extracted to calculate - }, checksumBitmask); - callback(undefined, archiveEntry); - }, + async (result) => ArchiveEntry.entryOf({ + archive: this, + entryPath: result.name, + size: Number.parseInt(result.size, 10), + crc32: result.crc, + // If MD5, SHA1, or SHA256 is desired, this file will need to be extracted to calculate + }, checksumBitmask), ); } diff --git a/src/types/files/archives/zip.ts b/src/types/files/archives/zip.ts index f38c007a2..787724ee1 100644 --- a/src/types/files/archives/zip.ts +++ b/src/types/files/archives/zip.ts @@ -4,10 +4,10 @@ import { Readable } from 'node:stream'; import { clearInterval } from 'node:timers'; import archiver, { Archiver } from 'archiver'; -import async, { AsyncResultCallback } from 'async'; import unzipper, { Entry } from 'unzipper'; import Constants from '../../../constants.js'; +import async from '../../../polyfill/async.js'; import fsPoly from '../../../polyfill/fsPoly.js'; import StreamPoly from '../../../polyfill/streamPoly.js'; import File from '../file.js'; @@ -33,7 +33,7 @@ export default class Zip extends Archive { return async.mapLimit( archive.files.filter((entryFile) => entryFile.type === 'File'), Constants.ARCHIVE_ENTRY_SCANNER_THREADS_PER_ARCHIVE, - async (entryFile, callback: AsyncResultCallback, Error>) => { + async (entryFile) => { let checksums: ChecksumProps = {}; if (checksumBitmask & ~ChecksumBitmask.CRC32) { const entryStream = entryFile.stream() @@ -54,14 +54,13 @@ export default class Zip extends Archive { } const { crc32, ...checksumsWithoutCrc } = checksums; - const archiveEntry = await ArchiveEntry.entryOf({ + return ArchiveEntry.entryOf({ archive: this, entryPath: entryFile.path, size: entryFile.uncompressedSize, crc32: crc32 ?? entryFile.crc32.toString(16), ...checksumsWithoutCrc, }, checksumBitmask); - callback(undefined, archiveEntry); }, ); } @@ -193,9 +192,7 @@ export default class Zip extends Archive { * also want to make sure the queue processing stays busy. Use 3 as a middle-ground. */ 3, - async.asyncify(async ( - [inputFile, outputArchiveEntry]: [File, ArchiveEntry], - ): Promise => { + async ([inputFile, outputArchiveEntry]) => { const streamProcessor = async (stream: Readable): Promise => { // Catch stream errors such as `ENOENT: no such file or directory` stream.on('error', catchError); @@ -228,7 +225,7 @@ export default class Zip extends Archive { catchError(new Error(`failed to write '${inputFile.toString()}' to '${outputArchiveEntry.toString()}'`)); } } - }), + }, ); if (zipFileError) { diff --git a/src/types/options.ts b/src/types/options.ts index e6ff19c00..da13fd5e0 100644 --- a/src/types/options.ts +++ b/src/types/options.ts @@ -3,7 +3,6 @@ import 'reflect-metadata'; import os from 'node:os'; import path from 'node:path'; -import async, { AsyncResultCallback } from 'async'; import { Expose, instanceToPlain, plainToInstance, Transform, } from 'class-transformer'; @@ -12,6 +11,7 @@ import { isNotJunk } from 'junk'; import micromatch from 'micromatch'; import moment from 'moment'; +import async from '../../src/polyfill/async.js'; import LogLevel from '../console/logLevel.js'; import Constants from '../constants.js'; import ArrayPoly from '../polyfill/arrayPoly.js'; @@ -633,17 +633,16 @@ export default class Options implements OptionsProps { const isNonDirectory = await async.mapLimit( globbedPaths, Constants.MAX_FS_THREADS, - async (file, callback: AsyncResultCallback) => { + async (file) => { if (!await fsPoly.exists(file) && URLPoly.canParse(file)) { - callback(undefined, true); - return; + return true; } try { - callback(undefined, !(await fsPoly.isDirectory(file))); + return !(await fsPoly.isDirectory(file)); } catch { // Assume errors mean the path doesn't exist - callback(undefined, false); + return false; } }, ); diff --git a/test/igir.test.ts b/test/igir.test.ts index 90b6be188..ad41ddee5 100644 --- a/test/igir.test.ts +++ b/test/igir.test.ts @@ -144,7 +144,7 @@ describe('with explicit DATs', () => { }); it('should throw on all invalid dats', async () => { - await expect(async () => new Igir(new Options({ + await expect(new Igir(new Options({ dat: ['src/*'], }), new Logger(LogLevel.NEVER)).main()).rejects.toThrow(/no valid dat files/i); });