/* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ // TODO: upgrade libsquoosh once types are available: https://github.com/GoogleChromeLabs/squoosh/issues/1077 import { cpus } from "os"; import path from "path"; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore import { ImagePool } from "@squoosh/lib"; import fse from "fs-extra"; import { default as getImageDimensions } from "image-size"; const SOURCE_DIRECTORY = "images"; const DESTINATION_DIRECTORY = path.join("public", "images"); // directories are relative to SOURCE_DIRECTORY const TEAM_IMAGES_DIRECTORY = path.join("team", ""); const EVENTS_IMAGES_DIRECTORY = path.join("events", ""); const IMAGE_MINIMUM_SIZE = 512; const GET_ENCODER_FROM_EXTENSION: { [imageExtension: string]: string } = { jpg: "mozjpeg", jpeg: "mozjpeg", JPG: "mozjpeg", png: "oxipng", }; const ENCODER_OPTIONS: { [encoder: string]: Record } = { mozjpeg: {}, oxipng: {}, }; void optimizeImages(); export async function optimizeImages() { const startTime = Date.now(); const imagePaths = await getFilePathsInDirectory(SOURCE_DIRECTORY); await fse.emptyDir(DESTINATION_DIRECTORY); // maximum number of workers is 8 in order to avoid running out of memory const numberOfWorkers = Math.min(cpus().length, 8); const imagePool = new ImagePool(numberOfWorkers); // process smaller batches in order to reduce memory usage const batchSize = 32; for (let i = 0; i < imagePaths.length; i += batchSize) { await Promise.all( imagePaths.slice(i, i + batchSize).map(async (imagePath) => { const imageStartTime = Date.now(); const sourcePath = path.join(SOURCE_DIRECTORY, imagePath); const destinationPath = path.join(DESTINATION_DIRECTORY, imagePath); const fileExtension = imagePath.split(".").pop() ?? ""; const encoder = GET_ENCODER_FROM_EXTENSION[fileExtension]; if (!encoder) { await fse.copy(sourcePath, destinationPath); console.log( `Copied ${imagePath} in ${getElapsedSeconds(imageStartTime)}s` ); return; } const rawImageFile = await fse.readFile(sourcePath); const ingestedImage = imagePool.ingestImage(rawImageFile); const { width, height } = getImageDimensions(rawImageFile); await ingestedImage.decoded; const shouldResize = (imagePath.startsWith(TEAM_IMAGES_DIRECTORY) || imagePath.startsWith(EVENTS_IMAGES_DIRECTORY)) && (width ?? 0) > IMAGE_MINIMUM_SIZE && (height ?? 0) > IMAGE_MINIMUM_SIZE; if (width && height && shouldResize) { const smallerDimension = width < height ? "width" : "height"; // specifying only one dimension maintains the aspect ratio const preprocessOptions = { resize: { enabled: true, [smallerDimension]: IMAGE_MINIMUM_SIZE, }, }; await ingestedImage.preprocess(preprocessOptions); console.log( `Resized ${sourcePath} in ${getElapsedSeconds(imageStartTime)}s` ); } const encodeOptions = { [encoder]: ENCODER_OPTIONS[encoder] }; await ingestedImage.encode(encodeOptions); const encodedImage = await ingestedImage.encodedWith[encoder]; await fse.outputFile(destinationPath, encodedImage.binary); console.log( `Optimized ${sourcePath} in ${getElapsedSeconds(imageStartTime)}s` ); }) ); } await imagePool.close(); console.log(`TOTAL DURATION: ${getElapsedSeconds(startTime)}s`); } async function getFilePathsInDirectory(directory: string): Promise { const entries = await fse.readdir(directory, { withFileTypes: true }); return ( await Promise.all( entries.map(async (entry) => { if (entry.isDirectory()) { const subdirectory = path.join(directory, entry.name); const subentries = await getFilePathsInDirectory(subdirectory); return subentries.map((subentry) => path.join(entry.name, subentry)); } return entry.name; }) ) ).flat(); } function getElapsedSeconds(startTime: number) { return (Date.now() - startTime) / 1000; }