/* eslint-disable @typescript-eslint/restrict-template-expressions */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ // TODO: upgrade libsquoosh once types are available: https://github.com/GoogleChromeLabs/squoosh/issues/1077 import { cpus } from "os"; import path from "path"; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore import { ImagePool } from "@squoosh/lib"; import fse from "fs-extra"; import { default as getImageDimensions } from "image-size"; import SparkMD5 from "spark-md5"; const SOURCE_DIRECTORY = "images"; const DESTINATION_DIRECTORY = path.join("public", "images"); const ALREADY_OPTIMIZED_SAVE_FILE_PATH = path.join( "scripts", "already-optimized-images.json" ); console.log("ENV IS", process.env); console.log(ALREADY_OPTIMIZED_SAVE_FILE_PATH); // directory where Meet the Team headshots are stored, relative to the source directory const TEAM_IMAGES_DIRECTORY = path.join("team", ""); const IMAGE_MINIMUM_SIZE = 512; const GET_ENCODER_FROM_EXTENSION: { [imageExtension: string]: string } = { jpg: "mozjpeg", jpeg: "mozjpeg", png: "oxipng", }; const ENCODER_OPTIONS: { [encoder: string]: Record } = { mozjpeg: {}, oxipng: {}, }; async function getAlreadyOptimizedImageHashesSet(): Promise> { try { const saveString = ( await fse.readFile(ALREADY_OPTIMIZED_SAVE_FILE_PATH) ).toString(); const optimizedArrayInSaveFile = JSON.parse(saveString).optimizedImages; return optimizedArrayInSaveFile ? new Set(optimizedArrayInSaveFile) : new Set(); } catch (e) { return new Set(); } } async function writeSaveFile(hashes: Set) { await fse.writeFile( ALREADY_OPTIMIZED_SAVE_FILE_PATH, JSON.stringify({ optimizedImages: Array.from(hashes) }) ); } void optimizeImages(); export async function optimizeImages() { const imagePaths = await getFilePathsInDirectory(SOURCE_DIRECTORY); const alreadyOptimizedImageHashes = await getAlreadyOptimizedImageHashesSet(); // maximum number of workers is 8 in order to avoid running out of memory const numberOfWorkers = Math.min(cpus().length, 8); const imagePool = new ImagePool(numberOfWorkers); await Promise.all( imagePaths.map(async (imagePath) => { const timerName = `File ${imagePath} took: `; console.time(timerName); const sourcePath = path.join(SOURCE_DIRECTORY, imagePath); const destinationPath = path.join(DESTINATION_DIRECTORY, imagePath); const fileExtension = imagePath.split(".").pop() ?? ""; const encoder = GET_ENCODER_FROM_EXTENSION[fileExtension]; if (!encoder) { console.log(`Only copying ${imagePath}`); await fse.copy(sourcePath, destinationPath); return; } const rawImageFile = await fse.readFile(sourcePath); // compare hash of file contents so we dont optimize images which have already been optimized before const fileHash = `${imagePath}-${SparkMD5.hash(rawImageFile.toString())}`; if (alreadyOptimizedImageHashes.has(fileHash)) { console.log(`Skipping ${imagePath}`); return; } console.log(`Optimizing file ${imagePath}`); const ingestedImage = imagePool.ingestImage(rawImageFile); const { width, height } = getImageDimensions(rawImageFile); await ingestedImage.decoded; const shouldResize = imagePath.startsWith(TEAM_IMAGES_DIRECTORY) && (width ?? 0) > IMAGE_MINIMUM_SIZE && (height ?? 0) > IMAGE_MINIMUM_SIZE; if (width && height && shouldResize) { const smallerDimension = width < height ? "width" : "height"; // specifying only one dimension maintains the aspect ratio const preprocessOptions = { resize: { enabled: true, [smallerDimension]: IMAGE_MINIMUM_SIZE, }, }; await ingestedImage.preprocess(preprocessOptions); } const encodeOptions = { [encoder]: ENCODER_OPTIONS[encoder] }; await ingestedImage.encode(encodeOptions); const encodedImage = await ingestedImage.encodedWith[encoder]; await fse.outputFile(destinationPath, encodedImage.binary); alreadyOptimizedImageHashes.add(fileHash); console.timeEnd(timerName); await writeSaveFile(alreadyOptimizedImageHashes); }) ); await writeSaveFile(alreadyOptimizedImageHashes); await imagePool.close(); } async function getFilePathsInDirectory(directory: string): Promise { const entries = await fse.readdir(directory, { withFileTypes: true }); return ( await Promise.all( entries.map(async (entry) => { if (entry.isDirectory()) { const subdirectory = path.join(directory, entry.name); const subentries = await getFilePathsInDirectory(subdirectory); return subentries.map((subentry) => path.join(entry.name, subentry)); } return entry.name; }) ) ).flat(); }