|
|
|
@ -12,10 +12,16 @@ import path from "path"; |
|
|
|
|
import { ImagePool } from "@squoosh/lib"; |
|
|
|
|
import fse from "fs-extra"; |
|
|
|
|
import { default as getImageDimensions } from "image-size"; |
|
|
|
|
import SparkMD5 from "spark-md5"; |
|
|
|
|
|
|
|
|
|
const SOURCE_DIRECTORY = "images"; |
|
|
|
|
const DESTINATION_DIRECTORY = path.join("public", "images"); |
|
|
|
|
const ALREADY_OPTIMIZED_SAVE_FILE_PATH = path.join( |
|
|
|
|
"scripts", |
|
|
|
|
"optimize-images-already-optimized-files.json" |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
console.log(ALREADY_OPTIMIZED_SAVE_FILE_PATH); |
|
|
|
|
// directory where Meet the Team headshots are stored, relative to the source directory
|
|
|
|
|
const TEAM_IMAGES_DIRECTORY = path.join("team", ""); |
|
|
|
|
|
|
|
|
@ -32,23 +38,43 @@ const ENCODER_OPTIONS: { [encoder: string]: Record<string, unknown> } = { |
|
|
|
|
oxipng: {}, |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
async function getAlreadyOptimizedImageHashesSet(): Promise<Set<string>> { |
|
|
|
|
try { |
|
|
|
|
const saveString = ( |
|
|
|
|
await fse.readFile(ALREADY_OPTIMIZED_SAVE_FILE_PATH) |
|
|
|
|
).toString(); |
|
|
|
|
|
|
|
|
|
const optimizedArrayInSaveFile = JSON.parse(saveString).optimizedImages; |
|
|
|
|
|
|
|
|
|
return optimizedArrayInSaveFile |
|
|
|
|
? new Set(optimizedArrayInSaveFile) |
|
|
|
|
: new Set(); |
|
|
|
|
} catch (e) { |
|
|
|
|
return new Set(); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
async function writeSaveFile(hashes: Set<string>) { |
|
|
|
|
await fse.writeFile( |
|
|
|
|
ALREADY_OPTIMIZED_SAVE_FILE_PATH, |
|
|
|
|
JSON.stringify({ optimizedImages: Array.from(hashes) }) |
|
|
|
|
); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void optimizeImages(); |
|
|
|
|
|
|
|
|
|
export async function optimizeImages() { |
|
|
|
|
const imagePaths = await getFilePathsInDirectory(SOURCE_DIRECTORY); |
|
|
|
|
// await fse.emptyDir(DESTINATION_DIRECTORY);
|
|
|
|
|
const alreadyOptimizedImageHashes = await getAlreadyOptimizedImageHashesSet(); |
|
|
|
|
|
|
|
|
|
// maximum number of workers is 4 in order to avoid running out of memory
|
|
|
|
|
const numberOfWorkers = Math.min(cpus().length, 4); |
|
|
|
|
// maximum number of workers is 8 in order to avoid running out of memory
|
|
|
|
|
const numberOfWorkers = Math.min(cpus().length, 8); |
|
|
|
|
const imagePool = new ImagePool(numberOfWorkers); |
|
|
|
|
|
|
|
|
|
let count = 0; |
|
|
|
|
await Promise.all( |
|
|
|
|
imagePaths.map(async (imagePath) => { |
|
|
|
|
console.time(`on file ${imagePath}`); |
|
|
|
|
count += 1; |
|
|
|
|
const num = count; |
|
|
|
|
console.time(`overall-timer${num}`); |
|
|
|
|
const num = `- ${imagePath}`; |
|
|
|
|
console.time(`overall-timer${num} -`); |
|
|
|
|
const sourcePath = path.join(SOURCE_DIRECTORY, imagePath); |
|
|
|
|
const destinationPath = path.join(DESTINATION_DIRECTORY, imagePath); |
|
|
|
|
const fileExtension = imagePath.split(".").pop() ?? ""; |
|
|
|
@ -63,6 +89,12 @@ export async function optimizeImages() { |
|
|
|
|
const rawImageFile = await fse.readFile(sourcePath); |
|
|
|
|
console.timeEnd(`read-file-timer${num}`); |
|
|
|
|
|
|
|
|
|
console.time(`hash-timer${num}`); |
|
|
|
|
const fileHash = SparkMD5.hash(rawImageFile.toString()); |
|
|
|
|
if (alreadyOptimizedImageHashes.has(fileHash)) { |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
const ingestedImage = imagePool.ingestImage(rawImageFile); |
|
|
|
|
const { width, height } = getImageDimensions(rawImageFile); |
|
|
|
|
|
|
|
|
@ -92,10 +124,12 @@ export async function optimizeImages() { |
|
|
|
|
|
|
|
|
|
const encodedImage = await ingestedImage.encodedWith[encoder]; |
|
|
|
|
await fse.outputFile(destinationPath, encodedImage.binary); |
|
|
|
|
alreadyOptimizedImageHashes.add(fileHash); |
|
|
|
|
console.timeEnd(`overall-timer${num}`); |
|
|
|
|
}) |
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
await writeSaveFile(alreadyOptimizedImageHashes); |
|
|
|
|
await imagePool.close(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|