diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..b609e61 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = tab +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +indent_size = 2 +indent_style = space diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c7af875 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +# generated +node_modules + +# logs +*.log* + +# locks +*-lock.* + +# environment +*.env* + +# macos +.DS_Store diff --git a/.vscode/.prettierignore b/.vscode/.prettierignore new file mode 100644 index 0000000..4b6eece --- /dev/null +++ b/.vscode/.prettierignore @@ -0,0 +1,3 @@ +# Ignore all JS/TS files: +*.js +*.ts diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..fec804e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,32 @@ +{ + /* Editor */ + "editor.codeActionsOnSave": { + "source.fixAll": true + }, + "editor.insertSpaces": false, + "editor.useTabStops": true, + + /* Prettier */ + "prettier.ignorePath": "/.vscode/.prettierignore", + "prettier.useEditorConfig": true, + + /* JavaScript */ + "[javascript]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, + + /* JSON */ + "[json]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, + + /* JSON */ + "[jsonc]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, + + /* TypeScript */ + "[typescript]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..b1ab0e4 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# get-size + +An ESM fork of [get-size](https://www.npmjs.com/package/get-size). diff --git a/package.json b/package.json new file mode 100644 index 0000000..ba8483c --- /dev/null +++ b/package.json @@ -0,0 +1,56 @@ +{ + "$schema": "https://json.schemastore.org/package.json", + "name": "@astropub/get-size.root", + "type": "module", + "workspaces": [ + "packages/*" + ], + "scripts": { + "check": "node scripts/check.js", + "release": "node scripts/release.js" + }, + "devDependencies": { + "@types/node": "20.4.1", + "@typescript-eslint/eslint-plugin": "6.0.0", + "@typescript-eslint/parser": "6.0.0", + "@typescript/lib-dom": "npm:@types/web@^0.0.105", + "brotli-size": "4.0.0", + "eslint": "8.44.0", + "eslint-config-dev": "3.3.1", + "eslint-plugin-json": "3.1.0", + "gzip-size": "7.0.0", + "rollup-plugin-terser": "7.0.2", + "rollup-plugin-typescript2": "0.35.0", + "types-object": "0.3.0", + "typescript": "5.1.6", + "vite": "4.4.3" + }, + "eslintConfig": { + "extends": [ + "dev/ts/nosemi" + ], + "plugins": [ + "json" + ], + "rules": { + "indent": [ + "error", + "tab" + ] + } + }, + "prettier": { + "singleQuote": true, + "useTabs": true, + "overrides": [ + { + "files": "*.md", + "options": { + "tabWidth": 2, + "useTabs": false + } + } + ] + }, + "private": true +} diff --git a/packages/get-size/LICENSE.md b/packages/get-size/LICENSE.md new file mode 100644 index 0000000..a319c75 --- /dev/null +++ b/packages/get-size/LICENSE.md @@ -0,0 +1,7 @@ +Copyright (c) 2012-2021 [David DeSandro](https://desandro.com) and [contributors](https://github.com/desandro/get-size/graphs/contributors) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/packages/get-size/README.md b/packages/get-size/README.md new file mode 100644 index 0000000..b1ab0e4 --- /dev/null +++ b/packages/get-size/README.md @@ -0,0 +1,3 @@ +# get-size + +An ESM fork of [get-size](https://www.npmjs.com/package/get-size). diff --git a/packages/get-size/lib/from/AsyncIterable.d.ts b/packages/get-size/lib/from/AsyncIterable.d.ts new file mode 100644 index 0000000..92aaee5 --- /dev/null +++ b/packages/get-size/lib/from/AsyncIterable.d.ts @@ -0,0 +1,3 @@ +import type { ImageResult } from '../types.d.ts' + +export default function getSizeFromAsyncIterable(iteratable: Iterable | AsyncIterable): Promise diff --git a/packages/get-size/lib/from/AsyncIterable.js b/packages/get-size/lib/from/AsyncIterable.js new file mode 100644 index 0000000..115b7b8 --- /dev/null +++ b/packages/get-size/lib/from/AsyncIterable.js @@ -0,0 +1,23 @@ +import getSizeFromUint8Array from './Uint8Array.js' +import getMergedUint8Array from '../utils/getMergedUint8Array.js' + +import '../types.js' +import '../detector.js' + +export default async function getSizeFromAsyncIterable(/** @type {Iterable | AsyncIterable} */ iteratable) { + await import('../utils/ReadableStreamPolyfill.js') + + let array = new Uint8Array() + + for await (const chunk of iteratable) { + array = getMergedUint8Array(array, chunk) + + const result = getSizeFromUint8Array(array) + + if (result !== null && !isNaN(result.width)) { + return result + } + } + + return null +} diff --git a/packages/get-size/lib/from/FileSync.d.ts b/packages/get-size/lib/from/FileSync.d.ts new file mode 100644 index 0000000..79c9f99 --- /dev/null +++ b/packages/get-size/lib/from/FileSync.d.ts @@ -0,0 +1,10 @@ +/// + +import type { ImageResult } from '../types.d.ts' +import type { PathLike } from 'node:fs' + +export interface GetSizeFromFileSyncOptions { + chunkSize: number; +} + +export default function getSizeFromFileSync(path: PathLike, opts?: GetSizeFromFileSyncOptions): ImageResult | null diff --git a/packages/get-size/lib/from/FileSync.js b/packages/get-size/lib/from/FileSync.js new file mode 100644 index 0000000..174ddf0 --- /dev/null +++ b/packages/get-size/lib/from/FileSync.js @@ -0,0 +1,38 @@ +/// + +/** @typedef {import('../types.d.ts').ImageResult} ImageResult */ +/** @typedef {import('node:fs').PathLike} PathLike */ +/** @typedef {import('./FileSync.d.ts').GetSizeFromFileSyncOptions} GetSizeFromFileSyncOptions */ + +import { openSync, fstatSync, readSync } from 'node:fs' +import getSizeFromIterator from './Iterable.js' + +import './Uint8Array.js' +import '../types.js' +import '../detector.js' +import '../utils/getMergedUint8Array.js' + +export default function getSizeFromFileSync(/** @type {PathLike} */ path, /** @type {GetSizeFromFileSyncOptions} */ opts = null) { + const chunkSize = Number(Object(opts).chunkSize) || defaultChunkSize + const fileReadIterator = readFileIteratorSync(path, { chunkSize }) + + return getSizeFromIterator(fileReadIterator) +} + +function * readFileIteratorSync(path, opts) { + opts = Object(opts) + + const chunkSize = Number(opts.chunkSize) || defaultChunkSize + const fileDescriptor = openSync(path, 'r') + const fileStats = fstatSync(fileDescriptor) + const buffer = Buffer.alloc(Math.min(fileStats.size, chunkSize)) + + /** @type {number} */ + let read + + while (read = readSync(fileDescriptor, buffer, 0, buffer.length, null), read !== 0) { + yield new Uint8Array(buffer) + } +} + +const defaultChunkSize = 256 diff --git a/packages/get-size/lib/from/Iterable.d.ts b/packages/get-size/lib/from/Iterable.d.ts new file mode 100644 index 0000000..a3cfc19 --- /dev/null +++ b/packages/get-size/lib/from/Iterable.d.ts @@ -0,0 +1,3 @@ +import type { ImageResult } from '../types.d.ts' + +export default function getSizeFromIterator(iterable: Iterable): ImageResult | null diff --git a/packages/get-size/lib/from/Iterable.js b/packages/get-size/lib/from/Iterable.js new file mode 100644 index 0000000..bcbd33a --- /dev/null +++ b/packages/get-size/lib/from/Iterable.js @@ -0,0 +1,21 @@ +import getSizeFromUint8Array from './Uint8Array.js' +import getMergedUint8Array from '../utils/getMergedUint8Array.js' + +import '../types.js' +import '../detector.js' + +export default function getSizeFromIterator(/** @type {Iterable} */ iterable) { + let array = new Uint8Array() + + for (const chunk of iterable) { + array = getMergedUint8Array(array, chunk) + + const result = getSizeFromUint8Array(array) + + if (result !== null && !isNaN(result.width)) { + return result + } + } + + return null +} diff --git a/packages/get-size/lib/from/ReadableStream.d.ts b/packages/get-size/lib/from/ReadableStream.d.ts new file mode 100644 index 0000000..22acf25 --- /dev/null +++ b/packages/get-size/lib/from/ReadableStream.d.ts @@ -0,0 +1,3 @@ +import type { ImageResult } from '../types.d.ts' + +export default function getSizeFromReadableStream(readableStream: ReadableStream): Promise diff --git a/packages/get-size/lib/from/ReadableStream.js b/packages/get-size/lib/from/ReadableStream.js new file mode 100644 index 0000000..6b875ba --- /dev/null +++ b/packages/get-size/lib/from/ReadableStream.js @@ -0,0 +1,12 @@ +import getSizeFromAsyncIterable from './AsyncIterable.js' + +import './Uint8Array.js' +import '../types.js' +import '../detector.js' +import '../utils/getMergedUint8Array.js' + +export default async function getSizeFromReadableStream(/** @type {ReadableStream} */ readableStream) { + await import('../utils/ReadableStreamPolyfill.js') + + return await getSizeFromAsyncIterable(readableStream) +} diff --git a/packages/get-size/lib/from/URL.d.ts b/packages/get-size/lib/from/URL.d.ts new file mode 100644 index 0000000..2dff71f --- /dev/null +++ b/packages/get-size/lib/from/URL.d.ts @@ -0,0 +1,3 @@ +import type { ImageResult } from '../types.d.ts' + +export default function getSizeFromURL(input: URL | RequestInfo): Promise diff --git a/packages/get-size/lib/from/URL.js b/packages/get-size/lib/from/URL.js new file mode 100644 index 0000000..035c263 --- /dev/null +++ b/packages/get-size/lib/from/URL.js @@ -0,0 +1,12 @@ +import getSizeFromAsyncIterable from './AsyncIterable.js' + +import './Uint8Array.js' +import '../types.js' +import '../detector.js' +import '../utils/getMergedUint8Array.js' + +export default async function getSizeFromURL(input) { + const response = await fetch(input) + + return await getSizeFromAsyncIterable(response.body) +} diff --git a/packages/get-size/lib/from/Uint8Array.d.ts b/packages/get-size/lib/from/Uint8Array.d.ts new file mode 100644 index 0000000..a103905 --- /dev/null +++ b/packages/get-size/lib/from/Uint8Array.d.ts @@ -0,0 +1,3 @@ +import type { ImageResult } from '../types.d.ts' + +export default function getSizeFromUint8Array(input: Uint8Array): ImageResult | null diff --git a/packages/get-size/lib/from/Uint8Array.js b/packages/get-size/lib/from/Uint8Array.js new file mode 100644 index 0000000..7c581de --- /dev/null +++ b/packages/get-size/lib/from/Uint8Array.js @@ -0,0 +1,23 @@ +/** @typedef {import('../types.d.ts').ImageResult} ImageResult */ + +import { types } from '../types.js' +import getType from '../getType.js' + +export default function getSizeFromUint8Array(/** @type {Uint8Array} */ input) { + const type = getType(input) + + if (type !== null) { + try { + const size = types[type].calculate(input) + + return /** @type {ImageResult} */ ({ + type, + ...size, + }) + } catch { + // do nothing and continue + } + } + + return null +} diff --git a/packages/get-size/lib/getSize.d.ts b/packages/get-size/lib/getSize.d.ts new file mode 100644 index 0000000..bb9c8f5 --- /dev/null +++ b/packages/get-size/lib/getSize.d.ts @@ -0,0 +1,10 @@ +export type { ImageType, ImageResult } from './types.ts' + +export { default as getSizeFromAsyncIterable } from './from/AsyncIterable.ts' +export { default as getSizeFromIterable } from './from/Iterable.ts' +export { default as getSizeFromReadableStream } from './from/ReadableStream.ts' +export { default as getSizeFromUint8Array } from './from/Uint8Array.ts' +export { default as getSizeFromURL } from './from/URL.ts' +export { default as getType } from './getType.js' + +export { types } from './types.ts' diff --git a/packages/get-size/lib/getSize.js b/packages/get-size/lib/getSize.js new file mode 100644 index 0000000..5c6f694 --- /dev/null +++ b/packages/get-size/lib/getSize.js @@ -0,0 +1,9 @@ +export { default as getSizeFromAsyncIterable } from './from/AsyncIterable.js' +export { default as getSizeFromIterable } from './from/Iterable.js' +export { default as getSizeFromReadableStream } from './from/ReadableStream.js' +export { default as getSizeFromUint8Array } from './from/Uint8Array.js' +export { default as getSizeFromURL } from './from/URL.js' +export { default as getType } from './getType.js' +export { types } from './types.js' + +import './utils/getMergedUint8Array.js' diff --git a/packages/get-size/lib/getType.d.ts b/packages/get-size/lib/getType.d.ts new file mode 100644 index 0000000..96beb4a --- /dev/null +++ b/packages/get-size/lib/getType.d.ts @@ -0,0 +1,3 @@ +import type { SupportedImageType } from './types.d.ts' + +export default function getType(input: Uint8Array): SupportedImageType | null diff --git a/packages/get-size/lib/getType.js b/packages/get-size/lib/getType.js new file mode 100644 index 0000000..419e227 --- /dev/null +++ b/packages/get-size/lib/getType.js @@ -0,0 +1,35 @@ +/** @typedef {import('./types.d.ts').SupportedImageType} SupportedImageType */ + +import { types } from './types.js' + +/** @type {SupportedImageType[]} */ +const keys = Object.keys(types) + +// This map helps avoid validating for every single image type +/** @type {{ [byte: number]: SupportedImageType }} */ +const firstBytes = { + 0x38: 'psd', + 0x42: 'bmp', + 0x44: 'dds', + 0x47: 'gif', + 0x52: 'webp', + 0x69: 'icns', + 0x89: 'png', + 0xff: 'jpg' +} + +export default function getType(/** @type {Uint8Array} */ input) { + const byte = input[0] + + if (byte in firstBytes) { + const type = firstBytes[byte] + + if (type && types[type].validate(input)) { + return type + } + } + + return keys.find( + (/** @type {SupportedImageType} */ key) => types[key].validate(input) + ) || null +} diff --git a/packages/get-size/lib/types.d.ts b/packages/get-size/lib/types.d.ts new file mode 100644 index 0000000..5a0d8c9 --- /dev/null +++ b/packages/get-size/lib/types.d.ts @@ -0,0 +1,19 @@ +export type ImageSize = { + width: number + height: number +} + +export type ImageDetector = { + validate(input: Uint8Array): boolean + calculate(input: Uint8Array): ImageSize +} + +export type SupportedImageType = 'bmp' | 'cur' | 'dds' | 'gif' | 'icns' | 'ico' | 'j2c' | 'jp2' | 'jpg' | 'ktx' | 'png' | 'pnm' | 'psd' | 'svg' | 'tga' | 'webp' + +export type ImageTypes = Record + +export type ImageResult = { + type: SupportedImageType +} & ImageSize + +export const types: ImageTypes diff --git a/packages/get-size/lib/types.js b/packages/get-size/lib/types.js new file mode 100644 index 0000000..fe91084 --- /dev/null +++ b/packages/get-size/lib/types.js @@ -0,0 +1,932 @@ +// @ts-check + +const decoder = new TextDecoder() + +const toUTF8String = (/** @type {Uint8Array} */ input, start = 0, end = input.length) => decoder.decode(input.slice(start, end)) +const toHexString = (/** @type {Uint8Array} */ input, start = 0, end = input.length) => input.slice(start, end).reduce((memo, i) => memo + ('0' + i.toString(16)).slice(-2), '') +const readInt16LE = (/** @type {Uint8Array} */ input, offset = 0) => { + const val = input[offset] + input[offset + 1] * 2 ** 8 + return val | (val & 2 ** 15) * 0x1fffe +} +const readUInt16BE = (/** @type {Uint8Array} */ input, offset = 0) => input[offset] * 2 ** 8 + input[offset + 1] +const readUInt16LE = (/** @type {Uint8Array} */ input, offset = 0) => input[offset] + input[offset + 1] * 2 ** 8 +const readUInt24LE = (/** @type {Uint8Array} */ input, offset = 0) => input[offset] + input[offset + 1] * 2 ** 8 + input[offset + 2] * 2 ** 16 +const readInt32LE = (/** @type {Uint8Array} */ input, offset = 0) => input[offset] + input[offset + 1] * 2 ** 8 + input[offset + 2] * 2 ** 16 + (input[offset + 3] << 24) +const readUInt32BE = (/** @type {Uint8Array} */ input, offset = 0) => input[offset] * 2 ** 24 + input[offset + 1] * 2 ** 16 + input[offset + 2] * 2 ** 8 + input[offset + 3] +const readUInt32LE = (/** @type {Uint8Array} */ input, offset = 0) => input[offset] + input[offset + 1] * 2 ** 8 + input[offset + 2] * 2 ** 16 + input[offset + 3] * 2 ** 24 + +// Abstract reading multi-byte unsigned integers +/** @type {Record number>} */ +const methods = { + readUInt16BE, + readUInt16LE, + readUInt32BE, + readUInt32LE, +} + +function readUInt(/** @type {Uint8Array} */ input, /** @type {number} */ bits, /** @type {number} */ offset, /** @type {boolean} */ isBigEndian) { + offset = offset || 0 + + const endian = isBigEndian ? 'BE' : 'LE' + const methodName = ('readUInt' + bits + endian) + + return methods[methodName](input, offset) +} + +const validate$f = (/** @type {Uint8Array} */ input) => toUTF8String(input, 0, 2) === 'BM' +const calculate$f = (/** @type {Uint8Array} */ input) => ({ + height: Math.abs(readInt32LE(input, 22)), + width: readUInt32LE(input, 18), +}) + +let BMP = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$f, + calculate: calculate$f +}) + +const TYPE_ICON = 1 + +/** + * ICON Header + * + * | Offset | Size | Purpose | + * | 0 | 2 | Reserved. Must always be 0. | + * | 2 | 2 | Image type: 1 for icon (.ICO) image, 2 for cursor (.CUR) image. Other values are invalid. | + * | 4 | 2 | Number of images in the file. | + * + */ +const SIZE_HEADER$1 = 2 + 2 + 2 // 6 + +/** + * Image Entry + * + * | Offset | Size | Purpose | + * | 0 | 1 | Image width in pixels. Can be any number between 0 and 255. Value 0 means width is 256 pixels. | + * | 1 | 1 | Image height in pixels. Can be any number between 0 and 255. Value 0 means height is 256 pixels. | + * | 2 | 1 | Number of colors in the color palette. Should be 0 if the image does not use a color palette. | + * | 3 | 1 | Reserved. Should be 0. | + * | 4 | 2 | ICO format: Color planes. Should be 0 or 1. | + * | | | CUR format: The horizontal coordinates of the hotspot in number of pixels from the left. | + * | 6 | 2 | ICO format: Bits per pixel. | + * | | | CUR format: The vertical coordinates of the hotspot in number of pixels from the top. | + * | 8 | 4 | The size of the image's data in bytes | + * | 12 | 4 | The offset of BMP or PNG data from the beginning of the ICO/CUR file | + * + */ +const SIZE_IMAGE_ENTRY = 1 + 1 + 1 + 1 + 2 + 2 + 4 + 4 // 16 + +function getSizeFromOffset(/** @type {Uint8Array} */ input, /** @type {number} */ offset) { + const value = input[offset] + return value === 0 ? 256 : value +} +function getImageSize$1(/** @type {Uint8Array} */ input, /** @type {number} */ imageIndex) { + const offset = SIZE_HEADER$1 + (imageIndex * SIZE_IMAGE_ENTRY) + return { + height: getSizeFromOffset(input, offset + 1), + width: getSizeFromOffset(input, offset), + } +} + +const validate$e = (/** @type {Uint8Array} */ input) => { + const reserved = readUInt16LE(input, 0) + const imageCount = readUInt16LE(input, 4) + if (reserved !== 0 || imageCount === 0) { + return false + } + const imageType = readUInt16LE(input, 2) + return imageType === TYPE_ICON +} + +const calculate$e = (/** @type {Uint8Array} */ input) => { + const nbImages = readUInt16LE(input, 4) + const imageSize = getImageSize$1(input, 0) + if (nbImages === 1) { + return imageSize + } + const imgs = [ imageSize ] + for (let imageIndex = 1; imageIndex < nbImages; imageIndex += 1) { + imgs.push(getImageSize$1(input, imageIndex)) + } + return { + height: imageSize.height, + width: imageSize.width, + images: imgs, + } +} + +let ICO = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$e, + calculate: calculate$e +}) + +const TYPE_CURSOR = 2 + +const validate$d = (/** @type {Uint8Array} */ input) => { + const reserved = readUInt16LE(input, 0) + const imageCount = readUInt16LE(input, 4) + if (reserved !== 0 || imageCount === 0) { return false } + const imageType = readUInt16LE(input, 2) + return imageType === TYPE_CURSOR +} + +const calculate$d = (/** @type {Uint8Array} */ input) => calculate$e(input) + +let CUR = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$d, + calculate: calculate$d +}) + +const validate$c = (/** @type {Uint8Array} */ input) => readUInt32LE(input, 0) === 0x20534444 +const calculate$c = (/** @type {Uint8Array} */ input) => ({ + height: readUInt32LE(input, 12), + width: readUInt32LE(input, 16) +}) + +let DDS = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$c, + calculate: calculate$c +}) + +/** 8-bit representation of "GIF87a" */ +const gif87a = [ 0x47, 0x49, 0x46, 0x38, 0x37, 0x61 ] + +/** 8-bit representation of "GIF89a" */ +const gif89a = [ 0x47, 0x49, 0x46, 0x38, 0x39, 0x61 ] + +const validate$b = (/** @type {Uint8Array} */ input) => (input[0] === gif87a[0] && + input[1] === gif87a[1] && + input[2] === gif87a[2] && + input[3] === gif87a[3] && + (input[4] === gif87a[4] || + input[4] === gif89a[4]) && + input[5] === gif87a[5]) + +const calculate$b = (/** @type {Uint8Array} */ input) => ({ + height: readUInt16LE(input, 8), + width: readUInt16LE(input, 6), +}) + +let GIF = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$b, + calculate: calculate$b +}) + +/** + * ICNS Header + * + * | Offset | Size | Purpose | + * | 0 | 4 | Magic literal, must be "icns" (0x69, 0x63, 0x6e, 0x73) | + * | 4 | 4 | Length of file, in bytes, msb first. | + * + */ +const SIZE_HEADER = 4 + 4 // 8 + +const FILE_LENGTH_OFFSET = 4 // MSB => BIG ENDIAN + +/** + * Image Entry + * + * | Offset | Size | Purpose | + * | 0 | 4 | Icon type, see OSType below. | + * | 4 | 4 | Length of data, in bytes (including type and length), msb first. | + * | 8 | n | Icon data | + */ +const ENTRY_LENGTH_OFFSET = 4 // MSB => BIG ENDIAN + +/** @type {Record} */ +const ICON_TYPE_SIZE = { + ICON: 32, + 'ICN#': 32, + // m => 16 x 16 + 'icm#': 16, + icm4: 16, + icm8: 16, + // s => 16 x 16 + 'ics#': 16, + ics4: 16, + ics8: 16, + is32: 16, + s8mk: 16, + icp4: 16, + // l => 32 x 32 + icl4: 32, + icl8: 32, + il32: 32, + l8mk: 32, + icp5: 32, + ic11: 32, + // h => 48 x 48 + ich4: 48, + ich8: 48, + ih32: 48, + h8mk: 48, + // . => 64 x 64 + icp6: 64, + ic12: 32, + // t => 128 x 128 + it32: 128, + t8mk: 128, + ic07: 128, + // . => 256 x 256 + ic08: 256, + ic13: 256, + // . => 512 x 512 + ic09: 512, + ic14: 512, + // . => 1024 x 1024 + ic10: 1024, +} + +function readImageHeader(/** @type {Uint8Array} */ input, /** @type {number} */ imageOffset) { + const imageLengthOffset = imageOffset + ENTRY_LENGTH_OFFSET + + return /** @type {[ string, number ]} */ ([ + toUTF8String(input, imageOffset, imageLengthOffset), + readUInt32BE(input, imageLengthOffset) + ]) +} + +function getImageSize(/** @type {string} */ type) { + const size = ICON_TYPE_SIZE[type] + + return { + width: size, + height: size, + type, + } +} + +const validate$a = (/** @type {Uint8Array} */ input) => toUTF8String(input, 0, 4) === 'icns' + +const calculate$a = (/** @type {Uint8Array} */ input) => { + const inputLength = input.length + const fileLength = readUInt32BE(input, FILE_LENGTH_OFFSET) + + let imageOffset = SIZE_HEADER + let imageHeader = readImageHeader(input, imageOffset) + let imageSize = getImageSize(imageHeader[0]) + + imageOffset += imageHeader[1] + + if (imageOffset === fileLength) { + return imageSize + } + + const result = { + height: imageSize.height, + width: imageSize.width, + images: [ + imageSize + ], + } + + while (imageOffset < fileLength && imageOffset < inputLength) { + imageHeader = readImageHeader(input, imageOffset) + imageSize = getImageSize(imageHeader[0]) + imageOffset += imageHeader[1] + + result.images.push(imageSize) + } + + return result +} + +let ICNS = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$a, + calculate: calculate$a +}) + +// TODO: this doesn't seem right. SIZ marker doesn't have to be right after the SOC +const validate$9 = (/** @type {Uint8Array} */ input) => toHexString(input, 0, 4) === 'ff4fff51' + +const calculate$9 = (/** @type {Uint8Array} */ input) => ({ + height: readUInt32BE(input, 12), + width: readUInt32BE(input, 8), +}) + +let J2C = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$9, + calculate: calculate$9 +}) + +const BoxTypes = { + ftyp: '66747970', + ihdr: '69686472', + jp2h: '6a703268', + jp__: '6a502020', + rreq: '72726571', + xml_: '786d6c20' +} + +const calculateRREQLength = (/** @type {Uint8Array} */ box) => { + const unit = box[0] + + let offset = 1 + (2 * unit) + + const numStdFlags = readUInt16BE(box, offset) + const flagsLength = numStdFlags * (2 + unit) + + offset = offset + 2 + flagsLength + + const numVendorFeatures = readUInt16BE(box, offset) + const featuresLength = numVendorFeatures * (16 + unit) + + return offset + 2 + featuresLength +} + +const parseIHDR = (/** @type {Uint8Array} */ box) => { + return { + height: readUInt32BE(box, 4), + width: readUInt32BE(box, 8), + } +} + +const validate$8 = (/** @type {Uint8Array} */ input) => { + const signature = toHexString(input, 4, 8) + const signatureLength = readUInt32BE(input, 0) + + if (signature !== BoxTypes.jp__ || signatureLength < 1) { + return false + } + + const ftypeBoxStart = signatureLength + 4 + const ftypBoxLength = readUInt32BE(input, signatureLength) + const ftypBox = input.slice(ftypeBoxStart, ftypeBoxStart + ftypBoxLength) + + return toHexString(ftypBox, 0, 4) === BoxTypes.ftyp +} + +const calculate$8 = (/** @type {Uint8Array} */ input) => { + const signatureLength = readUInt32BE(input, 0) + const ftypBoxLength = readUInt16BE(input, signatureLength + 2) + + let offset = signatureLength + 4 + ftypBoxLength + + const nextBoxType = toHexString(input, offset, offset + 4) + + switch (nextBoxType) { + case BoxTypes.rreq: + // WHAT ARE THESE 4 BYTES????? + // eslint-disable-next-line no-case-declarations + const MAGIC = 4 + + offset = offset + 4 + MAGIC + calculateRREQLength(input.slice(offset + 4)) + + return parseIHDR(input.slice(offset + 8, offset + 24)) + + case BoxTypes.jp2h: + return parseIHDR(input.slice(offset + 8, offset + 24)) + + default: + throw new TypeError('Unsupported header found: ' + toUTF8String(input, offset, offset + 4)) + } +} + +let JP2 = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$8, + calculate: calculate$8 +}) + +// NOTE: we only support baseline and progressive JPGs here +// due to the structure of the loader class, we only get a buffer +// with a maximum size of 4096 bytes. so if the SOF marker is outside +// if this range we can't detect the file size correctly. + +/** 8-bit representation of "Exif" */ +const EXIF_MARKER = [ 0x45, 0x78, 0x69, 0x66 ] + +const APP1_DATA_SIZE_BYTES = 2 + +const EXIF_HEADER_BYTES = 6 + +const TIFF_BYTE_ALIGN_BYTES = 2 + +const BIG_ENDIAN_BYTE_ALIGN = [ 0x4D, 0x4D ] + +const LITTLE_ENDIAN_BYTE_ALIGN = [ 0x49, 0x49 ] + +// Each entry is exactly 12 bytes +const IDF_ENTRY_BYTES = 12 + +const NUM_DIRECTORY_ENTRIES_BYTES = 2 + +const isEXIF = (/** @type {Uint8Array} */ input) => ( + input[2] === EXIF_MARKER[0] && + input[3] === EXIF_MARKER[1] && + input[4] === EXIF_MARKER[2] && + input[5] === EXIF_MARKER[3] +) + +function extractSize(/** @type {Uint8Array} */ input, /** @type {number} */ index) { + return { + height: readUInt16BE(input, index), + width: readUInt16BE(input, index + 2), + } +} + +function extractOrientation(/** @type {Uint8Array} */ exifBlock, /** @type {boolean} */ isBigEndian) { + // TODO: assert that this contains 0x002A + // let STATIC_MOTOROLA_TIFF_HEADER_BYTES = 2 + // let TIFF_IMAGE_FILE_DIRECTORY_BYTES = 4 + // TODO: derive from TIFF_IMAGE_FILE_DIRECTORY_BYTES + const idfOffset = 8 + + // IDF osset works from right after the header bytes + // (so the offset includes the tiff byte align) + const offset = EXIF_HEADER_BYTES + idfOffset + + const idfDirectoryEntries = readUInt(exifBlock, 16, offset, isBigEndian) + + for (let directoryEntryNumber = 0; directoryEntryNumber < idfDirectoryEntries; directoryEntryNumber++) { + const start = offset + NUM_DIRECTORY_ENTRIES_BYTES + (directoryEntryNumber * IDF_ENTRY_BYTES) + const end = start + IDF_ENTRY_BYTES + + // Skip on corrupt EXIF blocks + if (start > exifBlock.length) { + return + } + + const block = exifBlock.slice(start, end) + const tagNumber = readUInt(block, 16, 0, isBigEndian) + + // 0x0112 (decimal: 274) is the `orientation` tag ID + if (tagNumber === 274) { + const dataFormat = readUInt(block, 16, 2, isBigEndian) + + if (dataFormat !== 3) { + return + } + + // unsinged int has 2 bytes per component + // if there would more than 4 bytes in total it's a pointer + const numberOfComponents = readUInt(block, 32, 4, isBigEndian) + + if (numberOfComponents !== 1) { + return + } + + return readUInt(block, 16, 8, isBigEndian) + } + } +} + +function validateExifBlock(/** @type {Uint8Array} */ input, /** @type {number} */ index) { + // Skip APP1 Data Size + const exifBlock = input.slice(APP1_DATA_SIZE_BYTES, index) + + // Consider byte alignment + const byteAlign = exifBlock.slice(EXIF_HEADER_BYTES, EXIF_HEADER_BYTES + TIFF_BYTE_ALIGN_BYTES) + + // Ignore Empty EXIF. Validate byte alignment + const isBigEndian = byteAlign[0] === BIG_ENDIAN_BYTE_ALIGN[0] && byteAlign[1] === BIG_ENDIAN_BYTE_ALIGN[1] + + const isLittleEndian = byteAlign[0] === LITTLE_ENDIAN_BYTE_ALIGN[0] && byteAlign[1] === LITTLE_ENDIAN_BYTE_ALIGN[1] + + if (isBigEndian || isLittleEndian) { + return extractOrientation(exifBlock, isBigEndian) + } +} + +function validateInput(/** @type {Uint8Array} */ input, /** @type {number} */ index) { + // index should be within buffer limits + if (index > input.length) { + throw new TypeError('Corrupt JPG, exceeded buffer limits') + } + + // Every JPEG block must begin with a 0xFF + if (input[index] !== 0xFF) { + throw new TypeError('Invalid JPG, marker table corrupted') + } +} + +const validate$7 = (/** @type {Uint8Array} */ input) => input[0] === 0xFF && input[1] === 0xD8 +const calculate$7 = (/** @type {Uint8Array} */ input) => { + // Skip 4 chars, they are for signature + input = input.slice(4) + + let orientation + let next + + while (input.length) { + // read length of the next block + const i = readUInt16BE(input, 0) + + if (isEXIF(input)) { + orientation = validateExifBlock(input, i) + } + + // ensure correct format + validateInput(input, i) + + // 0xFFC0 is baseline standard(SOF) + // 0xFFC1 is baseline optimized(SOF) + // 0xFFC2 is progressive(SOF2) + next = input[i + 1] + + if (next === 0xC0 || next === 0xC1 || next === 0xC2) { + const size = extractSize(input, i + 5) + + // TODO: is orientation=0 a valid answer here? + if (!orientation) { + return size + } + + return { + height: size.height, + orientation, + width: size.width + } + } + + // move to the next block + input = input.slice(i + 2) + } + + throw new TypeError('Invalid JPG, no size found') +} + +let JPG = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$7, + calculate: calculate$7 +}) + +const validate$6 = (/** @type {Uint8Array} */ input) => toUTF8String(input, 1, 7) === 'KTX 11' +const calculate$6 = (/** @type {Uint8Array} */ input) => ({ + height: readUInt32LE(input, 40), + width: readUInt32LE(input, 36), +}) + +let KTX = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$6, + calculate: calculate$6 +}) + +const pngSignature = 'PNG\r\n\x1a\n' +const pngImageHeaderChunkName = 'IHDR' + +// Used to detect "fried" png's: http://www.jongware.com/pngdefry.html +const pngFriedChunkName = 'CgBI' + +const validate$5 = (/** @type {Uint8Array} */ input) => { + if (pngSignature === toUTF8String(input, 1, 8)) { + let chunkName = toUTF8String(input, 12, 16) + + if (chunkName === pngFriedChunkName) { + chunkName = toUTF8String(input, 28, 32) + } + + if (chunkName !== pngImageHeaderChunkName) { + // throw new TypeError('Invalid PNG') + return false + } + + return true + } + + return false +} + +const calculate$5 = (/** @type {Uint8Array} */ input) => (toUTF8String(input, 12, 16) === pngFriedChunkName + ? { + height: readUInt32BE(input, 36), + width: readUInt32BE(input, 32), + } + : { + height: readUInt32BE(input, 20), + width: readUInt32BE(input, 16), + }) + +let PNG = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$5, + calculate: calculate$5 +}) + +/** @type {Record} */ +const PNMTypes = { + P1: 'pbm/ascii', + P2: 'pgm/ascii', + P3: 'ppm/ascii', + P4: 'pbm', + P5: 'pgm', + P6: 'ppm', + P7: 'pam', + PF: 'pfm', +} + +/** @type {Record { width: number, height: number }>} */ +const handlers = { + default: (lines) => { + /** @type {string[]} */ + let dimensions = [] + + while (lines.length > 0) { + const line = /** @type {string} */ (lines.shift()) + + if (line[0] === '#') { + continue + } + + dimensions = line.split(' ') + + break + } + + if (dimensions.length === 2) { + return { + height: parseInt(dimensions[1], 10), + width: parseInt(dimensions[0], 10), + } + } else { + throw new TypeError('Invalid PNM') + } + }, + pam: (lines) => { + const size = /** @type {{ width: number, height: number }} */ ({}) + + while (lines.length > 0) { + const line = /** @type {string} */ (lines.shift()) + + if (line.length > 16 || line.charCodeAt(0) > 128) { + continue + } + + const [ key, value ] = line.split(' ') + + if (key && value) { + size[key.toLowerCase()] = parseInt(value, 10) + } + + if (size.height && size.width) { + break + } + } + + if (size.height && size.width) { + return { + height: size.height, + width: size.width + } + } else { + throw new TypeError('Invalid PAM') + } + } +} + +const validate$4 = (/** @type {Uint8Array} */ input) => toUTF8String(input, 0, 2) in PNMTypes + +const calculate$4 = (/** @type {Uint8Array} */ input) => { + const signature = toUTF8String(input, 0, 2) + const type = PNMTypes[signature] + + // TODO: this probably generates garbage. move to a stream based parser + const lines = toUTF8String(input, 3).split(/[\r\n]+/) + const handler = handlers[type] || handlers.default + + return handler(lines) +} + +let PNM = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$4, + calculate: calculate$4 +}) + +const validate$3 = (/** @type {Uint8Array} */ input) => toUTF8String(input, 0, 4) === '8BPS' + +const calculate$3 = (/** @type {Uint8Array} */ input) => ({ + height: readUInt32BE(input, 14), + width: readUInt32BE(input, 18) +}) + +let PSD = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$3, + calculate: calculate$3 +}) + +const svgReg = /"']|"[^"]*"|'[^']*')*>/ + +const extractorRegExps = { + height: /\sheight=(['"])([^%]+?)\1/, + root: svgReg, + viewbox: /\sviewBox=(['"])(.+?)\1/i, + width: /\swidth=(['"])([^%]+?)\1/, +} + +const INCH_CM = 2.54 + +const units = { + in: 96, + cm: 96 / INCH_CM, + em: 16, + ex: 8, + m: 96 / INCH_CM * 100, + mm: 96 / INCH_CM / 10, + pc: 96 / 72 / 12, + pt: 96 / 72, + px: 1 +} + +const unitsReg = new RegExp(`^([0-9.]+(?:e\\d+)?)(${Object.keys(units).join('|')})?$`) + +function parseLength(/** @type {string} */ len) { + const m = unitsReg.exec(len) + + if (!m) { + return 0 + } + + return Math.round(Number(m[1]) * (units[m[2]] || 1)) +} + +function parseViewbox(/** @type {string} */ viewbox) { + const bounds = viewbox.split(' ') + + return { + height: parseLength(bounds[3]), + width: parseLength(bounds[2]), + } +} + +function parseAttributes(/** @type {string} */ root) { + const width = root.match(extractorRegExps.width) + const height = root.match(extractorRegExps.height) + const viewbox = root.match(extractorRegExps.viewbox) + + return { + height: height ? parseLength(height[2]) : 0, + viewBox: viewbox ? parseViewbox(viewbox[2]) : { width: 0, height: 0 }, + width: width ? parseLength(width[2]) : 0, + } +} + +function calculateByDimensions(/** @type {ReturnType} */ attrs) { + return { + height: attrs.height, + width: attrs.width, + } +} + +function calculateByViewbox(/** @type {ReturnType} */ attrs, /** @type {ReturnType['viewBox']} */viewbox) { + const ratio = viewbox.width / viewbox.height + + if (attrs.width) { + return { + height: Math.floor(attrs.width / ratio), + width: attrs.width, + } + } + + if (attrs.height) { + return { + height: attrs.height, + width: Math.floor(attrs.height * ratio), + } + } + + return { + height: viewbox.height, + width: viewbox.width, + } +} + +const validate$2 = (/** @type {Uint8Array} */ input) => svgReg.test(toUTF8String(input)) + +const calculate$2 = (/** @type {Uint8Array} */ input) => { + const root = toUTF8String(input).match(extractorRegExps.root) + + if (root) { + const attrs = parseAttributes(root[0]) + + if (attrs.width && attrs.height) { + return calculateByDimensions(attrs) + } + + if (attrs.viewBox) { + return calculateByViewbox(attrs, attrs.viewBox) + } + } + + throw new TypeError('Invalid SVG') +} + +let SVG = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$2, + calculate: calculate$2 +}) + +const validate$1 = (/** @type {Uint8Array} */ input) => (readUInt16LE(input, 0) === 0 && readUInt16LE(input, 4) === 0) + +const calculate$1 = (/** @type {Uint8Array} */ input) => ({ + height: readUInt16LE(input, 14), + width: readUInt16LE(input, 12), +}) + +let TGA = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate: validate$1, + calculate: calculate$1 +}) + +// based on https://developers.google.com/speed/webp/docs/riff_container +function calculateExtended(/** @type {Uint8Array} */ input) { + return { + height: 1 + readUInt24LE(input, 7), + width: 1 + readUInt24LE(input, 4), + } +} + +function calculateLossless(/** @type {Uint8Array} */ input) { + return { + height: 1 + (((input[4] & 0xF) << 10) | (input[3] << 2) | ((input[2] & 0xC0) >> 6)), + width: 1 + (((input[2] & 0x3F) << 8) | input[1]), + } +} + +function calculateLossy(/** @type {Uint8Array} */ input) { + // `& 0x3fff` returns the last 14 bits + // TO-DO: include webp scaling in the calculations + return { + height: readInt16LE(input, 8) & 0x3fff, + width: readInt16LE(input, 6) & 0x3fff, + } +} + +const validate = (/** @type {Uint8Array} */ input) => { + const riffHeader = toUTF8String(input, 0, 4) === 'RIFF' + const webpHeader = toUTF8String(input, 8, 12) === 'WEBP' + const vp8Header = toUTF8String(input, 12, 15) === 'VP8' + + return (riffHeader && webpHeader && vp8Header) +} + +const calculate = (/** @type {Uint8Array} */ input) => { + const chunkHeader = toUTF8String(input, 12, 16) + + input = input.slice(20, 30) + + // Extended webp stream signature + if (chunkHeader === 'VP8X') { + const extendedHeader = input[0] + const validStart = (extendedHeader & 0xc0) === 0 + const validEnd = (extendedHeader & 0x01) === 0 + + if (validStart && validEnd) { + return calculateExtended(input) + } else { + throw new TypeError('Invalid WebP') + } + } + + // Lossless webp stream signature + if (chunkHeader === 'VP8 ' && input[0] !== 0x2f) { + return calculateLossy(input) + } + + // Lossy webp stream signature + const signature = toHexString(input, 3, 6) + + if (chunkHeader === 'VP8L' && signature !== '9d012a') { + return calculateLossless(input) + } + + throw new TypeError('Invalid WebP') +} + +let WEBP = /* #__PURE__ */Object.freeze({ + __proto__: null, + validate, + calculate +}) + +/** @type {import('./types.d.ts').ImageTypes} */ +export const types = { + bmp: BMP, + cur: CUR, + dds: DDS, + gif: GIF, + icns: ICNS, + ico: ICO, + j2c: J2C, + jp2: JP2, + jpg: JPG, + ktx: KTX, + png: PNG, + pnm: PNM, + psd: PSD, + svg: SVG, + tga: TGA, + webp: WEBP, +} diff --git a/packages/get-size/lib/utils/ReadableStreamPolyfill.d.ts b/packages/get-size/lib/utils/ReadableStreamPolyfill.d.ts new file mode 100644 index 0000000..e69de29 diff --git a/packages/get-size/lib/utils/ReadableStreamPolyfill.js b/packages/get-size/lib/utils/ReadableStreamPolyfill.js new file mode 100644 index 0000000..65b0785 --- /dev/null +++ b/packages/get-size/lib/utils/ReadableStreamPolyfill.js @@ -0,0 +1,27 @@ +{ + const { prototype } = ReadableStream + const { asyncIterator } = Symbol + + if (!(asyncIterator in prototype)) { + Object.assign(prototype, { + /** @this {ReadableStream} */ + async * [asyncIterator]() { + const reader = this.getReader() + + try { + while (true) { + const { done, value } = await reader.read() + + if (done) { + return + } + + yield value + } + } finally { + reader.releaseLock() + } + }, + }) + } +} diff --git a/packages/get-size/lib/utils/getMergedUint8Array.d.ts b/packages/get-size/lib/utils/getMergedUint8Array.d.ts new file mode 100644 index 0000000..2e690c4 --- /dev/null +++ b/packages/get-size/lib/utils/getMergedUint8Array.d.ts @@ -0,0 +1 @@ +export default function getMergedUint8Array(array1: Uint8Array, array2: Uint8Array): Uint8Array diff --git a/packages/get-size/lib/utils/getMergedUint8Array.js b/packages/get-size/lib/utils/getMergedUint8Array.js new file mode 100644 index 0000000..0ae030e --- /dev/null +++ b/packages/get-size/lib/utils/getMergedUint8Array.js @@ -0,0 +1,8 @@ +export default function getMergedUint8Array(/** @type {Uint8Array} */ array1, /** @type {Uint8Array} */ array2) { + const merged = new Uint8Array(array1.length + array2.length) + + merged.set(array1) + merged.set(array2, array1.length) + + return merged +} diff --git a/packages/get-size/package.json b/packages/get-size/package.json new file mode 100644 index 0000000..ad90a0a --- /dev/null +++ b/packages/get-size/package.json @@ -0,0 +1,42 @@ +{ + "$schema": "https://json.schemastore.org/package.json", + "name": "@astropub/get-size", + "version": "0.1.0", + "type": "module", + "main": "./lib/getSize.js", + "types": "./lib/getSize.d.ts", + "exports": { + ".": { + "import": "./lib/getSize.js", + "types": "./lib/getSize.d.ts" + }, + "./from/AsyncIterable": { + "import": "./lib/from/AsyncIterable.js", + "types": "./lib/from/AsyncIterable.d.ts" + }, + "./from/FileSync": { + "import": "./lib/from/FileSync.js", + "types": "./lib/from/FileSync.d.ts" + }, + "./from/Iterable": { + "import": "./lib/from/Iterable.js", + "types": "./lib/from/Iterable.d.ts" + }, + "./from/ReadableStream": { + "import": "./lib/from/ReadableStream.js", + "types": "./lib/from/ReadableStream.d.ts" + }, + "./from/Uint8Array": { + "import": "./lib/from/Uint8Array.js", + "types": "./lib/from/Uint8Array.d.ts" + }, + "./from/URL": { + "import": "./lib/from/URL.js", + "types": "./lib/from/URL.d.ts" + } + }, + "author": "David DeSandro", + "license": "MIT", + "homepage": "https://github.com/astro-community/get-size", + "bugs": "https://github.com/astro-community/get-size/issues" +} diff --git a/scripts/check.js b/scripts/check.js new file mode 100644 index 0000000..84520f4 --- /dev/null +++ b/scripts/check.js @@ -0,0 +1,8 @@ +import { spawnSync } from 'node:child_process' + +spawnSync('npx', [ 'eslint', 'packages/*/lib/**/*.js', ...process.argv.slice(2) ], { + cwd: process.cwd(), + env: process.env, + stdio: 'inherit', + encoding: 'utf-8', +}) diff --git a/scripts/release.js b/scripts/release.js new file mode 100644 index 0000000..7c1a29a --- /dev/null +++ b/scripts/release.js @@ -0,0 +1,18 @@ +import { execSync, spawnSync } from 'node:child_process' + +try { + const pkgs = JSON.parse(execSync('npm pack --workspaces --json').toString()) + + for (const pkg of pkgs) { + spawnSync('npm', [ 'publish', pkg.filename, '--access=public', ...process.argv.slice(2) ], { + cwd: process.cwd(), + env: process.env, + stdio: 'inherit', + encoding: 'utf-8', + }) + + execSync(`rm -rf ${pkg.filename}`) + } +} catch (error) { + console.error(error) +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..41a26d2 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + /* Type Checking */ + "strict": true, + "exactOptionalPropertyTypes": true, + + /* Modules */ + "allowArbitraryExtensions": true, + "allowImportingTsExtensions": true, + "module": "ESNext", + "moduleDetection": "force", + "moduleResolution": "NodeNext", + + /* Emit */ + "noEmit": true, + + /* Interop Constraints */ + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "verbatimModuleSyntax": true, + + /* Completeness */ + "skipLibCheck": true, + + /* Language and Environment */ + "target": "ESNext", + } +}