mirror of
https://github.com/NapNeko/NapCatQQ.git
synced 2025-12-19 13:10:16 +08:00
feat
This commit is contained in:
parent
c9df57d16a
commit
019b981022
@ -50,7 +50,6 @@
|
||||
"fast-xml-parser": "^4.3.6",
|
||||
"file-type": "^20.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"image-size": "^1.1.1",
|
||||
"json5": "^2.2.3",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"typescript": "^5.3.3",
|
||||
|
||||
@ -4,10 +4,10 @@ import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import type { VideoInfo } from './video';
|
||||
import { fileTypeFromFile } from 'file-type';
|
||||
import imageSize from 'image-size';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { platform } from 'node:os';
|
||||
import { LogWrapper } from './log';
|
||||
import { imageSizeFromFile } from '@/image-size/fromFile';
|
||||
const currentPath = dirname(fileURLToPath(import.meta.url));
|
||||
const execFileAsync = promisify(execFile);
|
||||
const getFFmpegPath = (tool: string): string => {
|
||||
@ -157,7 +157,7 @@ export class FFmpegService {
|
||||
try {
|
||||
await this.extractThumbnail(videoPath, thumbnailPath);
|
||||
// 获取图片尺寸
|
||||
const dimensions = imageSize(thumbnailPath);
|
||||
const dimensions = await imageSizeFromFile(thumbnailPath);
|
||||
|
||||
return {
|
||||
format: fileType?.ext ?? 'mp4',
|
||||
|
||||
@ -17,7 +17,6 @@ import fs from 'fs';
|
||||
import fsPromises from 'fs/promises';
|
||||
import { InstanceContext, NapCatCore, SearchResultItem } from '@/core';
|
||||
import { fileTypeFromFile } from 'file-type';
|
||||
import imageSize from 'image-size';
|
||||
import { ISizeCalculationResult } from 'image-size/dist/types/interface';
|
||||
import { RkeyManager } from '@/core/helper/rkey';
|
||||
import { calculateFileMD5 } from '@/common/file';
|
||||
@ -28,6 +27,7 @@ import { SendMessageContext } from '@/onebot/api';
|
||||
import { getFileTypeForSendType } from '../helper/msg';
|
||||
import { FFmpegService } from '@/common/ffmpeg';
|
||||
import { rkeyDataType } from '../types/file';
|
||||
import { imageSizeFromFile } from '@/image-size/fromFile';
|
||||
|
||||
export class NTQQFileApi {
|
||||
context: InstanceContext;
|
||||
@ -365,17 +365,18 @@ export class NTQQFileApi {
|
||||
}
|
||||
|
||||
async getImageSize(filePath: string): Promise<ISizeCalculationResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
imageSize(filePath, (err: Error | null, dimensions) => {
|
||||
if (err) {
|
||||
reject(new Error(err.message));
|
||||
} else if (!dimensions) {
|
||||
reject(new Error('获取图片尺寸失败'));
|
||||
} else {
|
||||
resolve(dimensions);
|
||||
}
|
||||
});
|
||||
});
|
||||
return await imageSizeFromFile(filePath)
|
||||
// return new Promise((resolve, reject) => {
|
||||
// imageSize(filePath, (err: Error | null, dimensions) => {
|
||||
// if (err) {
|
||||
// reject(new Error(err.message));
|
||||
// } else if (!dimensions) {
|
||||
// reject(new Error('获取图片尺寸失败'));
|
||||
// } else {
|
||||
// resolve(dimensions);
|
||||
// }
|
||||
// });
|
||||
// });
|
||||
}
|
||||
|
||||
async searchForFile(keys: string[]): Promise<SearchResultItem | undefined> {
|
||||
|
||||
9
src/image-size/LICENSE
Normal file
9
src/image-size/LICENSE
Normal file
@ -0,0 +1,9 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright © 2013-Present Aditya Yadav, http://netroy.in
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
198
src/image-size/Readme.md
Normal file
198
src/image-size/Readme.md
Normal file
@ -0,0 +1,198 @@
|
||||
# image-size
|
||||
|
||||
[](https://circleci.com/gh/image-size/image-size)
|
||||
[](https://www.npmjs.com/package/image-size)
|
||||
[](http://npm-stat.com/charts.html?package=image-size&author=netroy&from=&to=)
|
||||
|
||||
Fast, lightweight NodeJS package to get dimensions of any image file or buffer.
|
||||
|
||||
## Key Features
|
||||
- Zero dependencies
|
||||
- Supports all major image formats
|
||||
- Works with both files and buffers
|
||||
- Minimal memory footprint - reads only image headers
|
||||
- ESM and CommonJS support
|
||||
- TypeScript types included
|
||||
|
||||
## Supported formats
|
||||
|
||||
- BMP
|
||||
- CUR
|
||||
- DDS
|
||||
- GIF
|
||||
- HEIC (HEIF, AVCI, AVIF)
|
||||
- ICNS
|
||||
- ICO
|
||||
- J2C
|
||||
- JPEG-2000 (JP2)
|
||||
- JPEG
|
||||
- JPEG-XL
|
||||
- KTX (1 and 2)
|
||||
- PNG
|
||||
- PNM (PAM, PBM, PFM, PGM, PPM)
|
||||
- PSD
|
||||
- SVG
|
||||
- TGA
|
||||
- TIFF
|
||||
- WebP
|
||||
|
||||
## Installation
|
||||
|
||||
```shell
|
||||
npm install image-size
|
||||
# or
|
||||
yarn add image-size
|
||||
# or
|
||||
pnpm add image-size
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Passing in a Buffer/Uint8Array
|
||||
Best for streams, network requests, or when you already have the image data in memory.
|
||||
|
||||
```javascript
|
||||
import { imageSize } from 'image-size'
|
||||
// or
|
||||
const { imageSize } = require('image-size')
|
||||
|
||||
const dimensions = imageSize(buffer)
|
||||
console.log(dimensions.width, dimensions.height)
|
||||
```
|
||||
|
||||
### Reading from a file
|
||||
Best for local files. Returns a promise.
|
||||
|
||||
```javascript
|
||||
import { imageSizeFromFile } from 'image-size/fromFile'
|
||||
// or
|
||||
const { imageSizeFromFile } = require('image-size/fromFile')
|
||||
|
||||
const dimensions = await imageSizeFromFile('photos/image.jpg')
|
||||
console.log(dimensions.width, dimensions.height)
|
||||
```
|
||||
|
||||
Note: Reading from files has a default concurrency limit of **100**
|
||||
To change this limit, you can call the `setConcurrency` function like this:
|
||||
|
||||
```javascript
|
||||
import { setConcurrency } from 'image-size/fromFile'
|
||||
// or
|
||||
const { setConcurrency } = require('image-size/fromFile')
|
||||
setConcurrency(123456)
|
||||
```
|
||||
|
||||
### Reading from a file Syncronously (not recommended) ⚠️
|
||||
v1.x of this library had a sync API, that internally used sync file reads.
|
||||
|
||||
This isn't recommended because this blocks the node.js main thread, which reduces the performance, and prevents this library from being used concurrently.
|
||||
|
||||
However if you still need to use this package syncronously, you can read the file syncronously into a buffer, and then pass the buffer to this library.
|
||||
|
||||
```javascript
|
||||
import { readFileSync } from 'node:fs'
|
||||
import { imageSize } from 'image-size'
|
||||
|
||||
const buffer = readFileSync('photos/image.jpg')
|
||||
const dimensions = imageSize(buffer)
|
||||
console.log(dimensions.width, dimensions.height)
|
||||
```
|
||||
|
||||
### 3. Command Line
|
||||
Useful for quick checks.
|
||||
|
||||
```shell
|
||||
npx image-size image1.jpg image2.png
|
||||
```
|
||||
|
||||
### Multi-size
|
||||
|
||||
If the target file/buffer is an HEIF, an ICO, or a CUR file, the `width` and `height` will be the ones of the largest image in the set.
|
||||
|
||||
An additional `images` array is available and returns the dimensions of all the available images
|
||||
|
||||
```javascript
|
||||
import { imageSizeFromFile } from 'image-size/fromFile'
|
||||
// or
|
||||
const { imageSizeFromFile } = require('image-size/fromFile')
|
||||
|
||||
const { images } = await imageSizeFromFile('images/multi-size.ico')
|
||||
for (const dimensions of images) {
|
||||
console.log(dimensions.width, dimensions.height)
|
||||
}
|
||||
```
|
||||
|
||||
### Using a URL
|
||||
|
||||
```javascript
|
||||
import url from 'node:url'
|
||||
import http from 'node:http'
|
||||
import { imageSize } from 'image-size'
|
||||
|
||||
const imgUrl = 'http://my-amazing-website.com/image.jpeg'
|
||||
const options = url.parse(imgUrl)
|
||||
|
||||
http.get(options, function (response) {
|
||||
const chunks = []
|
||||
response
|
||||
.on('data', function (chunk) {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
.on('end', function () {
|
||||
const buffer = Buffer.concat(chunks)
|
||||
console.log(imageSize(buffer))
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
### Disabling certain image types
|
||||
|
||||
```javascript
|
||||
import { disableTypes } from 'image-size'
|
||||
// or
|
||||
const { disableTypes } = require('image-size')
|
||||
|
||||
disableTypes(['tiff', 'ico'])
|
||||
```
|
||||
|
||||
### JPEG image orientation
|
||||
|
||||
If the orientation is present in the JPEG EXIF metadata, it will be returned by the function. The orientation value is a [number between 1 and 8](https://exiftool.org/TagNames/EXIF.html#:~:text=0x0112,8%20=%20Rotate%20270%20CW) representing a type of orientation.
|
||||
|
||||
```javascript
|
||||
import { imageSizeFromFile } from 'image-size/fromFile'
|
||||
// or
|
||||
const { imageSizeFromFile } = require('image-size/fromFile')
|
||||
|
||||
const { width, height, orientation } = await imageSizeFromFile('images/photo.jpeg')
|
||||
console.log(width, height, orientation)
|
||||
```
|
||||
|
||||
# Limitations
|
||||
|
||||
1. **Partial File Reading**
|
||||
- Only reads image headers, not full files
|
||||
- Some corrupted images might still report dimensions
|
||||
|
||||
2. **SVG Limitations**
|
||||
- Only supports pixel dimensions and viewBox
|
||||
- Percentage values not supported
|
||||
|
||||
3. **File Access**
|
||||
- Reading from files has a default concurrency limit of 100
|
||||
- Can be adjusted using `setConcurrency()`
|
||||
|
||||
4. **Buffer Requirements**
|
||||
- Some formats (like TIFF) require the full header in buffer
|
||||
- Streaming partial buffers may not work for all formats
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Credits
|
||||
|
||||
not a direct port, but an attempt to have something like
|
||||
[dabble's imagesize](https://github.com/dabble/imagesize/blob/master/lib/image_size.rb) as a node module.
|
||||
|
||||
## [Contributors](Contributors.md)
|
||||
26
src/image-size/detector.ts
Normal file
26
src/image-size/detector.ts
Normal file
@ -0,0 +1,26 @@
|
||||
import type { imageType } from './types/index'
|
||||
import { typeHandlers, types } from './types/index'
|
||||
|
||||
// This map helps avoid validating for every single image type
|
||||
const firstBytes = new Map<number, imageType>([
|
||||
[0x00, 'heif'],
|
||||
[0x42, 'bmp'],
|
||||
[0x47, 'gif'],
|
||||
[0x49, 'tiff'],
|
||||
[0x4d, 'tiff'],
|
||||
[0x52, 'webp'],
|
||||
[0x89, 'png'],
|
||||
[0xff, 'jpg'],
|
||||
])
|
||||
|
||||
export function detector(input: Uint8Array): imageType | undefined {
|
||||
const byte = input[0];
|
||||
if (byte === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const type = firstBytes.get(byte)
|
||||
if (type && typeHandlers.get(type)!.validate(input)) {
|
||||
return type
|
||||
}
|
||||
return types.find((type) => typeHandlers.get(type)!.validate(input))
|
||||
}
|
||||
62
src/image-size/fromFile.ts
Normal file
62
src/image-size/fromFile.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import * as fs from 'node:fs'
|
||||
import * as path from 'node:path'
|
||||
|
||||
import { imageSize } from './lookup'
|
||||
import type { ISizeCalculationResult } from './types/interface'
|
||||
|
||||
// Maximum input size, with a default of 512 kilobytes.
|
||||
// TO-DO: make this adaptive based on the initial signature of the image
|
||||
const MaxInputSize = 512 * 1024
|
||||
|
||||
type Job = {
|
||||
filePath: string
|
||||
resolve: (value: ISizeCalculationResult) => void
|
||||
reject: (error: Error) => void
|
||||
}
|
||||
|
||||
// This queue is for async `fs` operations, to avoid reaching file-descriptor limits
|
||||
const queue: Job[] = []
|
||||
|
||||
let concurrency = 100
|
||||
export const setConcurrency = (c: number): void => {
|
||||
concurrency = c
|
||||
}
|
||||
|
||||
const processQueue = async () => {
|
||||
const jobs = queue.splice(0, concurrency)
|
||||
const promises = jobs.map(async ({ filePath, resolve, reject }) => {
|
||||
let handle: fs.promises.FileHandle
|
||||
try {
|
||||
handle = await fs.promises.open(path.resolve(filePath), 'r')
|
||||
} catch (err) {
|
||||
return reject(err as Error)
|
||||
}
|
||||
try {
|
||||
const { size } = await handle.stat()
|
||||
if (size <= 0) {
|
||||
throw new Error('Empty file')
|
||||
}
|
||||
const inputSize = Math.min(size, MaxInputSize)
|
||||
const input = new Uint8Array(inputSize)
|
||||
await handle.read(input, 0, inputSize, 0)
|
||||
resolve(imageSize(input))
|
||||
} catch (err) {
|
||||
reject(err as Error)
|
||||
} finally {
|
||||
await handle.close()
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.allSettled(promises)
|
||||
|
||||
if (queue.length) setTimeout(processQueue, 100)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} filePath - relative/absolute path of the image file
|
||||
*/
|
||||
export const imageSizeFromFile = async (filePath: string) =>
|
||||
new Promise<ISizeCalculationResult>((resolve, reject) => {
|
||||
queue.push({ filePath, resolve, reject })
|
||||
processQueue()
|
||||
})
|
||||
57
src/image-size/lookup.ts
Normal file
57
src/image-size/lookup.ts
Normal file
@ -0,0 +1,57 @@
|
||||
import { detector } from './detector'
|
||||
import type { imageType } from './types/index'
|
||||
import { typeHandlers } from './types/index'
|
||||
import type { ISizeCalculationResult } from './types/interface'
|
||||
|
||||
type Options = {
|
||||
disabledTypes: imageType[]
|
||||
}
|
||||
|
||||
const globalOptions: Options = {
|
||||
disabledTypes: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Return size information based on an Uint8Array
|
||||
*
|
||||
* @param {Uint8Array} input
|
||||
* @returns {ISizeCalculationResult}
|
||||
*/
|
||||
export function imageSize(input: Uint8Array): ISizeCalculationResult {
|
||||
// detect the file type... don't rely on the extension
|
||||
const type = detector(input)
|
||||
|
||||
if (typeof type !== 'undefined') {
|
||||
if (globalOptions.disabledTypes.indexOf(type) > -1) {
|
||||
throw new TypeError(`disabled file type: ${type}`)
|
||||
}
|
||||
|
||||
// find an appropriate handler for this file type
|
||||
const size = typeHandlers.get(type)!.calculate(input)
|
||||
if (size !== undefined) {
|
||||
size.type = size.type ?? type
|
||||
|
||||
// If multiple images, find the largest by area
|
||||
if (size.images && size.images.length > 1) {
|
||||
const largestImage = size.images.reduce((largest: any, current: { width: number; height: number }) => {
|
||||
return current.width * current.height > largest!.width * largest!.height
|
||||
? current
|
||||
: largest
|
||||
}, size.images[0])
|
||||
|
||||
// Ensure the main result is the largest image
|
||||
size.width = largestImage!.width
|
||||
size.height = largestImage!.height
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
||||
// throw up, if we don't understand the file
|
||||
throw new TypeError(`unsupported file type: ${type}`)
|
||||
}
|
||||
|
||||
export const disableTypes = (types: imageType[]): void => {
|
||||
globalOptions.disabledTypes = types
|
||||
}
|
||||
11
src/image-size/types/bmp.ts
Normal file
11
src/image-size/types/bmp.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import type { IImage } from './interface'
|
||||
import { readInt32LE, readUInt32LE, toUTF8String } from './utils'
|
||||
|
||||
export const BMP: IImage = {
|
||||
validate: (input) => toUTF8String(input, 0, 2) === 'BM',
|
||||
|
||||
calculate: (input) => ({
|
||||
height: Math.abs(readInt32LE(input, 22)),
|
||||
width: readUInt32LE(input, 18),
|
||||
}),
|
||||
}
|
||||
12
src/image-size/types/gif.ts
Normal file
12
src/image-size/types/gif.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import type { IImage } from './interface'
|
||||
import { readUInt16LE, toUTF8String } from './utils'
|
||||
|
||||
const gifRegexp = /^GIF8[79]a/
|
||||
export const GIF: IImage = {
|
||||
validate: (input) => gifRegexp.test(toUTF8String(input, 0, 6)),
|
||||
|
||||
calculate: (input) => ({
|
||||
height: readUInt16LE(input, 8),
|
||||
width: readUInt16LE(input, 6),
|
||||
}),
|
||||
}
|
||||
74
src/image-size/types/heif.ts
Normal file
74
src/image-size/types/heif.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import type { IImage, ISize } from './interface'
|
||||
import { findBox, readUInt32BE, toUTF8String } from './utils'
|
||||
|
||||
const brandMap = {
|
||||
avif: 'avif',
|
||||
mif1: 'heif',
|
||||
msf1: 'heif', // heif-sequence
|
||||
heic: 'heic',
|
||||
heix: 'heic',
|
||||
hevc: 'heic', // heic-sequence
|
||||
hevx: 'heic', // heic-sequence
|
||||
}
|
||||
|
||||
export const HEIF: IImage = {
|
||||
validate(input) {
|
||||
const boxType = toUTF8String(input, 4, 8)
|
||||
if (boxType !== 'ftyp') return false
|
||||
|
||||
const ftypBox = findBox(input, 'ftyp', 0)
|
||||
if (!ftypBox) return false
|
||||
|
||||
const brand = toUTF8String(input, ftypBox.offset + 8, ftypBox.offset + 12)
|
||||
return brand in brandMap
|
||||
},
|
||||
|
||||
calculate(input) {
|
||||
// Based on https://nokiatech.github.io/heif/technical.html
|
||||
const metaBox = findBox(input, 'meta', 0)
|
||||
const iprpBox = metaBox && findBox(input, 'iprp', metaBox.offset + 12)
|
||||
const ipcoBox = iprpBox && findBox(input, 'ipco', iprpBox.offset + 8)
|
||||
|
||||
if (!ipcoBox) {
|
||||
throw new TypeError('Invalid HEIF, no ipco box found')
|
||||
}
|
||||
|
||||
const type = toUTF8String(input, 8, 12)
|
||||
|
||||
const images: ISize[] = []
|
||||
let currentOffset = ipcoBox.offset + 8
|
||||
|
||||
// Find all ispe and clap boxes
|
||||
while (currentOffset < ipcoBox.offset + ipcoBox.size) {
|
||||
const ispeBox = findBox(input, 'ispe', currentOffset)
|
||||
if (!ispeBox) break
|
||||
|
||||
const rawWidth = readUInt32BE(input, ispeBox.offset + 12)
|
||||
const rawHeight = readUInt32BE(input, ispeBox.offset + 16)
|
||||
|
||||
// Look for a clap box after the ispe box
|
||||
const clapBox = findBox(input, 'clap', currentOffset)
|
||||
let width = rawWidth
|
||||
let height = rawHeight
|
||||
if (clapBox && clapBox.offset < ipcoBox.offset + ipcoBox.size) {
|
||||
const cropRight = readUInt32BE(input, clapBox.offset + 12)
|
||||
width = rawWidth - cropRight
|
||||
}
|
||||
|
||||
images.push({ height, width })
|
||||
|
||||
currentOffset = ispeBox.offset + ispeBox.size
|
||||
}
|
||||
|
||||
if (images.length === 0) {
|
||||
throw new TypeError('Invalid HEIF, no sizes found')
|
||||
}
|
||||
|
||||
return {
|
||||
width: images[0]!.width,
|
||||
height: images[0]!.height,
|
||||
type,
|
||||
...(images.length > 1 ? { images } : {}),
|
||||
}
|
||||
},
|
||||
}
|
||||
21
src/image-size/types/index.ts
Normal file
21
src/image-size/types/index.ts
Normal file
@ -0,0 +1,21 @@
|
||||
// load all available handlers explicitly for browserify support
|
||||
import { BMP } from './bmp'
|
||||
import { GIF } from './gif'
|
||||
import { HEIF } from './heif'
|
||||
import { JPG } from './jpg'
|
||||
import { PNG } from './png'
|
||||
import { TIFF } from './tiff'
|
||||
import { WEBP } from './webp'
|
||||
|
||||
export const typeHandlers = new Map([
|
||||
['bmp', BMP],
|
||||
['gif', GIF],
|
||||
['heif', HEIF],
|
||||
['jpg', JPG],
|
||||
['png', PNG],
|
||||
['tiff', TIFF],
|
||||
['webp', WEBP],
|
||||
] as const)
|
||||
|
||||
export const types = Array.from(typeHandlers.keys())
|
||||
export type imageType = (typeof types)[number]
|
||||
15
src/image-size/types/interface.ts
Normal file
15
src/image-size/types/interface.ts
Normal file
@ -0,0 +1,15 @@
|
||||
export interface ISize {
|
||||
width: number
|
||||
height: number
|
||||
orientation?: number
|
||||
type?: string
|
||||
}
|
||||
|
||||
export type ISizeCalculationResult = {
|
||||
images?: ISize[]
|
||||
} & ISize
|
||||
|
||||
export interface IImage {
|
||||
validate: (input: Uint8Array) => boolean
|
||||
calculate: (input: Uint8Array) => ISizeCalculationResult
|
||||
}
|
||||
163
src/image-size/types/jpg.ts
Normal file
163
src/image-size/types/jpg.ts
Normal file
@ -0,0 +1,163 @@
|
||||
// NOTE: we only support baseline and progressive JPGs here
|
||||
// due to the structure of the loader class, we only get a buffer
|
||||
// with a maximum size of 4096 bytes. so if the SOF marker is outside
|
||||
// if this range we can't detect the file size correctly.
|
||||
|
||||
import type { IImage, ISize } from './interface'
|
||||
import { readUInt, readUInt16BE, toHexString } from './utils'
|
||||
|
||||
const EXIF_MARKER = '45786966'
|
||||
const APP1_DATA_SIZE_BYTES = 2
|
||||
const EXIF_HEADER_BYTES = 6
|
||||
const TIFF_BYTE_ALIGN_BYTES = 2
|
||||
const BIG_ENDIAN_BYTE_ALIGN = '4d4d'
|
||||
const LITTLE_ENDIAN_BYTE_ALIGN = '4949'
|
||||
|
||||
// Each entry is exactly 12 bytes
|
||||
const IDF_ENTRY_BYTES = 12
|
||||
const NUM_DIRECTORY_ENTRIES_BYTES = 2
|
||||
|
||||
function isEXIF(input: Uint8Array): boolean {
|
||||
return toHexString(input, 2, 6) === EXIF_MARKER
|
||||
}
|
||||
|
||||
function extractSize(input: Uint8Array, index: number): ISize {
|
||||
return {
|
||||
height: readUInt16BE(input, index),
|
||||
width: readUInt16BE(input, index + 2),
|
||||
}
|
||||
}
|
||||
|
||||
function extractOrientation(exifBlock: Uint8Array, isBigEndian: boolean) {
|
||||
// TODO: assert that this contains 0x002A
|
||||
// let STATIC_MOTOROLA_TIFF_HEADER_BYTES = 2
|
||||
// let TIFF_IMAGE_FILE_DIRECTORY_BYTES = 4
|
||||
|
||||
// TODO: derive from TIFF_IMAGE_FILE_DIRECTORY_BYTES
|
||||
const idfOffset = 8
|
||||
|
||||
// IDF osset works from right after the header bytes
|
||||
// (so the offset includes the tiff byte align)
|
||||
const offset = EXIF_HEADER_BYTES + idfOffset
|
||||
|
||||
const idfDirectoryEntries = readUInt(exifBlock, 16, offset, isBigEndian)
|
||||
|
||||
for (
|
||||
let directoryEntryNumber = 0;
|
||||
directoryEntryNumber < idfDirectoryEntries;
|
||||
directoryEntryNumber++
|
||||
) {
|
||||
const start =
|
||||
offset +
|
||||
NUM_DIRECTORY_ENTRIES_BYTES +
|
||||
directoryEntryNumber * IDF_ENTRY_BYTES
|
||||
const end = start + IDF_ENTRY_BYTES
|
||||
|
||||
// Skip on corrupt EXIF blocks
|
||||
if (start > exifBlock.length) {
|
||||
return
|
||||
}
|
||||
|
||||
const block = exifBlock.slice(start, end)
|
||||
const tagNumber = readUInt(block, 16, 0, isBigEndian)
|
||||
|
||||
// 0x0112 (decimal: 274) is the `orientation` tag ID
|
||||
if (tagNumber === 274) {
|
||||
const dataFormat = readUInt(block, 16, 2, isBigEndian)
|
||||
if (dataFormat !== 3) {
|
||||
return
|
||||
}
|
||||
|
||||
// unsinged int has 2 bytes per component
|
||||
// if there would more than 4 bytes in total it's a pointer
|
||||
const numberOfComponents = readUInt(block, 32, 4, isBigEndian)
|
||||
if (numberOfComponents !== 1) {
|
||||
return
|
||||
}
|
||||
|
||||
return readUInt(block, 16, 8, isBigEndian)
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function validateExifBlock(input: Uint8Array, index: number) {
|
||||
// Skip APP1 Data Size
|
||||
const exifBlock = input.slice(APP1_DATA_SIZE_BYTES, index)
|
||||
|
||||
// Consider byte alignment
|
||||
const byteAlign = toHexString(
|
||||
exifBlock,
|
||||
EXIF_HEADER_BYTES,
|
||||
EXIF_HEADER_BYTES + TIFF_BYTE_ALIGN_BYTES,
|
||||
)
|
||||
|
||||
// Ignore Empty EXIF. Validate byte alignment
|
||||
const isBigEndian = byteAlign === BIG_ENDIAN_BYTE_ALIGN
|
||||
const isLittleEndian = byteAlign === LITTLE_ENDIAN_BYTE_ALIGN
|
||||
|
||||
if (isBigEndian || isLittleEndian) {
|
||||
return extractOrientation(exifBlock, isBigEndian)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function validateInput(input: Uint8Array, index: number): void {
|
||||
// index should be within buffer limits
|
||||
if (index > input.length) {
|
||||
throw new TypeError('Corrupt JPG, exceeded buffer limits')
|
||||
}
|
||||
}
|
||||
|
||||
export const JPG: IImage = {
|
||||
validate: (input) => toHexString(input, 0, 2) === 'ffd8',
|
||||
|
||||
calculate(_input) {
|
||||
// Skip 4 chars, they are for signature
|
||||
let input = _input.slice(4)
|
||||
|
||||
let orientation: number | undefined
|
||||
let next: number
|
||||
while (input.length) {
|
||||
// read length of the next block
|
||||
const i = readUInt16BE(input, 0)
|
||||
|
||||
// ensure correct format
|
||||
validateInput(input, i)
|
||||
|
||||
// Every JPEG block must begin with a 0xFF
|
||||
if (input[i] !== 0xff) {
|
||||
input = input.slice(1)
|
||||
continue
|
||||
}
|
||||
|
||||
if (isEXIF(input)) {
|
||||
orientation = validateExifBlock(input, i)
|
||||
}
|
||||
|
||||
// 0xFFC0 is baseline standard(SOF)
|
||||
// 0xFFC1 is baseline optimized(SOF)
|
||||
// 0xFFC2 is progressive(SOF2)
|
||||
next = input[i + 1]!
|
||||
if (next === 0xc0 || next === 0xc1 || next === 0xc2) {
|
||||
const size = extractSize(input, i + 5)
|
||||
|
||||
// TODO: is orientation=0 a valid answer here?
|
||||
if (!orientation) {
|
||||
return size
|
||||
}
|
||||
|
||||
return {
|
||||
height: size.height,
|
||||
orientation,
|
||||
width: size.width,
|
||||
}
|
||||
}
|
||||
|
||||
// move to the next block
|
||||
input = input.slice(i + 2)
|
||||
}
|
||||
|
||||
throw new TypeError('Invalid JPG, no size found')
|
||||
},
|
||||
}
|
||||
37
src/image-size/types/png.ts
Normal file
37
src/image-size/types/png.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import type { IImage } from './interface'
|
||||
import { readUInt32BE, toUTF8String } from './utils'
|
||||
|
||||
const pngSignature = 'PNG\r\n\x1a\n'
|
||||
const pngImageHeaderChunkName = 'IHDR'
|
||||
|
||||
// Used to detect "fried" png's: https://web.archive.org/web/20190414220044/http://www.jongware.com/pngdefry.html
|
||||
const pngFriedChunkName = 'CgBI'
|
||||
|
||||
export const PNG: IImage = {
|
||||
validate(input) {
|
||||
if (pngSignature === toUTF8String(input, 1, 8)) {
|
||||
let chunkName = toUTF8String(input, 12, 16)
|
||||
if (chunkName === pngFriedChunkName) {
|
||||
chunkName = toUTF8String(input, 28, 32)
|
||||
}
|
||||
if (chunkName !== pngImageHeaderChunkName) {
|
||||
throw new TypeError('Invalid PNG')
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
},
|
||||
|
||||
calculate(input) {
|
||||
if (toUTF8String(input, 12, 16) === pngFriedChunkName) {
|
||||
return {
|
||||
height: readUInt32BE(input, 36),
|
||||
width: readUInt32BE(input, 32),
|
||||
}
|
||||
}
|
||||
return {
|
||||
height: readUInt32BE(input, 20),
|
||||
width: readUInt32BE(input, 16),
|
||||
}
|
||||
},
|
||||
}
|
||||
172
src/image-size/types/tiff.ts
Normal file
172
src/image-size/types/tiff.ts
Normal file
@ -0,0 +1,172 @@
|
||||
import type { IImage, ISize } from './interface'
|
||||
import { readUInt, readUInt64, toHexString, toUTF8String } from './utils'
|
||||
|
||||
const CONSTANTS = {
|
||||
TAG: {
|
||||
WIDTH: 256,
|
||||
HEIGHT: 257,
|
||||
COMPRESSION: 259,
|
||||
},
|
||||
TYPE: {
|
||||
SHORT: 3,
|
||||
LONG: 4,
|
||||
LONG8: 16,
|
||||
},
|
||||
ENTRY_SIZE: {
|
||||
STANDARD: 12,
|
||||
BIG: 20,
|
||||
},
|
||||
COUNT_SIZE: {
|
||||
STANDARD: 2,
|
||||
BIG: 8,
|
||||
},
|
||||
} as const
|
||||
|
||||
interface TIFFFormat {
|
||||
isBigEndian: boolean
|
||||
isBigTiff: boolean
|
||||
}
|
||||
|
||||
interface TIFFInfo extends ISize {
|
||||
compression?: number
|
||||
}
|
||||
|
||||
// Read IFD (image-file-directory) into a buffer
|
||||
function readIFD(input: Uint8Array, { isBigEndian, isBigTiff }: TIFFFormat) {
|
||||
const ifdOffset = isBigTiff
|
||||
? Number(readUInt64(input, 8, isBigEndian))
|
||||
: readUInt(input, 32, 4, isBigEndian)
|
||||
const entryCountSize = isBigTiff
|
||||
? CONSTANTS.COUNT_SIZE.BIG
|
||||
: CONSTANTS.COUNT_SIZE.STANDARD
|
||||
return input.slice(ifdOffset + entryCountSize)
|
||||
}
|
||||
|
||||
function readTagValue(
|
||||
input: Uint8Array,
|
||||
type: number,
|
||||
offset: number,
|
||||
isBigEndian: boolean,
|
||||
): number {
|
||||
switch (type) {
|
||||
case CONSTANTS.TYPE.SHORT:
|
||||
return readUInt(input, 16, offset, isBigEndian)
|
||||
case CONSTANTS.TYPE.LONG:
|
||||
return readUInt(input, 32, offset, isBigEndian)
|
||||
case CONSTANTS.TYPE.LONG8: {
|
||||
const value = Number(readUInt64(input, offset, isBigEndian))
|
||||
if (value > Number.MAX_SAFE_INTEGER) {
|
||||
throw new TypeError('Value too large')
|
||||
}
|
||||
return value
|
||||
}
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
function nextTag(input: Uint8Array, isBigTiff: boolean) {
|
||||
const entrySize = isBigTiff
|
||||
? CONSTANTS.ENTRY_SIZE.BIG
|
||||
: CONSTANTS.ENTRY_SIZE.STANDARD
|
||||
if (input.length > entrySize) {
|
||||
return input.slice(entrySize)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
interface TIFFTags {
|
||||
[key: number]: number
|
||||
}
|
||||
|
||||
function extractTags(
|
||||
input: Uint8Array,
|
||||
{ isBigEndian, isBigTiff }: TIFFFormat,
|
||||
): TIFFTags {
|
||||
const tags: TIFFTags = {}
|
||||
|
||||
let temp: Uint8Array | undefined = input
|
||||
while (temp?.length) {
|
||||
const code = readUInt(temp, 16, 0, isBigEndian)
|
||||
const type = readUInt(temp, 16, 2, isBigEndian)
|
||||
const length = isBigTiff
|
||||
? Number(readUInt64(temp, 4, isBigEndian))
|
||||
: readUInt(temp, 32, 4, isBigEndian)
|
||||
|
||||
if (code === 0) break
|
||||
|
||||
if (
|
||||
length === 1 &&
|
||||
(type === CONSTANTS.TYPE.SHORT ||
|
||||
type === CONSTANTS.TYPE.LONG ||
|
||||
(isBigTiff && type === CONSTANTS.TYPE.LONG8))
|
||||
) {
|
||||
const valueOffset = isBigTiff ? 12 : 8
|
||||
tags[code] = readTagValue(temp, type, valueOffset, isBigEndian)
|
||||
}
|
||||
|
||||
temp = nextTag(temp, isBigTiff)
|
||||
}
|
||||
|
||||
return tags
|
||||
}
|
||||
|
||||
function determineFormat(input: Uint8Array): TIFFFormat {
|
||||
const signature = toUTF8String(input, 0, 2)
|
||||
const version = readUInt(input, 16, 2, signature === 'MM')
|
||||
|
||||
return {
|
||||
isBigEndian: signature === 'MM',
|
||||
isBigTiff: version === 43,
|
||||
}
|
||||
}
|
||||
|
||||
function validateBigTIFFHeader(input: Uint8Array, isBigEndian: boolean): void {
|
||||
const byteSize = readUInt(input, 16, 4, isBigEndian)
|
||||
const reserved = readUInt(input, 16, 6, isBigEndian)
|
||||
|
||||
if (byteSize !== 8 || reserved !== 0) {
|
||||
throw new TypeError('Invalid BigTIFF header')
|
||||
}
|
||||
}
|
||||
|
||||
const signatures = new Set([
|
||||
'49492a00', // Little Endian
|
||||
'4d4d002a', // Big Endian
|
||||
'49492b00', // BigTIFF Little Endian
|
||||
'4d4d002b', // BigTIFF Big Endian
|
||||
])
|
||||
|
||||
export const TIFF: IImage = {
|
||||
validate: (input) => {
|
||||
const signature = toHexString(input, 0, 4)
|
||||
return signatures.has(signature)
|
||||
},
|
||||
|
||||
calculate(input) {
|
||||
const format = determineFormat(input)
|
||||
|
||||
if (format.isBigTiff) {
|
||||
validateBigTIFFHeader(input, format.isBigEndian)
|
||||
}
|
||||
|
||||
const ifdBuffer = readIFD(input, format)
|
||||
const tags = extractTags(ifdBuffer, format)
|
||||
|
||||
const info: TIFFInfo = {
|
||||
height: tags[CONSTANTS.TAG.HEIGHT]!,
|
||||
width: tags[CONSTANTS.TAG.WIDTH]!,
|
||||
type: format.isBigTiff ? 'bigtiff' : 'tiff',
|
||||
}
|
||||
|
||||
if (tags[CONSTANTS.TAG.COMPRESSION]) {
|
||||
info.compression = tags[CONSTANTS.TAG.COMPRESSION]
|
||||
}
|
||||
|
||||
if (!info.width || !info.height) {
|
||||
throw new TypeError('Invalid Tiff. Missing tags')
|
||||
}
|
||||
|
||||
return info
|
||||
},
|
||||
}
|
||||
91
src/image-size/types/utils.ts
Normal file
91
src/image-size/types/utils.ts
Normal file
@ -0,0 +1,91 @@
|
||||
const decoder = new TextDecoder()
|
||||
export const toUTF8String = (
|
||||
input: Uint8Array,
|
||||
start = 0,
|
||||
end = input.length,
|
||||
) => decoder.decode(input.slice(start, end))
|
||||
|
||||
export const toHexString = (input: Uint8Array, start = 0, end = input.length) =>
|
||||
input
|
||||
.slice(start, end)
|
||||
.reduce((memo, i) => memo + `0${i.toString(16)}`.slice(-2), '')
|
||||
|
||||
const getView = (input: Uint8Array, offset: number) =>
|
||||
new DataView(input.buffer, input.byteOffset + offset)
|
||||
|
||||
export const readInt16LE = (input: Uint8Array, offset = 0) =>
|
||||
getView(input, offset).getInt16(0, true)
|
||||
|
||||
export const readUInt16BE = (input: Uint8Array, offset = 0) =>
|
||||
getView(input, offset).getUint16(0, false)
|
||||
|
||||
export const readUInt16LE = (input: Uint8Array, offset = 0) =>
|
||||
getView(input, offset).getUint16(0, true)
|
||||
|
||||
// DataView doesn't have 24-bit methods
|
||||
export const readUInt24LE = (input: Uint8Array, offset = 0) => {
|
||||
const view = getView(input, offset)
|
||||
return view.getUint16(0, true) + (view.getUint8(2) << 16)
|
||||
}
|
||||
|
||||
export const readInt32LE = (input: Uint8Array, offset = 0) =>
|
||||
getView(input, offset).getInt32(0, true)
|
||||
|
||||
export const readUInt32BE = (input: Uint8Array, offset = 0) =>
|
||||
getView(input, offset).getUint32(0, false)
|
||||
|
||||
export const readUInt32LE = (input: Uint8Array, offset = 0) =>
|
||||
getView(input, offset).getUint32(0, true)
|
||||
|
||||
export const readUInt64 = (
|
||||
input: Uint8Array,
|
||||
offset: number,
|
||||
isBigEndian: boolean,
|
||||
): bigint => getView(input, offset).getBigUint64(0, !isBigEndian)
|
||||
|
||||
// Abstract reading multi-byte unsigned integers
|
||||
const methods = {
|
||||
readUInt16BE,
|
||||
readUInt16LE,
|
||||
readUInt32BE,
|
||||
readUInt32LE,
|
||||
} as const
|
||||
|
||||
type MethodName = keyof typeof methods
|
||||
export function readUInt(
|
||||
input: Uint8Array,
|
||||
bits: 16 | 32,
|
||||
offset = 0,
|
||||
isBigEndian = false,
|
||||
): number {
|
||||
const endian = isBigEndian ? 'BE' : 'LE'
|
||||
const methodName = `readUInt${bits}${endian}` as MethodName
|
||||
return methods[methodName](input, offset)
|
||||
}
|
||||
|
||||
function readBox(input: Uint8Array, offset: number) {
|
||||
if (input.length - offset < 4) return
|
||||
const boxSize = readUInt32BE(input, offset)
|
||||
if (input.length - offset < boxSize) return
|
||||
return {
|
||||
name: toUTF8String(input, 4 + offset, 8 + offset),
|
||||
offset,
|
||||
size: boxSize,
|
||||
}
|
||||
}
|
||||
|
||||
export function findBox(
|
||||
input: Uint8Array,
|
||||
boxName: string,
|
||||
currentOffset: number,
|
||||
) {
|
||||
while (currentOffset < input.length) {
|
||||
const box = readBox(input, currentOffset)
|
||||
if (!box) break
|
||||
if (box.name === boxName) return box
|
||||
// Fix the infinite loop by ensuring offset always increases
|
||||
// If box.size is 0, advance by at least 8 bytes (the size of the box header)
|
||||
currentOffset += box.size > 0 ? box.size : 8
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
66
src/image-size/types/webp.ts
Normal file
66
src/image-size/types/webp.ts
Normal file
@ -0,0 +1,66 @@
|
||||
// based on https://developers.google.com/speed/webp/docs/riff_container
|
||||
import type { IImage, ISize } from './interface'
|
||||
import { readInt16LE, readUInt24LE, toHexString, toUTF8String } from './utils'
|
||||
|
||||
function calculateExtended(input: Uint8Array): ISize {
|
||||
return {
|
||||
height: 1 + readUInt24LE(input, 7),
|
||||
width: 1 + readUInt24LE(input, 4),
|
||||
}
|
||||
}
|
||||
|
||||
function calculateLossless(input: Uint8Array): ISize {
|
||||
return {
|
||||
height:
|
||||
1 +
|
||||
(((input[4]! & 0xf) << 10) | (input[3]! << 2) | ((input[2]! & 0xc0) >> 6)),
|
||||
width: 1 + (((input[2]! & 0x3f) << 8) | input[1]!),
|
||||
}
|
||||
}
|
||||
|
||||
function calculateLossy(input: Uint8Array): ISize {
|
||||
// `& 0x3fff` returns the last 14 bits
|
||||
// TO-DO: include webp scaling in the calculations
|
||||
return {
|
||||
height: readInt16LE(input, 8) & 0x3fff,
|
||||
width: readInt16LE(input, 6) & 0x3fff,
|
||||
}
|
||||
}
|
||||
|
||||
export const WEBP: IImage = {
|
||||
validate(input) {
|
||||
const riffHeader = 'RIFF' === toUTF8String(input, 0, 4)
|
||||
const webpHeader = 'WEBP' === toUTF8String(input, 8, 12)
|
||||
const vp8Header = 'VP8' === toUTF8String(input, 12, 15)
|
||||
return riffHeader && webpHeader && vp8Header
|
||||
},
|
||||
|
||||
calculate(_input) {
|
||||
const chunkHeader = toUTF8String(_input, 12, 16)
|
||||
const input = _input.slice(20, 30)
|
||||
|
||||
// Extended webp stream signature
|
||||
if (chunkHeader === 'VP8X') {
|
||||
const extendedHeader = input[0]
|
||||
const validStart = (extendedHeader! & 0xc0) === 0
|
||||
const validEnd = (extendedHeader! & 0x01) === 0
|
||||
if (validStart && validEnd) {
|
||||
return calculateExtended(input)
|
||||
}
|
||||
throw new TypeError('Invalid WebP')
|
||||
}
|
||||
|
||||
// Lossless webp stream signature
|
||||
if (chunkHeader === 'VP8 ' && input[0] !== 0x2f) {
|
||||
return calculateLossy(input)
|
||||
}
|
||||
|
||||
// Lossy webp stream signature
|
||||
const signature = toHexString(input, 3, 6)
|
||||
if (chunkHeader === 'VP8L' && signature !== '9d012a') {
|
||||
return calculateLossless(input)
|
||||
}
|
||||
|
||||
throw new TypeError('Invalid WebP')
|
||||
},
|
||||
}
|
||||
48
src/image-size/utils/bit-reader.ts
Normal file
48
src/image-size/utils/bit-reader.ts
Normal file
@ -0,0 +1,48 @@
|
||||
/** This class helps read Uint8Array bit-by-bit */
|
||||
export class BitReader {
|
||||
// Skip the first 16 bits (2 bytes) of signature
|
||||
private byteOffset = 2
|
||||
private bitOffset = 0
|
||||
|
||||
constructor(
|
||||
private readonly input: Uint8Array,
|
||||
private readonly endianness: 'big-endian' | 'little-endian',
|
||||
) {}
|
||||
|
||||
/** Reads a specified number of bits, and move the offset */
|
||||
getBits(length = 1): number {
|
||||
let result = 0
|
||||
let bitsRead = 0
|
||||
|
||||
while (bitsRead < length) {
|
||||
if (this.byteOffset >= this.input.length) {
|
||||
throw new Error('Reached end of input')
|
||||
}
|
||||
|
||||
const currentByte = this.input[this.byteOffset]
|
||||
const bitsLeft = 8 - this.bitOffset
|
||||
const bitsToRead = Math.min(length - bitsRead, bitsLeft)
|
||||
|
||||
if (this.endianness === 'little-endian') {
|
||||
const mask = (1 << bitsToRead) - 1
|
||||
const bits = (currentByte! >> this.bitOffset) & mask
|
||||
result |= bits << bitsRead
|
||||
} else {
|
||||
const mask =
|
||||
((1 << bitsToRead) - 1) << (8 - this.bitOffset - bitsToRead)
|
||||
const bits = (currentByte! & mask) >> (8 - this.bitOffset - bitsToRead)
|
||||
result = (result << bitsToRead) | bits
|
||||
}
|
||||
|
||||
bitsRead += bitsToRead
|
||||
this.bitOffset += bitsToRead
|
||||
|
||||
if (this.bitOffset === 8) {
|
||||
this.byteOffset++
|
||||
this.bitOffset = 0
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user