mirror of
https://github.com/excalidraw/excalidraw.git
synced 2025-04-14 16:40:58 -04:00
Merge f585cfb720
into 01304aac49
This commit is contained in:
commit
eec96105d5
6 changed files with 280 additions and 30 deletions
|
@ -81,7 +81,7 @@ export const base64urlToString = (str: string) => {
|
|||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// text encoding
|
||||
// tEXT encoding/decoding
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
type EncodedData = {
|
||||
|
@ -143,6 +143,113 @@ export const decode = (data: EncodedData): string => {
|
|||
return decoded;
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// iTXt encoding/decoding
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// Based on PNG spec: http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html
|
||||
// and iTXt chunk structure: https://www.w3.org/TR/PNG/#11iTXt
|
||||
|
||||
export const encodeITXtChunk = ({
|
||||
keyword,
|
||||
text,
|
||||
compressionFlag = true,
|
||||
compressionMethod = 0,
|
||||
languageTag = "",
|
||||
translatedKeyword = "",
|
||||
}: {
|
||||
keyword: string;
|
||||
text: string;
|
||||
compressionFlag?: boolean;
|
||||
compressionMethod?: number;
|
||||
languageTag?: string;
|
||||
translatedKeyword?: string;
|
||||
}): Uint8Array => {
|
||||
const keywordBytes = new TextEncoder().encode(keyword);
|
||||
const languageTagBytes = new TextEncoder().encode(languageTag);
|
||||
const translatedKeywordBytes = new TextEncoder().encode(translatedKeyword);
|
||||
const textBytes = new TextEncoder().encode(text);
|
||||
|
||||
const totalSize =
|
||||
keywordBytes.length +
|
||||
1 + // null separator after keyword
|
||||
1 + // compression flag
|
||||
1 + // compression method
|
||||
languageTagBytes.length +
|
||||
1 + // null separator after language tag
|
||||
translatedKeywordBytes.length +
|
||||
1 + // null separator after translated keyword
|
||||
(compressionFlag ? deflate(textBytes).length : textBytes.length);
|
||||
|
||||
const output = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
|
||||
output.set(keywordBytes, offset);
|
||||
offset += keywordBytes.length;
|
||||
output[offset++] = 0; // null separator
|
||||
|
||||
output[offset++] = compressionFlag ? 1 : 0;
|
||||
|
||||
output[offset++] = compressionMethod;
|
||||
|
||||
output.set(languageTagBytes, offset);
|
||||
offset += languageTagBytes.length;
|
||||
output[offset++] = 0; // null separator
|
||||
|
||||
output.set(translatedKeywordBytes, offset);
|
||||
offset += translatedKeywordBytes.length;
|
||||
output[offset++] = 0; // null separator
|
||||
|
||||
const finalTextBytes = compressionFlag ? deflate(textBytes) : textBytes;
|
||||
output.set(finalTextBytes, offset);
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
export const decodeITXtChunk = (data: Uint8Array): {
|
||||
keyword: string;
|
||||
text: string;
|
||||
compressed: boolean;
|
||||
compressedMethod: number;
|
||||
language: string;
|
||||
translated: string;
|
||||
} => {
|
||||
let offset = 0;
|
||||
|
||||
const keywordEnd = data.indexOf(0, offset);
|
||||
if (keywordEnd === -1) throw new Error("Invalid iTXt chunk: missing keyword");
|
||||
const keyword = new TextDecoder().decode(data.slice(offset, keywordEnd));
|
||||
offset = keywordEnd + 1;
|
||||
|
||||
const compressionFlag = data[offset++] === 1;
|
||||
|
||||
const compressionMethod = data[offset++];
|
||||
|
||||
const languageEnd = data.indexOf(0, offset);
|
||||
if (languageEnd === -1) throw new Error("Invalid iTXt chunk: missing language tag");
|
||||
const language = new TextDecoder().decode(data.slice(offset, languageEnd));
|
||||
offset = languageEnd + 1;
|
||||
|
||||
const translatedEnd = data.indexOf(0, offset);
|
||||
if (translatedEnd === -1) throw new Error("Invalid iTXt chunk: missing translated keyword");
|
||||
const translated = new TextDecoder().decode(data.slice(offset, translatedEnd));
|
||||
offset = translatedEnd + 1;
|
||||
|
||||
const textBytes = data.slice(offset);
|
||||
const text = compressionFlag
|
||||
? new TextDecoder().decode(inflate(textBytes))
|
||||
: new TextDecoder().decode(textBytes);
|
||||
|
||||
return {
|
||||
keyword,
|
||||
text,
|
||||
compressed: compressionFlag,
|
||||
compressedMethod: compressionMethod,
|
||||
language,
|
||||
translated,
|
||||
};
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// binary encoding
|
||||
// -----------------------------------------------------------------------------
|
||||
|
|
|
@ -5,67 +5,170 @@ import decodePng from "png-chunks-extract";
|
|||
import { EXPORT_DATA_TYPES, MIME_TYPES } from "@excalidraw/common";
|
||||
|
||||
import { blobToArrayBuffer } from "./blob";
|
||||
import { encode, decode } from "./encode";
|
||||
import { encode, decode, encodeITXtChunk, decodeITXtChunk } from "./encode";
|
||||
|
||||
type TEXtChunk = { name: "tEXt"; data: Uint8Array };
|
||||
type ITXtChunk = { name: "iTXt"; data: Uint8Array };
|
||||
type PngChunk = TEXtChunk | ITXtChunk;
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PNG
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export const getTEXtChunk = async (
|
||||
export const getMetadataChunk = async (
|
||||
blob: Blob,
|
||||
): Promise<{ keyword: string; text: string } | null> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadataChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (metadataChunk) {
|
||||
return tEXt.decode(metadataChunk.data);
|
||||
): Promise<{
|
||||
keyword: string;
|
||||
text: string;
|
||||
compressionFlag?: boolean;
|
||||
compressionMethod?: number;
|
||||
languageTag?: string;
|
||||
translatedKeyword?: string;
|
||||
} | null> => {
|
||||
try {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob))) as PngChunk[];
|
||||
|
||||
// Try iTXt chunk first (preferred format)
|
||||
const iTXtChunk = chunks.find((chunk) => chunk.name === "iTXt");
|
||||
if (iTXtChunk) {
|
||||
try {
|
||||
const decoded = decodeITXtChunk(iTXtChunk.data);
|
||||
console.debug("Decoded iTXt chunk:", decoded);
|
||||
return {
|
||||
keyword: decoded.keyword,
|
||||
text: decoded.text,
|
||||
compressionFlag: decoded.compressed,
|
||||
compressionMethod: decoded.compressedMethod,
|
||||
languageTag: decoded.language,
|
||||
translatedKeyword: decoded.translated
|
||||
};
|
||||
} catch (error) {
|
||||
console.warn("Failed to decode iTXt chunk:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to tEXt chunk
|
||||
const tEXtChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (tEXtChunk) {
|
||||
try {
|
||||
return tEXt.decode(tEXtChunk.data);
|
||||
} catch (error) {
|
||||
console.warn("Failed to decode tEXt chunk:", error);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error) {
|
||||
console.error("Failed to get metadata chunk:", error);
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const encodePngMetadata = async ({
|
||||
blob,
|
||||
metadata,
|
||||
useITXt = true,
|
||||
}: {
|
||||
blob: Blob;
|
||||
metadata: string;
|
||||
useITXt?: boolean;
|
||||
}) => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
try {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob))) as PngChunk[];
|
||||
|
||||
const metadataChunk = tEXt.encode(
|
||||
MIME_TYPES.excalidraw,
|
||||
JSON.stringify(
|
||||
const filteredChunks = chunks.filter((chunk) => {
|
||||
try {
|
||||
if (chunk.name === "tEXt") {
|
||||
return tEXt.decode(chunk.data).keyword !== MIME_TYPES.excalidraw;
|
||||
}
|
||||
if (chunk.name === "iTXt") {
|
||||
return decodeITXtChunk(chunk.data).keyword !== MIME_TYPES.excalidraw;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.warn("Failed to decode chunk during filtering:", error);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
const encodedData = JSON.stringify(
|
||||
encode({
|
||||
text: metadata,
|
||||
compress: true,
|
||||
}),
|
||||
),
|
||||
);
|
||||
// insert metadata before last chunk (iEND)
|
||||
chunks.splice(-1, 0, metadataChunk);
|
||||
);
|
||||
|
||||
return new Blob([encodePng(chunks)], { type: MIME_TYPES.png });
|
||||
let metadataChunk: PngChunk;
|
||||
try {
|
||||
if (useITXt) {
|
||||
metadataChunk = {
|
||||
name: "iTXt",
|
||||
data: encodeITXtChunk({
|
||||
keyword: MIME_TYPES.excalidraw,
|
||||
text: encodedData,
|
||||
compressionFlag: true,
|
||||
compressionMethod: 0,
|
||||
languageTag: "en",
|
||||
translatedKeyword: ""
|
||||
})
|
||||
};
|
||||
} else {
|
||||
throw new Error("Fallback to tEXt");
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn("iTXt encoding failed, falling back to tEXt:", error);
|
||||
const tEXtData = tEXt.encode(
|
||||
MIME_TYPES.excalidraw,
|
||||
encodedData,
|
||||
) as unknown as Uint8Array;
|
||||
metadataChunk = {
|
||||
name: "tEXt",
|
||||
data: tEXtData
|
||||
};
|
||||
}
|
||||
|
||||
// Insert metadata chunk before the IEND chunk (last chunk)
|
||||
filteredChunks.splice(-1, 0, metadataChunk);
|
||||
|
||||
return new Blob(
|
||||
[(encodePng as (chunks: PngChunk[]) => Uint8Array)(filteredChunks)],
|
||||
{ type: MIME_TYPES.png }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Failed to encode PNG metadata:", error);
|
||||
throw new Error("Failed to encode PNG metadata");
|
||||
}
|
||||
};
|
||||
|
||||
export const decodePngMetadata = async (blob: Blob) => {
|
||||
const metadata = await getTEXtChunk(blob);
|
||||
if (metadata?.keyword === MIME_TYPES.excalidraw) {
|
||||
try {
|
||||
const metadata = await getMetadataChunk(blob);
|
||||
|
||||
if (!metadata?.keyword || metadata.keyword !== MIME_TYPES.excalidraw) {
|
||||
throw new Error("Invalid or unsupported PNG metadata format");
|
||||
}
|
||||
|
||||
try {
|
||||
const encodedData = JSON.parse(metadata.text);
|
||||
|
||||
// Handle legacy format
|
||||
if (!("encoded" in encodedData)) {
|
||||
// legacy, un-encoded scene JSON
|
||||
if (
|
||||
"type" in encodedData &&
|
||||
encodedData.type === EXPORT_DATA_TYPES.excalidraw
|
||||
) {
|
||||
return metadata.text;
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
throw new Error("Malformed or unexpected metadata format");
|
||||
}
|
||||
|
||||
return decode(encodedData);
|
||||
} catch (error: any) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
} catch (error) {
|
||||
console.error("Failed to decode metadata:", error);
|
||||
throw new Error("Malformed or unexpected metadata format");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to decode PNG metadata:", error);
|
||||
throw new Error("Failed to decode PNG metadata");
|
||||
}
|
||||
throw new Error("INVALID");
|
||||
};
|
||||
|
|
24
packages/excalidraw/global.d.ts
vendored
24
packages/excalidraw/global.d.ts
vendored
|
@ -33,6 +33,7 @@ interface Clipboard extends EventTarget {
|
|||
// PNG encoding/decoding
|
||||
// -----------------------------------------------------------------------------
|
||||
type TEXtChunk = { name: "tEXt"; data: Uint8Array };
|
||||
type ITXtChunk = { name: "iTXt"; data: Uint8Array };
|
||||
|
||||
declare module "png-chunk-text" {
|
||||
function encode(
|
||||
|
@ -41,12 +42,31 @@ declare module "png-chunk-text" {
|
|||
): { name: "tEXt"; data: Uint8Array };
|
||||
function decode(data: Uint8Array): { keyword: string; text: string };
|
||||
}
|
||||
declare module "png-chunk-itxt" {
|
||||
function encodeSync(options: {
|
||||
keyword: string;
|
||||
text: string;
|
||||
compressionFlag?: boolean;
|
||||
compressionMethod?: number;
|
||||
languageTag?: string;
|
||||
translatedKeyword?: string;
|
||||
}): Uint8Array;
|
||||
|
||||
function decodeSync(data: Uint8Array): {
|
||||
keyword: string;
|
||||
text: string;
|
||||
compressed?: boolean;
|
||||
compressedMethod?: number;
|
||||
language?: string;
|
||||
translated?: string;
|
||||
};
|
||||
}
|
||||
declare module "png-chunks-encode" {
|
||||
function encode(chunks: TEXtChunk[]): Uint8Array;
|
||||
function encode(chunks: (TEXtChunk | ITXtChunk)[]): Uint8Array;
|
||||
export = encode;
|
||||
}
|
||||
declare module "png-chunks-extract" {
|
||||
function extract(buffer: Uint8Array): TEXtChunk[];
|
||||
function extract(buffer: Uint8Array): (TEXtChunk | ITXtChunk)[];
|
||||
export = extract;
|
||||
}
|
||||
// -----------------------------------------------------------------------------
|
||||
|
|
|
@ -87,13 +87,14 @@
|
|||
"image-blob-reduce": "3.0.1",
|
||||
"jotai": "2.11.0",
|
||||
"jotai-scope": "0.7.2",
|
||||
"lodash.throttle": "4.1.1",
|
||||
"lodash.debounce": "4.0.8",
|
||||
"lodash.throttle": "4.1.1",
|
||||
"nanoid": "3.3.3",
|
||||
"open-color": "1.9.1",
|
||||
"pako": "2.0.3",
|
||||
"perfect-freehand": "1.2.0",
|
||||
"pica": "7.1.1",
|
||||
"png-chunk-itxt": "1.0.0",
|
||||
"png-chunk-text": "1.0.0",
|
||||
"png-chunks-encode": "1.0.0",
|
||||
"png-chunks-extract": "1.0.0",
|
||||
|
|
|
@ -19,6 +19,11 @@ import type {
|
|||
NonDeleted,
|
||||
} from "@excalidraw/element/types";
|
||||
import type { AppState, BinaryFiles } from "@excalidraw/excalidraw/types";
|
||||
import { Buffer } from "buffer";
|
||||
|
||||
window.onload = () => {
|
||||
window.Buffer = Buffer;
|
||||
}
|
||||
|
||||
export { MIME_TYPES };
|
||||
|
||||
|
@ -101,9 +106,10 @@ export const exportToBlob = async (
|
|||
mimeType?: string;
|
||||
quality?: number;
|
||||
exportPadding?: number;
|
||||
useITXt?: boolean;
|
||||
},
|
||||
): Promise<Blob> => {
|
||||
let { mimeType = MIME_TYPES.png, quality } = opts;
|
||||
let { mimeType = MIME_TYPES.png, quality, useITXt = true } = opts;
|
||||
|
||||
if (mimeType === MIME_TYPES.png && typeof quality === "number") {
|
||||
console.warn(`"quality" will be ignored for "${MIME_TYPES.png}" mimeType`);
|
||||
|
@ -150,6 +156,7 @@ export const exportToBlob = async (
|
|||
opts.files || {},
|
||||
"local",
|
||||
),
|
||||
useITXt,
|
||||
});
|
||||
}
|
||||
resolve(blob);
|
||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -3801,6 +3801,11 @@ binary-extensions@^2.0.0:
|
|||
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522"
|
||||
integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==
|
||||
|
||||
binary-parser@^2.2.1:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/binary-parser/-/binary-parser-2.2.1.tgz#4edc6da2dc56db73fa5ba450dfe6382ede8294ce"
|
||||
integrity sha512-5ATpz/uPDgq5GgEDxTB4ouXCde7q2lqAQlSdBRQVl/AJnxmQmhIfyxJx+0MGu//D5rHQifkfGbWWlaysG0o9NA==
|
||||
|
||||
bl@^4.0.3:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
|
||||
|
@ -7838,6 +7843,13 @@ pkg-dir@4.2.0:
|
|||
dependencies:
|
||||
find-up "^4.0.0"
|
||||
|
||||
png-chunk-itxt@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/png-chunk-itxt/-/png-chunk-itxt-1.0.0.tgz#4652547b7c88d512337599e422b7431f2c234355"
|
||||
integrity sha512-/1gTTBlIBUL47FS1wXI5oW5zidHge1Lwn+w4WNsnTc6wu1i82l63hwz0mgw1x2eYFH4iYkHkmKH0FHoHYMmjig==
|
||||
dependencies:
|
||||
binary-parser "^2.2.1"
|
||||
|
||||
png-chunk-text@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/png-chunk-text/-/png-chunk-text-1.0.0.tgz#1c6006d8e34ba471d38e1c9c54b3f53e1085e18f"
|
||||
|
|
Loading…
Add table
Reference in a new issue