itxt implementation finished and working

This commit is contained in:
Gabriel Gomes 2025-04-11 20:20:58 +01:00
parent 7ddb09f12c
commit f585cfb720
2 changed files with 226 additions and 76 deletions

View file

@ -81,7 +81,7 @@ export const base64urlToString = (str: string) => {
};
// -----------------------------------------------------------------------------
// text encoding
// tEXT encoding/decoding
// -----------------------------------------------------------------------------
type EncodedData = {
@ -143,6 +143,113 @@ export const decode = (data: EncodedData): string => {
return decoded;
};
// -----------------------------------------------------------------------------
// iTXt encoding/decoding
// -----------------------------------------------------------------------------
// Based on PNG spec: http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html
// and iTXt chunk structure: https://www.w3.org/TR/PNG/#11iTXt
export const encodeITXtChunk = ({
keyword,
text,
compressionFlag = true,
compressionMethod = 0,
languageTag = "",
translatedKeyword = "",
}: {
keyword: string;
text: string;
compressionFlag?: boolean;
compressionMethod?: number;
languageTag?: string;
translatedKeyword?: string;
}): Uint8Array => {
const keywordBytes = new TextEncoder().encode(keyword);
const languageTagBytes = new TextEncoder().encode(languageTag);
const translatedKeywordBytes = new TextEncoder().encode(translatedKeyword);
const textBytes = new TextEncoder().encode(text);
const totalSize =
keywordBytes.length +
1 + // null separator after keyword
1 + // compression flag
1 + // compression method
languageTagBytes.length +
1 + // null separator after language tag
translatedKeywordBytes.length +
1 + // null separator after translated keyword
(compressionFlag ? deflate(textBytes).length : textBytes.length);
const output = new Uint8Array(totalSize);
let offset = 0;
output.set(keywordBytes, offset);
offset += keywordBytes.length;
output[offset++] = 0; // null separator
output[offset++] = compressionFlag ? 1 : 0;
output[offset++] = compressionMethod;
output.set(languageTagBytes, offset);
offset += languageTagBytes.length;
output[offset++] = 0; // null separator
output.set(translatedKeywordBytes, offset);
offset += translatedKeywordBytes.length;
output[offset++] = 0; // null separator
const finalTextBytes = compressionFlag ? deflate(textBytes) : textBytes;
output.set(finalTextBytes, offset);
return output;
};
export const decodeITXtChunk = (data: Uint8Array): {
keyword: string;
text: string;
compressed: boolean;
compressedMethod: number;
language: string;
translated: string;
} => {
let offset = 0;
const keywordEnd = data.indexOf(0, offset);
if (keywordEnd === -1) throw new Error("Invalid iTXt chunk: missing keyword");
const keyword = new TextDecoder().decode(data.slice(offset, keywordEnd));
offset = keywordEnd + 1;
const compressionFlag = data[offset++] === 1;
const compressionMethod = data[offset++];
const languageEnd = data.indexOf(0, offset);
if (languageEnd === -1) throw new Error("Invalid iTXt chunk: missing language tag");
const language = new TextDecoder().decode(data.slice(offset, languageEnd));
offset = languageEnd + 1;
const translatedEnd = data.indexOf(0, offset);
if (translatedEnd === -1) throw new Error("Invalid iTXt chunk: missing translated keyword");
const translated = new TextDecoder().decode(data.slice(offset, translatedEnd));
offset = translatedEnd + 1;
const textBytes = data.slice(offset);
const text = compressionFlag
? new TextDecoder().decode(inflate(textBytes))
: new TextDecoder().decode(textBytes);
return {
keyword,
text,
compressed: compressionFlag,
compressedMethod: compressionMethod,
language,
translated,
};
};
// -----------------------------------------------------------------------------
// binary encoding
// -----------------------------------------------------------------------------

View file

@ -1,12 +1,15 @@
import tEXt from "png-chunk-text";
import * as iTXt from "png-chunk-itxt";
import encodePng from "png-chunks-encode";
import decodePng from "png-chunks-extract";
import { EXPORT_DATA_TYPES, MIME_TYPES } from "@excalidraw/common";
import { blobToArrayBuffer } from "./blob";
import { encode, decode } from "./encode";
import { encode, decode, encodeITXtChunk, decodeITXtChunk } from "./encode";
type TEXtChunk = { name: "tEXt"; data: Uint8Array };
type ITXtChunk = { name: "iTXt"; data: Uint8Array };
type PngChunk = TEXtChunk | ITXtChunk;
// -----------------------------------------------------------------------------
// PNG
@ -22,33 +25,43 @@ export const getMetadataChunk = async (
languageTag?: string;
translatedKeyword?: string;
} | null> => {
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
const iTXtChunk = chunks.find((chunk) => chunk.name === "iTXt");
if (iTXtChunk) {
try {
const decoded = iTXt.decodeSync(iTXtChunk.data);
console.log("Decoded iTXt chunk:", decoded);
return {
keyword: decoded.keyword,
text: decoded.text,
compressionFlag: decoded.compressed,
compressionMethod: decoded.compressedMethod,
languageTag: decoded.language || "",
translatedKeyword: decoded.translated || ""
};
} catch (error) {
console.error("Failed to decode iTXt chunk:", error);
try {
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob))) as PngChunk[];
// Try iTXt chunk first (preferred format)
const iTXtChunk = chunks.find((chunk) => chunk.name === "iTXt");
if (iTXtChunk) {
try {
const decoded = decodeITXtChunk(iTXtChunk.data);
console.debug("Decoded iTXt chunk:", decoded);
return {
keyword: decoded.keyword,
text: decoded.text,
compressionFlag: decoded.compressed,
compressionMethod: decoded.compressedMethod,
languageTag: decoded.language,
translatedKeyword: decoded.translated
};
} catch (error) {
console.warn("Failed to decode iTXt chunk:", error);
}
}
// Fallback to tEXt chunk
const tEXtChunk = chunks.find((chunk) => chunk.name === "tEXt");
if (tEXtChunk) {
try {
return tEXt.decode(tEXtChunk.data);
} catch (error) {
console.warn("Failed to decode tEXt chunk:", error);
}
}
return null;
} catch (error) {
console.error("Failed to get metadata chunk:", error);
return null;
}
const tEXtChunk = chunks.find((chunk) => chunk.name === "tEXt");
if (tEXtChunk) {
return tEXt.decode(tEXtChunk.data);
}
return null;
};
export const encodePngMetadata = async ({
@ -60,58 +73,85 @@ export const encodePngMetadata = async ({
metadata: string;
useITXt?: boolean;
}) => {
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
const filteredChunks = chunks.filter(
(chunk) =>
!(chunk.name === "tEXt" &&
tEXt.decode(chunk.data).keyword === MIME_TYPES.excalidraw) &&
!(chunk.name === "iTXt" &&
iTXt.decodeSync(chunk.data).keyword === MIME_TYPES.excalidraw)
);
const encodedData = JSON.stringify(
encode({
text: metadata,
compress: true,
}),
);
let metadataChunk: TEXtChunk | ITXtChunk;
try {
if (useITXt) {
metadataChunk = {
name: "iTXt",
data: iTXt.encodeSync({
keyword: MIME_TYPES.excalidraw,
text: encodedData,
compressionFlag: true,
compressionMethod: 0,
languageTag: "en",
translatedKeyword: ""
})
};
} else {
throw new Error("Fallback to tEXt");
}
} catch (error) {
console.warn("iTXt encoding failed, falling back to tEXt:", error);
metadataChunk = tEXt.encode(
MIME_TYPES.excalidraw,
encodedData,
);
}
filteredChunks.splice(-1, 0, metadataChunk);
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob))) as PngChunk[];
return new Blob([encodePng(filteredChunks)], { type: MIME_TYPES.png });
const filteredChunks = chunks.filter((chunk) => {
try {
if (chunk.name === "tEXt") {
return tEXt.decode(chunk.data).keyword !== MIME_TYPES.excalidraw;
}
if (chunk.name === "iTXt") {
return decodeITXtChunk(chunk.data).keyword !== MIME_TYPES.excalidraw;
}
return true;
} catch (error) {
console.warn("Failed to decode chunk during filtering:", error);
return true;
}
});
const encodedData = JSON.stringify(
encode({
text: metadata,
compress: true,
}),
);
let metadataChunk: PngChunk;
try {
if (useITXt) {
metadataChunk = {
name: "iTXt",
data: encodeITXtChunk({
keyword: MIME_TYPES.excalidraw,
text: encodedData,
compressionFlag: true,
compressionMethod: 0,
languageTag: "en",
translatedKeyword: ""
})
};
} else {
throw new Error("Fallback to tEXt");
}
} catch (error) {
console.warn("iTXt encoding failed, falling back to tEXt:", error);
const tEXtData = tEXt.encode(
MIME_TYPES.excalidraw,
encodedData,
) as unknown as Uint8Array;
metadataChunk = {
name: "tEXt",
data: tEXtData
};
}
// Insert metadata chunk before the IEND chunk (last chunk)
filteredChunks.splice(-1, 0, metadataChunk);
return new Blob(
[(encodePng as (chunks: PngChunk[]) => Uint8Array)(filteredChunks)],
{ type: MIME_TYPES.png }
);
} catch (error) {
console.error("Failed to encode PNG metadata:", error);
throw new Error("Failed to encode PNG metadata");
}
};
export const decodePngMetadata = async (blob: Blob) => {
const metadata = await getMetadataChunk(blob);
if (metadata?.keyword === MIME_TYPES.excalidraw) {
try {
const metadata = await getMetadataChunk(blob);
if (!metadata?.keyword || metadata.keyword !== MIME_TYPES.excalidraw) {
throw new Error("Invalid or unsupported PNG metadata format");
}
try {
const encodedData = JSON.parse(metadata.text);
// Handle legacy format
if (!("encoded" in encodedData)) {
if (
"type" in encodedData &&
@ -121,11 +161,14 @@ export const decodePngMetadata = async (blob: Blob) => {
}
throw new Error("Malformed or unexpected metadata format");
}
return decode(encodedData);
} catch (error: any) {
console.error(error);
} catch (error) {
console.error("Failed to decode metadata:", error);
throw new Error("Malformed or unexpected metadata format");
}
} catch (error) {
console.error("Failed to decode PNG metadata:", error);
throw new Error("Failed to decode PNG metadata");
}
throw new Error("Invalid or unsupported PNG metadata format");
};