mirror of
https://github.com/excalidraw/excalidraw.git
synced 2025-05-03 10:00:07 -04:00
fix encoding of embed data & compress (#2240)
This commit is contained in:
parent
e8a39b5f84
commit
b3263c2a69
15 changed files with 483 additions and 122 deletions
|
@ -4,16 +4,20 @@ import { t } from "../i18n";
|
|||
import { AppState } from "../types";
|
||||
import { LibraryData, ImportedDataState } from "./types";
|
||||
import { calculateScrollCenter } from "../scene";
|
||||
import { MIME_TYPES } from "../constants";
|
||||
import { base64ToString } from "../base64";
|
||||
|
||||
export const parseFileContents = async (blob: Blob | File) => {
|
||||
let contents: string;
|
||||
|
||||
if (blob.type === "image/png") {
|
||||
const metadata = await (await import("./png")).getTEXtChunk(blob);
|
||||
if (metadata?.keyword === MIME_TYPES.excalidraw) {
|
||||
return metadata.text;
|
||||
try {
|
||||
return await (await import("./image")).decodePngMetadata(blob);
|
||||
} catch (error) {
|
||||
if (error.message === "INVALID") {
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
} else {
|
||||
throw new Error(t("alerts.cannotRestoreFromImage"));
|
||||
}
|
||||
}
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
} else {
|
||||
if ("text" in Blob) {
|
||||
contents = await blob.text();
|
||||
|
@ -29,16 +33,17 @@ export const parseFileContents = async (blob: Blob | File) => {
|
|||
});
|
||||
}
|
||||
if (blob.type === "image/svg+xml") {
|
||||
if (contents.includes(`payload-type:${MIME_TYPES.excalidraw}`)) {
|
||||
const match = contents.match(
|
||||
/<!-- payload-start -->(.+?)<!-- payload-end -->/,
|
||||
);
|
||||
if (!match) {
|
||||
try {
|
||||
return await (await import("./image")).decodeSvgMetadata({
|
||||
svg: contents,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.message === "INVALID") {
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
} else {
|
||||
throw new Error(t("alerts.cannotRestoreFromImage"));
|
||||
}
|
||||
return base64ToString(match[1]);
|
||||
}
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
}
|
||||
}
|
||||
return contents;
|
||||
|
|
116
src/data/encode.ts
Normal file
116
src/data/encode.ts
Normal file
|
@ -0,0 +1,116 @@
|
|||
import { deflate, inflate } from "pako";
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// byte (binary) strings
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// fast, Buffer-compatible implem
|
||||
export const toByteString = (data: string | Uint8Array): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob =
|
||||
typeof data === "string"
|
||||
? new Blob([new TextEncoder().encode(data)])
|
||||
: new Blob([data]);
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target || typeof event.target.result !== "string") {
|
||||
return reject(new Error("couldn't convert to byte string"));
|
||||
}
|
||||
resolve(event.target.result);
|
||||
};
|
||||
reader.readAsBinaryString(blob);
|
||||
});
|
||||
};
|
||||
|
||||
const byteStringToArrayBuffer = (byteString: string) => {
|
||||
const buffer = new ArrayBuffer(byteString.length);
|
||||
const bufferView = new Uint8Array(buffer);
|
||||
for (let i = 0, len = byteString.length; i < len; i++) {
|
||||
bufferView[i] = byteString.charCodeAt(i);
|
||||
}
|
||||
return buffer;
|
||||
};
|
||||
|
||||
const byteStringToString = (byteString: string) => {
|
||||
return new TextDecoder("utf-8").decode(byteStringToArrayBuffer(byteString));
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// base64
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @param isByteString set to true if already byte string to prevent bloat
|
||||
* due to reencoding
|
||||
*/
|
||||
export const stringToBase64 = async (str: string, isByteString = false) => {
|
||||
return isByteString ? btoa(str) : btoa(await toByteString(str));
|
||||
};
|
||||
|
||||
// async to align with stringToBase64
|
||||
export const base64ToString = async (base64: string, isByteString = false) => {
|
||||
return isByteString ? atob(base64) : byteStringToString(atob(base64));
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// text encoding
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
type EncodedData = {
|
||||
encoded: string;
|
||||
encoding: "bstring";
|
||||
/** whether text is compressed (zlib) */
|
||||
compressed: boolean;
|
||||
/** version for potential migration purposes */
|
||||
version?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Encodes (and potentially compresses via zlib) text to byte string
|
||||
*/
|
||||
export const encode = async ({
|
||||
text,
|
||||
compress,
|
||||
}: {
|
||||
text: string;
|
||||
/** defaults to `true`. If compression fails, falls back to bstring alone. */
|
||||
compress?: boolean;
|
||||
}): Promise<EncodedData> => {
|
||||
let deflated!: string;
|
||||
if (compress !== false) {
|
||||
try {
|
||||
deflated = await toByteString(deflate(text));
|
||||
} catch (error) {
|
||||
console.error("encode: cannot deflate", error);
|
||||
}
|
||||
}
|
||||
return {
|
||||
version: "1",
|
||||
encoding: "bstring",
|
||||
compressed: !!deflated,
|
||||
encoded: deflated || (await toByteString(text)),
|
||||
};
|
||||
};
|
||||
|
||||
export const decode = async (data: EncodedData): Promise<string> => {
|
||||
let decoded: string;
|
||||
|
||||
switch (data.encoding) {
|
||||
case "bstring":
|
||||
// if compressed, do not double decode the bstring
|
||||
decoded = data.compressed
|
||||
? data.encoded
|
||||
: await byteStringToString(data.encoded);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`decode: unknown encoding "${data.encoding}"`);
|
||||
}
|
||||
|
||||
if (data.compressed) {
|
||||
return inflate(new Uint8Array(byteStringToArrayBuffer(decoded)), {
|
||||
to: "string",
|
||||
});
|
||||
}
|
||||
|
||||
return decoded;
|
||||
};
|
130
src/data/image.ts
Normal file
130
src/data/image.ts
Normal file
|
@ -0,0 +1,130 @@
|
|||
import decodePng from "png-chunks-extract";
|
||||
import tEXt from "png-chunk-text";
|
||||
import encodePng from "png-chunks-encode";
|
||||
import { stringToBase64, encode, decode, base64ToString } from "./encode";
|
||||
import { MIME_TYPES } from "../constants";
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PNG
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const blobToArrayBuffer = (blob: Blob): Promise<ArrayBuffer> => {
|
||||
if ("arrayBuffer" in blob) {
|
||||
return blob.arrayBuffer();
|
||||
}
|
||||
// Safari
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target?.result) {
|
||||
return reject(new Error("couldn't convert blob to ArrayBuffer"));
|
||||
}
|
||||
resolve(event.target.result as ArrayBuffer);
|
||||
};
|
||||
reader.readAsArrayBuffer(blob);
|
||||
});
|
||||
};
|
||||
|
||||
export const getTEXtChunk = async (
|
||||
blob: Blob,
|
||||
): Promise<{ keyword: string; text: string } | null> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadataChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (metadataChunk) {
|
||||
return tEXt.decode(metadataChunk.data);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const encodePngMetadata = async ({
|
||||
blob,
|
||||
metadata,
|
||||
}: {
|
||||
blob: Blob;
|
||||
metadata: string;
|
||||
}) => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
|
||||
const metadataChunk = tEXt.encode(
|
||||
MIME_TYPES.excalidraw,
|
||||
JSON.stringify(
|
||||
await encode({
|
||||
text: metadata,
|
||||
compress: true,
|
||||
}),
|
||||
),
|
||||
);
|
||||
// insert metadata before last chunk (iEND)
|
||||
chunks.splice(-1, 0, metadataChunk);
|
||||
|
||||
return new Blob([encodePng(chunks)], { type: "image/png" });
|
||||
};
|
||||
|
||||
export const decodePngMetadata = async (blob: Blob) => {
|
||||
const metadata = await getTEXtChunk(blob);
|
||||
if (metadata?.keyword === MIME_TYPES.excalidraw) {
|
||||
try {
|
||||
const encodedData = JSON.parse(metadata.text);
|
||||
if (!("encoded" in encodedData)) {
|
||||
// legacy, un-encoded scene JSON
|
||||
if ("type" in encodedData && encodedData.type === "excalidraw") {
|
||||
return metadata.text;
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
return await decode(encodedData);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
}
|
||||
throw new Error("INVALID");
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// SVG
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export const encodeSvgMetadata = async ({ text }: { text: string }) => {
|
||||
const base64 = await stringToBase64(
|
||||
JSON.stringify(await encode({ text })),
|
||||
true /* is already byte string */,
|
||||
);
|
||||
|
||||
let metadata = "";
|
||||
metadata += `<!-- payload-type:${MIME_TYPES.excalidraw} -->`;
|
||||
metadata += `<!-- payload-version:2 -->`;
|
||||
metadata += "<!-- payload-start -->";
|
||||
metadata += base64;
|
||||
metadata += "<!-- payload-end -->";
|
||||
return metadata;
|
||||
};
|
||||
|
||||
export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
|
||||
if (svg.includes(`payload-type:${MIME_TYPES.excalidraw}`)) {
|
||||
const match = svg.match(/<!-- payload-start -->(.+?)<!-- payload-end -->/);
|
||||
if (!match) {
|
||||
throw new Error("INVALID");
|
||||
}
|
||||
const versionMatch = svg.match(/<!-- payload-version:(\d+) -->/);
|
||||
const version = versionMatch?.[1] || "1";
|
||||
const isByteString = version !== "1";
|
||||
|
||||
try {
|
||||
const json = await base64ToString(match[1], isByteString);
|
||||
const encodedData = JSON.parse(json);
|
||||
if (!("encoded" in encodedData)) {
|
||||
// legacy, un-encoded scene JSON
|
||||
if ("type" in encodedData && encodedData.type === "excalidraw") {
|
||||
return json;
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
return await decode(encodedData);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
}
|
||||
throw new Error("INVALID");
|
||||
};
|
|
@ -19,8 +19,6 @@ import { serializeAsJSON } from "./json";
|
|||
import { ExportType } from "../scene/types";
|
||||
import { restore } from "./restore";
|
||||
import { ImportedDataState } from "./types";
|
||||
import { MIME_TYPES } from "../constants";
|
||||
import { stringToBase64 } from "../base64";
|
||||
|
||||
export { loadFromBlob } from "./blob";
|
||||
export { saveAsJSON, loadFromJSON } from "./json";
|
||||
|
@ -302,21 +300,17 @@ export const exportCanvas = async (
|
|||
return window.alert(t("alerts.cannotExportEmptyCanvas"));
|
||||
}
|
||||
if (type === "svg" || type === "clipboard-svg") {
|
||||
let metadata = "";
|
||||
|
||||
if (appState.exportEmbedScene && type === "svg") {
|
||||
metadata += `<!-- payload-type:${MIME_TYPES.excalidraw} -->`;
|
||||
metadata += "<!-- payload-start -->";
|
||||
metadata += await stringToBase64(serializeAsJSON(elements, appState));
|
||||
metadata += "<!-- payload-end -->";
|
||||
}
|
||||
|
||||
const tempSvg = exportToSvg(elements, {
|
||||
exportBackground,
|
||||
viewBackgroundColor,
|
||||
exportPadding,
|
||||
shouldAddWatermark,
|
||||
metadata,
|
||||
metadata:
|
||||
appState.exportEmbedScene && type === "svg"
|
||||
? await (await import("./image")).encodeSvgMetadata({
|
||||
text: serializeAsJSON(elements, appState),
|
||||
})
|
||||
: undefined,
|
||||
});
|
||||
if (type === "svg") {
|
||||
await fileSave(new Blob([tempSvg.outerHTML], { type: "image/svg+xml" }), {
|
||||
|
@ -345,9 +339,9 @@ export const exportCanvas = async (
|
|||
tempCanvas.toBlob(async (blob) => {
|
||||
if (blob) {
|
||||
if (appState.exportEmbedScene) {
|
||||
blob = await (await import("./png")).encodeTEXtChunk(blob, {
|
||||
keyword: MIME_TYPES.excalidraw,
|
||||
text: serializeAsJSON(elements, appState),
|
||||
blob = await (await import("./image")).encodePngMetadata({
|
||||
blob,
|
||||
metadata: serializeAsJSON(elements, appState),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
import decodePng from "png-chunks-extract";
|
||||
import tEXt from "png-chunk-text";
|
||||
import encodePng from "png-chunks-encode";
|
||||
|
||||
const blobToArrayBuffer = (blob: Blob): Promise<ArrayBuffer> => {
|
||||
if ("arrayBuffer" in blob) {
|
||||
return blob.arrayBuffer();
|
||||
}
|
||||
// Safari
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target?.result) {
|
||||
return reject(new Error("couldn't convert blob to ArrayBuffer"));
|
||||
}
|
||||
resolve(event.target.result as ArrayBuffer);
|
||||
};
|
||||
reader.readAsArrayBuffer(blob);
|
||||
});
|
||||
};
|
||||
|
||||
export const getTEXtChunk = async (
|
||||
blob: Blob,
|
||||
): Promise<{ keyword: string; text: string } | null> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadataChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (metadataChunk) {
|
||||
return tEXt.decode(metadataChunk.data);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const encodeTEXtChunk = async (
|
||||
blob: Blob,
|
||||
chunk: { keyword: string; text: string },
|
||||
): Promise<Blob> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadata = tEXt.encode(chunk.keyword, chunk.text);
|
||||
// insert metadata before last chunk (iEND)
|
||||
chunks.splice(-1, 0, metadata);
|
||||
return new Blob([encodePng(chunks)], { type: "image/png" });
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue