feat: image support (#4011)

Co-authored-by: Emil Atanasov <heitara@gmail.com>
Co-authored-by: Aakansha Doshi <aakansha1216@gmail.com>
This commit is contained in:
David Luzar 2021-10-21 22:05:48 +02:00 committed by GitHub
parent 0f0244224d
commit 163ad1f4c4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
85 changed files with 3536 additions and 618 deletions

View file

@ -1,11 +1,16 @@
import { nanoid } from "nanoid";
import { cleanAppStateForExport } from "../appState";
import { EXPORT_DATA_TYPES } from "../constants";
import {
ALLOWED_IMAGE_MIME_TYPES,
EXPORT_DATA_TYPES,
MIME_TYPES,
} from "../constants";
import { clearElementsForExport } from "../element";
import { ExcalidrawElement } from "../element/types";
import { ExcalidrawElement, FileId } from "../element/types";
import { CanvasError } from "../errors";
import { t } from "../i18n";
import { calculateScrollCenter } from "../scene";
import { AppState } from "../types";
import { AppState, DataURL } from "../types";
import { FileSystemHandle } from "./filesystem";
import { isValidExcalidrawData } from "./json";
import { restore } from "./restore";
@ -14,16 +19,22 @@ import { ImportedLibraryData } from "./types";
const parseFileContents = async (blob: Blob | File) => {
let contents: string;
if (blob.type === "image/png") {
if (blob.type === MIME_TYPES.png) {
try {
return await (
await import(/* webpackChunkName: "image" */ "./image")
).decodePngMetadata(blob);
} catch (error) {
if (error.message === "INVALID") {
throw new Error(t("alerts.imageDoesNotContainScene"));
throw new DOMException(
t("alerts.imageDoesNotContainScene"),
"EncodingError",
);
} else {
throw new Error(t("alerts.cannotRestoreFromImage"));
throw new DOMException(
t("alerts.cannotRestoreFromImage"),
"EncodingError",
);
}
}
} else {
@ -40,7 +51,7 @@ const parseFileContents = async (blob: Blob | File) => {
};
});
}
if (blob.type === "image/svg+xml") {
if (blob.type === MIME_TYPES.svg) {
try {
return await (
await import(/* webpackChunkName: "image" */ "./image")
@ -49,9 +60,15 @@ const parseFileContents = async (blob: Blob | File) => {
});
} catch (error) {
if (error.message === "INVALID") {
throw new Error(t("alerts.imageDoesNotContainScene"));
throw new DOMException(
t("alerts.imageDoesNotContainScene"),
"EncodingError",
);
} else {
throw new Error(t("alerts.cannotRestoreFromImage"));
throw new DOMException(
t("alerts.cannotRestoreFromImage"),
"EncodingError",
);
}
}
}
@ -70,13 +87,13 @@ export const getMimeType = (blob: Blob | string): string => {
name = blob.name || "";
}
if (/\.(excalidraw|json)$/.test(name)) {
return "application/json";
return MIME_TYPES.json;
} else if (/\.png$/.test(name)) {
return "image/png";
return MIME_TYPES.png;
} else if (/\.jpe?g$/.test(name)) {
return "image/jpeg";
return MIME_TYPES.jpg;
} else if (/\.svg$/.test(name)) {
return "image/svg+xml";
return MIME_TYPES.svg;
}
return "";
};
@ -100,6 +117,15 @@ export const isImageFileHandle = (handle: FileSystemHandle | null) => {
return type === "png" || type === "svg";
};
export const isSupportedImageFile = (
blob: Blob | null | undefined,
): blob is Blob & { type: typeof ALLOWED_IMAGE_MIME_TYPES[number] } => {
const { type } = blob || {};
return (
!!type && (ALLOWED_IMAGE_MIME_TYPES as readonly string[]).includes(type)
);
};
export const loadFromBlob = async (
blob: Blob,
/** @see restore.localAppState */
@ -123,6 +149,7 @@ export const loadFromBlob = async (
? calculateScrollCenter(data.elements || [], localAppState, null)
: {}),
},
files: data.files,
},
localAppState,
localElements,
@ -165,3 +192,93 @@ export const canvasToBlob = async (
}
});
};
/** generates SHA-1 digest from supplied file (if not supported, falls back
to a 40-char base64 random id) */
export const generateIdFromFile = async (file: File) => {
let id: FileId;
try {
const hashBuffer = await window.crypto.subtle.digest(
"SHA-1",
await file.arrayBuffer(),
);
id =
// convert buffer to byte array
Array.from(new Uint8Array(hashBuffer))
// convert to hex string
.map((byte) => byte.toString(16).padStart(2, "0"))
.join("") as FileId;
} catch (error) {
console.error(error);
// length 40 to align with the HEX length of SHA-1 (which is 160 bit)
id = nanoid(40) as FileId;
}
return id;
};
export const getDataURL = async (file: Blob | File): Promise<DataURL> => {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const dataURL = reader.result as DataURL;
resolve(dataURL);
};
reader.onerror = (error) => reject(error);
reader.readAsDataURL(file);
});
};
export const dataURLToFile = (dataURL: DataURL, filename = "") => {
const dataIndexStart = dataURL.indexOf(",");
const byteString = atob(dataURL.slice(dataIndexStart + 1));
const mimeType = dataURL.slice(0, dataIndexStart).split(":")[1].split(";")[0];
const ab = new ArrayBuffer(byteString.length);
const ia = new Uint8Array(ab);
for (let i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new File([ab], filename, { type: mimeType });
};
export const resizeImageFile = async (
file: File,
maxWidthOrHeight: number,
): Promise<File> => {
// SVG files shouldn't a can't be resized
if (file.type === MIME_TYPES.svg) {
return file;
}
const [pica, imageBlobReduce] = await Promise.all([
import("pica").then((res) => res.default),
// a wrapper for pica for better API
import("image-blob-reduce").then((res) => res.default),
]);
// CRA's minification settings break pica in WebWorkers, so let's disable
// them for now
// https://github.com/nodeca/image-blob-reduce/issues/21#issuecomment-757365513
const reduce = imageBlobReduce({
pica: pica({ features: ["js", "wasm"] }),
});
const fileType = file.type;
if (!isSupportedImageFile(file)) {
throw new Error(t("errors.unsupportedFileType"));
}
return new File(
[await reduce.toBlob(file, { max: maxWidthOrHeight })],
file.name,
{ type: fileType },
);
};
export const SVGStringToFile = (SVGString: string, filename: string = "") => {
return new File([new TextEncoder().encode(SVGString)], filename, {
type: MIME_TYPES.svg,
}) as File & { type: typeof MIME_TYPES.svg };
};

View file

@ -1,16 +1,19 @@
import { deflate, inflate } from "pako";
import { encryptData, decryptData } from "./encryption";
// -----------------------------------------------------------------------------
// byte (binary) strings
// -----------------------------------------------------------------------------
// fast, Buffer-compatible implem
export const toByteString = (data: string | Uint8Array): Promise<string> => {
export const toByteString = (
data: string | Uint8Array | ArrayBuffer,
): Promise<string> => {
return new Promise((resolve, reject) => {
const blob =
typeof data === "string"
? new Blob([new TextEncoder().encode(data)])
: new Blob([data]);
: new Blob([data instanceof Uint8Array ? data : new Uint8Array(data)]);
const reader = new FileReader();
reader.onload = (event) => {
if (!event.target || typeof event.target.result !== "string") {
@ -44,12 +47,14 @@ const byteStringToString = (byteString: string) => {
* due to reencoding
*/
export const stringToBase64 = async (str: string, isByteString = false) => {
return isByteString ? btoa(str) : btoa(await toByteString(str));
return isByteString ? window.btoa(str) : window.btoa(await toByteString(str));
};
// async to align with stringToBase64
export const base64ToString = async (base64: string, isByteString = false) => {
return isByteString ? atob(base64) : byteStringToString(atob(base64));
return isByteString
? window.atob(base64)
: byteStringToString(window.atob(base64));
};
// -----------------------------------------------------------------------------
@ -114,3 +119,261 @@ export const decode = async (data: EncodedData): Promise<string> => {
return decoded;
};
// -----------------------------------------------------------------------------
// binary encoding
// -----------------------------------------------------------------------------
type FileEncodingInfo = {
/* version 2 is the version we're shipping the initial image support with.
version 1 was a PR version that a lot of people were using anyway.
Thus, if there are issues we can check whether they're not using the
unoffic version */
version: 1 | 2;
compression: "pako@1" | null;
encryption: "AES-GCM" | null;
};
// -----------------------------------------------------------------------------
const CONCAT_BUFFERS_VERSION = 1;
/** how many bytes we use to encode how many bytes the next chunk has.
* Corresponds to DataView setter methods (setUint32, setUint16, etc).
*
* NOTE ! values must not be changed, which would be backwards incompatible !
*/
const VERSION_DATAVIEW_BYTES = 4;
const NEXT_CHUNK_SIZE_DATAVIEW_BYTES = 4;
// -----------------------------------------------------------------------------
const DATA_VIEW_BITS_MAP = { 1: 8, 2: 16, 4: 32 } as const;
// getter
function dataView(buffer: Uint8Array, bytes: 1 | 2 | 4, offset: number): number;
// setter
function dataView(
buffer: Uint8Array,
bytes: 1 | 2 | 4,
offset: number,
value: number,
): Uint8Array;
/**
* abstraction over DataView that serves as a typed getter/setter in case
* you're using constants for the byte size and want to ensure there's no
* discrepenancy in the encoding across refactors.
*
* DataView serves for an endian-agnostic handling of numbers in ArrayBuffers.
*/
function dataView(
buffer: Uint8Array,
bytes: 1 | 2 | 4,
offset: number,
value?: number,
): Uint8Array | number {
if (value != null) {
if (value > Math.pow(2, DATA_VIEW_BITS_MAP[bytes]) - 1) {
throw new Error(
`attempting to set value higher than the allocated bytes (value: ${value}, bytes: ${bytes})`,
);
}
const method = `setUint${DATA_VIEW_BITS_MAP[bytes]}` as const;
new DataView(buffer.buffer)[method](offset, value);
return buffer;
}
const method = `getUint${DATA_VIEW_BITS_MAP[bytes]}` as const;
return new DataView(buffer.buffer)[method](offset);
}
// -----------------------------------------------------------------------------
/**
* Resulting concatenated buffer has this format:
*
* [
* VERSION chunk (4 bytes)
* LENGTH chunk 1 (4 bytes)
* DATA chunk 1 (up to 2^32 bits)
* LENGTH chunk 2 (4 bytes)
* DATA chunk 2 (up to 2^32 bits)
* ...
* ]
*
* @param buffers each buffer (chunk) must be at most 2^32 bits large (~4GB)
*/
const concatBuffers = (...buffers: Uint8Array[]) => {
const bufferView = new Uint8Array(
VERSION_DATAVIEW_BYTES +
NEXT_CHUNK_SIZE_DATAVIEW_BYTES * buffers.length +
buffers.reduce((acc, buffer) => acc + buffer.byteLength, 0),
);
let cursor = 0;
// as the first chunk we'll encode the version for backwards compatibility
dataView(bufferView, VERSION_DATAVIEW_BYTES, cursor, CONCAT_BUFFERS_VERSION);
cursor += VERSION_DATAVIEW_BYTES;
for (const buffer of buffers) {
dataView(
bufferView,
NEXT_CHUNK_SIZE_DATAVIEW_BYTES,
cursor,
buffer.byteLength,
);
cursor += NEXT_CHUNK_SIZE_DATAVIEW_BYTES;
bufferView.set(buffer, cursor);
cursor += buffer.byteLength;
}
return bufferView;
};
/** can only be used on buffers created via `concatBuffers()` */
const splitBuffers = (concatenatedBuffer: Uint8Array) => {
const buffers = [];
let cursor = 0;
// first chunk is the version (ignored for now)
cursor += VERSION_DATAVIEW_BYTES;
while (true) {
const chunkSize = dataView(
concatenatedBuffer,
NEXT_CHUNK_SIZE_DATAVIEW_BYTES,
cursor,
);
cursor += NEXT_CHUNK_SIZE_DATAVIEW_BYTES;
buffers.push(concatenatedBuffer.slice(cursor, cursor + chunkSize));
cursor += chunkSize;
if (cursor >= concatenatedBuffer.byteLength) {
break;
}
}
return buffers;
};
// helpers for (de)compressing data with JSON metadata including encryption
// -----------------------------------------------------------------------------
/** @private */
const _encryptAndCompress = async (
data: Uint8Array | string,
encryptionKey: string,
) => {
const { encryptedBuffer, iv } = await encryptData(
encryptionKey,
deflate(data),
);
return { iv, buffer: new Uint8Array(encryptedBuffer) };
};
/**
* The returned buffer has following format:
* `[]` refers to a buffers wrapper (see `concatBuffers`)
*
* [
* encodingMetadataBuffer,
* iv,
* [
* contentsMetadataBuffer
* contentsBuffer
* ]
* ]
*/
export const compressData = async <T extends Record<string, any> = never>(
dataBuffer: Uint8Array,
options: {
encryptionKey: string;
} & ([T] extends [never]
? {
metadata?: T;
}
: {
metadata: T;
}),
): Promise<Uint8Array> => {
const fileInfo: FileEncodingInfo = {
version: 2,
compression: "pako@1",
encryption: "AES-GCM",
};
const encodingMetadataBuffer = new TextEncoder().encode(
JSON.stringify(fileInfo),
);
const contentsMetadataBuffer = new TextEncoder().encode(
JSON.stringify(options.metadata || null),
);
const { iv, buffer } = await _encryptAndCompress(
concatBuffers(contentsMetadataBuffer, dataBuffer),
options.encryptionKey,
);
return concatBuffers(encodingMetadataBuffer, iv, buffer);
};
/** @private */
const _decryptAndDecompress = async (
iv: Uint8Array,
decryptedBuffer: Uint8Array,
decryptionKey: string,
isCompressed: boolean,
) => {
decryptedBuffer = new Uint8Array(
await decryptData(iv, decryptedBuffer, decryptionKey),
);
if (isCompressed) {
return inflate(decryptedBuffer);
}
return decryptedBuffer;
};
export const decompressData = async <T extends Record<string, any>>(
bufferView: Uint8Array,
options: { decryptionKey: string },
) => {
// first chunk is encoding metadata (ignored for now)
const [encodingMetadataBuffer, iv, buffer] = splitBuffers(bufferView);
const encodingMetadata: FileEncodingInfo = JSON.parse(
new TextDecoder().decode(encodingMetadataBuffer),
);
try {
const [contentsMetadataBuffer, contentsBuffer] = splitBuffers(
await _decryptAndDecompress(
iv,
buffer,
options.decryptionKey,
!!encodingMetadata.compression,
),
);
const metadata = JSON.parse(
new TextDecoder().decode(contentsMetadataBuffer),
) as T;
return {
/** metadata source is always JSON so we can decode it here */
metadata,
/** data can be anything so the caller must decode it */
data: contentsBuffer,
};
} catch (error) {
console.error(
`Error during decompressing and decrypting the file.`,
encodingMetadata,
);
throw error;
}
};
// -----------------------------------------------------------------------------

79
src/data/encryption.ts Normal file
View file

@ -0,0 +1,79 @@
export const IV_LENGTH_BYTES = 12;
export const createIV = () => {
const arr = new Uint8Array(IV_LENGTH_BYTES);
return window.crypto.getRandomValues(arr);
};
export const generateEncryptionKey = async () => {
const key = await window.crypto.subtle.generateKey(
{
name: "AES-GCM",
length: 128,
},
true, // extractable
["encrypt", "decrypt"],
);
return (await window.crypto.subtle.exportKey("jwk", key)).k;
};
export const getImportedKey = (key: string, usage: KeyUsage) =>
window.crypto.subtle.importKey(
"jwk",
{
alg: "A128GCM",
ext: true,
k: key,
key_ops: ["encrypt", "decrypt"],
kty: "oct",
},
{
name: "AES-GCM",
length: 128,
},
false, // extractable
[usage],
);
export const encryptData = async (
key: string,
data: Uint8Array | ArrayBuffer | Blob | File | string,
): Promise<{ encryptedBuffer: ArrayBuffer; iv: Uint8Array }> => {
const importedKey = await getImportedKey(key, "encrypt");
const iv = createIV();
const buffer: ArrayBuffer | Uint8Array =
typeof data === "string"
? new TextEncoder().encode(data)
: data instanceof Uint8Array
? data
: data instanceof Blob
? await data.arrayBuffer()
: data;
const encryptedBuffer = await window.crypto.subtle.encrypt(
{
name: "AES-GCM",
iv,
},
importedKey,
buffer as ArrayBuffer | Uint8Array,
);
return { encryptedBuffer, iv };
};
export const decryptData = async (
iv: Uint8Array,
encrypted: Uint8Array | ArrayBuffer,
privateKey: string,
): Promise<ArrayBuffer> => {
const key = await getImportedKey(privateKey, "decrypt");
return window.crypto.subtle.decrypt(
{
name: "AES-GCM",
iv,
},
key,
encrypted,
);
};

View file

@ -10,6 +10,7 @@ import { AbortError } from "../errors";
import { debounce } from "../utils";
type FILE_EXTENSION =
| "gif"
| "jpg"
| "png"
| "svg"
@ -17,15 +18,6 @@ type FILE_EXTENSION =
| "excalidraw"
| "excalidrawlib";
const FILE_TYPE_TO_MIME_TYPE: Record<FILE_EXTENSION, string> = {
jpg: "image/jpeg",
png: "image/png",
svg: "image/svg+xml",
json: "application/json",
excalidraw: MIME_TYPES.excalidraw,
excalidrawlib: MIME_TYPES.excalidrawlib,
};
const INPUT_CHANGE_INTERVAL_MS = 500;
export const fileOpen = <M extends boolean | undefined = false>(opts: {
@ -41,7 +33,7 @@ export const fileOpen = <M extends boolean | undefined = false>(opts: {
: FileWithHandle[];
const mimeTypes = opts.extensions?.reduce((mimeTypes, type) => {
mimeTypes.push(FILE_TYPE_TO_MIME_TYPE[type]);
mimeTypes.push(MIME_TYPES[type]);
return mimeTypes;
}, [] as string[]);

View file

@ -57,7 +57,7 @@ export const encodePngMetadata = async ({
// insert metadata before last chunk (iEND)
chunks.splice(-1, 0, metadataChunk);
return new Blob([encodePng(chunks)], { type: "image/png" });
return new Blob([encodePng(chunks)], { type: MIME_TYPES.png });
};
export const decodePngMetadata = async (blob: Blob) => {

View file

@ -2,12 +2,12 @@ import {
copyBlobToClipboardAsPng,
copyTextToSystemClipboard,
} from "../clipboard";
import { DEFAULT_EXPORT_PADDING } from "../constants";
import { DEFAULT_EXPORT_PADDING, MIME_TYPES } from "../constants";
import { NonDeletedExcalidrawElement } from "../element/types";
import { t } from "../i18n";
import { exportToCanvas, exportToSvg } from "../scene/export";
import { ExportType } from "../scene/types";
import { AppState } from "../types";
import { AppState, BinaryFiles } from "../types";
import { canvasToBlob } from "./blob";
import { fileSave, FileSystemHandle } from "./filesystem";
import { serializeAsJSON } from "./json";
@ -19,6 +19,7 @@ export const exportCanvas = async (
type: ExportType,
elements: readonly NonDeletedExcalidrawElement[],
appState: AppState,
files: BinaryFiles,
{
exportBackground,
exportPadding = DEFAULT_EXPORT_PADDING,
@ -37,17 +38,21 @@ export const exportCanvas = async (
throw new Error(t("alerts.cannotExportEmptyCanvas"));
}
if (type === "svg" || type === "clipboard-svg") {
const tempSvg = await exportToSvg(elements, {
exportBackground,
exportWithDarkMode: appState.exportWithDarkMode,
viewBackgroundColor,
exportPadding,
exportScale: appState.exportScale,
exportEmbedScene: appState.exportEmbedScene && type === "svg",
});
const tempSvg = await exportToSvg(
elements,
{
exportBackground,
exportWithDarkMode: appState.exportWithDarkMode,
viewBackgroundColor,
exportPadding,
exportScale: appState.exportScale,
exportEmbedScene: appState.exportEmbedScene && type === "svg",
},
files,
);
if (type === "svg") {
return await fileSave(
new Blob([tempSvg.outerHTML], { type: "image/svg+xml" }),
new Blob([tempSvg.outerHTML], { type: MIME_TYPES.svg }),
{
name,
extension: "svg",
@ -60,7 +65,7 @@ export const exportCanvas = async (
}
}
const tempCanvas = exportToCanvas(elements, appState, {
const tempCanvas = await exportToCanvas(elements, appState, files, {
exportBackground,
viewBackgroundColor,
exportPadding,
@ -76,7 +81,7 @@ export const exportCanvas = async (
await import(/* webpackChunkName: "image" */ "./image")
).encodePngMetadata({
blob,
metadata: serializeAsJSON(elements, appState),
metadata: serializeAsJSON(elements, appState, files, "local"),
});
}

View file

@ -1,9 +1,9 @@
import { fileOpen, fileSave } from "./filesystem";
import { cleanAppStateForExport } from "../appState";
import { cleanAppStateForExport, clearAppStateForDatabase } from "../appState";
import { EXPORT_DATA_TYPES, EXPORT_SOURCE, MIME_TYPES } from "../constants";
import { clearElementsForExport } from "../element";
import { clearElementsForDatabase, clearElementsForExport } from "../element";
import { ExcalidrawElement } from "../element/types";
import { AppState } from "../types";
import { AppState, BinaryFiles } from "../types";
import { isImageFileHandle, loadFromBlob } from "./blob";
import {
@ -13,16 +13,50 @@ import {
} from "./types";
import Library from "./library";
/**
* Strips out files which are only referenced by deleted elements
*/
const filterOutDeletedFiles = (
elements: readonly ExcalidrawElement[],
files: BinaryFiles,
) => {
const nextFiles: BinaryFiles = {};
for (const element of elements) {
if (
!element.isDeleted &&
"fileId" in element &&
element.fileId &&
files[element.fileId]
) {
nextFiles[element.fileId] = files[element.fileId];
}
}
return nextFiles;
};
export const serializeAsJSON = (
elements: readonly ExcalidrawElement[],
appState: Partial<AppState>,
files: BinaryFiles,
type: "local" | "database",
): string => {
const data: ExportedDataState = {
type: EXPORT_DATA_TYPES.excalidraw,
version: 2,
source: EXPORT_SOURCE,
elements: clearElementsForExport(elements),
appState: cleanAppStateForExport(appState),
elements:
type === "local"
? clearElementsForExport(elements)
: clearElementsForDatabase(elements),
appState:
type === "local"
? cleanAppStateForExport(appState)
: clearAppStateForDatabase(appState),
files:
type === "local"
? filterOutDeletedFiles(elements, files)
: // will be stripped from JSON
undefined,
};
return JSON.stringify(data, null, 2);
@ -31,8 +65,9 @@ export const serializeAsJSON = (
export const saveAsJSON = async (
elements: readonly ExcalidrawElement[],
appState: AppState,
files: BinaryFiles,
) => {
const serialized = serializeAsJSON(elements, appState);
const serialized = serializeAsJSON(elements, appState, files, "local");
const blob = new Blob([serialized], {
type: MIME_TYPES.excalidraw,
});
@ -56,15 +91,7 @@ export const loadFromJSON = async (
description: "Excalidraw files",
// ToDo: Be over-permissive until https://bugs.webkit.org/show_bug.cgi?id=34442
// gets resolved. Else, iOS users cannot open `.excalidraw` files.
/*
extensions: [".json", ".excalidraw", ".png", ".svg"],
mimeTypes: [
MIME_TYPES.excalidraw,
"application/json",
"image/png",
"image/svg+xml",
],
*/
// extensions: ["json", "excalidraw", "png", "svg"],
});
return loadFromBlob(blob, localAppState, localElements);
};

View file

@ -1,5 +1,5 @@
import { ExcalidrawElement } from "../element/types";
import { AppState } from "../types";
import { AppState, BinaryFiles } from "../types";
import { exportCanvas } from ".";
import { getNonDeletedElements } from "../element";
import { getFileHandleType, isImageFileHandleType } from "./blob";
@ -7,6 +7,7 @@ import { getFileHandleType, isImageFileHandleType } from "./blob";
export const resaveAsImageWithScene = async (
elements: readonly ExcalidrawElement[],
appState: AppState,
files: BinaryFiles,
) => {
const { exportBackground, viewBackgroundColor, name, fileHandle } = appState;
@ -26,6 +27,7 @@ export const resaveAsImageWithScene = async (
fileHandleType,
getNonDeletedElements(elements),
appState,
files,
{
exportBackground,
viewBackgroundColor,

View file

@ -3,7 +3,7 @@ import {
ExcalidrawSelectionElement,
FontFamilyValues,
} from "../element/types";
import { AppState, NormalizedZoomValue } from "../types";
import { AppState, BinaryFiles, NormalizedZoomValue } from "../types";
import { ImportedDataState } from "./types";
import {
getElementMap,
@ -37,6 +37,7 @@ export const AllowedExcalidrawElementTypes: Record<
diamond: true,
ellipse: true,
line: true,
image: true,
arrow: true,
freedraw: true,
};
@ -44,6 +45,7 @@ export const AllowedExcalidrawElementTypes: Record<
export type RestoredDataState = {
elements: ExcalidrawElement[];
appState: RestoredAppState;
files: BinaryFiles;
};
const getFontFamilyByName = (fontFamilyName: string): FontFamilyValues => {
@ -57,16 +59,19 @@ const getFontFamilyByName = (fontFamilyName: string): FontFamilyValues => {
const restoreElementWithProperties = <
T extends ExcalidrawElement,
K extends keyof Omit<
Required<T>,
Exclude<keyof ExcalidrawElement, "type" | "x" | "y">
>
K extends Pick<T, keyof Omit<Required<T>, keyof ExcalidrawElement>>
>(
element: Required<T>,
extra: Pick<T, K>,
extra: Pick<
T,
// This extra Pick<T, keyof K> ensure no excess properties are passed.
// @ts-ignore TS complains here but type checks the call sites fine.
keyof K
> &
Partial<Pick<ExcalidrawElement, "type" | "x" | "y">>,
): T => {
const base: Pick<T, keyof ExcalidrawElement> = {
type: (extra as Partial<T>).type || element.type,
type: extra.type || element.type,
// all elements must have version > 0 so getSceneVersion() will pick up
// newly added elements
version: element.version || 1,
@ -79,8 +84,8 @@ const restoreElementWithProperties = <
roughness: element.roughness ?? 1,
opacity: element.opacity == null ? 100 : element.opacity,
angle: element.angle || 0,
x: (extra as Partial<T>).x ?? element.x ?? 0,
y: (extra as Partial<T>).y ?? element.y ?? 0,
x: extra.x ?? element.x ?? 0,
y: extra.y ?? element.y ?? 0,
strokeColor: element.strokeColor,
backgroundColor: element.backgroundColor,
width: element.width || 0,
@ -102,7 +107,7 @@ const restoreElementWithProperties = <
const restoreElement = (
element: Exclude<ExcalidrawElement, ExcalidrawSelectionElement>,
): typeof element => {
): typeof element | null => {
switch (element.type) {
case "text":
let fontSize = element.fontSize;
@ -131,6 +136,12 @@ const restoreElement = (
pressures: element.pressures,
});
}
case "image":
return restoreElementWithProperties(element, {
status: element.status || "pending",
fileId: element.fileId,
scale: element.scale || [1, 1],
});
case "line":
// @ts-ignore LEGACY type
// eslint-disable-next-line no-fallthrough
@ -194,7 +205,7 @@ export const restoreElements = (
// filtering out selection, which is legacy, no longer kept in elements,
// and causing issues if retained
if (element.type !== "selection" && !isInvisiblySmallElement(element)) {
let migratedElement: ExcalidrawElement = restoreElement(element);
let migratedElement: ExcalidrawElement | null = restoreElement(element);
if (migratedElement) {
const localElement = localElementsMap?.[element.id];
if (localElement && localElement.version > migratedElement.version) {
@ -260,5 +271,6 @@ export const restore = (
return {
elements: restoreElements(data?.elements, localElements),
appState: restoreAppState(data?.appState, localAppState || null),
files: data?.files || {},
};
};

View file

@ -1,5 +1,5 @@
import { ExcalidrawElement } from "../element/types";
import { AppState, LibraryItems } from "../types";
import { AppState, BinaryFiles, LibraryItems } from "../types";
import type { cleanAppStateForExport } from "../appState";
export interface ExportedDataState {
@ -8,6 +8,7 @@ export interface ExportedDataState {
source: string;
elements: readonly ExcalidrawElement[];
appState: ReturnType<typeof cleanAppStateForExport>;
files: BinaryFiles | undefined;
}
export interface ImportedDataState {
@ -18,6 +19,7 @@ export interface ImportedDataState {
appState?: Readonly<Partial<AppState>> | null;
scrollToContent?: boolean;
libraryItems?: LibraryItems;
files?: BinaryFiles;
}
export interface ExportedLibraryData {