feat: add first-class support for CJK (#8530)

This commit is contained in:
Marcel Mraz 2024-10-17 21:14:17 +03:00 committed by GitHub
parent 21815fb930
commit b479f3bd65
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
288 changed files with 3559 additions and 918 deletions

View file

@ -1,214 +0,0 @@
import {
base64ToArrayBuffer,
stringToBase64,
toByteString,
} from "../data/encode";
import { LOCAL_FONT_PROTOCOL } from "./metadata";
import loadWoff2 from "./wasm/woff2.loader";
import loadHbSubset from "./wasm/hb-subset.loader";
export interface Font {
urls: URL[];
fontFace: FontFace;
getContent(codePoints: ReadonlySet<number>): Promise<string>;
}
export const UNPKG_FALLBACK_URL = `https://unpkg.com/${
import.meta.env.VITE_PKG_NAME
? `${import.meta.env.VITE_PKG_NAME}@${import.meta.env.PKG_VERSION}` // should be provided by vite during package build
: "@excalidraw/excalidraw" // fallback to latest package version (i.e. for app)
}/dist/prod/`;
export class ExcalidrawFont implements Font {
public readonly urls: URL[];
public readonly fontFace: FontFace;
constructor(family: string, uri: string, descriptors?: FontFaceDescriptors) {
this.urls = ExcalidrawFont.createUrls(uri);
const sources = this.urls
.map((url) => `url(${url}) ${ExcalidrawFont.getFormat(url)}`)
.join(", ");
this.fontFace = new FontFace(family, sources, {
display: "swap",
style: "normal",
weight: "400",
...descriptors,
});
}
/**
* Tries to fetch woff2 content, based on the registered urls (from first to last, treated as fallbacks).
*
* NOTE: assumes usage of `dataurl` outside the browser environment
*
* @returns base64 with subsetted glyphs based on the passed codepoint, last defined url otherwise
*/
public async getContent(codePoints: ReadonlySet<number>): Promise<string> {
let i = 0;
const errorMessages = [];
while (i < this.urls.length) {
const url = this.urls[i];
// it's dataurl (server), the font is inlined as base64, no need to fetch
if (url.protocol === "data:") {
const arrayBuffer = base64ToArrayBuffer(url.toString().split(",")[1]);
const base64 = await ExcalidrawFont.subsetGlyphsByCodePoints(
arrayBuffer,
codePoints,
);
return base64;
}
try {
const response = await fetch(url, {
headers: {
Accept: "font/woff2",
},
});
if (response.ok) {
const arrayBuffer = await response.arrayBuffer();
const base64 = await ExcalidrawFont.subsetGlyphsByCodePoints(
arrayBuffer,
codePoints,
);
return base64;
}
// response not ok, try to continue
errorMessages.push(
`"${url.toString()}" returned status "${response.status}"`,
);
} catch (e) {
errorMessages.push(`"${url.toString()}" returned error "${e}"`);
}
i++;
}
console.error(
`Failed to fetch font "${
this.fontFace.family
}" from urls "${this.urls.toString()}`,
JSON.stringify(errorMessages, undefined, 2),
);
// in case of issues, at least return the last url as a content
// defaults to unpkg for bundled fonts (so that we don't have to host them forever) and http url for others
return this.urls.length ? this.urls[this.urls.length - 1].toString() : "";
}
/**
* Tries to subset glyphs in a font based on the used codepoints, returning the font as daturl.
*
* @param arrayBuffer font data buffer, preferrably in the woff2 format, though others should work as well
* @param codePoints codepoints used to subset the glyphs
*
* @returns font with subsetted glyphs (all glyphs in case of errors) converted into a dataurl
*/
private static async subsetGlyphsByCodePoints(
arrayBuffer: ArrayBuffer,
codePoints: ReadonlySet<number>,
): Promise<string> {
try {
// lazy loaded wasm modules to avoid multiple initializations in case of concurrent triggers
const { compress, decompress } = await loadWoff2();
const { subset } = await loadHbSubset();
const decompressedBinary = decompress(arrayBuffer).buffer;
const subsetSnft = subset(decompressedBinary, codePoints);
const compressedBinary = compress(subsetSnft.buffer);
return ExcalidrawFont.toBase64(compressedBinary.buffer);
} catch (e) {
console.error("Skipped glyph subsetting", e);
// Fallback to encoding whole font in case of errors
return ExcalidrawFont.toBase64(arrayBuffer);
}
}
private static async toBase64(arrayBuffer: ArrayBuffer) {
let base64: string;
if (typeof Buffer !== "undefined") {
// node + server-side
base64 = Buffer.from(arrayBuffer).toString("base64");
} else {
base64 = await stringToBase64(await toByteString(arrayBuffer), true);
}
return `data:font/woff2;base64,${base64}`;
}
private static createUrls(uri: string): URL[] {
if (uri.startsWith(LOCAL_FONT_PROTOCOL)) {
// no url for local fonts
return [];
}
if (uri.startsWith("http") || uri.startsWith("data")) {
// one url for http imports or data url
return [new URL(uri)];
}
// absolute assets paths, which are found in tests and excalidraw-app build, won't work with base url, so we are stripping initial slash away
const assetUrl: string = uri.replace(/^\/+/, "");
const urls: URL[] = [];
if (typeof window.EXCALIDRAW_ASSET_PATH === "string") {
const normalizedBaseUrl = this.normalizeBaseUrl(
window.EXCALIDRAW_ASSET_PATH,
);
urls.push(new URL(assetUrl, normalizedBaseUrl));
} else if (Array.isArray(window.EXCALIDRAW_ASSET_PATH)) {
window.EXCALIDRAW_ASSET_PATH.forEach((path) => {
const normalizedBaseUrl = this.normalizeBaseUrl(path);
urls.push(new URL(assetUrl, normalizedBaseUrl));
});
}
// fallback url for bundled fonts
urls.push(new URL(assetUrl, UNPKG_FALLBACK_URL));
return urls;
}
private static getFormat(url: URL) {
try {
const parts = new URL(url).pathname.split(".");
if (parts.length === 1) {
return "";
}
return `format('${parts.pop()}')`;
} catch (error) {
return "";
}
}
private static normalizeBaseUrl(baseUrl: string) {
let result = baseUrl;
// in case user passed a root-relative url (~absolute path),
// like "/" or "/some/path", or relative (starts with "./"),
// prepend it with `location.origin`
if (/^\.?\//.test(result)) {
result = new URL(
result.replace(/^\.?\/+/, ""),
window?.location?.origin,
).toString();
}
// ensure there is a trailing slash, otherwise url won't be correctly concatenated
result = `${result.replace(/\/+$/, "")}/`;
return result;
}
}

View file

@ -0,0 +1,213 @@
import { promiseTry } from "../utils";
import { LOCAL_FONT_PROTOCOL } from "./metadata";
import { subsetWoff2GlyphsByCodepoints } from "./subset/subset-main";
type DataURL = string;
export interface IExcalidrawFontFace {
urls: URL[] | DataURL[];
fontFace: FontFace;
toCSS(
characters: string,
codePoints: Array<number>,
): Promise<string> | undefined;
}
export class ExcalidrawFontFace implements IExcalidrawFontFace {
public readonly urls: URL[] | DataURL[];
public readonly fontFace: FontFace;
private static readonly UNPKG_FALLBACK_URL = `https://unpkg.com/${
import.meta.env.VITE_PKG_NAME
? `${import.meta.env.VITE_PKG_NAME}@${import.meta.env.PKG_VERSION}` // should be provided by vite during package build
: "@excalidraw/excalidraw" // fallback to latest package version (i.e. for app)
}/dist/prod/`;
constructor(family: string, uri: string, descriptors?: FontFaceDescriptors) {
this.urls = ExcalidrawFontFace.createUrls(uri);
const sources = this.urls
.map((url) => `url(${url}) ${ExcalidrawFontFace.getFormat(url)}`)
.join(", ");
this.fontFace = new FontFace(family, sources, {
display: "swap",
style: "normal",
weight: "400",
...descriptors,
});
}
/**
* Generates CSS `@font-face` definition with the (subsetted) font source as a data url for the characters within the unicode range.
*
* Retrieves `undefined` otherwise.
*/
public toCSS(
characters: string,
codePoints: Array<number>,
): Promise<string> | undefined {
// quick exit in case the characters are not within this font face's unicode range
if (!this.getUnicodeRangeRegex().test(characters)) {
return;
}
return this.getContent(codePoints).then(
(content) =>
`@font-face { font-family: ${this.fontFace.family}; src: url(${content}); }`,
);
}
/**
* Tries to fetch woff2 content, based on the registered urls (from first to last, treated as fallbacks).
*
* @returns base64 with subsetted glyphs based on the passed codepoint, last defined url otherwise
*/
public async getContent(codePoints: Array<number>): Promise<string> {
let i = 0;
const errorMessages = [];
while (i < this.urls.length) {
const url = this.urls[i];
try {
const arrayBuffer = await this.fetchFont(url);
const base64 = await subsetWoff2GlyphsByCodepoints(
arrayBuffer,
codePoints,
);
return base64;
} catch (e) {
errorMessages.push(`"${url.toString()}" returned error "${e}"`);
}
i++;
}
console.error(
`Failed to fetch font family "${this.fontFace.family}"`,
JSON.stringify(errorMessages, undefined, 2),
);
// in case of issues, at least return the last url as a content
// defaults to unpkg for bundled fonts (so that we don't have to host them forever) and http url for others
return this.urls.length ? this.urls[this.urls.length - 1].toString() : "";
}
public fetchFont(url: URL | DataURL): Promise<ArrayBuffer> {
return promiseTry(async () => {
const response = await fetch(url, {
headers: {
Accept: "font/woff2",
},
});
if (!response.ok) {
const urlString = url instanceof URL ? url.toString() : "dataurl";
throw new Error(
`Failed to fetch "${urlString}": ${response.statusText}`,
);
}
const arrayBuffer = await response.arrayBuffer();
return arrayBuffer;
});
}
private getUnicodeRangeRegex() {
// using \u{h} or \u{hhhhh} to match any number of hex digits,
// otherwise we would get an "Invalid Unicode escape" error
// e.g. U+0-1007F -> \u{0}-\u{1007F}
const unicodeRangeRegex = this.fontFace.unicodeRange
.split(/,\s*/)
.map((range) => {
const [start, end] = range.replace("U+", "").split("-");
if (end) {
return `\\u{${start}}-\\u{${end}}`;
}
return `\\u{${start}}`;
})
.join("");
return new RegExp(`[${unicodeRangeRegex}]`, "u");
}
private static createUrls(uri: string): URL[] | DataURL[] {
if (uri.startsWith("data")) {
// don't create the URL instance, as parsing the huge dataurl string is expensive
return [uri];
}
if (uri.startsWith(LOCAL_FONT_PROTOCOL)) {
// no url for local fonts
return [];
}
if (uri.startsWith("http")) {
// one url for http imports or data url
return [new URL(uri)];
}
// absolute assets paths, which are found in tests and excalidraw-app build, won't work with base url, so we are stripping initial slash away
const assetUrl: string = uri.replace(/^\/+/, "");
const urls: URL[] = [];
if (typeof window.EXCALIDRAW_ASSET_PATH === "string") {
const normalizedBaseUrl = this.normalizeBaseUrl(
window.EXCALIDRAW_ASSET_PATH,
);
urls.push(new URL(assetUrl, normalizedBaseUrl));
} else if (Array.isArray(window.EXCALIDRAW_ASSET_PATH)) {
window.EXCALIDRAW_ASSET_PATH.forEach((path) => {
const normalizedBaseUrl = this.normalizeBaseUrl(path);
urls.push(new URL(assetUrl, normalizedBaseUrl));
});
}
// fallback url for bundled fonts
urls.push(new URL(assetUrl, ExcalidrawFontFace.UNPKG_FALLBACK_URL));
return urls;
}
private static getFormat(url: URL | DataURL) {
if (!(url instanceof URL)) {
// format is irrelevant for data url
return "";
}
try {
const parts = new URL(url).pathname.split(".");
if (parts.length === 1) {
return "";
}
return `format('${parts.pop()}')`;
} catch (error) {
return "";
}
}
private static normalizeBaseUrl(baseUrl: string) {
let result = baseUrl;
// in case user passed a root-relative url (~absolute path),
// like "/" or "/some/path", or relative (starts with "./"),
// prepend it with `location.origin`
if (/^\.?\//.test(result)) {
result = new URL(
result.replace(/^\.?\/+/, ""),
window?.location?.origin,
).toString();
}
// ensure there is a trailing slash, otherwise url won't be correctly concatenated
result = `${result.replace(/\/+$/, "")}/`;
return result;
}
}

View file

@ -4,7 +4,7 @@
@font-face {
font-family: "Assistant";
src: url(./Assistant-Regular.woff2) format("woff2");
src: url(../woff2/Assistant/Assistant-Regular.woff2) format("woff2");
font-weight: 400;
style: normal;
display: swap;
@ -12,7 +12,7 @@
@font-face {
font-family: "Assistant";
src: url(./Assistant-Medium.woff2) format("woff2");
src: url(../woff2/Assistant/Assistant-Medium.woff2) format("woff2");
font-weight: 500;
style: normal;
display: swap;
@ -20,7 +20,7 @@
@font-face {
font-family: "Assistant";
src: url(./Assistant-SemiBold.woff2) format("woff2");
src: url(../woff2/Assistant/Assistant-SemiBold.woff2) format("woff2");
font-weight: 600;
style: normal;
display: swap;
@ -28,7 +28,7 @@
@font-face {
font-family: "Assistant";
src: url(./Assistant-Bold.woff2) format("woff2");
src: url(../woff2/Assistant/Assistant-Bold.woff2) format("woff2");
font-weight: 700;
style: normal;
display: swap;

View file

@ -8,30 +8,28 @@ import type {
import { ShapeCache } from "../scene/ShapeCache";
import { isTextElement } from "../element";
import { getFontString } from "../utils";
import { FONT_FAMILY } from "../constants";
import {
LOCAL_FONT_PROTOCOL,
FONT_METADATA,
RANGES,
type FontMetadata,
} from "./metadata";
import { ExcalidrawFont, type Font } from "./ExcalidrawFont";
import { getContainerElement } from "../element/textElement";
import Virgil from "./assets/Virgil-Regular.woff2";
import Excalifont from "./assets/Excalifont-Regular.woff2";
import Cascadia from "./assets/CascadiaCode-Regular.woff2";
import ComicShanns from "./assets/ComicShanns-Regular.woff2";
import LiberationSans from "./assets/LiberationSans-Regular.woff2";
import LilitaLatin from "./assets/Lilita-Regular-i7dPIFZ9Zz-WBtRtedDbYEF8RXi4EwQ.woff2";
import LilitaLatinExt from "./assets/Lilita-Regular-i7dPIFZ9Zz-WBtRtedDbYE98RXi4EwSsbg.woff2";
import NunitoLatin from "./assets/Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTQ3j6zbXWjgeg.woff2";
import NunitoLatinExt from "./assets/Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTo3j6zbXWjgevT5.woff2";
import NunitoCyrilic from "./assets/Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTA3j6zbXWjgevT5.woff2";
import NunitoCyrilicExt from "./assets/Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTk3j6zbXWjgevT5.woff2";
import NunitoVietnamese from "./assets/Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTs3j6zbXWjgevT5.woff2";
FONT_FAMILY,
FONT_FAMILY_FALLBACKS,
WINDOWS_EMOJI_FALLBACK_FONT,
CJK_HAND_DRAWN_FALLBACK_FONT,
} from "../constants";
import { FONT_METADATA, type FontMetadata } from "./metadata";
import { charWidth, getContainerElement } from "../element/textElement";
import {
ExcalidrawFontFace,
type IExcalidrawFontFace,
} from "./ExcalidrawFontFace";
import { CascadiaFontFaces } from "./woff2/Cascadia";
import { ComicFontFaces } from "./woff2/Comic";
import { ExcalifontFontFaces } from "./woff2/Excalifont";
import { HelveticaFontFaces } from "./woff2/Helvetica";
import { LiberationFontFaces } from "./woff2/Liberation";
import { LilitaFontFaces } from "./woff2/Lilita";
import { NunitoFontFaces } from "./woff2/Nunito";
import { VirgilFontFaces } from "./woff2/Virgil";
import { XiaolaiFontFaces } from "./woff2/Xiaolai";
import { EmojiFontFaces } from "./woff2/Emoji";
export class Fonts {
// it's ok to track fonts across multiple instances only once, so let's use
@ -43,7 +41,7 @@ export class Fonts {
number,
{
metadata: FontMetadata;
fonts: Font[];
fontFaces: IExcalidrawFontFace[];
}
>
| undefined;
@ -85,20 +83,23 @@ export class Fonts {
* of the supplied fontFaces has not already been processed.
*/
public onLoaded = (fontFaces: readonly FontFace[]) => {
if (
// bail if all fonts with have been processed. We're checking just a
// subset of the font properties (though it should be enough), so it
// can technically bail on a false positive.
fontFaces.every((fontFace) => {
const sig = `${fontFace.family}-${fontFace.style}-${fontFace.weight}-${fontFace.unicodeRange}`;
if (Fonts.loadedFontsCache.has(sig)) {
return true;
}
// bail if all fonts with have been processed. We're checking just a
// subset of the font properties (though it should be enough), so it
// can technically bail on a false positive.
let shouldBail = true;
for (const fontFace of fontFaces) {
const sig = `${fontFace.family}-${fontFace.style}-${fontFace.weight}-${fontFace.unicodeRange}`;
// make sure to update our cache with all the loaded font faces
if (!Fonts.loadedFontsCache.has(sig)) {
Fonts.loadedFontsCache.add(sig);
return false;
})
) {
return false;
shouldBail = false;
}
}
if (shouldBail) {
return;
}
let didUpdate = false;
@ -109,6 +110,10 @@ export class Fonts {
if (isTextElement(element)) {
didUpdate = true;
ShapeCache.delete(element);
// clear the width cache, so that we don't perform subsequent wrapping based on the stale fallback font metrics
charWidth.clearCache(getFontString(element));
const container = getContainerElement(element, elementsMap);
if (container) {
ShapeCache.delete(container);
@ -125,26 +130,27 @@ export class Fonts {
* Load font faces for a given scene and trigger scene update.
*/
public loadSceneFonts = async (): Promise<FontFace[]> => {
const sceneFamilies = this.getSceneFontFamilies();
const sceneFamilies = this.getSceneFamilies();
const loaded = await Fonts.loadFontFaces(sceneFamilies);
this.onLoaded(loaded);
return loaded;
};
/**
* Gets all the font families for the given scene.
* Load all registered font faces.
*/
public getSceneFontFamilies = () => {
return Fonts.getFontFamilies(this.scene.getNonDeletedElements());
public static loadAllFonts = async (): Promise<FontFace[]> => {
const allFamilies = Fonts.getAllFamilies();
return Fonts.loadFontFaces(allFamilies);
};
/**
* Load font faces for passed elements - use when the scene is unavailable (i.e. export).
*/
public static loadFontsForElements = async (
public static loadElementsFonts = async (
elements: readonly ExcalidrawElement[],
): Promise<FontFace[]> => {
const fontFamilies = Fonts.getFontFamilies(elements);
const fontFamilies = Fonts.getElementsFamilies(elements);
return await Fonts.loadFontFaces(fontFamilies);
};
@ -152,13 +158,13 @@ export class Fonts {
fontFamilies: Array<ExcalidrawTextElement["fontFamily"]>,
) {
// add all registered font faces into the `document.fonts` (if not added already)
for (const { fonts, metadata } of Fonts.registered.values()) {
for (const { fontFaces, metadata } of Fonts.registered.values()) {
// skip registering font faces for local fonts (i.e. Helvetica)
if (metadata.local) {
continue;
}
for (const { fontFace } of fonts) {
for (const { fontFace } of fontFaces) {
if (!window.document.fonts.has(fontFace)) {
window.document.fonts.add(fontFace);
}
@ -183,7 +189,7 @@ export class Fonts {
console.error(
`Failed to load font "${fontString}" from urls "${Fonts.registered
.get(fontFamily)
?.fonts.map((x) => x.urls)}"`,
?.fontFaces.map((x) => x.urls)}"`,
e,
);
}
@ -202,82 +208,58 @@ export class Fonts {
private static init() {
const fonts = {
registered: new Map<
ValueOf<typeof FONT_FAMILY>,
{ metadata: FontMetadata; fonts: Font[] }
ValueOf<typeof FONT_FAMILY | typeof FONT_FAMILY_FALLBACKS>,
{ metadata: FontMetadata; fontFaces: IExcalidrawFontFace[] }
>(),
};
// TODO: let's tweak this once we know how `register` will be exposed as part of the custom fonts API
const _register = register.bind(fonts);
const init = (
family: keyof typeof FONT_FAMILY | keyof typeof FONT_FAMILY_FALLBACKS,
...fontFacesDescriptors: ExcalidrawFontFaceDescriptor[]
) => {
const fontFamily =
FONT_FAMILY[family as keyof typeof FONT_FAMILY] ??
FONT_FAMILY_FALLBACKS[family as keyof typeof FONT_FAMILY_FALLBACKS];
_register("Virgil", FONT_METADATA[FONT_FAMILY.Virgil], {
uri: Virgil,
});
// default to Excalifont metrics
const metadata =
FONT_METADATA[fontFamily] ?? FONT_METADATA[FONT_FAMILY.Excalifont];
_register("Excalifont", FONT_METADATA[FONT_FAMILY.Excalifont], {
uri: Excalifont,
});
register.call(fonts, family, metadata, ...fontFacesDescriptors);
};
init("Cascadia", ...CascadiaFontFaces);
init("Comic Shanns", ...ComicFontFaces);
init("Excalifont", ...ExcalifontFontFaces);
// keeping for backwards compatibility reasons, uses system font (Helvetica on MacOS, Arial on Win)
_register("Helvetica", FONT_METADATA[FONT_FAMILY.Helvetica], {
uri: LOCAL_FONT_PROTOCOL,
});
init("Helvetica", ...HelveticaFontFaces);
// used for server-side pdf & png export instead of helvetica (technically does not need metrics, but kept in for consistency)
_register(
"Liberation Sans",
FONT_METADATA[FONT_FAMILY["Liberation Sans"]],
{
uri: LiberationSans,
},
);
init("Liberation Sans", ...LiberationFontFaces);
init("Lilita One", ...LilitaFontFaces);
init("Nunito", ...NunitoFontFaces);
init("Virgil", ...VirgilFontFaces);
_register("Cascadia", FONT_METADATA[FONT_FAMILY.Cascadia], {
uri: Cascadia,
});
_register("Comic Shanns", FONT_METADATA[FONT_FAMILY["Comic Shanns"]], {
uri: ComicShanns,
});
_register(
"Lilita One",
FONT_METADATA[FONT_FAMILY["Lilita One"]],
{ uri: LilitaLatinExt, descriptors: { unicodeRange: RANGES.LATIN_EXT } },
{ uri: LilitaLatin, descriptors: { unicodeRange: RANGES.LATIN } },
);
_register(
"Nunito",
FONT_METADATA[FONT_FAMILY.Nunito],
{
uri: NunitoCyrilicExt,
descriptors: { unicodeRange: RANGES.CYRILIC_EXT, weight: "500" },
},
{
uri: NunitoCyrilic,
descriptors: { unicodeRange: RANGES.CYRILIC, weight: "500" },
},
{
uri: NunitoVietnamese,
descriptors: { unicodeRange: RANGES.VIETNAMESE, weight: "500" },
},
{
uri: NunitoLatinExt,
descriptors: { unicodeRange: RANGES.LATIN_EXT, weight: "500" },
},
{
uri: NunitoLatin,
descriptors: { unicodeRange: RANGES.LATIN, weight: "500" },
},
);
// fallback font faces
init(CJK_HAND_DRAWN_FALLBACK_FONT, ...XiaolaiFontFaces);
init(WINDOWS_EMOJI_FALLBACK_FONT, ...EmojiFontFaces);
Fonts._initialized = true;
return fonts.registered;
}
private static getFontFamilies(
/**
* Gets all the font families for the given scene.
*/
public getSceneFamilies = () => {
return Fonts.getElementsFamilies(this.scene.getNonDeletedElements());
};
private static getAllFamilies() {
return Array.from(Fonts.registered.keys());
}
private static getElementsFamilies(
elements: ReadonlyArray<ExcalidrawElement>,
): Array<ExcalidrawTextElement["fontFamily"]> {
return Array.from(
@ -296,30 +278,34 @@ export class Fonts {
*
* @param family font family
* @param metadata font metadata
* @param params array of the rest of the FontFace parameters [uri: string, descriptors: FontFaceDescriptors?] ,
* @param fontFacesDecriptors font faces descriptors
*/
function register(
this:
| Fonts
| {
registered: Map<
ValueOf<typeof FONT_FAMILY>,
{ metadata: FontMetadata; fonts: Font[] }
number,
{ metadata: FontMetadata; fontFaces: IExcalidrawFontFace[] }
>;
},
family: string,
metadata: FontMetadata,
...params: Array<{ uri: string; descriptors?: FontFaceDescriptors }>
...fontFacesDecriptors: ExcalidrawFontFaceDescriptor[]
) {
// TODO: likely we will need to abandon number "id" in order to support custom fonts
const familyId = FONT_FAMILY[family as keyof typeof FONT_FAMILY];
const registeredFamily = this.registered.get(familyId);
// TODO: likely we will need to abandon number value in order to support custom fonts
const fontFamily =
FONT_FAMILY[family as keyof typeof FONT_FAMILY] ??
FONT_FAMILY_FALLBACKS[family as keyof typeof FONT_FAMILY_FALLBACKS];
const registeredFamily = this.registered.get(fontFamily);
if (!registeredFamily) {
this.registered.set(familyId, {
this.registered.set(fontFamily, {
metadata,
fonts: params.map(
({ uri, descriptors }) => new ExcalidrawFont(family, uri, descriptors),
fontFaces: fontFacesDecriptors.map(
({ uri, descriptors }) =>
new ExcalidrawFontFace(family, uri, descriptors),
),
});
}
@ -357,3 +343,8 @@ export const getLineHeight = (fontFamily: FontFamilyValues) => {
return lineHeight as ExcalidrawTextElement["lineHeight"];
};
export interface ExcalidrawFontFaceDescriptor {
uri: string;
descriptors?: FontFaceDescriptors;
}

View file

@ -4,7 +4,7 @@ import {
FontFamilyNormalIcon,
FreedrawIcon,
} from "../components/icons";
import { FONT_FAMILY } from "../constants";
import { FONT_FAMILY, FONT_FAMILY_FALLBACKS } from "../constants";
/**
* Encapsulates font metrics with additional font metadata.
@ -22,13 +22,15 @@ export interface FontMetadata {
lineHeight: number;
};
/** element to be displayed as an icon */
icon: JSX.Element;
icon?: JSX.Element;
/** flag to indicate a deprecated font */
deprecated?: true;
/** flag to indicate a server-side only font */
serverSide?: true;
/** flag to indiccate a local-only font */
local?: true;
/** flag to indicate a fallback font */
fallback?: true;
}
export const FONT_METADATA: Record<number, FontMetadata> = {
@ -106,13 +108,32 @@ export const FONT_METADATA: Record<number, FontMetadata> = {
descender: -434,
lineHeight: 1.15,
},
icon: FontFamilyNormalIcon,
serverSide: true,
},
[FONT_FAMILY_FALLBACKS.Xiaolai]: {
metrics: {
unitsPerEm: 1000,
ascender: 880,
descender: -144,
lineHeight: 1.15,
},
fallback: true,
},
[FONT_FAMILY_FALLBACKS["Segoe UI Emoji"]]: {
metrics: {
// reusing Excalifont metrics
unitsPerEm: 1000,
ascender: 886,
descender: -374,
lineHeight: 1.25,
},
local: true,
fallback: true,
},
};
/** Unicode ranges */
export const RANGES = {
/** Unicode ranges defined by google fonts */
export const GOOGLE_FONTS_RANGES = {
LATIN:
"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD",
LATIN_EXT:

View file

@ -0,0 +1,131 @@
import {
WorkerInTheMainChunkError,
WorkerUrlNotDefinedError,
} from "../../errors";
import { isServerEnv, promiseTry } from "../../utils";
import { WorkerPool } from "../../workers";
import type { Commands } from "./subset-shared.chunk";
let shouldUseWorkers = typeof Worker !== "undefined";
/**
* Tries to subset glyphs in a font based on the used codepoints, returning the font as dataurl.
* Under the hood utilizes worker threads (Web Workers, if available), otherwise fallbacks to the main thread.
*
* Check the following diagram for details: link.excalidraw.com/readonly/MbbnWPSWXgadXdtmzgeO
*
* @param arrayBuffer font data buffer in the woff2 format
* @param codePoints codepoints used to subset the glyphs
*
* @returns font with subsetted glyphs (all glyphs in case of errors) converted into a dataurl
*/
export const subsetWoff2GlyphsByCodepoints = async (
arrayBuffer: ArrayBuffer,
codePoints: Array<number>,
): Promise<string> => {
const { Commands, subsetToBase64, toBase64 } =
await lazyLoadSharedSubsetChunk();
if (!shouldUseWorkers) {
return subsetToBase64(arrayBuffer, codePoints);
}
return promiseTry(async () => {
try {
const workerPool = await getOrCreateWorkerPool();
// copy the buffer to avoid working on top of the detached array buffer in the fallback
// i.e. in case the worker throws, the array buffer does not get automatically detached, even if the worker is terminated
const arrayBufferCopy = arrayBuffer.slice(0);
const result = await workerPool.postMessage(
{
command: Commands.Subset,
arrayBuffer: arrayBufferCopy,
codePoints,
} as const,
{ transfer: [arrayBufferCopy] },
);
// encode on the main thread to avoid copying large binary strings (as dataurl) between threads
return toBase64(result);
} catch (e) {
// don't use workers if they are failing
shouldUseWorkers = false;
if (
// don't log the expected errors server-side
!(
isServerEnv() &&
(e instanceof WorkerUrlNotDefinedError ||
e instanceof WorkerInTheMainChunkError)
)
) {
// eslint-disable-next-line no-console
console.error(
"Failed to use workers for subsetting, falling back to the main thread.",
e,
);
}
// fallback to the main thread
return subsetToBase64(arrayBuffer, codePoints);
}
});
};
// lazy-loaded and cached chunks
let subsetWorker: Promise<typeof import("./subset-worker.chunk")> | null = null;
let subsetShared: Promise<typeof import("./subset-shared.chunk")> | null = null;
const lazyLoadWorkerSubsetChunk = async () => {
if (!subsetWorker) {
subsetWorker = import("./subset-worker.chunk");
}
return subsetWorker;
};
const lazyLoadSharedSubsetChunk = async () => {
if (!subsetShared) {
// load dynamically to force create a shared chunk reused between main thread and the worker thread
subsetShared = import("./subset-shared.chunk");
}
return subsetShared;
};
// could be extended with multiple commands in the future
type SubsetWorkerData = {
command: typeof Commands.Subset;
arrayBuffer: ArrayBuffer;
codePoints: Array<number>;
};
type SubsetWorkerResult<T extends SubsetWorkerData["command"]> =
T extends typeof Commands.Subset ? ArrayBuffer : never;
let workerPool: Promise<
WorkerPool<SubsetWorkerData, SubsetWorkerResult<SubsetWorkerData["command"]>>
> | null = null;
/**
* Lazy initialize or get the worker pool singleton.
*
* @throws implicitly if anything goes wrong - worker pool creation, loading wasm, initializing worker, etc.
*/
const getOrCreateWorkerPool = () => {
if (!workerPool) {
// immediate concurrent-friendly return, to ensure we have only one pool instance
workerPool = promiseTry(async () => {
const { WorkerUrl } = await lazyLoadWorkerSubsetChunk();
const pool = WorkerPool.create<
SubsetWorkerData,
SubsetWorkerResult<SubsetWorkerData["command"]>
>(WorkerUrl);
return pool;
});
}
return workerPool;
};

View file

@ -0,0 +1,81 @@
/**
* DON'T depend on anything from the outside like `promiseTry`, as this module is part of a separate lazy-loaded chunk.
*
* Including anything from the main chunk would include the whole chunk by default.
* Even it it would be tree-shaken during build, it won't be tree-shaken in dev.
*
* In the future consider separating common utils into a separate shared chunk.
*/
import loadWoff2 from "../wasm/woff2-loader";
import loadHbSubset from "../wasm/hb-subset-loader";
/**
* Shared commands between the main thread and worker threads.
*/
export const Commands = {
Subset: "SUBSET",
} as const;
/**
* Used by browser (main thread), node and jsdom, to subset the font based on the passed codepoints.
*
* @returns woff2 font as a base64 encoded string
*/
export const subsetToBase64 = async (
arrayBuffer: ArrayBuffer,
codePoints: Array<number>,
): Promise<string> => {
try {
const buffer = await subsetToBinary(arrayBuffer, codePoints);
return toBase64(buffer);
} catch (e) {
console.error("Skipped glyph subsetting", e);
// Fallback to encoding whole font in case of errors
return toBase64(arrayBuffer);
}
};
/**
* Used by browser (worker thread) and as part of `subsetToBase64`, to subset the font based on the passed codepoints.
*
* @eturns woff2 font as an ArrayBuffer, to avoid copying large strings between worker threads and the main thread.
*/
export const subsetToBinary = async (
arrayBuffer: ArrayBuffer,
codePoints: Array<number>,
): Promise<ArrayBuffer> => {
// lazy loaded wasm modules to avoid multiple initializations in case of concurrent triggers
// IMPORTANT: could be expensive, as each new worker instance lazy loads these to their own memory ~ keep the # of workes small!
const { compress, decompress } = await loadWoff2();
const { subset } = await loadHbSubset();
const decompressedBinary = decompress(arrayBuffer).buffer;
const snftSubset = subset(decompressedBinary, new Set(codePoints));
const compressedBinary = compress(snftSubset.buffer);
return compressedBinary.buffer;
};
/**
* Util for isomoprhic browser (main thread), node and jsdom usage.
*
* Isn't used inside the worker to avoid copying large binary strings (as dataurl) between worker threads and the main thread.
*/
export const toBase64 = async (arrayBuffer: ArrayBuffer) => {
let base64: string;
if (typeof Buffer !== "undefined") {
// node, jsdom
base64 = Buffer.from(arrayBuffer).toString("base64");
} else {
// browser (main thread)
// it's perfectly fine to treat each byte independently,
// as we care only about turning individual bytes into codepoints,
// not about multi-byte unicode characters
const byteString = String.fromCharCode(...new Uint8Array(arrayBuffer));
base64 = btoa(byteString);
}
return `data:font/woff2;base64,${base64}`;
};

View file

@ -0,0 +1,42 @@
/**
* DON'T depend on anything from the outside like `promiseTry`, as this module is part of a separate lazy-loaded chunk.
*
* Including anything from the main chunk would include the whole chunk by default.
* Even it it would be tree-shaken during build, it won't be tree-shaken in dev.
*
* In the future consider separating common utils into a separate shared chunk.
*/
import { Commands, subsetToBinary } from "./subset-shared.chunk";
/**
* Due to this export (and related dynamic import), this worker code will be included in the bundle automatically (as a separate chunk),
* without the need for esbuild / vite /rollup plugins and special browser / server treatment.
*
* `import.meta.url` is undefined in nodejs
*/
export const WorkerUrl: URL | undefined = import.meta.url
? new URL(import.meta.url)
: undefined;
// run only in the worker context
if (typeof window === "undefined" && typeof self !== "undefined") {
self.onmessage = async (e: {
data: {
command: typeof Commands.Subset;
arrayBuffer: ArrayBuffer;
codePoints: Array<number>;
};
}) => {
switch (e.data.command) {
case Commands.Subset:
const buffer = await subsetToBinary(
e.data.arrayBuffer,
e.data.codePoints,
);
self.postMessage(buffer, { transfer: [buffer] });
break;
}
};
}

View file

@ -0,0 +1,57 @@
/**
* DON'T depend on anything from the outside like `promiseTry`, as this module is part of a separate lazy-loaded chunk.
*
* Including anything from the main chunk would include the whole chunk by default.
* Even it it would be tree-shaken during build, it won't be tree-shaken in dev.
*
* In the future consider separating common utils into a separate shared chunk.
*/
import binary from "./hb-subset-wasm";
import bindings from "./hb-subset-bindings";
/**
* Lazy loads wasm and respective bindings for font subsetting based on the harfbuzzjs.
*/
let loadedWasm: ReturnType<typeof load> | null = null;
// TODO: consider adding support for fetching the wasm from an URL (external CDN, data URL, etc.)
const load = (): Promise<{
subset: (
fontBuffer: ArrayBuffer,
codePoints: ReadonlySet<number>,
) => Uint8Array;
}> => {
return new Promise(async (resolve, reject) => {
try {
const module = await WebAssembly.instantiate(binary);
const harfbuzzJsWasm = module.instance.exports;
// @ts-expect-error since `.buffer` is custom prop
const heapu8 = new Uint8Array(harfbuzzJsWasm.memory.buffer);
const hbSubset = {
subset: (fontBuffer: ArrayBuffer, codePoints: ReadonlySet<number>) => {
return bindings.subset(
harfbuzzJsWasm,
heapu8,
fontBuffer,
codePoints,
);
},
};
resolve(hbSubset);
} catch (e) {
reject(e);
}
});
};
// lazy load the default export
export default (): ReturnType<typeof load> => {
if (!loadedWasm) {
loadedWasm = load();
}
return loadedWasm;
};

View file

@ -1,58 +0,0 @@
/**
* Lazy loads wasm and respective bindings for font subsetting based on the harfbuzzjs.
*/
let loadedWasm: ReturnType<typeof load> | null = null;
// TODO: add support for fetching the wasm from an URL (external CDN, data URL, etc.)
const load = (): Promise<{
subset: (
fontBuffer: ArrayBuffer,
codePoints: ReadonlySet<number>,
) => Uint8Array;
}> => {
return new Promise(async (resolve, reject) => {
try {
const [binary, bindings] = await Promise.all([
import("./hb-subset.wasm"),
import("./hb-subset.bindings"),
]);
WebAssembly.instantiate(binary.default).then((module) => {
try {
const harfbuzzJsWasm = module.instance.exports;
// @ts-expect-error since `.buffer` is custom prop
const heapu8 = new Uint8Array(harfbuzzJsWasm.memory.buffer);
const hbSubset = {
subset: (
fontBuffer: ArrayBuffer,
codePoints: ReadonlySet<number>,
) => {
return bindings.default.subset(
harfbuzzJsWasm,
heapu8,
fontBuffer,
codePoints,
);
},
};
resolve(hbSubset);
} catch (e) {
reject(e);
}
});
} catch (error) {
reject(error);
}
});
};
// lazy load the default export
export default (): ReturnType<typeof load> => {
if (!loadedWasm) {
loadedWasm = load();
}
return loadedWasm;
};

View file

@ -47,6 +47,7 @@ const Module = (function () {
moduleOverrides[key] = Module[key];
}
}
let arguments_ = [];
let thisProgram = "./this.program";
let quit_ = function (status, toThrow) {
@ -4046,3 +4047,5 @@ const Module = (function () {
})();
export default Module;

View file

@ -0,0 +1,76 @@
/**
* DON'T depend on anything from the outside like `promiseTry`, as this module is part of a separate lazy-loaded chunk.
*
* Including anything from the main chunk would include the whole chunk by default.
* Even it it would be tree-shaken during build, it won't be tree-shaken in dev.
*
* In the future consider separating common utils into a separate shared chunk.
*/
import binary from "./woff2-wasm";
import bindings from "./woff2-bindings";
/**
* Lazy loads wasm and respective bindings for woff2 compression and decompression.
*/
type Vector = any;
let loadedWasm: ReturnType<typeof load> | null = null;
// re-map from internal vector into byte array
function convertFromVecToUint8Array(vector: Vector): Uint8Array {
const arr = [];
for (let i = 0, l = vector.size(); i < l; i++) {
arr.push(vector.get(i));
}
return new Uint8Array(arr);
}
// TODO: consider adding support for fetching the wasm from an URL (external CDN, data URL, etc.)
const load = (): Promise<{
compress: (buffer: ArrayBuffer) => Uint8Array;
decompress: (buffer: ArrayBuffer) => Uint8Array;
}> => {
return new Promise((resolve, reject) => {
try {
// initializing the module manually, so that we could pass in the wasm binary
// note that the `bindings.then` is not not promise/A+ compliant, hence the need for another explicit try/catch
bindings({ wasmBinary: binary }).then(
(module: {
woff2Enc: (buffer: ArrayBuffer, byteLength: number) => Vector;
woff2Dec: (buffer: ArrayBuffer, byteLength: number) => Vector;
}) => {
try {
// re-exporting only compress and decompress functions (also avoids infinite loop inside emscripten bindings)
const woff2 = {
compress: (buffer: ArrayBuffer) =>
convertFromVecToUint8Array(
module.woff2Enc(buffer, buffer.byteLength),
),
decompress: (buffer: ArrayBuffer) =>
convertFromVecToUint8Array(
module.woff2Dec(buffer, buffer.byteLength),
),
};
resolve(woff2);
} catch (e) {
reject(e);
}
},
);
} catch (e) {
reject(e);
}
});
};
// lazy loaded default export
export default (): ReturnType<typeof load> => {
if (!loadedWasm) {
loadedWasm = load();
}
return loadedWasm;
};

View file

@ -1,70 +0,0 @@
/**
* Lazy loads wasm and respective bindings for woff2 compression and decompression.
*/
type Vector = any;
let loadedWasm: ReturnType<typeof load> | null = null;
// TODO: add support for fetching the wasm from an URL (external CDN, data URL, etc.)
const load = (): Promise<{
compress: (buffer: ArrayBuffer) => Uint8Array;
decompress: (buffer: ArrayBuffer) => Uint8Array;
}> => {
return new Promise(async (resolve, reject) => {
try {
const [binary, bindings] = await Promise.all([
import("./woff2.wasm"),
import("./woff2.bindings"),
]);
// initializing the module manually, so that we could pass in the wasm binary
bindings
.default({ wasmBinary: binary.default })
.then(
(module: {
woff2Enc: (buffer: ArrayBuffer, byteLength: number) => Vector;
woff2Dec: (buffer: ArrayBuffer, byteLength: number) => Vector;
}) => {
try {
// re-map from internal vector into byte array
function convertFromVecToUint8Array(vector: Vector): Uint8Array {
const arr = [];
for (let i = 0, l = vector.size(); i < l; i++) {
arr.push(vector.get(i));
}
return new Uint8Array(arr);
}
// re-exporting only compress and decompress functions (also avoids infinite loop inside emscripten bindings)
const woff2 = {
compress: (buffer: ArrayBuffer) =>
convertFromVecToUint8Array(
module.woff2Enc(buffer, buffer.byteLength),
),
decompress: (buffer: ArrayBuffer) =>
convertFromVecToUint8Array(
module.woff2Dec(buffer, buffer.byteLength),
),
};
resolve(woff2);
} catch (e) {
reject(e);
}
},
);
} catch (e) {
reject(e);
}
});
};
// lazy loaded default export
export default (): ReturnType<typeof load> => {
if (!loadedWasm) {
loadedWasm = load();
}
return loadedWasm;
};

View file

@ -0,0 +1,8 @@
import CascadiaCodeRegular from "./CascadiaCode-Regular.woff2";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const CascadiaFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: CascadiaCodeRegular,
},
];

View file

@ -0,0 +1,8 @@
import ComicShannsRegular from "./ComicShanns-Regular.woff2";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const ComicFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: ComicShannsRegular,
},
];

View file

@ -0,0 +1,8 @@
import { LOCAL_FONT_PROTOCOL } from "../../metadata";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const EmojiFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: LOCAL_FONT_PROTOCOL,
},
];

View file

@ -0,0 +1,8 @@
import Excalifont from "./Excalifont-Regular.woff2";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const ExcalifontFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: Excalifont,
},
];

View file

@ -0,0 +1,8 @@
import { LOCAL_FONT_PROTOCOL } from "../../metadata";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const HelveticaFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: LOCAL_FONT_PROTOCOL,
},
];

View file

@ -0,0 +1,8 @@
import LiberationSansRegular from "./LiberationSans-Regular.woff2";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const LiberationFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: LiberationSansRegular,
},
];

View file

@ -0,0 +1,16 @@
import LilitaLatin from "./Lilita-Regular-i7dPIFZ9Zz-WBtRtedDbYEF8RXi4EwQ.woff2";
import LilitaLatinExt from "./Lilita-Regular-i7dPIFZ9Zz-WBtRtedDbYE98RXi4EwSsbg.woff2";
import { GOOGLE_FONTS_RANGES } from "../../metadata";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const LilitaFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: LilitaLatinExt,
descriptors: { unicodeRange: GOOGLE_FONTS_RANGES.LATIN_EXT },
},
{
uri: LilitaLatin,
descriptors: { unicodeRange: GOOGLE_FONTS_RANGES.LATIN },
},
];

View file

@ -0,0 +1,37 @@
import Latin from "./Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTQ3j6zbXWjgeg.woff2";
import LatinExt from "./Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTo3j6zbXWjgevT5.woff2";
import Cyrilic from "./Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTA3j6zbXWjgevT5.woff2";
import CyrilicExt from "./Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTk3j6zbXWjgevT5.woff2";
import Vietnamese from "./Nunito-Regular-XRXI3I6Li01BKofiOc5wtlZ2di8HDIkhdTs3j6zbXWjgevT5.woff2";
import { GOOGLE_FONTS_RANGES } from "../../metadata";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const NunitoFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: CyrilicExt,
descriptors: {
unicodeRange: GOOGLE_FONTS_RANGES.CYRILIC_EXT,
weight: "500",
},
},
{
uri: Cyrilic,
descriptors: { unicodeRange: GOOGLE_FONTS_RANGES.CYRILIC, weight: "500" },
},
{
uri: Vietnamese,
descriptors: {
unicodeRange: GOOGLE_FONTS_RANGES.VIETNAMESE,
weight: "500",
},
},
{
uri: LatinExt,
descriptors: { unicodeRange: GOOGLE_FONTS_RANGES.LATIN_EXT, weight: "500" },
},
{
uri: Latin,
descriptors: { unicodeRange: GOOGLE_FONTS_RANGES.LATIN, weight: "500" },
},
];

View file

@ -0,0 +1,8 @@
import Virgil from "./Virgil-Regular.woff2";
import { type ExcalidrawFontFaceDescriptor } from "../..";
export const VirgilFontFaces: ExcalidrawFontFaceDescriptor[] = [
{
uri: Virgil,
},
];

Some files were not shown because too many files have changed in this diff Show more