mirror of
https://github.com/excalidraw/excalidraw.git
synced 2025-05-03 10:00:07 -04:00
revert build & worker changes (unused)
This commit is contained in:
parent
057331431f
commit
a7668890fb
4 changed files with 48 additions and 89 deletions
|
@ -23,7 +23,7 @@ export const subsetWoff2GlyphsByCodepoints = async (
|
||||||
codePoints: Array<number>,
|
codePoints: Array<number>,
|
||||||
): Promise<string> => {
|
): Promise<string> => {
|
||||||
const { Commands, subsetToBase64, toBase64 } =
|
const { Commands, subsetToBase64, toBase64 } =
|
||||||
await lazyLoadSubsetSharedChunk();
|
await lazyLoadSharedSubsetChunk();
|
||||||
|
|
||||||
if (!shouldUseWorkers) {
|
if (!shouldUseWorkers) {
|
||||||
return subsetToBase64(arrayBuffer, codePoints);
|
return subsetToBase64(arrayBuffer, codePoints);
|
||||||
|
@ -75,7 +75,7 @@ export const subsetWoff2GlyphsByCodepoints = async (
|
||||||
let subsetWorker: Promise<typeof import("./subset-worker.chunk")> | null = null;
|
let subsetWorker: Promise<typeof import("./subset-worker.chunk")> | null = null;
|
||||||
let subsetShared: Promise<typeof import("./subset-shared.chunk")> | null = null;
|
let subsetShared: Promise<typeof import("./subset-shared.chunk")> | null = null;
|
||||||
|
|
||||||
const lazyLoadSubsetWorkerChunk = async () => {
|
const lazyLoadWorkerSubsetChunk = async () => {
|
||||||
if (!subsetWorker) {
|
if (!subsetWorker) {
|
||||||
subsetWorker = import("./subset-worker.chunk");
|
subsetWorker = import("./subset-worker.chunk");
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,7 @@ const lazyLoadSubsetWorkerChunk = async () => {
|
||||||
return subsetWorker;
|
return subsetWorker;
|
||||||
};
|
};
|
||||||
|
|
||||||
const lazyLoadSubsetSharedChunk = async () => {
|
const lazyLoadSharedSubsetChunk = async () => {
|
||||||
if (!subsetShared) {
|
if (!subsetShared) {
|
||||||
// load dynamically to force create a shared chunk reused between main thread and the worker thread
|
// load dynamically to force create a shared chunk reused between main thread and the worker thread
|
||||||
subsetShared = import("./subset-shared.chunk");
|
subsetShared = import("./subset-shared.chunk");
|
||||||
|
@ -93,20 +93,17 @@ const lazyLoadSubsetSharedChunk = async () => {
|
||||||
};
|
};
|
||||||
|
|
||||||
// could be extended with multiple commands in the future
|
// could be extended with multiple commands in the future
|
||||||
export type SubsetWorkerInput = {
|
type SubsetWorkerData = {
|
||||||
command: typeof Commands.Subset;
|
command: typeof Commands.Subset;
|
||||||
arrayBuffer: ArrayBuffer;
|
arrayBuffer: ArrayBuffer;
|
||||||
codePoints: Array<number>;
|
codePoints: Array<number>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type SubsetWorkerOutput<T extends SubsetWorkerInput["command"]> =
|
type SubsetWorkerResult<T extends SubsetWorkerData["command"]> =
|
||||||
T extends typeof Commands.Subset ? ArrayBuffer : never;
|
T extends typeof Commands.Subset ? ArrayBuffer : never;
|
||||||
|
|
||||||
let workerPool: Promise<
|
let workerPool: Promise<
|
||||||
WorkerPool<
|
WorkerPool<SubsetWorkerData, SubsetWorkerResult<SubsetWorkerData["command"]>>
|
||||||
SubsetWorkerInput,
|
|
||||||
SubsetWorkerOutput<SubsetWorkerInput["command"]>
|
|
||||||
>
|
|
||||||
> | null = null;
|
> | null = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -118,11 +115,11 @@ const getOrCreateWorkerPool = () => {
|
||||||
if (!workerPool) {
|
if (!workerPool) {
|
||||||
// immediate concurrent-friendly return, to ensure we have only one pool instance
|
// immediate concurrent-friendly return, to ensure we have only one pool instance
|
||||||
workerPool = promiseTry(async () => {
|
workerPool = promiseTry(async () => {
|
||||||
const { WorkerUrl } = await lazyLoadSubsetWorkerChunk();
|
const { WorkerUrl } = await lazyLoadWorkerSubsetChunk();
|
||||||
|
|
||||||
const pool = WorkerPool.create<
|
const pool = WorkerPool.create<
|
||||||
SubsetWorkerInput,
|
SubsetWorkerData,
|
||||||
SubsetWorkerOutput<SubsetWorkerInput["command"]>
|
SubsetWorkerResult<SubsetWorkerData["command"]>
|
||||||
>(WorkerUrl);
|
>(WorkerUrl);
|
||||||
|
|
||||||
return pool;
|
return pool;
|
||||||
|
|
|
@ -9,8 +9,6 @@
|
||||||
|
|
||||||
import { Commands, subsetToBinary } from "./subset-shared.chunk";
|
import { Commands, subsetToBinary } from "./subset-shared.chunk";
|
||||||
|
|
||||||
import type { SubsetWorkerInput } from "./subset-main";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Due to this export (and related dynamic import), this worker code will be included in the bundle automatically (as a separate chunk),
|
* Due to this export (and related dynamic import), this worker code will be included in the bundle automatically (as a separate chunk),
|
||||||
* without the need for esbuild / vite /rollup plugins and special browser / server treatment.
|
* without the need for esbuild / vite /rollup plugins and special browser / server treatment.
|
||||||
|
@ -23,7 +21,13 @@ export const WorkerUrl: URL | undefined = import.meta.url
|
||||||
|
|
||||||
// run only in the worker context
|
// run only in the worker context
|
||||||
if (typeof window === "undefined" && typeof self !== "undefined") {
|
if (typeof window === "undefined" && typeof self !== "undefined") {
|
||||||
self.onmessage = async (e: MessageEvent<SubsetWorkerInput>) => {
|
self.onmessage = async (e: {
|
||||||
|
data: {
|
||||||
|
command: typeof Commands.Subset;
|
||||||
|
arrayBuffer: ArrayBuffer;
|
||||||
|
codePoints: Array<number>;
|
||||||
|
};
|
||||||
|
}) => {
|
||||||
switch (e.data.command) {
|
switch (e.data.command) {
|
||||||
case Commands.Subset:
|
case Commands.Subset:
|
||||||
const buffer = await subsetToBinary(
|
const buffer = await subsetToBinary(
|
||||||
|
|
|
@ -16,28 +16,24 @@ class IdleWorker {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pool of idle short-lived workers, so that they can be reused in a short period of time (`ttl`), instead of having to create a new worker from scratch.
|
* Pool of idle short-lived workers.
|
||||||
|
*
|
||||||
|
* IMPORTANT: for simplicity it does not limit the number of newly created workers, leaving it up to the caller to manage the pool size.
|
||||||
*/
|
*/
|
||||||
export class WorkerPool<T, R> {
|
export class WorkerPool<T, R> {
|
||||||
private idleWorkers: Set<IdleWorker> = new Set();
|
private idleWorkers: Set<IdleWorker> = new Set();
|
||||||
private activeWorkers: Set<IdleWorker> = new Set();
|
|
||||||
|
|
||||||
private readonly workerUrl: URL;
|
private readonly workerUrl: URL;
|
||||||
private readonly workerTTL: number;
|
private readonly workerTTL: number;
|
||||||
private readonly maxPoolSize: number;
|
|
||||||
|
|
||||||
private constructor(
|
private constructor(
|
||||||
workerUrl: URL,
|
workerUrl: URL,
|
||||||
options: {
|
options: {
|
||||||
ttl?: number;
|
ttl?: number;
|
||||||
maxPoolSize?: number;
|
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
this.workerUrl = workerUrl;
|
this.workerUrl = workerUrl;
|
||||||
// by default, active & idle workers will be terminated after 1s of inactivity
|
// by default, active & idle workers will be terminated after 1s of inactivity
|
||||||
this.workerTTL = options.ttl || 1000;
|
this.workerTTL = options.ttl || 1000;
|
||||||
// by default, active workers are limited to 3 instances
|
|
||||||
this.maxPoolSize = options.maxPoolSize || 3;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -52,7 +48,6 @@ export class WorkerPool<T, R> {
|
||||||
workerUrl: URL | undefined,
|
workerUrl: URL | undefined,
|
||||||
options: {
|
options: {
|
||||||
ttl?: number;
|
ttl?: number;
|
||||||
maxPoolSize?: number;
|
|
||||||
} = {},
|
} = {},
|
||||||
): WorkerPool<T, R> {
|
): WorkerPool<T, R> {
|
||||||
if (!workerUrl) {
|
if (!workerUrl) {
|
||||||
|
@ -77,18 +72,13 @@ export class WorkerPool<T, R> {
|
||||||
let worker: IdleWorker;
|
let worker: IdleWorker;
|
||||||
|
|
||||||
const idleWorker = Array.from(this.idleWorkers).shift();
|
const idleWorker = Array.from(this.idleWorkers).shift();
|
||||||
|
|
||||||
if (idleWorker) {
|
if (idleWorker) {
|
||||||
this.idleWorkers.delete(idleWorker);
|
this.idleWorkers.delete(idleWorker);
|
||||||
worker = idleWorker;
|
worker = idleWorker;
|
||||||
} else if (this.activeWorkers.size < this.maxPoolSize) {
|
|
||||||
worker = await this.createWorker();
|
|
||||||
} else {
|
} else {
|
||||||
worker = await this.waitForActiveWorker();
|
worker = await this.createWorker();
|
||||||
}
|
}
|
||||||
|
|
||||||
this.activeWorkers.add(worker);
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
worker.instance.onmessage = this.onMessageHandler(worker, resolve);
|
worker.instance.onmessage = this.onMessageHandler(worker, resolve);
|
||||||
worker.instance.onerror = this.onErrorHandler(worker, reject);
|
worker.instance.onerror = this.onErrorHandler(worker, reject);
|
||||||
|
@ -111,13 +101,7 @@ export class WorkerPool<T, R> {
|
||||||
worker.instance.terminate();
|
worker.instance.terminate();
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const worker of this.activeWorkers) {
|
|
||||||
worker.debounceTerminate.cancel();
|
|
||||||
worker.instance.terminate();
|
|
||||||
}
|
|
||||||
|
|
||||||
this.idleWorkers.clear();
|
this.idleWorkers.clear();
|
||||||
this.activeWorkers.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -146,25 +130,9 @@ export class WorkerPool<T, R> {
|
||||||
return worker;
|
return worker;
|
||||||
}
|
}
|
||||||
|
|
||||||
private waitForActiveWorker(): Promise<IdleWorker> {
|
|
||||||
return Promise.race(
|
|
||||||
Array.from(this.activeWorkers).map(
|
|
||||||
(worker) =>
|
|
||||||
new Promise<IdleWorker>((resolve) => {
|
|
||||||
const originalOnMessage = worker.instance.onmessage;
|
|
||||||
worker.instance.onmessage = (e) => {
|
|
||||||
worker.instance.onmessage = originalOnMessage;
|
|
||||||
resolve(worker);
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private onMessageHandler(worker: IdleWorker, resolve: (value: R) => void) {
|
private onMessageHandler(worker: IdleWorker, resolve: (value: R) => void) {
|
||||||
return (e: { data: R }) => {
|
return (e: { data: R }) => {
|
||||||
worker.debounceTerminate();
|
worker.debounceTerminate();
|
||||||
this.activeWorkers.delete(worker);
|
|
||||||
this.idleWorkers.add(worker);
|
this.idleWorkers.add(worker);
|
||||||
resolve(e.data);
|
resolve(e.data);
|
||||||
};
|
};
|
||||||
|
@ -175,8 +143,6 @@ export class WorkerPool<T, R> {
|
||||||
reject: (reason: ErrorEvent) => void,
|
reject: (reason: ErrorEvent) => void,
|
||||||
) {
|
) {
|
||||||
return (e: ErrorEvent) => {
|
return (e: ErrorEvent) => {
|
||||||
this.activeWorkers.delete(worker);
|
|
||||||
|
|
||||||
// terminate the worker immediately before rejection
|
// terminate the worker immediately before rejection
|
||||||
worker.debounceTerminate(() => reject(e));
|
worker.debounceTerminate(() => reject(e));
|
||||||
worker.debounceTerminate.flush();
|
worker.debounceTerminate.flush();
|
||||||
|
|
|
@ -16,15 +16,17 @@ const ENV_VARS = {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const rawConfigCommon = {
|
// excludes all external dependencies and bundles only the source code
|
||||||
|
const getConfig = (outdir) => ({
|
||||||
|
outdir,
|
||||||
bundle: true,
|
bundle: true,
|
||||||
|
splitting: true,
|
||||||
format: "esm",
|
format: "esm",
|
||||||
|
packages: "external",
|
||||||
plugins: [sassPlugin()],
|
plugins: [sassPlugin()],
|
||||||
|
target: "es2020",
|
||||||
assetNames: "[dir]/[name]",
|
assetNames: "[dir]/[name]",
|
||||||
chunkNames: "[dir]/[name]-[hash]",
|
chunkNames: "[dir]/[name]-[hash]",
|
||||||
// chunks are always external, so they are not bundled within and get build separately
|
|
||||||
external: ["*.chunk"],
|
|
||||||
packages: "external",
|
|
||||||
alias: {
|
alias: {
|
||||||
"@excalidraw/common": path.resolve(__dirname, "../packages/common/src"),
|
"@excalidraw/common": path.resolve(__dirname, "../packages/common/src"),
|
||||||
"@excalidraw/element": path.resolve(__dirname, "../packages/element/src"),
|
"@excalidraw/element": path.resolve(__dirname, "../packages/element/src"),
|
||||||
|
@ -35,57 +37,47 @@ const rawConfigCommon = {
|
||||||
loader: {
|
loader: {
|
||||||
".woff2": "file",
|
".woff2": "file",
|
||||||
},
|
},
|
||||||
};
|
});
|
||||||
|
|
||||||
const rawConfigIndex = {
|
function buildDev(config) {
|
||||||
...rawConfigCommon,
|
return build({
|
||||||
entryPoints: ["index.tsx"],
|
...config,
|
||||||
};
|
|
||||||
|
|
||||||
const rawConfigChunks = {
|
|
||||||
...rawConfigCommon,
|
|
||||||
// create a separate chunk for each
|
|
||||||
entryPoints: ["**/*.chunk.ts"],
|
|
||||||
entryNames: "[name]",
|
|
||||||
};
|
|
||||||
|
|
||||||
function buildDev(chunkConfig) {
|
|
||||||
const config = {
|
|
||||||
...chunkConfig,
|
|
||||||
sourcemap: true,
|
sourcemap: true,
|
||||||
define: {
|
define: {
|
||||||
"import.meta.env": JSON.stringify(ENV_VARS.development),
|
"import.meta.env": JSON.stringify(ENV_VARS.development),
|
||||||
},
|
},
|
||||||
outdir: "dist/dev",
|
});
|
||||||
};
|
|
||||||
|
|
||||||
return build(config);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildProd(chunkConfig) {
|
function buildProd(config) {
|
||||||
const config = {
|
return build({
|
||||||
...chunkConfig,
|
...config,
|
||||||
minify: true,
|
minify: true,
|
||||||
define: {
|
define: {
|
||||||
"import.meta.env": JSON.stringify(ENV_VARS.production),
|
"import.meta.env": JSON.stringify(ENV_VARS.production),
|
||||||
},
|
},
|
||||||
outdir: "dist/prod",
|
});
|
||||||
};
|
|
||||||
|
|
||||||
return build(config);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const createESMRawBuild = async () => {
|
const createESMRawBuild = async () => {
|
||||||
// development unminified build with source maps
|
const chunksConfig = {
|
||||||
await buildDev(rawConfigIndex);
|
entryPoints: ["index.tsx", "**/*.chunk.ts"],
|
||||||
await buildDev(rawConfigChunks);
|
entryNames: "[name]",
|
||||||
|
};
|
||||||
// production minified buld without sourcemaps
|
|
||||||
await buildProd(rawConfigIndex);
|
// development unminified build with source maps
|
||||||
await buildProd(rawConfigChunks);
|
await buildDev({
|
||||||
|
...getConfig("dist/dev"),
|
||||||
|
...chunksConfig,
|
||||||
|
});
|
||||||
|
|
||||||
|
// production minified buld without sourcemaps
|
||||||
|
await buildProd({
|
||||||
|
...getConfig("dist/prod"),
|
||||||
|
...chunksConfig,
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// otherwise throws "ERROR: Could not resolve "./subset-worker.chunk"
|
|
||||||
(async () => {
|
(async () => {
|
||||||
await createESMRawBuild();
|
await createESMRawBuild();
|
||||||
})();
|
})();
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue