mirror of
https://github.com/excalidraw/excalidraw.git
synced 2024-11-02 03:25:53 +01:00
fix encoding of embed data & compress (#2240)
This commit is contained in:
parent
e8a39b5f84
commit
b3263c2a69
6
package-lock.json
generated
6
package-lock.json
generated
@ -3003,6 +3003,12 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-13.5.1.tgz",
|
||||
"integrity": "sha512-Jj2W7VWQ2uM83f8Ls5ON9adxN98MvyJsMSASYFuSvrov8RMRY64Ayay7KV35ph1TSGIJ2gG9ZVDdEq3c3zaydA=="
|
||||
},
|
||||
"@types/pako": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/pako/-/pako-1.0.1.tgz",
|
||||
"integrity": "sha512-GdZbRSJ3Cv5fiwT6I0SQ3ckeN2PWNqxd26W9Z2fCK1tGrrasGy4puvNFtnddqH9UJFMQYXxEuuB7B8UK+LLwSg==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/parse-json": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz",
|
||||
|
@ -35,6 +35,7 @@
|
||||
"nanoid": "2.1.11",
|
||||
"node-sass": "4.14.1",
|
||||
"open-color": "1.7.0",
|
||||
"pako": "1.0.11",
|
||||
"png-chunk-text": "1.0.0",
|
||||
"png-chunks-encode": "1.0.0",
|
||||
"png-chunks-extract": "1.0.0",
|
||||
@ -49,6 +50,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/lodash.throttle": "4.1.6",
|
||||
"@types/pako": "1.0.1",
|
||||
"asar": "3.0.3",
|
||||
"eslint": "6.8.0",
|
||||
"eslint-config-prettier": "6.12.0",
|
||||
|
@ -1,40 +0,0 @@
|
||||
// `btoa(unescape(encodeURIComponent(str)))` hack doesn't work in edge cases and
|
||||
// `unescape` API shouldn't be used anyway.
|
||||
// This implem is ~10x faster than using fromCharCode in a loop (in Chrome).
|
||||
const stringToByteString = (str: string): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob = new Blob([new TextEncoder().encode(str)]);
|
||||
const reader = new FileReader();
|
||||
reader.onload = function (event) {
|
||||
if (!event.target || typeof event.target.result !== "string") {
|
||||
return reject(new Error("couldn't convert to byte string"));
|
||||
}
|
||||
resolve(event.target.result);
|
||||
};
|
||||
reader.readAsBinaryString(blob);
|
||||
});
|
||||
};
|
||||
|
||||
function byteStringToArrayBuffer(byteString: string) {
|
||||
const buffer = new ArrayBuffer(byteString.length);
|
||||
const bufferView = new Uint8Array(buffer);
|
||||
for (let i = 0, len = byteString.length; i < len; i++) {
|
||||
bufferView[i] = byteString.charCodeAt(i);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
const byteStringToString = (byteString: string) => {
|
||||
return new TextDecoder("utf-8").decode(byteStringToArrayBuffer(byteString));
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export const stringToBase64 = async (str: string) => {
|
||||
return btoa(await stringToByteString(str));
|
||||
};
|
||||
|
||||
// async to align with stringToBase64
|
||||
export const base64ToString = async (base64: string) => {
|
||||
return byteStringToString(atob(base64));
|
||||
};
|
@ -4,16 +4,20 @@ import { t } from "../i18n";
|
||||
import { AppState } from "../types";
|
||||
import { LibraryData, ImportedDataState } from "./types";
|
||||
import { calculateScrollCenter } from "../scene";
|
||||
import { MIME_TYPES } from "../constants";
|
||||
import { base64ToString } from "../base64";
|
||||
|
||||
export const parseFileContents = async (blob: Blob | File) => {
|
||||
let contents: string;
|
||||
|
||||
if (blob.type === "image/png") {
|
||||
const metadata = await (await import("./png")).getTEXtChunk(blob);
|
||||
if (metadata?.keyword === MIME_TYPES.excalidraw) {
|
||||
return metadata.text;
|
||||
try {
|
||||
return await (await import("./image")).decodePngMetadata(blob);
|
||||
} catch (error) {
|
||||
if (error.message === "INVALID") {
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
} else {
|
||||
throw new Error(t("alerts.cannotRestoreFromImage"));
|
||||
}
|
||||
}
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
} else {
|
||||
if ("text" in Blob) {
|
||||
contents = await blob.text();
|
||||
@ -29,16 +33,17 @@ export const parseFileContents = async (blob: Blob | File) => {
|
||||
});
|
||||
}
|
||||
if (blob.type === "image/svg+xml") {
|
||||
if (contents.includes(`payload-type:${MIME_TYPES.excalidraw}`)) {
|
||||
const match = contents.match(
|
||||
/<!-- payload-start -->(.+?)<!-- payload-end -->/,
|
||||
);
|
||||
if (!match) {
|
||||
try {
|
||||
return await (await import("./image")).decodeSvgMetadata({
|
||||
svg: contents,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.message === "INVALID") {
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
} else {
|
||||
throw new Error(t("alerts.cannotRestoreFromImage"));
|
||||
}
|
||||
return base64ToString(match[1]);
|
||||
}
|
||||
throw new Error(t("alerts.imageDoesNotContainScene"));
|
||||
}
|
||||
}
|
||||
return contents;
|
||||
|
116
src/data/encode.ts
Normal file
116
src/data/encode.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { deflate, inflate } from "pako";
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// byte (binary) strings
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// fast, Buffer-compatible implem
|
||||
export const toByteString = (data: string | Uint8Array): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob =
|
||||
typeof data === "string"
|
||||
? new Blob([new TextEncoder().encode(data)])
|
||||
: new Blob([data]);
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target || typeof event.target.result !== "string") {
|
||||
return reject(new Error("couldn't convert to byte string"));
|
||||
}
|
||||
resolve(event.target.result);
|
||||
};
|
||||
reader.readAsBinaryString(blob);
|
||||
});
|
||||
};
|
||||
|
||||
const byteStringToArrayBuffer = (byteString: string) => {
|
||||
const buffer = new ArrayBuffer(byteString.length);
|
||||
const bufferView = new Uint8Array(buffer);
|
||||
for (let i = 0, len = byteString.length; i < len; i++) {
|
||||
bufferView[i] = byteString.charCodeAt(i);
|
||||
}
|
||||
return buffer;
|
||||
};
|
||||
|
||||
const byteStringToString = (byteString: string) => {
|
||||
return new TextDecoder("utf-8").decode(byteStringToArrayBuffer(byteString));
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// base64
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @param isByteString set to true if already byte string to prevent bloat
|
||||
* due to reencoding
|
||||
*/
|
||||
export const stringToBase64 = async (str: string, isByteString = false) => {
|
||||
return isByteString ? btoa(str) : btoa(await toByteString(str));
|
||||
};
|
||||
|
||||
// async to align with stringToBase64
|
||||
export const base64ToString = async (base64: string, isByteString = false) => {
|
||||
return isByteString ? atob(base64) : byteStringToString(atob(base64));
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// text encoding
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
type EncodedData = {
|
||||
encoded: string;
|
||||
encoding: "bstring";
|
||||
/** whether text is compressed (zlib) */
|
||||
compressed: boolean;
|
||||
/** version for potential migration purposes */
|
||||
version?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Encodes (and potentially compresses via zlib) text to byte string
|
||||
*/
|
||||
export const encode = async ({
|
||||
text,
|
||||
compress,
|
||||
}: {
|
||||
text: string;
|
||||
/** defaults to `true`. If compression fails, falls back to bstring alone. */
|
||||
compress?: boolean;
|
||||
}): Promise<EncodedData> => {
|
||||
let deflated!: string;
|
||||
if (compress !== false) {
|
||||
try {
|
||||
deflated = await toByteString(deflate(text));
|
||||
} catch (error) {
|
||||
console.error("encode: cannot deflate", error);
|
||||
}
|
||||
}
|
||||
return {
|
||||
version: "1",
|
||||
encoding: "bstring",
|
||||
compressed: !!deflated,
|
||||
encoded: deflated || (await toByteString(text)),
|
||||
};
|
||||
};
|
||||
|
||||
export const decode = async (data: EncodedData): Promise<string> => {
|
||||
let decoded: string;
|
||||
|
||||
switch (data.encoding) {
|
||||
case "bstring":
|
||||
// if compressed, do not double decode the bstring
|
||||
decoded = data.compressed
|
||||
? data.encoded
|
||||
: await byteStringToString(data.encoded);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`decode: unknown encoding "${data.encoding}"`);
|
||||
}
|
||||
|
||||
if (data.compressed) {
|
||||
return inflate(new Uint8Array(byteStringToArrayBuffer(decoded)), {
|
||||
to: "string",
|
||||
});
|
||||
}
|
||||
|
||||
return decoded;
|
||||
};
|
130
src/data/image.ts
Normal file
130
src/data/image.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import decodePng from "png-chunks-extract";
|
||||
import tEXt from "png-chunk-text";
|
||||
import encodePng from "png-chunks-encode";
|
||||
import { stringToBase64, encode, decode, base64ToString } from "./encode";
|
||||
import { MIME_TYPES } from "../constants";
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PNG
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const blobToArrayBuffer = (blob: Blob): Promise<ArrayBuffer> => {
|
||||
if ("arrayBuffer" in blob) {
|
||||
return blob.arrayBuffer();
|
||||
}
|
||||
// Safari
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target?.result) {
|
||||
return reject(new Error("couldn't convert blob to ArrayBuffer"));
|
||||
}
|
||||
resolve(event.target.result as ArrayBuffer);
|
||||
};
|
||||
reader.readAsArrayBuffer(blob);
|
||||
});
|
||||
};
|
||||
|
||||
export const getTEXtChunk = async (
|
||||
blob: Blob,
|
||||
): Promise<{ keyword: string; text: string } | null> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadataChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (metadataChunk) {
|
||||
return tEXt.decode(metadataChunk.data);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const encodePngMetadata = async ({
|
||||
blob,
|
||||
metadata,
|
||||
}: {
|
||||
blob: Blob;
|
||||
metadata: string;
|
||||
}) => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
|
||||
const metadataChunk = tEXt.encode(
|
||||
MIME_TYPES.excalidraw,
|
||||
JSON.stringify(
|
||||
await encode({
|
||||
text: metadata,
|
||||
compress: true,
|
||||
}),
|
||||
),
|
||||
);
|
||||
// insert metadata before last chunk (iEND)
|
||||
chunks.splice(-1, 0, metadataChunk);
|
||||
|
||||
return new Blob([encodePng(chunks)], { type: "image/png" });
|
||||
};
|
||||
|
||||
export const decodePngMetadata = async (blob: Blob) => {
|
||||
const metadata = await getTEXtChunk(blob);
|
||||
if (metadata?.keyword === MIME_TYPES.excalidraw) {
|
||||
try {
|
||||
const encodedData = JSON.parse(metadata.text);
|
||||
if (!("encoded" in encodedData)) {
|
||||
// legacy, un-encoded scene JSON
|
||||
if ("type" in encodedData && encodedData.type === "excalidraw") {
|
||||
return metadata.text;
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
return await decode(encodedData);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
}
|
||||
throw new Error("INVALID");
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// SVG
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export const encodeSvgMetadata = async ({ text }: { text: string }) => {
|
||||
const base64 = await stringToBase64(
|
||||
JSON.stringify(await encode({ text })),
|
||||
true /* is already byte string */,
|
||||
);
|
||||
|
||||
let metadata = "";
|
||||
metadata += `<!-- payload-type:${MIME_TYPES.excalidraw} -->`;
|
||||
metadata += `<!-- payload-version:2 -->`;
|
||||
metadata += "<!-- payload-start -->";
|
||||
metadata += base64;
|
||||
metadata += "<!-- payload-end -->";
|
||||
return metadata;
|
||||
};
|
||||
|
||||
export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
|
||||
if (svg.includes(`payload-type:${MIME_TYPES.excalidraw}`)) {
|
||||
const match = svg.match(/<!-- payload-start -->(.+?)<!-- payload-end -->/);
|
||||
if (!match) {
|
||||
throw new Error("INVALID");
|
||||
}
|
||||
const versionMatch = svg.match(/<!-- payload-version:(\d+) -->/);
|
||||
const version = versionMatch?.[1] || "1";
|
||||
const isByteString = version !== "1";
|
||||
|
||||
try {
|
||||
const json = await base64ToString(match[1], isByteString);
|
||||
const encodedData = JSON.parse(json);
|
||||
if (!("encoded" in encodedData)) {
|
||||
// legacy, un-encoded scene JSON
|
||||
if ("type" in encodedData && encodedData.type === "excalidraw") {
|
||||
return json;
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
return await decode(encodedData);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
}
|
||||
throw new Error("INVALID");
|
||||
};
|
@ -19,8 +19,6 @@ import { serializeAsJSON } from "./json";
|
||||
import { ExportType } from "../scene/types";
|
||||
import { restore } from "./restore";
|
||||
import { ImportedDataState } from "./types";
|
||||
import { MIME_TYPES } from "../constants";
|
||||
import { stringToBase64 } from "../base64";
|
||||
|
||||
export { loadFromBlob } from "./blob";
|
||||
export { saveAsJSON, loadFromJSON } from "./json";
|
||||
@ -302,21 +300,17 @@ export const exportCanvas = async (
|
||||
return window.alert(t("alerts.cannotExportEmptyCanvas"));
|
||||
}
|
||||
if (type === "svg" || type === "clipboard-svg") {
|
||||
let metadata = "";
|
||||
|
||||
if (appState.exportEmbedScene && type === "svg") {
|
||||
metadata += `<!-- payload-type:${MIME_TYPES.excalidraw} -->`;
|
||||
metadata += "<!-- payload-start -->";
|
||||
metadata += await stringToBase64(serializeAsJSON(elements, appState));
|
||||
metadata += "<!-- payload-end -->";
|
||||
}
|
||||
|
||||
const tempSvg = exportToSvg(elements, {
|
||||
exportBackground,
|
||||
viewBackgroundColor,
|
||||
exportPadding,
|
||||
shouldAddWatermark,
|
||||
metadata,
|
||||
metadata:
|
||||
appState.exportEmbedScene && type === "svg"
|
||||
? await (await import("./image")).encodeSvgMetadata({
|
||||
text: serializeAsJSON(elements, appState),
|
||||
})
|
||||
: undefined,
|
||||
});
|
||||
if (type === "svg") {
|
||||
await fileSave(new Blob([tempSvg.outerHTML], { type: "image/svg+xml" }), {
|
||||
@ -345,9 +339,9 @@ export const exportCanvas = async (
|
||||
tempCanvas.toBlob(async (blob) => {
|
||||
if (blob) {
|
||||
if (appState.exportEmbedScene) {
|
||||
blob = await (await import("./png")).encodeTEXtChunk(blob, {
|
||||
keyword: MIME_TYPES.excalidraw,
|
||||
text: serializeAsJSON(elements, appState),
|
||||
blob = await (await import("./image")).encodePngMetadata({
|
||||
blob,
|
||||
metadata: serializeAsJSON(elements, appState),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1,42 +0,0 @@
|
||||
import decodePng from "png-chunks-extract";
|
||||
import tEXt from "png-chunk-text";
|
||||
import encodePng from "png-chunks-encode";
|
||||
|
||||
const blobToArrayBuffer = (blob: Blob): Promise<ArrayBuffer> => {
|
||||
if ("arrayBuffer" in blob) {
|
||||
return blob.arrayBuffer();
|
||||
}
|
||||
// Safari
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target?.result) {
|
||||
return reject(new Error("couldn't convert blob to ArrayBuffer"));
|
||||
}
|
||||
resolve(event.target.result as ArrayBuffer);
|
||||
};
|
||||
reader.readAsArrayBuffer(blob);
|
||||
});
|
||||
};
|
||||
|
||||
export const getTEXtChunk = async (
|
||||
blob: Blob,
|
||||
): Promise<{ keyword: string; text: string } | null> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadataChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (metadataChunk) {
|
||||
return tEXt.decode(metadataChunk.data);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const encodeTEXtChunk = async (
|
||||
blob: Blob,
|
||||
chunk: { keyword: string; text: string },
|
||||
): Promise<Blob> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const metadata = tEXt.encode(chunk.keyword, chunk.text);
|
||||
// insert metadata before last chunk (iEND)
|
||||
chunks.splice(-1, 0, metadata);
|
||||
return new Blob([encodePng(chunks)], { type: "image/png" });
|
||||
};
|
155
src/tests/export.test.tsx
Normal file
155
src/tests/export.test.tsx
Normal file
@ -0,0 +1,155 @@
|
||||
import React from "react";
|
||||
import { render, waitFor } from "./test-utils";
|
||||
import App from "../components/App";
|
||||
import { API } from "./helpers/api";
|
||||
import {
|
||||
encodePngMetadata,
|
||||
encodeSvgMetadata,
|
||||
decodeSvgMetadata,
|
||||
} from "../data/image";
|
||||
import { serializeAsJSON } from "../data/json";
|
||||
|
||||
import fs from "fs";
|
||||
import util from "util";
|
||||
import path from "path";
|
||||
|
||||
const readFile = util.promisify(fs.readFile);
|
||||
|
||||
const { h } = window;
|
||||
|
||||
const testElements = [
|
||||
{
|
||||
...API.createElement({
|
||||
type: "text",
|
||||
id: "A",
|
||||
text: "😀",
|
||||
}),
|
||||
// can't get jsdom text measurement to work so this is a temp hack
|
||||
// to ensure the element isn't stripped as invisible
|
||||
width: 16,
|
||||
height: 16,
|
||||
},
|
||||
];
|
||||
|
||||
// tiny polyfill for TextDecoder.decode on which we depend
|
||||
Object.defineProperty(window, "TextDecoder", {
|
||||
value: class TextDecoder {
|
||||
decode(ab: ArrayBuffer) {
|
||||
return new Uint8Array(ab).reduce(
|
||||
(acc, c) => acc + String.fromCharCode(c),
|
||||
"",
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
describe("appState", () => {
|
||||
beforeEach(() => {
|
||||
render(<App />);
|
||||
});
|
||||
|
||||
it("export embedded png and reimport", async () => {
|
||||
const pngBlob = new Blob(
|
||||
[await readFile(path.resolve(__dirname, "./fixtures/smiley.png"))],
|
||||
{ type: "image/png" },
|
||||
);
|
||||
|
||||
const pngBlobEmbedded = await encodePngMetadata({
|
||||
blob: pngBlob,
|
||||
metadata: serializeAsJSON(testElements, h.state),
|
||||
});
|
||||
API.dropFile(pngBlobEmbedded);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(h.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "😀" }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it("test encoding/decoding scene for SVG export", async () => {
|
||||
const encoded = await encodeSvgMetadata({
|
||||
text: serializeAsJSON(testElements, h.state),
|
||||
});
|
||||
const decoded = JSON.parse(await decodeSvgMetadata({ svg: encoded }));
|
||||
expect(decoded.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "😀" }),
|
||||
]);
|
||||
});
|
||||
|
||||
it("import embedded png (legacy v1)", async () => {
|
||||
const pngBlob = new Blob(
|
||||
[
|
||||
await readFile(
|
||||
path.resolve(__dirname, "./fixtures/test_embedded_v1.png"),
|
||||
),
|
||||
],
|
||||
{ type: "image/png" },
|
||||
);
|
||||
|
||||
API.dropFile(pngBlob);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(h.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "test" }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it("import embedded png (v2)", async () => {
|
||||
const pngBlob = new Blob(
|
||||
[
|
||||
await readFile(
|
||||
path.resolve(__dirname, "./fixtures/smiley_embedded_v2.png"),
|
||||
),
|
||||
],
|
||||
{ type: "image/png" },
|
||||
);
|
||||
|
||||
API.dropFile(pngBlob);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(h.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "😀" }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it("import embedded svg (legacy v1)", async () => {
|
||||
const svgBlob = new Blob(
|
||||
[
|
||||
await readFile(
|
||||
path.resolve(__dirname, "./fixtures/test_embedded_v1.svg"),
|
||||
),
|
||||
],
|
||||
{ type: "image/svg+xml" },
|
||||
);
|
||||
|
||||
API.dropFile(svgBlob);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(h.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "test" }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it("import embedded svg (v2)", async () => {
|
||||
const svgBlob = new Blob(
|
||||
[
|
||||
await readFile(
|
||||
path.resolve(__dirname, "./fixtures/smiley_embedded_v2.svg"),
|
||||
),
|
||||
],
|
||||
{ type: "image/svg+xml" },
|
||||
);
|
||||
|
||||
API.dropFile(svgBlob);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(h.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "😀" }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
BIN
src/tests/fixtures/smiley.png
vendored
Normal file
BIN
src/tests/fixtures/smiley.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.2 KiB |
BIN
src/tests/fixtures/smiley_embedded_v2.png
vendored
Normal file
BIN
src/tests/fixtures/smiley_embedded_v2.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.0 KiB |
16
src/tests/fixtures/smiley_embedded_v2.svg
vendored
Normal file
16
src/tests/fixtures/smiley_embedded_v2.svg
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 56 77">
|
||||
<!-- svg-source:excalidraw -->
|
||||
<!-- payload-type:application/vnd.excalidraw+json --><!-- payload-version:2 --><!-- payload-start -->eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nGVSy07jMFx1MDAxNN3zXHUwMDE1kdmOIE1IXztcdTAwMWVcdTAwMDNiw6ZcdTAwMTKjXHUwMDExXHUwMDFhjUxym1hcdTAwMTjbsm9pU4TEZ8xufpFPwNcpcVqyiHTPfVx1MDAxZJ9zX4+ShGFrgM1cdTAwMTNcdTAwMDabkktRWb5mP1xif1x1MDAwMeuEVj6VhdjplS1DZYNo5qenUvuGRjuc52madk0g4Vx1MDAxOVx1MDAxNDpf9uDjJHlccn+fXHUwMDExXHUwMDE1tS4up2Urr2/M9lwivV1v26vf8Pc+tIaiLy5cYlx1MDAxYozoxkPT6biPW1x1MDAxZufF5KTokbWosCE0XHUwMDE2NSDqXHUwMDA2PVZMeoyrWtL8tEdcdTAwMWNa/Vx1MDAwNJdaakt7j8tZxjNcdTAwMWVXP/LyqbZ6paq+XHUwMDA2LVfOcOufXHUwMDE565ZCylx1MDAwNbay04eXzcpcdTAwMDI72PJrR3J0gPd9Tnv9Y5dfWzdcbpzb69GGl1x1MDAwMkmCUVx1MDAxYd9BXHUwMDFjzW1cdTAwMTV0/3M4v+HW7OYwR8GAXHUwMDE5XHUwMDAw+ZJl6WSSj2dxzcD9/Fx1MDAxMLzTKlx1MDAxY0IxK/JiXHUwMDFj08Jdef8xTFxccukgykhcbv7sbqNjqVZSRtvJbk/u4/+/94GmWuFCbGHfV0Kv+bOQ7Z4sNOJcXIqaXHUwMDE4M1x0y4E3njVcbn+qfVx1MDAxYbVcdTAwMTk67EBcbkVbzkZcdTAwMDF88/+gIePGLJAj5bo7Zi9cdTAwMDLWXHUwMDE332/ieFx1MDAxOb7dVO+GqHbM6Z1HNPPtXHUwMDEz+I3nwSJ9<!-- payload-end -->
|
||||
<defs>
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: "Virgil";
|
||||
src: url("https://excalidraw.com/FG_Virgil.woff2");
|
||||
}
|
||||
@font-face {
|
||||
font-family: "Cascadia";
|
||||
src: url("https://excalidraw.com/Cascadia.woff2");
|
||||
}
|
||||
</style>
|
||||
</defs>
|
||||
<rect x="0" y="0" width="56" height="77" fill="#ffffff"></rect><g transform="translate(10 10) rotate(0 18 28.5)"><text x="0" y="41" font-family="Virgil, Segoe UI Emoji" font-size="36px" fill="#c92a2a" text-anchor="start" style="white-space: pre;" direction="ltr">😀</text></g></svg>
|
After Width: | Height: | Size: 1.7 KiB |
BIN
src/tests/fixtures/test_embedded_v1.png
vendored
Normal file
BIN
src/tests/fixtures/test_embedded_v1.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.0 KiB |
16
src/tests/fixtures/test_embedded_v1.svg
vendored
Normal file
16
src/tests/fixtures/test_embedded_v1.svg
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 97 77">
|
||||
<!-- svg-source:excalidraw -->
|
||||
<!-- payload-type:application/vnd.excalidraw+json --><!-- payload-start -->ewogICJ0eXBlIjogImV4Y2FsaWRyYXciLAogICJ2ZXJzaW9uIjogMiwKICAic291cmNlIjogImh0dHBzOi8vZXhjYWxpZHJhdy5jb20iLAogICJlbGVtZW50cyI6IFsKICAgIHsKICAgICAgImlkIjogInRabVFwa0cyQlZ2SzNxT01icHVXeiIsCiAgICAgICJ0eXBlIjogInRleHQiLAogICAgICAieCI6IDg2MS4xMTExMTExMTExMTExLAogICAgICAieSI6IDM1Ni4zMzMzMzMzMzMzMzMzLAogICAgICAid2lkdGgiOiA3NywKICAgICAgImhlaWdodCI6IDU3LAogICAgICAiYW5nbGUiOiAwLAogICAgICAic3Ryb2tlQ29sb3IiOiAiIzAwMDAwMCIsCiAgICAgICJiYWNrZ3JvdW5kQ29sb3IiOiAiIzg2OGU5NiIsCiAgICAgICJmaWxsU3R5bGUiOiAiY3Jvc3MtaGF0Y2giLAogICAgICAic3Ryb2tlV2lkdGgiOiAyLAogICAgICAic3Ryb2tlU3R5bGUiOiAic29saWQiLAogICAgICAicm91Z2huZXNzIjogMSwKICAgICAgIm9wYWNpdHkiOiAxMDAsCiAgICAgICJncm91cElkcyI6IFtdLAogICAgICAic3Ryb2tlU2hhcnBuZXNzIjogInJvdW5kIiwKICAgICAgInNlZWQiOiA0NzYzNjM3OTMsCiAgICAgICJ2ZXJzaW9uIjogMjMsCiAgICAgICJ2ZXJzaW9uTm9uY2UiOiA1OTc0MzUxMzUsCiAgICAgICJpc0RlbGV0ZWQiOiBmYWxzZSwKICAgICAgImJvdW5kRWxlbWVudElkcyI6IG51bGwsCiAgICAgICJ0ZXh0IjogInRlc3QiLAogICAgICAiZm9udFNpemUiOiAzNiwKICAgICAgImZvbnRGYW1pbHkiOiAxLAogICAgICAidGV4dEFsaWduIjogImxlZnQiLAogICAgICAidmVydGljYWxBbGlnbiI6ICJ0b3AiLAogICAgICAiYmFzZWxpbmUiOiA0MQogICAgfQogIF0sCiAgImFwcFN0YXRlIjogewogICAgInZpZXdCYWNrZ3JvdW5kQ29sb3IiOiAiI2ZmZmZmZiIsCiAgICAiZ3JpZFNpemUiOiBudWxsCiAgfQp9<!-- payload-end -->
|
||||
<defs>
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: "Virgil";
|
||||
src: url("https://excalidraw.com/FG_Virgil.woff2");
|
||||
}
|
||||
@font-face {
|
||||
font-family: "Cascadia";
|
||||
src: url("https://excalidraw.com/Cascadia.woff2");
|
||||
}
|
||||
</style>
|
||||
</defs>
|
||||
<rect x="0" y="0" width="97" height="77" fill="#ffffff"></rect><g transform="translate(10 10) rotate(0 38.5 28.5)"><text x="0" y="41" font-family="Virgil, Segoe UI Emoji" font-size="36px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">test</text></g></svg>
|
After Width: | Height: | Size: 1.9 KiB |
@ -138,19 +138,22 @@ export class API {
|
||||
return element as any;
|
||||
};
|
||||
|
||||
static dropFile(sceneData: ImportedDataState) {
|
||||
static dropFile(data: ImportedDataState | Blob) {
|
||||
const fileDropEvent = createEvent.drop(GlobalTestState.canvas);
|
||||
const file = new Blob(
|
||||
[
|
||||
JSON.stringify({
|
||||
type: "excalidraw",
|
||||
...sceneData,
|
||||
}),
|
||||
],
|
||||
{
|
||||
type: "application/json",
|
||||
},
|
||||
);
|
||||
const file =
|
||||
data instanceof Blob
|
||||
? data
|
||||
: new Blob(
|
||||
[
|
||||
JSON.stringify({
|
||||
type: "excalidraw",
|
||||
...data,
|
||||
}),
|
||||
],
|
||||
{
|
||||
type: "application/json",
|
||||
},
|
||||
);
|
||||
Object.defineProperty(fileDropEvent, "dataTransfer", {
|
||||
value: {
|
||||
files: [file],
|
||||
|
Loading…
Reference in New Issue
Block a user