Fix reporter false positive and DefaultExtractAndLoadChunksRegex not catching all cases
This commit is contained in:
parent
afd56820db
commit
f469060ccf
2 changed files with 49 additions and 21 deletions
|
@ -243,19 +243,27 @@ page.on("console", async e => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isDebug) {
|
async function getText() {
|
||||||
console.error(e.text());
|
|
||||||
} else if (level === "error") {
|
|
||||||
const text = await Promise.all(
|
|
||||||
e.args().map(async a => {
|
|
||||||
try {
|
try {
|
||||||
|
return await Promise.all(
|
||||||
|
e.args().map(async a => {
|
||||||
return await maybeGetError(a) || await a.jsonValue();
|
return await maybeGetError(a) || await a.jsonValue();
|
||||||
} catch (e) {
|
|
||||||
return a.toString();
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
).then(a => a.join(" ").trim());
|
).then(a => a.join(" ").trim());
|
||||||
|
} catch {
|
||||||
|
return e.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isDebug) {
|
||||||
|
const text = await getText();
|
||||||
|
|
||||||
|
console.error(text);
|
||||||
|
if (text.includes("A fatal error occurred:")) {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
} else if (level === "error") {
|
||||||
|
const text = await getText();
|
||||||
|
|
||||||
if (text.length && !text.startsWith("Failed to load resource: the server responded with a status of") && !text.includes("Webpack")) {
|
if (text.length && !text.startsWith("Failed to load resource: the server responded with a status of") && !text.includes("Webpack")) {
|
||||||
console.error("[Unexpected Error]", text);
|
console.error("[Unexpected Error]", text);
|
||||||
|
@ -322,22 +330,31 @@ async function runtime(token: string) {
|
||||||
|
|
||||||
const validChunks = new Set<string>();
|
const validChunks = new Set<string>();
|
||||||
const invalidChunks = new Set<string>();
|
const invalidChunks = new Set<string>();
|
||||||
|
const deferredRequires = new Set<string>();
|
||||||
|
|
||||||
let chunksSearchingResolve: (value: void | PromiseLike<void>) => void;
|
let chunksSearchingResolve: (value: void | PromiseLike<void>) => void;
|
||||||
const chunksSearchingDone = new Promise<void>(r => chunksSearchingResolve = r);
|
const chunksSearchingDone = new Promise<void>(r => chunksSearchingResolve = r);
|
||||||
|
|
||||||
// True if resolved, false otherwise
|
// True if resolved, false otherwise
|
||||||
const chunksSearchPromises = [] as Array<() => boolean>;
|
const chunksSearchPromises = [] as Array<() => boolean>;
|
||||||
const lazyChunkRegex = canonicalizeMatch(/Promise\.all\((\[\i\.\i\(".+?"\).+?\])\).then\(\i\.bind\(\i,"(.+?)"\)\)/g);
|
|
||||||
const chunkIdsRegex = canonicalizeMatch(/\("(.+?)"\)/g);
|
const LazyChunkRegex = canonicalizeMatch(/(?:Promise\.all\(\[(\i\.\i\("[^)]+?"\)[^\]]+?)\]\)|(\i\.\i\("[^)]+?"\)))\.then\(\i\.bind\(\i,"([^)]+?)"\)\)/g);
|
||||||
|
|
||||||
async function searchAndLoadLazyChunks(factoryCode: string) {
|
async function searchAndLoadLazyChunks(factoryCode: string) {
|
||||||
const lazyChunks = factoryCode.matchAll(lazyChunkRegex);
|
const lazyChunks = factoryCode.matchAll(LazyChunkRegex);
|
||||||
const validChunkGroups = new Set<[chunkIds: string[], entryPoint: string]>();
|
const validChunkGroups = new Set<[chunkIds: string[], entryPoint: string]>();
|
||||||
|
|
||||||
await Promise.all(Array.from(lazyChunks).map(async ([, rawChunkIds, entryPoint]) => {
|
// Workaround for a chunk that depends on the ChannelMessage component but may be be force loaded before
|
||||||
const chunkIds = Array.from(rawChunkIds.matchAll(chunkIdsRegex)).map(m => m[1]);
|
// the chunk containing the component
|
||||||
if (chunkIds.length === 0) return;
|
const shouldForceDefer = factoryCode.includes(".Messages.GUILD_FEED_UNFEATURE_BUTTON_TEXT");
|
||||||
|
|
||||||
|
await Promise.all(Array.from(lazyChunks).map(async ([, rawChunkIdsArray, rawChunkIdsSingle, entryPoint]) => {
|
||||||
|
const rawChunkIds = rawChunkIdsArray ?? rawChunkIdsSingle;
|
||||||
|
const chunkIds = rawChunkIds ? Array.from(rawChunkIds.matchAll(Vencord.Webpack.ChunkIdsRegex)).map(m => m[1]) : [];
|
||||||
|
|
||||||
|
if (chunkIds.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let invalidChunkGroup = false;
|
let invalidChunkGroup = false;
|
||||||
|
|
||||||
|
@ -373,6 +390,11 @@ async function runtime(token: string) {
|
||||||
// Requires the entry points for all valid chunk groups
|
// Requires the entry points for all valid chunk groups
|
||||||
for (const [, entryPoint] of validChunkGroups) {
|
for (const [, entryPoint] of validChunkGroups) {
|
||||||
try {
|
try {
|
||||||
|
if (shouldForceDefer) {
|
||||||
|
deferredRequires.add(entryPoint);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if (wreq.m[entryPoint]) wreq(entryPoint as any);
|
if (wreq.m[entryPoint]) wreq(entryPoint as any);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
console.error(err);
|
||||||
|
@ -435,6 +457,11 @@ async function runtime(token: string) {
|
||||||
|
|
||||||
await chunksSearchingDone;
|
await chunksSearchingDone;
|
||||||
|
|
||||||
|
// Require deferred entry points
|
||||||
|
for (const deferredRequire of deferredRequires) {
|
||||||
|
wreq!(deferredRequire as any);
|
||||||
|
}
|
||||||
|
|
||||||
// All chunks Discord has mapped to asset files, even if they are not used anymore
|
// All chunks Discord has mapped to asset files, even if they are not used anymore
|
||||||
const allChunks = [] as string[];
|
const allChunks = [] as string[];
|
||||||
|
|
||||||
|
@ -514,7 +541,6 @@ async function runtime(token: string) {
|
||||||
setTimeout(() => console.log("[PUPPETEER_TEST_DONE_SIGNAL]"), 1000);
|
setTimeout(() => console.log("[PUPPETEER_TEST_DONE_SIGNAL]"), 1000);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("[PUP_DEBUG]", "A fatal error occurred:", e);
|
console.log("[PUP_DEBUG]", "A fatal error occurred:", e);
|
||||||
process.exit(1);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -402,7 +402,8 @@ export function findExportedComponentLazy<T extends object = any>(...props: stri
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const DefaultExtractAndLoadChunksRegex = /(?:Promise\.all\((\[\i\.\i\(".+?"\).+?\])\)|Promise\.resolve\(\)).then\(\i\.bind\(\i,"(.+?)"\)\)/;
|
export const DefaultExtractAndLoadChunksRegex = /(?:Promise\.all\(\[(\i\.\i\("[^)]+?"\)[^\]]+?)\]\)|(\i\.\i\("[^)]+?"\))|Promise\.resolve\(\))\.then\(\i\.bind\(\i,"([^)]+?)"\)\)/;
|
||||||
|
export const ChunkIdsRegex = /\("(.+?)"\)/g;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and load chunks using their entry point
|
* Extract and load chunks using their entry point
|
||||||
|
@ -431,7 +432,7 @@ export async function extractAndLoadChunks(code: string[], matcher: RegExp = Def
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [, rawChunkIds, entryPointId] = match;
|
const [, rawChunkIdsArray, rawChunkIdsSingle, entryPointId] = match;
|
||||||
if (Number.isNaN(Number(entryPointId))) {
|
if (Number.isNaN(Number(entryPointId))) {
|
||||||
const err = new Error("extractAndLoadChunks: Matcher didn't return a capturing group with the chunk ids array, or the entry point id returned as the second group wasn't a number");
|
const err = new Error("extractAndLoadChunks: Matcher didn't return a capturing group with the chunk ids array, or the entry point id returned as the second group wasn't a number");
|
||||||
logger.warn(err, "Code:", code, "Matcher:", matcher);
|
logger.warn(err, "Code:", code, "Matcher:", matcher);
|
||||||
|
@ -443,8 +444,9 @@ export async function extractAndLoadChunks(code: string[], matcher: RegExp = Def
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const rawChunkIds = rawChunkIdsArray ?? rawChunkIdsSingle;
|
||||||
if (rawChunkIds) {
|
if (rawChunkIds) {
|
||||||
const chunkIds = Array.from(rawChunkIds.matchAll(/\("(.+?)"\)/g)).map((m: any) => m[1]);
|
const chunkIds = Array.from(rawChunkIds.matchAll(ChunkIdsRegex)).map((m: any) => m[1]);
|
||||||
await Promise.all(chunkIds.map(id => wreq.e(id)));
|
await Promise.all(chunkIds.map(id => wreq.e(id)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue