Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ const defaultOptions = {
destination: null,
concurrency: 4,
include: ["/"],
// If you get a Timeout error, ignore and move on
fastFail: true,
userAgent: "ReactSnap",
// 4 params below will be refactored to one: `puppeteer: {}`
// https://github.com/stereobooster/react-snap/issues/120
Expand Down
24 changes: 17 additions & 7 deletions src/puppeteer_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,9 @@ const crawl = async opt => {

const onUnhandledRejection = error => {
console.log("🔥 UnhandledPromiseRejectionWarning", error);
shuttingDown = true;
if (options.fastFail) {
shuttingDown = true;
}
};
process.on("unhandledRejection", onUnhandledRejection);

Expand Down Expand Up @@ -237,20 +239,28 @@ const crawl = async opt => {
beforeFetch && beforeFetch({ page, route });
await page.setUserAgent(options.userAgent);
const tracker = createTracker(page);
let skipPage = false;
try {
await page.goto(pageUrl, { waitUntil: "networkidle0" });
} catch (e) {
e.message = augmentTimeoutError(e.message, tracker);
throw e;
if (opt.fastFail) {
throw e;
} else {
console.log(`🔥 failed to crawl page: ${pageUrl}`, e);
skipPage = true;
}
} finally {
tracker.dispose();
}
if (options.waitFor) await page.waitFor(options.waitFor);
if (options.crawl) {
const links = await getLinks({ page });
links.forEach(addToQueue);
if (skipPage == false) {
if (options.waitFor) await page.waitFor(options.waitFor);
if (options.crawl) {
const links = await getLinks({ page });
links.forEach(addToQueue);
}
afterFetch && (await afterFetch({ page, route, browser, addToQueue }));
}
afterFetch && (await afterFetch({ page, route, browser, addToQueue }));
await page.close();
console.log(`✅ crawled ${processed + 1} out of ${enqued} (${route})`);
} catch (e) {
Expand Down