mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-11 15:39:13 +08:00
Nick: send notifications for crawl+batch scrape
This commit is contained in:
parent
7128f83a7a
commit
e1e39f8836
@ -44,7 +44,7 @@ const emailTemplates: Record<
|
||||
<br/>
|
||||
<p>We've improved our system by transitioning to concurrency limits, allowing faster scraping by default and eliminating* the often rate limit errors.</p>
|
||||
<p>You're hitting the concurrency limit for your plan quite often, which means Firecrawl can't scrape as fast as it could. But don't worry, it is not failing your requests and you are still getting your results.</p>
|
||||
<p>This is just to let you know that you could be scraping more pages faster. Consider upgrading your plan at <a href='https://firecrawl.dev/pricing'>firecrawl.dev/pricing</a>.</p><br/>Thanks,<br/>Firecrawl Team<br/>`,
|
||||
<p>This is just to let you know that you could be scraping faster. Consider upgrading your plan at <a href='https://firecrawl.dev/pricing'>firecrawl.dev/pricing</a>.</p><br/>Thanks,<br/>Firecrawl Team<br/>`,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -95,14 +95,12 @@ async function addScrapeJobRaw(
|
||||
logger.info("Concurrency limited 2x (single) - ", "Concurrency queue jobs: ", concurrencyQueueJobs, "Max concurrency: ", maxConcurrency, "Team ID: ", webScraperOptions.team_id);
|
||||
|
||||
// Only send notification if it's not a crawl or batch scrape
|
||||
if (!isCrawlOrBatchScrape(webScraperOptions)) {
|
||||
const shouldSendNotification = await shouldSendConcurrencyLimitNotification(webScraperOptions.team_id);
|
||||
if (shouldSendNotification) {
|
||||
sendNotificationWithCustomDays(webScraperOptions.team_id, NotificationType.CONCURRENCY_LIMIT_REACHED, 15, false).catch((error) => {
|
||||
logger.error("Error sending notification (concurrency limit reached): ", error);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
webScraperOptions.concurrencyLimited = true;
|
||||
@ -193,14 +191,12 @@ export async function addScrapeJobs(
|
||||
logger.info("Concurrency limited 2x (multiple) - ", "Concurrency queue jobs: ", addToCQ.length, "Max concurrency: ", maxConcurrency, "Team ID: ", jobs[0].data.team_id);
|
||||
|
||||
// Only send notification if it's not a crawl or batch scrape
|
||||
if (!isCrawlOrBatchScrape(jobs[0].data)) {
|
||||
const shouldSendNotification = await shouldSendConcurrencyLimitNotification(jobs[0].data.team_id);
|
||||
if (shouldSendNotification) {
|
||||
sendNotificationWithCustomDays(jobs[0].data.team_id, NotificationType.CONCURRENCY_LIMIT_REACHED, 15, false).catch((error) => {
|
||||
logger.error("Error sending notification (concurrency limit reached): ", error);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
|
Loading…
x
Reference in New Issue
Block a user