fix: broken on self-host

This commit is contained in:
Gergő Móricz 2025-04-04 22:04:19 +02:00
parent cb3008a8af
commit 6287db8492
2 changed files with 28 additions and 28 deletions

View File

@ -78,5 +78,5 @@ describe("Crawl tests", () => {
limit: 3,
delay: 5,
});
}, 600000);
}, 300000);
});

View File

@ -674,11 +674,7 @@ const workerFun = async (
runningJobs.delete(job.id);
}
if (job.id && job.data && job.data.team_id && job.data.plan) {
await removeConcurrencyLimitActiveJob(job.data.team_id, job.id);
cleanOldConcurrencyLimitEntries(job.data.team_id);
if (job.data.crawl_id && job.data.crawlerOptions?.delay) {
if (job.id && job.data.crawl_id && job.data.crawlerOptions?.delay) {
await removeCrawlConcurrencyLimitActiveJob(job.data.crawl_id, job.id);
cleanOldCrawlConcurrencyLimitEntries(job.data.crawl_id);
@ -705,6 +701,10 @@ const workerFun = async (
}
}
if (job.id && job.data && job.data.team_id && job.data.plan) {
await removeConcurrencyLimitActiveJob(job.data.team_id, job.id);
cleanOldConcurrencyLimitEntries(job.data.team_id);
// No need to check if we're under the limit here -- if the current job is finished,
// we are 1 under the limit, assuming the job insertion logic never over-inserts. - MG
const nextJob = await takeConcurrencyLimitedJob(job.data.team_id);