diff --git a/apps/api/src/lib/crawl-redis.ts b/apps/api/src/lib/crawl-redis.ts index b6bae396..3928eb11 100644 --- a/apps/api/src/lib/crawl-redis.ts +++ b/apps/api/src/lib/crawl-redis.ts @@ -148,7 +148,8 @@ export async function lockURL(id: string, sc: StoredCrawl, url: string): Promise res = (await redisConnection.sadd("crawl:" + id + ":visited", url)) !== 0 } else { const permutations = generateURLPermutations(url); - res = (await redisConnection.sadd("crawl:" + id + ":visited", ...permutations.map(x => x.href))) === permutations.length; + const x = (await redisConnection.sadd("crawl:" + id + ":visited", ...permutations.map(x => x.href))); + res = x === permutations.length; } await redisConnection.expire("crawl:" + id + ":visited", 24 * 60 * 60, "NX"); diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index c25601ca..aa7a891f 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -350,7 +350,7 @@ async function processJob(job: Job & { id: string }, token: string) { await addCrawlJobDone(job.data.crawl_id, job.id); - if (!job.data.sitemapped && job.data.crawlerOptions !== null) { + if (job.data.crawlerOptions !== null) { if (!sc.cancelled) { const crawler = crawlToCrawler(job.data.crawl_id, sc, doc.metadata.url ?? doc.metadata.sourceURL ?? sc.originUrl);