diff --git a/apps/api/requests.http b/apps/api/requests.http index 962ebabe..95195e9f 100644 --- a/apps/api/requests.http +++ b/apps/api/requests.http @@ -9,19 +9,17 @@ Authorization: Bearer {{$dotenv TEST_API_KEY}} content-type: application/json { - "url":"https://opencorporates.com/companies/us_tn/001260776/" + "url":"https://firecrawl.dev" } -### http://ibtikar.net.sa bugado. redirect -### https://bansko.bg/bg -> webhooks ### Crawl Website # @name crawl -POST {{baseUrl}}/v1/map HTTP/1.1 +POST {{baseUrl}}/v1/crawl HTTP/1.1 Authorization: Bearer {{$dotenv TEST_API_KEY}} content-type: application/json -{ - "url": "https://emelitastes.lausd.org" +{ + "url": "https://firecrawl.dev" } ### Check Crawl Status diff --git a/apps/api/src/controllers/v1/crawl-status.ts b/apps/api/src/controllers/v1/crawl-status.ts index a2593ced..02046364 100644 --- a/apps/api/src/controllers/v1/crawl-status.ts +++ b/apps/api/src/controllers/v1/crawl-status.ts @@ -12,6 +12,7 @@ import { getDoneJobsOrdered, getDoneJobsOrderedLength, getThrottledJobs, + isCrawlFinished, } from "../../lib/crawl-redis"; import { getScrapeQueue } from "../../services/queue-service"; import { @@ -116,7 +117,7 @@ export async function crawlStatusController( sc.cancelled ? "cancelled" : validJobStatuses.every((x) => x[1] === "completed") && - validJobStatuses.length > 0 + await isCrawlFinished(req.params.jobId) ? "completed" : "scraping";