Fix Supabase client configuration errors when USE_DB_AUTHENTICATION is false (#1534)

* Fix Supabase client configuration errors when USE_DB_AUTHENTICATION is false

Co-Authored-By: hello@sideguide.dev <hello+firecrawl@sideguide.dev>

* Add USE_DB_AUTHENTICATION checks to map and search controllers

Add test for USE_DB_AUTHENTICATION=false

Add USE_DB_AUTHENTICATION checks to billing services

Add USE_DB_AUTHENTICATION checks to batch_billing.ts

Add USE_DB_AUTHENTICATION checks to cached-docs.ts

Add USE_DB_AUTHENTICATION checks to supabase-jobs.ts

Add USE_DB_AUTHENTICATION checks to team-id-sync.ts

Add USE_DB_AUTHENTICATION checks to test-suite log.ts

Add USE_DB_AUTHENTICATION checks to idempotency services

Co-Authored-By: hello@sideguide.dev <hello+firecrawl@sideguide.dev>

* Revert "Add USE_DB_AUTHENTICATION checks to map and search controllers"

This reverts commit 834a5d51a68c74ada67800fa3a0aa45bde22d745.

---------

Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: hello@sideguide.dev <hello+firecrawl@sideguide.dev>
Co-authored-by: Nicolas <nicolascamara29@gmail.com>
Co-authored-by: Gergő Móricz <mo.geryy@gmail.com>
This commit is contained in:
devin-ai-integration[bot] 2025-05-16 12:56:33 -03:00 committed by GitHub
parent 526165e1b9
commit ab30c8e4ac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -147,19 +147,23 @@ async function finishCrawlIfNeeded(job: Job & { id: string }, sc: StoredCrawl) {
"crawl:" + job.data.crawl_id + ":visited_unique",
),
);
logger.info("Visited URLs", {
visitedUrls: visitedUrls.size,
});
const lastUrls: string[] = (
(
await supabase_service.rpc("diff_get_last_crawl_urls", {
i_team_id: job.data.team_id,
i_url: sc.originUrl!,
})
).data ?? []
).map((x) => x.url);
let lastUrls: string[] = [];
const useDbAuthentication = process.env.USE_DB_AUTHENTICATION === "true";
if (useDbAuthentication) {
lastUrls = (
(
await supabase_service.rpc("diff_get_last_crawl_urls", {
i_team_id: job.data.team_id,
i_url: sc.originUrl!,
})
).data ?? []
).map((x) => x.url);
}
const lastUrlsSet = new Set(lastUrls);
@ -257,7 +261,8 @@ async function finishCrawlIfNeeded(job: Job & { id: string }, sc: StoredCrawl) {
if (
visitedUrls.length > 0 &&
job.data.crawlerOptions !== null &&
originUrl
originUrl &&
process.env.USE_DB_AUTHENTICATION === "true"
) {
// Queue the indexing job instead of doing it directly
await getIndexQueue().add(