mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-12 03:29:01 +08:00
feat(v0): store v0 users (team ID) in Redis for collection (#1111)
This commit is contained in:
parent
fa99c62f64
commit
aaa16ee388
@ -6,6 +6,7 @@ import { logger } from "../../../src/lib/logger";
|
||||
import { getCrawl, saveCrawl } from "../../../src/lib/crawl-redis";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { configDotenv } from "dotenv";
|
||||
import { redisConnection } from "../../services/queue-service";
|
||||
configDotenv();
|
||||
|
||||
export async function crawlCancelController(req: Request, res: Response) {
|
||||
@ -19,6 +20,9 @@ export async function crawlCancelController(req: Request, res: Response) {
|
||||
|
||||
const { team_id } = auth;
|
||||
|
||||
redisConnection.sadd("teams_using_v0", team_id)
|
||||
.catch(error => logger.error("Failed to add team to teams_using_v0", { error, team_id }));
|
||||
|
||||
const sc = await getCrawl(req.params.jobId);
|
||||
if (!sc) {
|
||||
return res.status(404).json({ error: "Job not found" });
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Request, Response } from "express";
|
||||
import { authenticateUser } from "../auth";
|
||||
import { RateLimiterMode } from "../../../src/types";
|
||||
import { getScrapeQueue } from "../../../src/services/queue-service";
|
||||
import { getScrapeQueue, redisConnection } from "../../../src/services/queue-service";
|
||||
import { logger } from "../../../src/lib/logger";
|
||||
import { getCrawl, getCrawlJobs } from "../../../src/lib/crawl-redis";
|
||||
import { supabaseGetJobsByCrawlId } from "../../../src/lib/supabase-jobs";
|
||||
@ -67,6 +67,9 @@ export async function crawlStatusController(req: Request, res: Response) {
|
||||
|
||||
const { team_id } = auth;
|
||||
|
||||
redisConnection.sadd("teams_using_v0", team_id)
|
||||
.catch(error => logger.error("Failed to add team to teams_using_v0", { error, team_id }));
|
||||
|
||||
const sc = await getCrawl(req.params.jobId);
|
||||
if (!sc) {
|
||||
return res.status(404).json({ error: "Job not found" });
|
||||
|
@ -24,7 +24,7 @@ import {
|
||||
saveCrawl,
|
||||
StoredCrawl,
|
||||
} from "../../../src/lib/crawl-redis";
|
||||
import { getScrapeQueue } from "../../../src/services/queue-service";
|
||||
import { getScrapeQueue, redisConnection } from "../../../src/services/queue-service";
|
||||
import { checkAndUpdateURL } from "../../../src/lib/validateUrl";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { getJobPriority } from "../../lib/job-priority";
|
||||
@ -41,6 +41,9 @@ export async function crawlController(req: Request, res: Response) {
|
||||
|
||||
const { team_id, plan, chunk } = auth;
|
||||
|
||||
redisConnection.sadd("teams_using_v0", team_id)
|
||||
.catch(error => logger.error("Failed to add team to teams_using_v0", { error, team_id }));
|
||||
|
||||
if (req.headers["x-idempotency-key"]) {
|
||||
const isIdempotencyValid = await validateIdempotencyKey(req);
|
||||
if (!isIdempotencyValid) {
|
||||
|
@ -2,6 +2,8 @@ import { AuthResponse, RateLimiterMode } from "../../types";
|
||||
|
||||
import { Request, Response } from "express";
|
||||
import { authenticateUser } from "../auth";
|
||||
import { redisConnection } from "../../services/queue-service";
|
||||
import { logger } from "../../lib/logger";
|
||||
|
||||
export const keyAuthController = async (req: Request, res: Response) => {
|
||||
try {
|
||||
@ -11,6 +13,9 @@ export const keyAuthController = async (req: Request, res: Response) => {
|
||||
return res.status(auth.status).json({ error: auth.error });
|
||||
}
|
||||
|
||||
redisConnection.sadd("teams_using_v0", auth.team_id)
|
||||
.catch(error => logger.error("Failed to add team to teams_using_v0", { error, team_id: auth.team_id }));
|
||||
|
||||
// if success, return success: true
|
||||
return res.status(200).json({ success: true });
|
||||
} catch (error) {
|
||||
|
@ -21,7 +21,7 @@ import {
|
||||
defaultOrigin,
|
||||
} from "../../lib/default-values";
|
||||
import { addScrapeJob, waitForJob } from "../../services/queue-jobs";
|
||||
import { getScrapeQueue } from "../../services/queue-service";
|
||||
import { getScrapeQueue, redisConnection } from "../../services/queue-service";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { logger } from "../../lib/logger";
|
||||
import * as Sentry from "@sentry/node";
|
||||
@ -181,6 +181,9 @@ export async function scrapeController(req: Request, res: Response) {
|
||||
|
||||
const { team_id, plan, chunk } = auth;
|
||||
|
||||
redisConnection.sadd("teams_using_v0", team_id)
|
||||
.catch(error => logger.error("Failed to add team to teams_using_v0", { error, team_id }));
|
||||
|
||||
const crawlerOptions = req.body.crawlerOptions ?? {};
|
||||
const pageOptions = { ...defaultPageOptions, ...req.body.pageOptions };
|
||||
const extractorOptions = {
|
||||
|
@ -11,7 +11,7 @@ import { search } from "../../search";
|
||||
import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { logger } from "../../lib/logger";
|
||||
import { getScrapeQueue } from "../../services/queue-service";
|
||||
import { getScrapeQueue, redisConnection } from "../../services/queue-service";
|
||||
import { addScrapeJob, waitForJob } from "../../services/queue-jobs";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import { getJobPriority } from "../../lib/job-priority";
|
||||
@ -163,6 +163,10 @@ export async function searchController(req: Request, res: Response) {
|
||||
return res.status(auth.status).json({ error: auth.error });
|
||||
}
|
||||
const { team_id, plan, chunk } = auth;
|
||||
|
||||
redisConnection.sadd("teams_using_v0", team_id)
|
||||
.catch(error => logger.error("Failed to add team to teams_using_v0", { error, team_id }));
|
||||
|
||||
const crawlerOptions = req.body.crawlerOptions ?? {};
|
||||
const pageOptions = req.body.pageOptions ?? {
|
||||
includeHtml: req.body.pageOptions?.includeHtml ?? false,
|
||||
|
Loading…
x
Reference in New Issue
Block a user