mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-15 00:45:52 +08:00
stuff
This commit is contained in:
parent
71d83a64ed
commit
9cb4987cdc
@ -172,34 +172,6 @@ describe("Concurrency Limit", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("getConcurrencyLimitMax", () => {
|
|
||||||
it("should return correct limit for free plan", () => {
|
|
||||||
const result = getConcurrencyLimitMax("free");
|
|
||||||
expect(result).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return correct limit for standard plan", () => {
|
|
||||||
const result = getConcurrencyLimitMax("standard");
|
|
||||||
expect(result).toBe(CONCURRENCY_LIMIT.standard);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return correct limit for scale plan", () => {
|
|
||||||
const result = getConcurrencyLimitMax("scale");
|
|
||||||
expect(result).toBe(CONCURRENCY_LIMIT.scale);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return default limit for unknown plan", () => {
|
|
||||||
const result = getConcurrencyLimitMax("unknown");
|
|
||||||
expect(result).toBe(10);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle special team IDs", () => {
|
|
||||||
process.env.DEV_B_TEAM_ID = "dev-b-team";
|
|
||||||
const result = getConcurrencyLimitMax("free", "dev-b-team");
|
|
||||||
expect(result).toBe(120);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("Integration Scenarios", () => {
|
describe("Integration Scenarios", () => {
|
||||||
it("should handle complete job lifecycle", async () => {
|
it("should handle complete job lifecycle", async () => {
|
||||||
const mockJob: ConcurrencyLimitedJob = {
|
const mockJob: ConcurrencyLimitedJob = {
|
||||||
|
@ -7,6 +7,7 @@ import {
|
|||||||
removeConcurrencyLimitActiveJob,
|
removeConcurrencyLimitActiveJob,
|
||||||
} from "../lib/concurrency-limit";
|
} from "../lib/concurrency-limit";
|
||||||
import { WebScraperOptions } from "../types";
|
import { WebScraperOptions } from "../types";
|
||||||
|
import { getACUCTeam } from "../controllers/auth";
|
||||||
|
|
||||||
// Mock all the dependencies
|
// Mock all the dependencies
|
||||||
const mockAdd = jest.fn();
|
const mockAdd = jest.fn();
|
||||||
@ -31,7 +32,6 @@ jest.mock("uuid", () => ({
|
|||||||
|
|
||||||
describe("Queue Concurrency Integration", () => {
|
describe("Queue Concurrency Integration", () => {
|
||||||
const mockTeamId = "test-team-id";
|
const mockTeamId = "test-team-id";
|
||||||
const mockPlan = "standard";
|
|
||||||
const mockNow = Date.now();
|
const mockNow = Date.now();
|
||||||
|
|
||||||
const defaultScrapeOptions = {
|
const defaultScrapeOptions = {
|
||||||
@ -102,8 +102,10 @@ describe("Queue Concurrency Integration", () => {
|
|||||||
|
|
||||||
it("should add job to concurrency queue when at concurrency limit", async () => {
|
it("should add job to concurrency queue when at concurrency limit", async () => {
|
||||||
// Mock current active jobs to be at limit
|
// Mock current active jobs to be at limit
|
||||||
const maxConcurrency = getConcurrencyLimitMax(mockPlan);
|
(getACUCTeam as jest.Mock).mockResolvedValue({
|
||||||
const activeJobs = Array(maxConcurrency).fill("active-job");
|
concurrency: 15,
|
||||||
|
} as any);
|
||||||
|
const activeJobs = Array(15).fill("active-job");
|
||||||
(redisConnection.zrangebyscore as jest.Mock).mockResolvedValue(
|
(redisConnection.zrangebyscore as jest.Mock).mockResolvedValue(
|
||||||
activeJobs,
|
activeJobs,
|
||||||
);
|
);
|
||||||
@ -134,7 +136,6 @@ describe("Queue Concurrency Integration", () => {
|
|||||||
url: `https://test${i}.com`,
|
url: `https://test${i}.com`,
|
||||||
mode: "single_urls",
|
mode: "single_urls",
|
||||||
team_id: mockTeamId,
|
team_id: mockTeamId,
|
||||||
plan: mockPlan,
|
|
||||||
scrapeOptions: defaultScrapeOptions,
|
scrapeOptions: defaultScrapeOptions,
|
||||||
} as WebScraperOptions,
|
} as WebScraperOptions,
|
||||||
opts: {
|
opts: {
|
||||||
@ -144,7 +145,10 @@ describe("Queue Concurrency Integration", () => {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
it("should handle batch jobs respecting concurrency limits", async () => {
|
it("should handle batch jobs respecting concurrency limits", async () => {
|
||||||
const maxConcurrency = getConcurrencyLimitMax(mockPlan);
|
const maxConcurrency = 15;
|
||||||
|
(getACUCTeam as jest.Mock).mockResolvedValue({
|
||||||
|
concurrency: maxConcurrency,
|
||||||
|
} as any);
|
||||||
const totalJobs = maxConcurrency + 5; // Some jobs should go to queue
|
const totalJobs = maxConcurrency + 5; // Some jobs should go to queue
|
||||||
const mockJobs = createMockJobs(totalJobs);
|
const mockJobs = createMockJobs(totalJobs);
|
||||||
|
|
||||||
@ -178,7 +182,6 @@ describe("Queue Concurrency Integration", () => {
|
|||||||
id: "test-job",
|
id: "test-job",
|
||||||
data: {
|
data: {
|
||||||
team_id: mockTeamId,
|
team_id: mockTeamId,
|
||||||
plan: mockPlan,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -216,7 +219,6 @@ describe("Queue Concurrency Integration", () => {
|
|||||||
id: "failing-job",
|
id: "failing-job",
|
||||||
data: {
|
data: {
|
||||||
team_id: mockTeamId,
|
team_id: mockTeamId,
|
||||||
plan: mockPlan,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ import { deleteKey, getValue } from "../services/redis";
|
|||||||
import { setValue } from "../services/redis";
|
import { setValue } from "../services/redis";
|
||||||
import { validate } from "uuid";
|
import { validate } from "uuid";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { AuthCreditUsageChunk } from "./v1/types";
|
import { AuthCreditUsageChunk, AuthCreditUsageChunkFromTeam } from "./v1/types";
|
||||||
// const { data, error } = await supabase_service
|
// const { data, error } = await supabase_service
|
||||||
// .from('api_keys')
|
// .from('api_keys')
|
||||||
// .select(`
|
// .select(`
|
||||||
@ -37,12 +37,13 @@ function normalizedApiIsUuid(potentialUuid: string): boolean {
|
|||||||
|
|
||||||
export async function setCachedACUC(
|
export async function setCachedACUC(
|
||||||
api_key: string,
|
api_key: string,
|
||||||
|
is_extract: boolean,
|
||||||
acuc:
|
acuc:
|
||||||
| AuthCreditUsageChunk
|
| AuthCreditUsageChunk
|
||||||
| null
|
| null
|
||||||
| ((acuc: AuthCreditUsageChunk) => AuthCreditUsageChunk | null),
|
| ((acuc: AuthCreditUsageChunk) => AuthCreditUsageChunk | null),
|
||||||
) {
|
) {
|
||||||
const cacheKeyACUC = `acuc_${api_key}`;
|
const cacheKeyACUC = `acuc_${api_key}_${is_extract ? "extract" : "scrape"}`;
|
||||||
const redLockKey = `lock_${cacheKeyACUC}`;
|
const redLockKey = `lock_${cacheKeyACUC}`;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -77,7 +78,11 @@ export async function getACUC(
|
|||||||
useCache = true,
|
useCache = true,
|
||||||
mode?: RateLimiterMode,
|
mode?: RateLimiterMode,
|
||||||
): Promise<AuthCreditUsageChunk | null> {
|
): Promise<AuthCreditUsageChunk | null> {
|
||||||
const cacheKeyACUC = `acuc_${api_key}_${mode}`;
|
let isExtract =
|
||||||
|
mode === RateLimiterMode.Extract ||
|
||||||
|
mode === RateLimiterMode.ExtractStatus;
|
||||||
|
|
||||||
|
const cacheKeyACUC = `acuc_${api_key}_${isExtract ? "extract" : "scrape"}`;
|
||||||
|
|
||||||
if (useCache) {
|
if (useCache) {
|
||||||
const cachedACUC = await getValue(cacheKeyACUC);
|
const cachedACUC = await getValue(cacheKeyACUC);
|
||||||
@ -91,10 +96,6 @@ export async function getACUC(
|
|||||||
let error;
|
let error;
|
||||||
let retries = 0;
|
let retries = 0;
|
||||||
const maxRetries = 5;
|
const maxRetries = 5;
|
||||||
|
|
||||||
let isExtract =
|
|
||||||
mode === RateLimiterMode.Extract ||
|
|
||||||
mode === RateLimiterMode.ExtractStatus;
|
|
||||||
while (retries < maxRetries) {
|
while (retries < maxRetries) {
|
||||||
const client =
|
const client =
|
||||||
Math.random() > (2/3) ? supabase_rr_service : supabase_service;
|
Math.random() > (2/3) ? supabase_rr_service : supabase_service;
|
||||||
@ -129,7 +130,112 @@ export async function getACUC(
|
|||||||
|
|
||||||
// NOTE: Should we cache null chunks? - mogery
|
// NOTE: Should we cache null chunks? - mogery
|
||||||
if (chunk !== null && useCache) {
|
if (chunk !== null && useCache) {
|
||||||
setCachedACUC(api_key, chunk);
|
setCachedACUC(api_key, isExtract, chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
return chunk ? { ...chunk, is_extract: isExtract } : null;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setCachedACUCTeam(
|
||||||
|
team_id: string,
|
||||||
|
is_extract: boolean,
|
||||||
|
acuc:
|
||||||
|
| AuthCreditUsageChunkFromTeam
|
||||||
|
| null
|
||||||
|
| ((acuc: AuthCreditUsageChunkFromTeam) => AuthCreditUsageChunkFromTeam | null),
|
||||||
|
) {
|
||||||
|
const cacheKeyACUC = `acuc_team_${team_id}_${is_extract ? "extract" : "scrape"}`;
|
||||||
|
const redLockKey = `lock_${cacheKeyACUC}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await redlock.using([redLockKey], 10000, {}, async (signal) => {
|
||||||
|
if (typeof acuc === "function") {
|
||||||
|
acuc = acuc(JSON.parse((await getValue(cacheKeyACUC)) ?? "null"));
|
||||||
|
|
||||||
|
if (acuc === null) {
|
||||||
|
if (signal.aborted) {
|
||||||
|
throw signal.error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal.aborted) {
|
||||||
|
throw signal.error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache for 1 hour. - mogery
|
||||||
|
await setValue(cacheKeyACUC, JSON.stringify(acuc), 3600, true);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error updating cached ACUC ${cacheKeyACUC}: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getACUCTeam(
|
||||||
|
team_id: string,
|
||||||
|
cacheOnly = false,
|
||||||
|
useCache = true,
|
||||||
|
mode?: RateLimiterMode,
|
||||||
|
): Promise<AuthCreditUsageChunkFromTeam | null> {
|
||||||
|
let isExtract =
|
||||||
|
mode === RateLimiterMode.Extract ||
|
||||||
|
mode === RateLimiterMode.ExtractStatus;
|
||||||
|
|
||||||
|
const cacheKeyACUC = `acuc_team_${team_id}_${isExtract ? "extract" : "scrape"}`;
|
||||||
|
|
||||||
|
if (useCache) {
|
||||||
|
const cachedACUC = await getValue(cacheKeyACUC);
|
||||||
|
if (cachedACUC !== null) {
|
||||||
|
return JSON.parse(cachedACUC);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!cacheOnly) {
|
||||||
|
let data;
|
||||||
|
let error;
|
||||||
|
let retries = 0;
|
||||||
|
const maxRetries = 5;
|
||||||
|
|
||||||
|
while (retries < maxRetries) {
|
||||||
|
const client =
|
||||||
|
Math.random() > (2/3) ? supabase_rr_service : supabase_service;
|
||||||
|
({ data, error } = await client.rpc(
|
||||||
|
"auth_credit_usage_chunk_28_from_team",
|
||||||
|
{ input_team: team_id, i_is_extract: isExtract, tally_untallied_credits: true },
|
||||||
|
{ get: true },
|
||||||
|
));
|
||||||
|
|
||||||
|
if (!error) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn(
|
||||||
|
`Failed to retrieve authentication and credit usage data after ${retries}, trying again...`,
|
||||||
|
{ error }
|
||||||
|
);
|
||||||
|
retries++;
|
||||||
|
if (retries === maxRetries) {
|
||||||
|
throw new Error(
|
||||||
|
"Failed to retrieve authentication and credit usage data after 3 attempts: " +
|
||||||
|
JSON.stringify(error),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for a short time before retrying
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunk: AuthCreditUsageChunk | null =
|
||||||
|
data.length === 0 ? null : data[0].team_id === null ? null : data[0];
|
||||||
|
|
||||||
|
// NOTE: Should we cache null chunks? - mogery
|
||||||
|
if (chunk !== null && useCache) {
|
||||||
|
setCachedACUCTeam(team_id, isExtract, chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
return chunk ? { ...chunk, is_extract: isExtract } : null;
|
return chunk ? { ...chunk, is_extract: isExtract } : null;
|
||||||
|
@ -191,9 +191,9 @@ export async function crawlStatusWSController(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { team_id, plan } = auth;
|
const { team_id } = auth;
|
||||||
|
|
||||||
req.auth = { team_id, plan };
|
req.auth = { team_id };
|
||||||
|
|
||||||
await crawlStatusWS(ws, req);
|
await crawlStatusWS(ws, req);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
import { RequestWithAuth } from "./types";
|
import { RequestWithAuth } from "./types";
|
||||||
import { getACUC } from "../auth";
|
import { getACUCTeam } from "../auth";
|
||||||
import { logger } from "../../lib/logger";
|
import { logger } from "../../lib/logger";
|
||||||
|
|
||||||
export async function creditUsageController(
|
export async function creditUsageController(
|
||||||
@ -20,7 +20,7 @@ export async function creditUsageController(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise fetch fresh data
|
// Otherwise fetch fresh data
|
||||||
const chunk = await getACUC(req.auth.team_id);
|
const chunk = await getACUCTeam(req.auth.team_id);
|
||||||
if (!chunk) {
|
if (!chunk) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
import { RequestWithAuth } from "./types";
|
import { RequestWithAuth } from "./types";
|
||||||
import { getACUC } from "../auth";
|
import { getACUC, getACUCTeam } from "../auth";
|
||||||
import { logger } from "../../lib/logger";
|
import { logger } from "../../lib/logger";
|
||||||
import { RateLimiterMode } from "../../types";
|
import { RateLimiterMode } from "../../types";
|
||||||
|
|
||||||
@ -21,7 +21,7 @@ export async function tokenUsageController(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise fetch fresh data
|
// Otherwise fetch fresh data
|
||||||
const chunk = await getACUC(req.auth.team_id, false, true, RateLimiterMode.Extract);
|
const chunk = await getACUCTeam(req.auth.team_id, false, true, RateLimiterMode.Extract);
|
||||||
if (!chunk) {
|
if (!chunk) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
|
@ -768,6 +768,8 @@ export type AuthCreditUsageChunk = {
|
|||||||
is_extract?: boolean;
|
is_extract?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type AuthCreditUsageChunkFromTeam = Omit<AuthCreditUsageChunk, "api_key">;
|
||||||
|
|
||||||
export interface RequestWithMaybeACUC<
|
export interface RequestWithMaybeACUC<
|
||||||
ReqParams = {},
|
ReqParams = {},
|
||||||
ReqBody = undefined,
|
ReqBody = undefined,
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { getACUC } from "../controllers/auth";
|
import { getACUC, getACUCTeam } from "../controllers/auth";
|
||||||
import { redisConnection } from "../services/queue-service";
|
import { redisConnection } from "../services/queue-service";
|
||||||
import { logger } from "./logger";
|
import { logger } from "./logger";
|
||||||
|
|
||||||
@ -40,7 +40,7 @@ export async function getJobPriority({
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const acuc = await getACUC(team_id);
|
const acuc = await getACUCTeam(team_id);
|
||||||
|
|
||||||
const setKey = SET_KEY_PREFIX + team_id;
|
const setKey = SET_KEY_PREFIX + team_id;
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ import { supabase_service } from "../supabase";
|
|||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { Queue } from "bullmq";
|
import { Queue } from "bullmq";
|
||||||
import { withAuth } from "../../lib/withAuth";
|
import { withAuth } from "../../lib/withAuth";
|
||||||
import { getACUC, setCachedACUC } from "../../controllers/auth";
|
import { getACUC, setCachedACUC, setCachedACUCTeam } from "../../controllers/auth";
|
||||||
|
|
||||||
// Configuration constants
|
// Configuration constants
|
||||||
const BATCH_KEY = "billing_batch";
|
const BATCH_KEY = "billing_batch";
|
||||||
@ -298,7 +298,17 @@ async function supaBillTeam(
|
|||||||
// Update cached ACUC to reflect the new credit usage
|
// Update cached ACUC to reflect the new credit usage
|
||||||
(async () => {
|
(async () => {
|
||||||
for (const apiKey of (data ?? []).map((x) => x.api_key)) {
|
for (const apiKey of (data ?? []).map((x) => x.api_key)) {
|
||||||
await setCachedACUC(apiKey, (acuc) =>
|
await setCachedACUC(apiKey, is_extract, (acuc) =>
|
||||||
|
acuc
|
||||||
|
? {
|
||||||
|
...acuc,
|
||||||
|
credits_used: acuc.credits_used + credits,
|
||||||
|
adjusted_credits_used: acuc.adjusted_credits_used + credits,
|
||||||
|
remaining_credits: acuc.remaining_credits - credits,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
);
|
||||||
|
await setCachedACUCTeam(team_id, is_extract, (acuc) =>
|
||||||
acuc
|
acuc
|
||||||
? {
|
? {
|
||||||
...acuc,
|
...acuc,
|
||||||
|
@ -12,7 +12,7 @@ import {
|
|||||||
import { logger } from "../lib/logger";
|
import { logger } from "../lib/logger";
|
||||||
import { sendNotificationWithCustomDays } from './notification/email_notification';
|
import { sendNotificationWithCustomDays } from './notification/email_notification';
|
||||||
import { shouldSendConcurrencyLimitNotification } from './notification/notification-check';
|
import { shouldSendConcurrencyLimitNotification } from './notification/notification-check';
|
||||||
import { getACUC } from "../controllers/auth";
|
import { getACUC, getACUCTeam } from "../controllers/auth";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if a job is a crawl or batch scrape based on its options
|
* Checks if a job is a crawl or batch scrape based on its options
|
||||||
@ -78,7 +78,7 @@ async function addScrapeJobRaw(
|
|||||||
webScraperOptions.team_id
|
webScraperOptions.team_id
|
||||||
) {
|
) {
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
maxConcurrency = (await getACUC(webScraperOptions.team_id))?.concurrency ?? 2;
|
maxConcurrency = (await getACUCTeam(webScraperOptions.team_id))?.concurrency ?? 2;
|
||||||
cleanOldConcurrencyLimitEntries(webScraperOptions.team_id, now);
|
cleanOldConcurrencyLimitEntries(webScraperOptions.team_id, now);
|
||||||
currentActiveConcurrency = (await getConcurrencyLimitActiveJobs(webScraperOptions.team_id, now)).length;
|
currentActiveConcurrency = (await getConcurrencyLimitActiveJobs(webScraperOptions.team_id, now)).length;
|
||||||
concurrencyLimited = currentActiveConcurrency >= maxConcurrency;
|
concurrencyLimited = currentActiveConcurrency >= maxConcurrency;
|
||||||
@ -171,7 +171,7 @@ export async function addScrapeJobs(
|
|||||||
|
|
||||||
if (jobs[0].data && jobs[0].data.team_id) {
|
if (jobs[0].data && jobs[0].data.team_id) {
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
maxConcurrency = (await getACUC(jobs[0].data.team_id))?.concurrency ?? 2;
|
maxConcurrency = (await getACUCTeam(jobs[0].data.team_id))?.concurrency ?? 2;
|
||||||
cleanOldConcurrencyLimitEntries(jobs[0].data.team_id, now);
|
cleanOldConcurrencyLimitEntries(jobs[0].data.team_id, now);
|
||||||
|
|
||||||
currentActiveConcurrency = (await getConcurrencyLimitActiveJobs(jobs[0].data.team_id, now)).length;
|
currentActiveConcurrency = (await getConcurrencyLimitActiveJobs(jobs[0].data.team_id, now)).length;
|
||||||
|
@ -1,369 +1,370 @@
|
|||||||
import {
|
// import {
|
||||||
getRateLimiter,
|
// getRateLimiter,
|
||||||
serverRateLimiter,
|
// serverRateLimiter,
|
||||||
redisRateLimitClient,
|
// redisRateLimitClient,
|
||||||
} from "./rate-limiter";
|
// } from "./rate-limiter";
|
||||||
import { RateLimiterMode } from "../../src/types";
|
// import { RateLimiterMode } from "../../src/types";
|
||||||
import { RateLimiterRedis } from "rate-limiter-flexible";
|
// import { RateLimiterRedis } from "rate-limiter-flexible";
|
||||||
|
|
||||||
describe("Rate Limiter Service", () => {
|
// describe("Rate Limiter Service", () => {
|
||||||
beforeAll(async () => {
|
// beforeAll(async () => {
|
||||||
try {
|
// try {
|
||||||
await redisRateLimitClient.connect();
|
// await redisRateLimitClient.connect();
|
||||||
// if (process.env.REDIS_RATE_LIMIT_URL === "redis://localhost:6379") {
|
// // if (process.env.REDIS_RATE_LIMIT_URL === "redis://localhost:6379") {
|
||||||
// console.log("Erasing all keys");
|
// // console.log("Erasing all keys");
|
||||||
// // erase all the keys that start with "test-prefix"
|
// // // erase all the keys that start with "test-prefix"
|
||||||
// const keys = await redisRateLimitClient.keys("test-prefix:*");
|
// // const keys = await redisRateLimitClient.keys("test-prefix:*");
|
||||||
// if (keys.length > 0) {
|
// // if (keys.length > 0) {
|
||||||
// await redisRateLimitClient.del(...keys);
|
// // await redisRateLimitClient.del(...keys);
|
||||||
// }
|
// // }
|
||||||
// }
|
// // }
|
||||||
} catch (error) {}
|
// } catch (error) {}
|
||||||
});
|
// });
|
||||||
|
|
||||||
afterAll(async () => {
|
// afterAll(async () => {
|
||||||
try {
|
// try {
|
||||||
// if (process.env.REDIS_RATE_LIMIT_URL === "redis://localhost:6379") {
|
// // if (process.env.REDIS_RATE_LIMIT_URL === "redis://localhost:6379") {
|
||||||
await redisRateLimitClient.disconnect();
|
// await redisRateLimitClient.disconnect();
|
||||||
// }
|
// // }
|
||||||
} catch (error) {}
|
// } catch (error) {}
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the testSuiteRateLimiter for specific tokens", () => {
|
// it("should return the testSuiteRateLimiter for specific tokens", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:a01ccae",
|
// "test-prefix:a01ccae",
|
||||||
);
|
// );
|
||||||
expect(limiter).toBe(testSuiteRateLimiter);
|
// expect(limiter).toBe(testSuiteRateLimiter);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:6254cf9",
|
// "test-prefix:6254cf9",
|
||||||
);
|
// );
|
||||||
expect(limiter2).toBe(testSuiteRateLimiter);
|
// expect(limiter2).toBe(testSuiteRateLimiter);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the serverRateLimiter if mode is not found", () => {
|
// it("should return the serverRateLimiter if mode is not found", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"nonexistent" as RateLimiterMode,
|
// "nonexistent" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(serverRateLimiter.points);
|
// expect(limiter.points).toBe(serverRateLimiter.points);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter based on mode and plan", () => {
|
// it("should return the correct rate limiter based on mode and plan", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(2);
|
// expect(limiter.points).toBe(2);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"standard",
|
// "standard",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(100);
|
// expect(limiter2.points).toBe(100);
|
||||||
|
|
||||||
const limiter3 = getRateLimiter(
|
// const limiter3 = getRateLimiter(
|
||||||
"search" as RateLimiterMode,
|
// "search" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"growth",
|
// "growth",
|
||||||
);
|
// );
|
||||||
expect(limiter3.points).toBe(500);
|
// expect(limiter3.points).toBe(500);
|
||||||
|
|
||||||
const limiter4 = getRateLimiter(
|
// const limiter4 = getRateLimiter(
|
||||||
"crawlStatus" as RateLimiterMode,
|
// "crawlStatus" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"growth",
|
// "growth",
|
||||||
);
|
// );
|
||||||
expect(limiter4.points).toBe(250);
|
// expect(limiter4.points).toBe(250);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the default rate limiter if plan is not provided", () => {
|
// it("should return the default rate limiter if plan is not provided", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(3);
|
// expect(limiter.points).toBe(3);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(20);
|
// expect(limiter2.points).toBe(20);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should create a new RateLimiterRedis instance with correct parameters", () => {
|
// it("should create a new RateLimiterRedis instance with correct parameters", () => {
|
||||||
const keyPrefix = "test-prefix";
|
// const keyPrefix = "test-prefix";
|
||||||
const points = 10;
|
// const points = 10;
|
||||||
const limiter = new RateLimiterRedis({
|
// const limiter = new RateLimiterRedis({
|
||||||
storeClient: redisRateLimitClient,
|
// storeClient: redisRateLimitClient,
|
||||||
keyPrefix,
|
// keyPrefix,
|
||||||
points,
|
// points,
|
||||||
duration: 60,
|
// duration: 60,
|
||||||
});
|
// });
|
||||||
|
|
||||||
expect(limiter.keyPrefix).toBe(keyPrefix);
|
// expect(limiter.keyPrefix).toBe(keyPrefix);
|
||||||
expect(limiter.points).toBe(points);
|
// expect(limiter.points).toBe(points);
|
||||||
expect(limiter.duration).toBe(60);
|
// expect(limiter.duration).toBe(60);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'preview' mode", () => {
|
// it("should return the correct rate limiter for 'preview' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"preview" as RateLimiterMode,
|
// "preview" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(5);
|
// expect(limiter.points).toBe(5);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"preview" as RateLimiterMode,
|
// "preview" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(5);
|
// expect(limiter2.points).toBe(5);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'account' mode", () => {
|
// it("should return the correct rate limiter for 'account' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"account" as RateLimiterMode,
|
// "account" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(100);
|
// expect(limiter.points).toBe(100);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"account" as RateLimiterMode,
|
// "account" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(100);
|
// expect(limiter2.points).toBe(100);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'crawlStatus' mode", () => {
|
// it("should return the correct rate limiter for 'crawlStatus' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawlStatus" as RateLimiterMode,
|
// "crawlStatus" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(150);
|
// expect(limiter.points).toBe(150);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"crawlStatus" as RateLimiterMode,
|
// "crawlStatus" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(250);
|
// expect(limiter2.points).toBe(250);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should consume points correctly for 'crawl' mode", async () => {
|
// it("should consume points correctly for 'crawl' mode", async () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someTokenCRAWL",
|
// "test-prefix:someTokenCRAWL",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
const consumePoints = 1;
|
// const consumePoints = 1;
|
||||||
|
|
||||||
const res = await limiter.consume(
|
// const res = await limiter.consume(
|
||||||
"test-prefix:someTokenCRAWL",
|
// "test-prefix:someTokenCRAWL",
|
||||||
consumePoints,
|
// consumePoints,
|
||||||
);
|
// );
|
||||||
expect(res.remainingPoints).toBe(1);
|
// expect(res.remainingPoints).toBe(1);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should consume points correctly for 'scrape' mode (DEFAULT)", async () => {
|
// it("should consume points correctly for 'scrape' mode (DEFAULT)", async () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someTokenX",
|
// "test-prefix:someTokenX",
|
||||||
);
|
// );
|
||||||
const consumePoints = 4;
|
// const consumePoints = 4;
|
||||||
|
|
||||||
const res = await limiter.consume("test-prefix:someTokenX", consumePoints);
|
// const res = await limiter.consume("test-prefix:someTokenX", consumePoints);
|
||||||
expect(res.remainingPoints).toBe(16);
|
// expect(res.remainingPoints).toBe(16);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should consume points correctly for 'scrape' mode (HOBBY)", async () => {
|
// it("should consume points correctly for 'scrape' mode (HOBBY)", async () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someTokenXY",
|
// "test-prefix:someTokenXY",
|
||||||
"hobby",
|
// "hobby",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(20);
|
// expect(limiter.points).toBe(20);
|
||||||
|
|
||||||
const consumePoints = 5;
|
// const consumePoints = 5;
|
||||||
|
|
||||||
const res = await limiter.consume("test-prefix:someTokenXY", consumePoints);
|
// const res = await limiter.consume("test-prefix:someTokenXY", consumePoints);
|
||||||
expect(res.consumedPoints).toBe(5);
|
// expect(res.consumedPoints).toBe(5);
|
||||||
expect(res.remainingPoints).toBe(15);
|
// expect(res.remainingPoints).toBe(15);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'crawl' mode", () => {
|
// it("should return the correct rate limiter for 'crawl' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(2);
|
// expect(limiter.points).toBe(2);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"starter",
|
// "starter",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(10);
|
// expect(limiter2.points).toBe(10);
|
||||||
|
|
||||||
const limiter3 = getRateLimiter(
|
// const limiter3 = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"standard",
|
// "standard",
|
||||||
);
|
// );
|
||||||
expect(limiter3.points).toBe(5);
|
// expect(limiter3.points).toBe(5);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'scrape' mode", () => {
|
// it("should return the correct rate limiter for 'scrape' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(10);
|
// expect(limiter.points).toBe(10);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"starter",
|
// "starter",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(100);
|
// expect(limiter2.points).toBe(100);
|
||||||
|
|
||||||
const limiter3 = getRateLimiter(
|
// const limiter3 = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"standard",
|
// "standard",
|
||||||
);
|
// );
|
||||||
expect(limiter3.points).toBe(100);
|
// expect(limiter3.points).toBe(100);
|
||||||
|
|
||||||
const limiter4 = getRateLimiter(
|
// const limiter4 = getRateLimiter(
|
||||||
"scrape" as RateLimiterMode,
|
// "scrape" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"growth",
|
// "growth",
|
||||||
);
|
// );
|
||||||
expect(limiter4.points).toBe(1000);
|
// expect(limiter4.points).toBe(1000);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'search' mode", () => {
|
// it("should return the correct rate limiter for 'search' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"search" as RateLimiterMode,
|
// "search" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(5);
|
// expect(limiter.points).toBe(5);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"search" as RateLimiterMode,
|
// "search" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"starter",
|
// "starter",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(50);
|
// expect(limiter2.points).toBe(50);
|
||||||
|
|
||||||
const limiter3 = getRateLimiter(
|
// const limiter3 = getRateLimiter(
|
||||||
"search" as RateLimiterMode,
|
// "search" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"standard",
|
// "standard",
|
||||||
);
|
// );
|
||||||
expect(limiter3.points).toBe(50);
|
// expect(limiter3.points).toBe(50);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'preview' mode", () => {
|
// it("should return the correct rate limiter for 'preview' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"preview" as RateLimiterMode,
|
// "preview" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(5);
|
// expect(limiter.points).toBe(5);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"preview" as RateLimiterMode,
|
// "preview" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(5);
|
// expect(limiter2.points).toBe(5);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'account' mode", () => {
|
// it("should return the correct rate limiter for 'account' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"account" as RateLimiterMode,
|
// "account" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(100);
|
// expect(limiter.points).toBe(100);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"account" as RateLimiterMode,
|
// "account" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(100);
|
// expect(limiter2.points).toBe(100);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'crawlStatus' mode", () => {
|
// it("should return the correct rate limiter for 'crawlStatus' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawlStatus" as RateLimiterMode,
|
// "crawlStatus" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(150);
|
// expect(limiter.points).toBe(150);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"crawlStatus" as RateLimiterMode,
|
// "crawlStatus" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(250);
|
// expect(limiter2.points).toBe(250);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should return the correct rate limiter for 'testSuite' mode", () => {
|
// it("should return the correct rate limiter for 'testSuite' mode", () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"testSuite" as RateLimiterMode,
|
// "testSuite" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
"free",
|
// "free",
|
||||||
);
|
// );
|
||||||
expect(limiter.points).toBe(10000);
|
// expect(limiter.points).toBe(10000);
|
||||||
|
|
||||||
const limiter2 = getRateLimiter(
|
// const limiter2 = getRateLimiter(
|
||||||
"testSuite" as RateLimiterMode,
|
// "testSuite" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
expect(limiter2.points).toBe(10000);
|
// expect(limiter2.points).toBe(10000);
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should throw an error when consuming more points than available", async () => {
|
// it("should throw an error when consuming more points than available", async () => {
|
||||||
const limiter = getRateLimiter(
|
// const limiter = getRateLimiter(
|
||||||
"crawl" as RateLimiterMode,
|
// "crawl" as RateLimiterMode,
|
||||||
"test-prefix:someToken",
|
// "test-prefix:someToken",
|
||||||
);
|
// );
|
||||||
const consumePoints = limiter.points + 1;
|
// const consumePoints = limiter.points + 1;
|
||||||
|
|
||||||
try {
|
// try {
|
||||||
await limiter.consume("test-prefix:someToken", consumePoints);
|
// await limiter.consume("test-prefix:someToken", consumePoints);
|
||||||
} catch (error) {
|
// } catch (error) {
|
||||||
// expect remaining points to be 0
|
// // expect remaining points to be 0
|
||||||
const res = await limiter.get("test-prefix:someToken");
|
// const res = await limiter.get("test-prefix:someToken");
|
||||||
expect(res?.remainingPoints).toBe(0);
|
// expect(res?.remainingPoints).toBe(0);
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
|
|
||||||
it("should reset points after duration", async () => {
|
// it("should reset points after duration", async () => {
|
||||||
const keyPrefix = "test-prefix";
|
// const keyPrefix = "test-prefix";
|
||||||
const points = 10;
|
// const points = 10;
|
||||||
const duration = 1; // 1 second
|
// const duration = 1; // 1 second
|
||||||
const limiter = new RateLimiterRedis({
|
// const limiter = new RateLimiterRedis({
|
||||||
storeClient: redisRateLimitClient,
|
// storeClient: redisRateLimitClient,
|
||||||
keyPrefix,
|
// keyPrefix,
|
||||||
points,
|
// points,
|
||||||
duration,
|
// duration,
|
||||||
});
|
// });
|
||||||
|
|
||||||
const consumePoints = 5;
|
// const consumePoints = 5;
|
||||||
await limiter.consume("test-prefix:someToken", consumePoints);
|
// await limiter.consume("test-prefix:someToken", consumePoints);
|
||||||
await new Promise((resolve) => setTimeout(resolve, duration * 1000 + 100)); // Wait for duration + 100ms
|
// await new Promise((resolve) => setTimeout(resolve, duration * 1000 + 100)); // Wait for duration + 100ms
|
||||||
|
|
||||||
const res = await limiter.consume("test-prefix:someToken", consumePoints);
|
// const res = await limiter.consume("test-prefix:someToken", consumePoints);
|
||||||
expect(res.remainingPoints).toBe(points - consumePoints);
|
// expect(res.remainingPoints).toBe(points - consumePoints);
|
||||||
});
|
// });
|
||||||
});
|
// });
|
||||||
|
// TODO: FIX
|
Loading…
x
Reference in New Issue
Block a user