mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-12 07:19:03 +08:00
un-gate scrape status, add test
This commit is contained in:
parent
3ccef5fb66
commit
c8a8e96acc
@ -34,6 +34,23 @@ export async function scrape(body: ScrapeRequestInput): Promise<Document> {
|
||||
return raw.body.data;
|
||||
}
|
||||
|
||||
export async function scrapeStatusRaw(jobId: string) {
|
||||
return await request(TEST_URL)
|
||||
.get("/v1/scrape/" + encodeURIComponent(jobId))
|
||||
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
|
||||
.send();
|
||||
}
|
||||
|
||||
export async function scrapeStatus(jobId: string): Promise<Document> {
|
||||
const raw = await scrapeStatusRaw(jobId);
|
||||
expect(raw.statusCode).toBe(200);
|
||||
expect(raw.body.success).toBe(true);
|
||||
expect(typeof raw.body.data).toBe("object");
|
||||
expect(raw.body.data).not.toBeNull();
|
||||
expect(raw.body.data).toBeDefined();
|
||||
return raw.body.data;
|
||||
}
|
||||
|
||||
// =========================================
|
||||
// Crawl API
|
||||
// =========================================
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { scrape } from "./lib";
|
||||
import { scrape, scrapeStatus } from "./lib";
|
||||
|
||||
describe("Scrape tests", () => {
|
||||
it.concurrent("mocking works properly", async () => {
|
||||
@ -24,6 +24,17 @@ describe("Scrape tests", () => {
|
||||
expect(response.markdown).toContain("Firecrawl");
|
||||
}, 30000);
|
||||
|
||||
it.concurrent("scrape status works", async () => {
|
||||
const response = await scrape({
|
||||
url: "http://firecrawl.dev"
|
||||
});
|
||||
|
||||
expect(response.markdown).toContain("Firecrawl");
|
||||
|
||||
const status = await scrapeStatus(response.metadata.scrapeId!);
|
||||
expect(JSON.stringify(status)).toBe(JSON.stringify(response));
|
||||
}, 60000);
|
||||
|
||||
it.concurrent("handles non-UTF-8 encodings", async () => {
|
||||
const response = await scrape({
|
||||
url: "https://www.rtpro.yamaha.co.jp/RT/docs/misc/kanji-sjis.html",
|
||||
|
@ -3,19 +3,6 @@ import { supabaseGetJobByIdOnlyData } from "../../lib/supabase-jobs";
|
||||
import { getJob } from "./crawl-status";
|
||||
|
||||
export async function scrapeStatusController(req: any, res: any) {
|
||||
const allowedTeams = [
|
||||
"41bdbfe1-0579-4d9b-b6d5-809f16be12f5",
|
||||
"511544f2-2fce-4183-9c59-6c29b02c69b5",
|
||||
"1ec9a0b3-6e7d-49a9-ad6c-9c598ba824c8",
|
||||
];
|
||||
|
||||
if (!allowedTeams.includes(req.auth.team_id)) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: "Forbidden",
|
||||
});
|
||||
}
|
||||
|
||||
const job = await supabaseGetJobByIdOnlyData(req.params.jobId);
|
||||
|
||||
if (!job) {
|
||||
@ -26,7 +13,6 @@ export async function scrapeStatusController(req: any, res: any) {
|
||||
}
|
||||
|
||||
if (
|
||||
!allowedTeams.includes(job?.team_id) ||
|
||||
job?.team_id !== req.auth.team_id
|
||||
) {
|
||||
return res.status(403).json({
|
||||
|
Loading…
x
Reference in New Issue
Block a user