This commit is contained in:
Nicolas 2024-09-01 15:06:36 -03:00
parent 980293652d
commit cb2dfe29be
2 changed files with 80 additions and 43 deletions

View File

@ -266,12 +266,14 @@ async function processJob(job: Job, token: string) {
);
}
if (job.data.webhook && job.data.mode !== "crawl" && job.data.v1) {
callWebhook(
await callWebhook(
job.data.team_id,
job.data.crawl_id,
data,
job.data.webhook,
job.data.v1
job.data.v1,
"crawl.page",
true
);
}
@ -344,17 +346,7 @@ async function processJob(job: Job, token: string) {
}
if (await finishCrawl(job.data.crawl_id)) {
// v1 web hooks, call when done with no data, but with event completed
if (job.data.v1 && job.data.webhook) {
callWebhook(
job.data.team_id,
job.data.crawl_id,
[],
job.data.webhook,
job.data.v1,
"crawl.completed"
);
}
if (!job.data.v1) {
const jobIDs = await getCrawlJobs(job.data.crawl_id);
@ -400,7 +392,6 @@ async function processJob(job: Job, token: string) {
docs: fullDocs,
};
console.log(fullDocs.length);
// v0 web hooks, call when done with all the data
if (!job.data.v1) {
callWebhook(
@ -420,6 +411,18 @@ async function processJob(job: Job, token: string) {
? "failed"
: "completed";
// v1 web hooks, call when done with no data, but with event completed
if (job.data.v1 && job.data.webhook) {
callWebhook(
job.data.team_id,
job.data.crawl_id,
[],
job.data.webhook,
job.data.v1,
"crawl.completed"
);
}
await logJob({
job_id: job.data.crawl_id,
success: jobStatus === "completed",

View File

@ -10,7 +10,8 @@ export const callWebhook = async (
data: any | null,
specified?: string,
v1 = false,
eventType: WebhookEventType = "crawl.page"
eventType: WebhookEventType = "crawl.page",
awaitWebhook: boolean = false
) => {
try {
const selfHostedUrl = process.env.SELF_HOSTED_WEBHOOK_URL?.replace(
@ -64,36 +65,69 @@ export const callWebhook = async (
}
}
axios
.post(
webhookUrl,
{
success: !v1
? data.success
: eventType === "crawl.page"
? data.success
: true,
type: eventType,
[v1 ? "id" : "jobId"]: id,
data: dataToSend,
error: !v1
? data?.error || undefined
: eventType === "crawl.page"
? data?.error || undefined
: undefined,
},
{
headers: {
"Content-Type": "application/json",
if (awaitWebhook) {
try {
await axios.post(
webhookUrl,
{
success: !v1
? data.success
: eventType === "crawl.page"
? data.success
: true,
type: eventType,
[v1 ? "id" : "jobId"]: id,
data: dataToSend,
error: !v1
? data?.error || undefined
: eventType === "crawl.page"
? data?.error || undefined
: undefined,
},
timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1)
}
)
.catch((error) => {
Logger.error(
`Axios error sending webhook for team ID: ${teamId}, error: ${error.message}`
{
headers: {
"Content-Type": "application/json",
},
timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1)
}
);
});
} catch (error) {
Logger.error(
`Axios error (0) sending webhook for team ID: ${teamId}, error: ${error.message}`
);
}
} else {
axios
.post(
webhookUrl,
{
success: !v1
? data.success
: eventType === "crawl.page"
? data.success
: true,
type: eventType,
[v1 ? "id" : "jobId"]: id,
data: dataToSend,
error: !v1
? data?.error || undefined
: eventType === "crawl.page"
? data?.error || undefined
: undefined,
},
{
headers: {
"Content-Type": "application/json",
},
timeout: v1 ? 10000 : 30000, // 10 seconds timeout (v1)
}
)
.catch((error) => {
Logger.error(
`Axios error sending webhook for team ID: ${teamId}, error: ${error.message}`
);
});
}
} catch (error) {
Logger.debug(
`Error sending webhook for team ID: ${teamId}, error: ${error.message}`