fix handling of badly formatted URLs

This commit is contained in:
Gergő Móricz 2024-11-20 20:18:40 +01:00
parent ba6f29cdda
commit e2ddc6c65c
2 changed files with 6 additions and 5 deletions

View File

@ -230,8 +230,11 @@ export class WebCrawler {
const $ = load(html);
$("a").each((_, element) => {
const href = $(element).attr("href");
let href = $(element).attr("href");
if (href) {
if (href.match(/^https?:\/[^\/]/)) {
href = href.replace(/^https?:\/[^\/]/, "$&/");
}
const u = this.filterURL(href, url);
if (u !== null) {
links.push(u);

View File

@ -352,12 +352,10 @@ async function processJob(job: Job & { id: string }, token: string) {
if (job.data.crawlerOptions !== null) {
if (!sc.cancelled) {
const newURL = new URL(doc.metadata.url ?? doc.metadata.sourceURL ?? sc.originUrl!);
const useNewURLAsBase = newURL.hostname.split(".").slice(-2).join(".") === new URL(sc.originUrl!).hostname.split(".").slice(-2).join(".");
const crawler = crawlToCrawler(job.data.crawl_id, sc, useNewURLAsBase ? newURL.href : undefined);
const crawler = crawlToCrawler(job.data.crawl_id, sc, doc.metadata.url ?? doc.metadata.sourceURL ?? sc.originUrl!);
const links = crawler.filterLinks(
crawler.extractLinksFromHTML(rawHtml ?? "", doc.metadata?.url ?? doc.metadata?.sourceURL ?? sc.originUrl as string),
crawler.extractLinksFromHTML(rawHtml ?? "", doc.metadata?.url ?? doc.metadata?.sourceURL ?? sc.originUrl!),
Infinity,
sc.crawlerOptions?.maxDepth ?? 10
);