137 lines
3.7 KiB
TypeScript
Raw Normal View History

2024-08-16 17:57:11 -03:00
import { Response } from "express";
import { v4 as uuidv4 } from "uuid";
2024-08-16 19:33:57 -04:00
import {
legacyCrawlerOptions,
mapRequestSchema,
RequestWithAuth,
} from "./types";
2024-08-16 17:57:11 -03:00
import { crawlToCrawler, StoredCrawl } from "../../lib/crawl-redis";
2024-08-16 19:33:57 -04:00
import { MapResponse, MapRequest } from "./types";
2024-08-16 17:57:11 -03:00
import { configDotenv } from "dotenv";
2024-08-16 19:33:57 -04:00
import {
checkAndUpdateURLForMap,
isSameDomain,
isSameSubdomain,
2024-08-26 16:56:27 -03:00
removeDuplicateUrls,
2024-08-16 19:33:57 -04:00
} from "../../lib/validateUrl";
import { fireEngineMap } from "../../search/fireEngine";
2024-08-20 16:43:46 -03:00
import { billTeam } from "../../services/billing/credit_billing";
2024-08-26 16:56:27 -03:00
import { logJob } from "../../services/logging/log_job";
2024-08-28 15:40:30 -03:00
import { performCosineSimilarity } from "../../lib/map-cosine";
2024-09-03 21:09:32 -03:00
import { Logger } from "../../lib/logger";
2024-08-16 17:57:11 -03:00
configDotenv();
2024-08-06 15:24:45 -03:00
2024-08-16 19:33:57 -04:00
export async function mapController(
req: RequestWithAuth<{}, MapResponse, MapRequest>,
res: Response<MapResponse>
) {
2024-08-26 16:56:27 -03:00
const startTime = new Date().getTime();
req.body = mapRequestSchema.parse(req.body);
2024-08-06 15:24:45 -03:00
2024-08-26 17:13:00 -03:00
2024-09-01 19:54:15 -03:00
const limit : number = req.body.limit ?? 5000;
2024-08-16 17:57:11 -03:00
const id = uuidv4();
let links: string[] = [req.body.url];
const sc: StoredCrawl = {
originUrl: req.body.url,
2024-08-20 12:17:53 -03:00
crawlerOptions: legacyCrawlerOptions(req.body),
2024-08-16 17:57:11 -03:00
pageOptions: {},
team_id: req.auth.team_id,
createdAt: Date.now(),
2024-08-28 12:46:59 -03:00
plan: req.auth.plan,
2024-08-16 17:57:11 -03:00
};
const crawler = crawlToCrawler(id, sc);
2024-08-26 16:56:27 -03:00
const sitemap = req.body.ignoreSitemap ? null : await crawler.tryGetSitemap();
2024-08-16 17:57:11 -03:00
if (sitemap !== null) {
2024-08-16 19:33:57 -04:00
sitemap.map((x) => {
links.push(x.url);
});
2024-08-16 17:57:11 -03:00
}
2024-08-20 12:24:14 -03:00
let urlWithoutWww = req.body.url.replace("www.", "");
2024-08-26 16:56:27 -03:00
2024-08-16 19:55:44 -04:00
let mapUrl = req.body.search
2024-08-20 12:24:14 -03:00
? `"${req.body.search}" site:${urlWithoutWww}`
2024-08-16 19:55:44 -04:00
: `site:${req.body.url}`;
// www. seems to exclude subdomains in some cases
const mapResults = await fireEngineMap(mapUrl, {
2024-09-04 23:46:18 -03:00
// limit to 100 results (beta)
numResults: Math.min(limit, 100),
2024-08-16 19:33:57 -04:00
});
if (mapResults.length > 0) {
2024-08-20 12:17:53 -03:00
if (req.body.search) {
// Ensure all map results are first, maintaining their order
2024-08-26 16:56:27 -03:00
links = [
mapResults[0].url,
...mapResults.slice(1).map((x) => x.url),
...links,
];
2024-08-20 12:17:53 -03:00
} else {
mapResults.map((x) => {
2024-08-16 19:55:44 -04:00
links.push(x.url);
2024-08-20 12:17:53 -03:00
});
}
2024-08-16 17:57:11 -03:00
}
2024-08-28 15:40:30 -03:00
// Perform cosine similarity between the search query and the list of links
if (req.body.search) {
const searchQuery = req.body.search.toLowerCase();
links = performCosineSimilarity(links, searchQuery);
}
2024-08-20 12:17:53 -03:00
links = links.map((x) => checkAndUpdateURLForMap(x).url.trim());
2024-08-16 19:33:57 -04:00
// allows for subdomains to be included
links = links.filter((x) => isSameDomain(x, req.body.url));
// if includeSubdomains is false, filter out subdomains
if (!req.body.includeSubdomains) {
links = links.filter((x) => isSameSubdomain(x, req.body.url));
}
// remove duplicates that could be due to http/https or www
2024-08-26 16:56:27 -03:00
links = removeDuplicateUrls(links);
2024-08-16 17:57:11 -03:00
2024-09-03 21:09:32 -03:00
billTeam(req.auth.team_id, 1).catch(error => {
Logger.error(`Failed to bill team ${req.auth.team_id} for 1 credit: ${error}`);
// Optionally, you could notify an admin or add to a retry queue here
});
2024-08-20 16:43:46 -03:00
2024-08-26 16:56:27 -03:00
const endTime = new Date().getTime();
const timeTakenInSeconds = (endTime - startTime) / 1000;
2024-08-27 11:11:59 -03:00
const linksToReturn = links.slice(0, limit);
2024-08-26 16:56:27 -03:00
logJob({
job_id: id,
2024-08-26 18:48:00 -03:00
success: links.length > 0,
2024-08-26 16:56:27 -03:00
message: "Map completed",
2024-09-01 19:52:21 -03:00
num_docs: linksToReturn.length,
2024-08-27 11:11:59 -03:00
docs: linksToReturn,
2024-08-26 16:56:27 -03:00
time_taken: timeTakenInSeconds,
team_id: req.auth.team_id,
mode: "map",
url: req.body.url,
crawlerOptions: {},
pageOptions: {},
origin: req.body.origin,
extractor_options: { mode: "markdown" },
num_tokens: 0,
});
2024-08-16 17:57:11 -03:00
return res.status(200).json({
success: true,
2024-08-27 11:11:59 -03:00
links: linksToReturn,
2024-09-02 19:36:36 -03:00
scrape_id: req.body.origin?.includes("website") ? id : undefined,
2024-08-16 17:57:11 -03:00
});
2024-08-06 15:24:45 -03:00
}