2024-08-06 15:24:45 -03:00
|
|
|
import { Request, Response } from "express";
|
|
|
|
import { Logger } from "../../../src/lib/logger";
|
|
|
|
import { checkAndUpdateURL } from "../../../src/lib/validateUrl";
|
2024-08-15 23:30:33 +02:00
|
|
|
import { MapRequest, mapRequestSchema, MapResponse, RequestWithAuth } from "./types";
|
|
|
|
import { checkTeamCredits } from "../../services/billing/credit_billing";
|
2024-08-06 15:24:45 -03:00
|
|
|
|
2024-08-16 22:47:56 +02:00
|
|
|
export async function mapController(req: RequestWithAuth<{}, MapResponse, MapRequest>, res: Response<MapResponse>) {
|
2024-08-15 23:30:33 +02:00
|
|
|
req.body = mapRequestSchema.parse(req.body);
|
|
|
|
console.log(req.body);
|
2024-08-06 15:24:45 -03:00
|
|
|
// expected req.body
|
|
|
|
|
|
|
|
// req.body = {
|
|
|
|
// url: string
|
2024-08-15 21:51:59 +02:00
|
|
|
// crawlerOptions:
|
2024-08-06 15:24:45 -03:00
|
|
|
// }
|
|
|
|
|
|
|
|
|
2024-08-15 23:30:33 +02:00
|
|
|
return res.status(200).json({ success: true, links: [ "test1", "test2" ] });
|
|
|
|
|
|
|
|
// const mode = req.body.mode ?? "crawl";
|
|
|
|
|
|
|
|
// const crawlerOptions = { ...defaultCrawlerOptions, ...req.body.crawlerOptions };
|
|
|
|
// const pageOptions = { ...defaultCrawlPageOptions, ...req.body.pageOptions };
|
|
|
|
|
|
|
|
// if (mode === "single_urls" && !url.includes(",")) { // NOTE: do we need this?
|
|
|
|
// try {
|
|
|
|
// const a = new WebScraperDataProvider();
|
|
|
|
// await a.setOptions({
|
|
|
|
// jobId: uuidv4(),
|
|
|
|
// mode: "single_urls",
|
|
|
|
// urls: [url],
|
|
|
|
// crawlerOptions: { ...crawlerOptions, returnOnlyUrls: true },
|
|
|
|
// pageOptions: pageOptions,
|
|
|
|
// });
|
|
|
|
|
|
|
|
// const docs = await a.getDocuments(false, (progress) => {
|
|
|
|
// job.progress({
|
|
|
|
// current: progress.current,
|
|
|
|
// total: progress.total,
|
|
|
|
// current_step: "SCRAPING",
|
|
|
|
// current_url: progress.currentDocumentUrl,
|
|
|
|
// });
|
|
|
|
// });
|
|
|
|
// return res.json({
|
|
|
|
// success: true,
|
|
|
|
// documents: docs,
|
|
|
|
// });
|
|
|
|
// } catch (error) {
|
|
|
|
// Logger.error(error);
|
|
|
|
// return res.status(500).json({ error: error.message });
|
|
|
|
// }
|
|
|
|
// }
|
2024-08-06 15:24:45 -03:00
|
|
|
|
2024-08-15 23:30:33 +02:00
|
|
|
// const job = await addWebScraperJob({
|
|
|
|
// url: url,
|
|
|
|
// mode: mode ?? "crawl", // fix for single urls not working
|
|
|
|
// crawlerOptions: crawlerOptions,
|
|
|
|
// team_id: team_id,
|
|
|
|
// pageOptions: pageOptions,
|
|
|
|
// origin: req.body.origin ?? defaultOrigin,
|
|
|
|
// });
|
2024-08-06 15:24:45 -03:00
|
|
|
|
2024-08-15 23:30:33 +02:00
|
|
|
// await logCrawl(job.id.toString(), team_id);
|
2024-08-06 15:24:45 -03:00
|
|
|
|
2024-08-15 23:30:33 +02:00
|
|
|
// res.json({ jobId: job.id });
|
2024-08-06 15:24:45 -03:00
|
|
|
}
|