2024-10-28 16:02:07 -03:00
|
|
|
import { Request, Response } from "express";
|
|
|
|
import {
|
2024-11-12 18:44:14 -03:00
|
|
|
// Document,
|
2024-10-28 16:02:07 -03:00
|
|
|
RequestWithAuth,
|
|
|
|
ExtractRequest,
|
|
|
|
extractRequestSchema,
|
|
|
|
ExtractResponse,
|
|
|
|
MapDocument,
|
2024-11-12 18:44:14 -03:00
|
|
|
scrapeOptions,
|
2024-10-28 16:02:07 -03:00
|
|
|
} from "./types";
|
2024-11-12 18:44:14 -03:00
|
|
|
import { Document } from "../../lib/entities";
|
|
|
|
import Redis from "ioredis";
|
|
|
|
import { configDotenv } from "dotenv";
|
|
|
|
import { performRanking } from "../../lib/ranker";
|
|
|
|
import { billTeam } from "../../services/billing/credit_billing";
|
|
|
|
import { logJob } from "../../services/logging/log_job";
|
|
|
|
import { logger } from "../../lib/logger";
|
|
|
|
import { getScrapeQueue } from "../../services/queue-service";
|
|
|
|
import { waitForJob } from "../../services/queue-jobs";
|
|
|
|
import { addScrapeJob } from "../../services/queue-jobs";
|
|
|
|
import { PlanType } from "../../types";
|
|
|
|
import { getJobPriority } from "../../lib/job-priority";
|
2024-11-13 18:06:20 -03:00
|
|
|
import { generateOpenAICompletions } from "../../scraper/scrapeURL/transformers/llmExtract";
|
|
|
|
import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist";
|
2024-11-14 14:57:38 -05:00
|
|
|
import { getMapResults } from "./map";
|
2024-11-12 18:44:14 -03:00
|
|
|
|
|
|
|
configDotenv();
|
|
|
|
const redis = new Redis(process.env.REDIS_URL!);
|
|
|
|
|
|
|
|
const MAX_EXTRACT_LIMIT = 100;
|
2024-11-13 18:06:20 -03:00
|
|
|
const MAX_RANKING_LIMIT = 5;
|
|
|
|
const SCORE_THRESHOLD = 0.75;
|
2024-10-28 16:02:07 -03:00
|
|
|
|
|
|
|
export async function extractController(
|
|
|
|
req: RequestWithAuth<{}, ExtractResponse, ExtractRequest>,
|
2024-11-13 18:06:20 -03:00
|
|
|
res: Response<ExtractResponse>
|
2024-10-28 16:02:07 -03:00
|
|
|
) {
|
|
|
|
req.body = extractRequestSchema.parse(req.body);
|
|
|
|
|
2024-11-12 18:44:14 -03:00
|
|
|
const id = crypto.randomUUID();
|
2024-11-14 14:57:38 -05:00
|
|
|
let links: string[] = [];
|
|
|
|
let docs: Document[] = [];
|
|
|
|
const earlyReturn = false;
|
|
|
|
|
|
|
|
for (const url of req.body.urls) {
|
|
|
|
if (url.includes('/*')) {
|
|
|
|
// Handle glob pattern URLs
|
|
|
|
const baseUrl = url.replace('/*', '');
|
|
|
|
const pathPrefix = baseUrl.split('/').slice(3).join('/'); // Get path after domain if any
|
|
|
|
|
|
|
|
const allowExternalLinks = req.body.allowExternalLinks ?? true;
|
|
|
|
let urlWithoutWww = baseUrl.replace("www.", "");
|
|
|
|
let mapUrl = req.body.prompt && allowExternalLinks
|
|
|
|
? `${req.body.prompt} ${urlWithoutWww}`
|
|
|
|
: req.body.prompt ? `${req.body.prompt} site:${urlWithoutWww}`
|
|
|
|
: `site:${urlWithoutWww}`;
|
|
|
|
|
|
|
|
const mapResults = await getMapResults({
|
|
|
|
url: baseUrl,
|
|
|
|
search: req.body.prompt,
|
|
|
|
teamId: req.auth.team_id,
|
|
|
|
plan: req.auth.plan,
|
|
|
|
allowExternalLinks,
|
|
|
|
origin: req.body.origin,
|
|
|
|
limit: req.body.limit,
|
|
|
|
ignoreSitemap: false,
|
|
|
|
includeMetadata: true,
|
|
|
|
includeSubdomains: req.body.includeSubdomains,
|
2024-11-13 18:06:20 -03:00
|
|
|
});
|
2024-11-12 18:44:14 -03:00
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
let mappedLinks = mapResults.links.map(x => `url: ${x.url}, title: ${x.title}, description: ${x.description}`);
|
|
|
|
|
|
|
|
// Filter by path prefix if present
|
|
|
|
if (pathPrefix) {
|
|
|
|
mappedLinks = mappedLinks.filter(x => x.includes(`/${pathPrefix}/`));
|
|
|
|
}
|
2024-11-12 18:44:14 -03:00
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
if (req.body.prompt) {
|
|
|
|
const linksAndScores = await performRanking(mappedLinks, mapUrl);
|
|
|
|
mappedLinks = linksAndScores
|
|
|
|
.filter(x => x.score > SCORE_THRESHOLD)
|
|
|
|
.map(x => x.link.split("url: ")[1].split(",")[0])
|
|
|
|
.filter(x => !isUrlBlocked(x))
|
|
|
|
.slice(0, MAX_RANKING_LIMIT);
|
|
|
|
}
|
2024-11-13 18:06:20 -03:00
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
links.push(...mappedLinks);
|
2024-11-12 18:44:14 -03:00
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
} else {
|
|
|
|
// Handle direct URLs without glob pattern
|
|
|
|
if (!isUrlBlocked(url)) {
|
|
|
|
links.push(url);
|
|
|
|
}
|
|
|
|
}
|
2024-11-12 18:44:14 -03:00
|
|
|
}
|
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
// Scrape each link
|
2024-11-12 18:44:14 -03:00
|
|
|
for (const url of links) {
|
|
|
|
const origin = req.body.origin || "api";
|
2024-11-13 18:06:20 -03:00
|
|
|
const timeout = req.body.timeout ?? 30000;
|
2024-11-12 18:44:14 -03:00
|
|
|
const jobId = crypto.randomUUID();
|
|
|
|
|
|
|
|
const jobPriority = await getJobPriority({
|
|
|
|
plan: req.auth.plan as PlanType,
|
|
|
|
team_id: req.auth.team_id,
|
|
|
|
basePriority: 10,
|
|
|
|
});
|
|
|
|
|
|
|
|
await addScrapeJob(
|
|
|
|
{
|
|
|
|
url,
|
|
|
|
mode: "single_urls",
|
|
|
|
team_id: req.auth.team_id,
|
|
|
|
scrapeOptions: scrapeOptions.parse({}),
|
|
|
|
internalOptions: {},
|
|
|
|
plan: req.auth.plan!,
|
|
|
|
origin,
|
|
|
|
is_scrape: true,
|
|
|
|
},
|
|
|
|
{},
|
|
|
|
jobId,
|
|
|
|
jobPriority
|
|
|
|
);
|
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
const totalWait = 0;
|
2024-11-12 18:44:14 -03:00
|
|
|
|
|
|
|
let doc: Document;
|
|
|
|
try {
|
2024-11-14 14:57:38 -05:00
|
|
|
doc = await waitForJob<Document>(jobId, timeout + totalWait);
|
2024-11-12 18:44:14 -03:00
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Error in scrapeController: ${e}`);
|
|
|
|
if (e instanceof Error && (e.message.startsWith("Job wait") || e.message === "timeout")) {
|
|
|
|
return res.status(408).json({
|
|
|
|
success: false,
|
|
|
|
error: "Request timed out",
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
return res.status(500).json({
|
|
|
|
success: false,
|
|
|
|
error: `(Internal server error) - ${(e && e.message) ? e.message : e}`,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
await getScrapeQueue().remove(jobId);
|
|
|
|
|
|
|
|
if (earlyReturn) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
docs.push(doc);
|
|
|
|
}
|
|
|
|
|
2024-11-13 18:06:20 -03:00
|
|
|
const completions = await generateOpenAICompletions(
|
|
|
|
logger.child({ method: "extractController/generateOpenAICompletions" }),
|
|
|
|
{
|
|
|
|
mode: "llm",
|
|
|
|
systemPrompt: "Only use the provided content to answer the question.",
|
2024-11-14 14:57:38 -05:00
|
|
|
prompt: req.body.prompt,
|
2024-11-13 18:06:20 -03:00
|
|
|
schema: req.body.schema,
|
|
|
|
},
|
|
|
|
docs.map(x => x.markdown).join('\n')
|
|
|
|
);
|
2024-11-12 18:44:14 -03:00
|
|
|
|
2024-11-14 14:57:38 -05:00
|
|
|
// console.log("completions", completions);
|
2024-11-12 18:44:14 -03:00
|
|
|
|
|
|
|
// if(req.body.extract && req.body.formats.includes("extract")) {
|
|
|
|
// creditsToBeBilled = 5;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// billTeam(req.auth.team_id, req.acuc?.sub_id, creditsToBeBilled).catch(error => {
|
|
|
|
// logger.error(`Failed to bill team ${req.auth.team_id} for ${creditsToBeBilled} credits: ${error}`);
|
|
|
|
// // Optionally, you could notify an admin or add to a retry queue here
|
|
|
|
// });
|
|
|
|
|
|
|
|
// if (!req.body.formats.includes("rawHtml")) {
|
|
|
|
// if (doc && doc.rawHtml) {
|
|
|
|
// delete doc.rawHtml;
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// logJob({
|
|
|
|
// job_id: jobId,
|
|
|
|
// success: true,
|
|
|
|
// message: "Scrape completed",
|
|
|
|
// num_docs: 1,
|
|
|
|
// docs: [doc],
|
|
|
|
// time_taken: timeTakenInSeconds,
|
|
|
|
// team_id: req.auth.team_id,
|
|
|
|
// mode: "scrape",
|
|
|
|
// url: req.body.url,
|
|
|
|
// scrapeOptions: req.body,
|
|
|
|
// origin: origin,
|
|
|
|
// num_tokens: numTokens,
|
|
|
|
// });
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// billTeam(teamId, subId, 1).catch((error) => {
|
|
|
|
// logger.error(
|
|
|
|
// `Failed to bill team ${teamId} for 1 credit: ${error}`
|
|
|
|
// );
|
|
|
|
// });
|
|
|
|
|
|
|
|
// const linksToReturn = links.slice(0, limit);
|
|
|
|
|
|
|
|
// logJob({
|
|
|
|
// job_id: id,
|
|
|
|
// success: links.length > 0,
|
|
|
|
// message: "Extract completed",
|
|
|
|
// num_docs: linksToReturn.length,
|
|
|
|
// docs: linksToReturn,
|
|
|
|
// time_taken: (new Date().getTime() - Date.now()) / 1000,
|
|
|
|
// team_id: teamId,
|
|
|
|
// mode: "extract",
|
|
|
|
// url: urls[0],
|
|
|
|
// crawlerOptions: {},
|
|
|
|
// scrapeOptions: {},
|
|
|
|
// origin: origin ?? "api",
|
|
|
|
// num_tokens: 0,
|
|
|
|
// });
|
|
|
|
|
|
|
|
// return {
|
|
|
|
|
|
|
|
// };
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// const response = {
|
|
|
|
// success: true as const,
|
|
|
|
// data: result.data,
|
|
|
|
// scrape_id: result.scrape_id
|
|
|
|
// };
|
|
|
|
|
2024-11-13 18:06:20 -03:00
|
|
|
console.log("completions.extract", completions.extract);
|
|
|
|
|
|
|
|
let data: any;
|
|
|
|
try {
|
|
|
|
data = JSON.parse(completions.extract);
|
|
|
|
} catch (e) {
|
|
|
|
data = completions.extract;
|
|
|
|
}
|
|
|
|
|
2024-10-28 16:02:07 -03:00
|
|
|
return res.status(200).json({
|
|
|
|
success: true,
|
2024-11-14 14:57:38 -05:00
|
|
|
data: data,
|
|
|
|
scrape_id: id,
|
2024-10-28 16:02:07 -03:00
|
|
|
});
|
2024-11-12 18:44:14 -03:00
|
|
|
}
|