firecrawl/apps/api/src/main/runWebScraper.ts

130 lines
3.6 KiB
TypeScript
Raw Normal View History

2024-04-15 17:01:47 -04:00
import { Job } from "bull";
2024-06-25 12:20:25 -07:00
import { CrawlResult, WebScraperOptions, RunWebScraperParams, RunWebScraperResult } from "../types";
2024-04-15 17:01:47 -04:00
import { WebScraperDataProvider } from "../scraper/WebScraper";
2024-04-20 11:59:42 -07:00
import { DocumentUrl, Progress } from "../lib/entities";
2024-04-15 17:01:47 -04:00
import { billTeam } from "../services/billing/credit_billing";
2024-04-20 11:59:42 -07:00
import { Document } from "../lib/entities";
import { supabase_service } from "../services/supabase";
2024-04-15 17:01:47 -04:00
export async function startWebScraperPipeline({
job,
}: {
job: Job<WebScraperOptions>;
}) {
2024-05-04 12:30:12 -07:00
let partialDocs: Document[] = [];
2024-04-15 17:01:47 -04:00
return (await runWebScraper({
url: job.data.url,
mode: job.data.mode,
crawlerOptions: job.data.crawlerOptions,
2024-04-17 18:24:46 -07:00
pageOptions: job.data.pageOptions,
2024-04-15 17:01:47 -04:00
inProgress: (progress) => {
2024-05-13 13:57:22 -07:00
if (progress.currentDocument) {
partialDocs.push(progress.currentDocument);
2024-06-05 11:11:09 -07:00
if (partialDocs.length > 50) {
partialDocs = partialDocs.slice(-50);
}
2024-05-13 13:57:22 -07:00
job.progress({ ...progress, partialDocs: partialDocs });
}
2024-04-15 17:01:47 -04:00
},
onSuccess: (result) => {
saveJob(job, result);
2024-04-15 17:01:47 -04:00
},
onError: (error) => {
job.moveToFailed(error);
},
team_id: job.data.team_id,
2024-05-13 13:57:22 -07:00
bull_job_id: job.id.toString(),
2024-04-20 13:53:11 -07:00
})) as { success: boolean; message: string; docs: Document[] };
2024-04-15 17:01:47 -04:00
}
export async function runWebScraper({
url,
mode,
crawlerOptions,
2024-04-17 18:24:46 -07:00
pageOptions,
2024-04-15 17:01:47 -04:00
inProgress,
onSuccess,
onError,
team_id,
2024-05-06 17:16:43 -07:00
bull_job_id,
2024-06-25 12:20:25 -07:00
}: RunWebScraperParams): Promise<RunWebScraperResult> {
2024-04-15 17:01:47 -04:00
try {
const provider = new WebScraperDataProvider();
if (mode === "crawl") {
await provider.setOptions({
mode: mode,
urls: [url],
crawlerOptions: crawlerOptions,
2024-04-17 18:24:46 -07:00
pageOptions: pageOptions,
2024-05-13 13:57:22 -07:00
bullJobId: bull_job_id,
2024-04-15 17:01:47 -04:00
});
} else {
await provider.setOptions({
mode: mode,
urls: url.split(","),
crawlerOptions: crawlerOptions,
2024-05-13 13:57:22 -07:00
pageOptions: pageOptions,
2024-04-15 17:01:47 -04:00
});
}
const docs = (await provider.getDocuments(false, (progress: Progress) => {
inProgress(progress);
2024-04-20 11:59:42 -07:00
})) as Document[];
2024-04-20 11:46:06 -07:00
2024-04-15 17:01:47 -04:00
if (docs.length === 0) {
return {
success: true,
message: "No pages found",
2024-05-13 13:57:22 -07:00
docs: [],
2024-04-15 17:01:47 -04:00
};
}
// remove docs with empty content
2024-04-20 11:59:42 -07:00
const filteredDocs = crawlerOptions.returnOnlyUrls
? docs.map((doc) => {
if (doc.metadata.sourceURL) {
return { url: doc.metadata.sourceURL };
}
})
: docs.filter((doc) => doc.content.trim().length > 0);
2024-05-13 13:57:22 -07:00
const billingResult = await billTeam(team_id, filteredDocs.length);
2024-04-20 13:53:11 -07:00
2024-04-26 11:42:49 -03:00
if (!billingResult.success) {
2024-04-17 05:13:27 +00:00
// throw new Error("Failed to bill team, no subscription was found");
2024-04-15 17:01:47 -04:00
return {
success: false,
2024-04-17 05:13:27 +00:00
message: "Failed to bill team, no subscription was found",
2024-05-13 13:57:22 -07:00
docs: [],
2024-04-15 17:01:47 -04:00
};
}
2024-04-20 13:53:11 -07:00
// This is where the returnvalue from the job is set
onSuccess(filteredDocs);
// this return doesn't matter too much for the job completion result
2024-04-20 11:59:42 -07:00
return { success: true, message: "", docs: filteredDocs };
2024-04-15 17:01:47 -04:00
} catch (error) {
console.error("Error running web scraper", error);
onError(error);
return { success: false, message: error.message, docs: [] };
}
}
const saveJob = async (job: Job, result: any) => {
2024-06-27 16:00:45 -03:00
try {
if (process.env.USE_DB_AUTHENTICATION) {
const { data, error } = await supabase_service
.from("firecrawl_jobs")
.update({ docs: result })
.eq("job_id", job.id);
2024-06-27 16:00:45 -03:00
if (error) throw new Error(error.message);
await job.moveToCompleted(null);
} else {
await job.moveToCompleted(result);
}
} catch (error) {
console.error("Failed to update job status:", error);
}
}