refactor: remove unnecessary logs and set integration as default to null

This commit is contained in:
Ademílson Tonato 2025-06-04 19:29:47 +01:00
parent 57a0aed484
commit 4c49bb9fc6
No known key found for this signature in database
GPG Key ID: 169C7BE271C9FA3A
8 changed files with 9 additions and 16 deletions

View File

@ -85,7 +85,6 @@ export async function batchScrapeController(
appendToId: req.body.appendToId,
account: req.account,
});
logger.debug("[batch-scrape - batchScrapeController] Integration: " + req.body.integration);
if (!req.body.appendToId) {
await logCrawl(id, req.auth.team_id);

View File

@ -31,7 +31,6 @@ export async function crawlController(
originalRequest: preNormalizedBody,
account: req.account,
});
logger.debug("[crawl - crawlController] Integration: " + req.body.integration);
await logCrawl(id, req.auth.team_id);

View File

@ -83,7 +83,6 @@ export async function extractController(
subId: req.acuc?.sub_id,
extractId,
});
_logger.debug("[extract - extractController] Integration: " + req.body.integration);
const jobData = {
request: req.body,

View File

@ -331,7 +331,6 @@ export async function mapController(
originalRequest,
teamId: req.auth.team_id,
});
logger.debug("[map - mapController] Integration: " + req.body.integration);
let result: Awaited<ReturnType<typeof getMapResults>>;
const abort = new AbortController();

View File

@ -29,7 +29,6 @@ export async function scrapeController(
teamId: req.auth.team_id,
account: req.account,
});
logger.debug("[scrape - scrapeController] Integration: " + req.body.integration);
req.body = scrapeRequestSchema.parse(req.body);

View File

@ -181,7 +181,6 @@ export async function searchController(
query: req.body.query,
origin: req.body.origin,
});
logger.debug("[search - searchController] Integration: " + req.body.integration);
let limit = req.body.limit;

View File

@ -23,7 +23,6 @@ export enum IntegrationEnum {
MAKE = "make",
FLOWISE = "flowise",
METAGPT = "metagpt",
UNKNOWN = "unknown",
}
export type Format =
@ -485,7 +484,7 @@ export const extractV1Options = z
enableWebSearch: z.boolean().default(false),
scrapeOptions: baseScrapeOptions.default({ onlyMainContent: false }).optional(),
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
urlTrace: z.boolean().default(false),
timeout: z.number().int().positive().finite().safe().default(60000),
__experimental_streamSteps: z.boolean().default(false),
@ -544,7 +543,7 @@ export const scrapeRequestSchema = baseScrapeOptions
extract: extractOptionsWithAgent.optional(),
jsonOptions: extractOptionsWithAgent.optional(),
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
timeout: z.number().int().positive().finite().safe().default(30000),
})
.strict(strictMessage)
@ -579,7 +578,7 @@ export const batchScrapeRequestSchema = baseScrapeOptions
.extend({
urls: url.array(),
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
webhook: webhookSchema.optional(),
appendToId: z.string().uuid().optional(),
ignoreInvalidURLs: z.boolean().default(false),
@ -593,7 +592,7 @@ export const batchScrapeRequestSchemaNoURLValidation = baseScrapeOptions
.extend({
urls: z.string().array(),
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
webhook: webhookSchema.optional(),
appendToId: z.string().uuid().optional(),
ignoreInvalidURLs: z.boolean().default(false),
@ -641,7 +640,7 @@ export const crawlRequestSchema = crawlerOptions
.extend({
url,
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
scrapeOptions: baseScrapeOptions.default({}),
webhook: webhookSchema.optional(),
limit: z.number().default(10000),
@ -673,7 +672,7 @@ export const mapRequestSchema = crawlerOptions
.extend({
url,
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
includeSubdomains: z.boolean().default(true),
search: z.string().optional(),
ignoreSitemap: z.boolean().default(false),
@ -1194,7 +1193,7 @@ export const searchRequestSchema = z
country: z.string().optional().default("us"),
location: z.string().optional(),
origin: z.string().optional().default("api"),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || IntegrationEnum.UNKNOWN),
integration: z.nativeEnum(IntegrationEnum).optional().transform(val => val || null),
timeout: z.number().int().positive().finite().safe().default(60000),
ignoreInvalidURLs: z.boolean().optional().default(false),
__searchPreviewToken: z.string().optional(),

View File

@ -44,7 +44,7 @@ export interface WebScraperOptions {
sitemapped?: boolean;
webhook?: z.infer<typeof webhookSchema>;
v1?: boolean;
integration?: string;
integration?: string | null;
/**
* Disables billing on the worker side.
@ -95,7 +95,7 @@ export interface FirecrawlJob {
crawlerOptions?: any;
scrapeOptions?: any;
origin: string;
integration?: string;
integration?: string | null;
num_tokens?: number;
retry?: boolean;
crawl_id?: string;