mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-12-10 23:05:55 +00:00
This commit is contained in:
parent
bca22514f3
commit
21603f89a8
@ -12,7 +12,6 @@
|
||||
*/
|
||||
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { Operation } from 'fast-json-patch';
|
||||
import { CreateIngestionPipeline } from '../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import { getURLWithQueryFields } from '../utils/APIUtils';
|
||||
import APIClient from './index';
|
||||
@ -57,16 +56,7 @@ export const deleteIngestionPipelineById = (
|
||||
};
|
||||
|
||||
export const updateIngestionPipeline = (
|
||||
id: string,
|
||||
patch: Operation[]
|
||||
data: CreateIngestionPipeline
|
||||
): Promise<AxiosResponse> => {
|
||||
const configOptions = {
|
||||
headers: { 'Content-type': 'application/json-patch+json' },
|
||||
};
|
||||
|
||||
return APIClient.patch(
|
||||
`/services/ingestionPipelines/${id}`,
|
||||
patch,
|
||||
configOptions
|
||||
);
|
||||
return APIClient.put(`/services/ingestionPipelines`, data);
|
||||
};
|
||||
|
||||
@ -47,12 +47,12 @@ export const getServiceById: Function = (
|
||||
};
|
||||
|
||||
export const getServiceByFQN: Function = (
|
||||
serviceName: string,
|
||||
serviceCat: string,
|
||||
fqn: string,
|
||||
arrQueryFields = ''
|
||||
): Promise<AxiosResponse> => {
|
||||
const url = getURLWithQueryFields(
|
||||
`/services/${serviceName}/name/${fqn}`,
|
||||
`/services/${serviceCat}/name/${fqn}`,
|
||||
arrQueryFields
|
||||
);
|
||||
|
||||
@ -60,25 +60,25 @@ export const getServiceByFQN: Function = (
|
||||
};
|
||||
|
||||
export const postService: Function = (
|
||||
serviceName: string,
|
||||
serviceCat: string,
|
||||
options: ServiceOption
|
||||
): Promise<AxiosResponse> => {
|
||||
return APIClient.post(`/services/${serviceName}`, options);
|
||||
return APIClient.post(`/services/${serviceCat}`, options);
|
||||
};
|
||||
|
||||
export const updateService: Function = (
|
||||
serviceName: string,
|
||||
serviceCat: string,
|
||||
_id: string,
|
||||
options: ServiceOption
|
||||
): Promise<AxiosResponse> => {
|
||||
return APIClient.put(`/services/${serviceName}`, options);
|
||||
return APIClient.put(`/services/${serviceCat}`, options);
|
||||
};
|
||||
|
||||
export const deleteService: Function = (
|
||||
serviceName: string,
|
||||
serviceCat: string,
|
||||
id: string
|
||||
): Promise<AxiosResponse> => {
|
||||
return APIClient.delete(`/services/${serviceName}/${id}`);
|
||||
return APIClient.delete(`/services/${serviceCat}/${id}`);
|
||||
};
|
||||
|
||||
export const TestConnection = (
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
*/
|
||||
|
||||
import { isEmpty, isUndefined } from 'lodash';
|
||||
import React, { useState } from 'react';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import {
|
||||
INGESTION_SCHEDULER_INITIAL_VALUE,
|
||||
INITIAL_FILTER_PATTERN,
|
||||
@ -23,8 +23,10 @@ import { FormSubmitType } from '../../enums/form.enum';
|
||||
import {
|
||||
ConfigClass,
|
||||
CreateIngestionPipeline,
|
||||
PipelineType,
|
||||
} from '../../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import {
|
||||
ConfigType,
|
||||
FilterPattern,
|
||||
IngestionPipeline,
|
||||
} from '../../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
@ -88,7 +90,7 @@ const AddIngestion = ({
|
||||
)
|
||||
);
|
||||
const [includeView, setIncludeView] = useState(
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.includeViews ?? false
|
||||
Boolean((data?.source.sourceConfig.config as ConfigClass)?.includeViews)
|
||||
);
|
||||
const [enableDataProfiler, setEnableDataProfiler] = useState(
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.enableDataProfiler ??
|
||||
@ -120,6 +122,23 @@ const AddIngestion = ({
|
||||
INITIAL_FILTER_PATTERN
|
||||
);
|
||||
|
||||
const [queryLogDuration, setQueryLogDuration] = useState<number>(
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.queryLogDuration ?? 1
|
||||
);
|
||||
const [stageFileLocation, setStageFileLocation] = useState<string>(
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.stageFileLocation ??
|
||||
'/tmp/query_log'
|
||||
);
|
||||
const [resultLimit, setResultLimit] = useState<number>(
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.resultLimit ?? 100
|
||||
);
|
||||
const usageIngestionType = useMemo(() => {
|
||||
return (
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.type ??
|
||||
ConfigType.DatabaseUsage
|
||||
);
|
||||
}, [data]);
|
||||
|
||||
const getIncludeValue = (value: Array<string>, type: FilterPatternEnum) => {
|
||||
switch (type) {
|
||||
case FilterPatternEnum.DASHBOARD:
|
||||
@ -247,26 +266,37 @@ const AddIngestion = ({
|
||||
type: serviceCategory.slice(0, -1),
|
||||
},
|
||||
sourceConfig: {
|
||||
config: {
|
||||
enableDataProfiler: enableDataProfiler,
|
||||
generateSampleData: ingestSampleData,
|
||||
includeViews: includeView,
|
||||
schemaFilterPattern: getFilterPatternData(schemaFilterPattern),
|
||||
tableFilterPattern: getFilterPatternData(tableFilterPattern),
|
||||
chartFilterPattern: getFilterPatternData(chartFilterPattern),
|
||||
dashboardFilterPattern: getFilterPatternData(dashboardFilterPattern),
|
||||
topicFilterPattern: getFilterPatternData(topicFilterPattern),
|
||||
},
|
||||
config:
|
||||
pipelineType === PipelineType.Usage
|
||||
? {
|
||||
queryLogDuration,
|
||||
resultLimit,
|
||||
stageFileLocation,
|
||||
type: usageIngestionType,
|
||||
}
|
||||
: {
|
||||
enableDataProfiler: enableDataProfiler,
|
||||
generateSampleData: ingestSampleData,
|
||||
includeViews: includeView,
|
||||
schemaFilterPattern: getFilterPatternData(schemaFilterPattern),
|
||||
tableFilterPattern: getFilterPatternData(tableFilterPattern),
|
||||
chartFilterPattern: getFilterPatternData(chartFilterPattern),
|
||||
dashboardFilterPattern: getFilterPatternData(
|
||||
dashboardFilterPattern
|
||||
),
|
||||
topicFilterPattern: getFilterPatternData(topicFilterPattern),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
onAddIngestionSave(ingestionDetails).then(() => {
|
||||
if (showSuccessScreen) {
|
||||
setActiveIngestionStep(3);
|
||||
} else {
|
||||
onSuccessSave?.();
|
||||
}
|
||||
});
|
||||
onAddIngestionSave &&
|
||||
onAddIngestionSave(ingestionDetails).then(() => {
|
||||
if (showSuccessScreen) {
|
||||
setActiveIngestionStep(3);
|
||||
} else {
|
||||
onSuccessSave?.();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const updateIngestion = () => {
|
||||
@ -276,7 +306,7 @@ const AddIngestion = ({
|
||||
airflowConfig: {
|
||||
...data.airflowConfig,
|
||||
startDate: startDate as unknown as Date,
|
||||
endDate: endDate as unknown as Date,
|
||||
endDate: (endDate as unknown as Date) || null,
|
||||
scheduleInterval: repeatFrequency,
|
||||
},
|
||||
source: {
|
||||
@ -284,16 +314,29 @@ const AddIngestion = ({
|
||||
sourceConfig: {
|
||||
config: {
|
||||
...(data.source.sourceConfig.config as ConfigClass),
|
||||
enableDataProfiler: enableDataProfiler,
|
||||
generateSampleData: ingestSampleData,
|
||||
includeViews: includeView,
|
||||
schemaFilterPattern: getFilterPatternData(schemaFilterPattern),
|
||||
tableFilterPattern: getFilterPatternData(tableFilterPattern),
|
||||
chartFilterPattern: getFilterPatternData(chartFilterPattern),
|
||||
dashboardFilterPattern: getFilterPatternData(
|
||||
dashboardFilterPattern
|
||||
),
|
||||
topicFilterPattern: getFilterPatternData(topicFilterPattern),
|
||||
...(pipelineType === PipelineType.Usage
|
||||
? {
|
||||
queryLogDuration,
|
||||
resultLimit,
|
||||
stageFileLocation,
|
||||
type: usageIngestionType,
|
||||
}
|
||||
: {
|
||||
enableDataProfiler: enableDataProfiler,
|
||||
generateSampleData: ingestSampleData,
|
||||
includeViews: includeView,
|
||||
schemaFilterPattern:
|
||||
getFilterPatternData(schemaFilterPattern),
|
||||
tableFilterPattern:
|
||||
getFilterPatternData(tableFilterPattern),
|
||||
chartFilterPattern:
|
||||
getFilterPatternData(chartFilterPattern),
|
||||
dashboardFilterPattern: getFilterPatternData(
|
||||
dashboardFilterPattern
|
||||
),
|
||||
topicFilterPattern:
|
||||
getFilterPatternData(topicFilterPattern),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -340,10 +383,16 @@ const AddIngestion = ({
|
||||
}
|
||||
handleIncludeView={() => setIncludeView((pre) => !pre)}
|
||||
handleIngestSampleData={() => setIngestSampleData((pre) => !pre)}
|
||||
handleQueryLogDuration={(val) => setQueryLogDuration(val)}
|
||||
handleResultLimit={(val) => setResultLimit(val)}
|
||||
handleShowFilter={handleShowFilter}
|
||||
handleStageFileLocation={(val) => setStageFileLocation(val)}
|
||||
includeView={includeView}
|
||||
ingestSampleData={ingestSampleData}
|
||||
ingestionName={ingestionName}
|
||||
pipelineType={pipelineType}
|
||||
queryLogDuration={queryLogDuration}
|
||||
resultLimit={resultLimit}
|
||||
schemaFilterPattern={schemaFilterPattern}
|
||||
serviceCategory={serviceCategory}
|
||||
showChartFilter={showChartFilter}
|
||||
@ -351,6 +400,7 @@ const AddIngestion = ({
|
||||
showSchemaFilter={showSchemaFilter}
|
||||
showTableFilter={showTableFilter}
|
||||
showTopicFilter={showTopicFilter}
|
||||
stageFileLocation={stageFileLocation}
|
||||
tableFilterPattern={tableFilterPattern}
|
||||
topicFilterPattern={topicFilterPattern}
|
||||
onCancel={handleConfigureIngestionCancelClick}
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
import { findAllByText, findByTestId, render } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import { ServiceCategory } from '../../../enums/service.enum';
|
||||
import { PipelineType } from '../../../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
import { ConfigureIngestionProps } from '../addIngestion.interface';
|
||||
import ConfigureIngestion from './ConfigureIngestion';
|
||||
|
||||
@ -50,6 +51,10 @@ const mockConfigureIngestion: ConfigureIngestionProps = {
|
||||
excludes: [],
|
||||
},
|
||||
includeView: false,
|
||||
pipelineType: PipelineType.Metadata,
|
||||
queryLogDuration: 1,
|
||||
resultLimit: 100,
|
||||
stageFileLocation: '',
|
||||
enableDataProfiler: false,
|
||||
ingestSampleData: false,
|
||||
showDashboardFilter: false,
|
||||
@ -60,6 +65,9 @@ const mockConfigureIngestion: ConfigureIngestionProps = {
|
||||
handleIncludeView: jest.fn(),
|
||||
handleEnableDataProfiler: jest.fn(),
|
||||
handleIngestSampleData: jest.fn(),
|
||||
handleQueryLogDuration: jest.fn(),
|
||||
handleResultLimit: jest.fn(),
|
||||
handleStageFileLocation: jest.fn(),
|
||||
getIncludeValue: jest.fn(),
|
||||
getExcludeValue: jest.fn(),
|
||||
handleShowFilter: jest.fn(),
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
import React, { Fragment } from 'react';
|
||||
import { FilterPatternEnum } from '../../../enums/filterPattern.enum';
|
||||
import { ServiceCategory } from '../../../enums/service.enum';
|
||||
import { PipelineType } from '../../../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
import { getSeparator } from '../../../utils/CommonUtils';
|
||||
import { Button } from '../../buttons/Button/Button';
|
||||
import FilterPattern from '../../common/FilterPattern/FilterPattern';
|
||||
@ -31,17 +32,24 @@ const ConfigureIngestion = ({
|
||||
serviceCategory,
|
||||
enableDataProfiler,
|
||||
ingestSampleData,
|
||||
pipelineType,
|
||||
showDashboardFilter,
|
||||
showSchemaFilter,
|
||||
showTableFilter,
|
||||
showTopicFilter,
|
||||
showChartFilter,
|
||||
queryLogDuration,
|
||||
stageFileLocation,
|
||||
resultLimit,
|
||||
getExcludeValue,
|
||||
getIncludeValue,
|
||||
handleShowFilter,
|
||||
handleEnableDataProfiler,
|
||||
handleIncludeView,
|
||||
handleIngestSampleData,
|
||||
handleQueryLogDuration,
|
||||
handleStageFileLocation,
|
||||
handleResultLimit,
|
||||
onCancel,
|
||||
onNext,
|
||||
}: ConfigureIngestionProps) => {
|
||||
@ -124,52 +132,137 @@ const ConfigureIngestion = ({
|
||||
}
|
||||
};
|
||||
|
||||
const getMetadataFields = () => {
|
||||
return (
|
||||
<>
|
||||
<div>{getFilterPatternField()}</div>
|
||||
{getSeparator('')}
|
||||
<div>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Include views</label>
|
||||
<ToggleSwitchV1
|
||||
checked={includeView}
|
||||
handleCheck={handleIncludeView}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">
|
||||
Enable extracting views from the data source
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Enable Data Profiler</label>
|
||||
<ToggleSwitchV1
|
||||
checked={enableDataProfiler}
|
||||
handleCheck={handleEnableDataProfiler}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">
|
||||
Slowdown metadata extraction by calculate the metrics and
|
||||
distribution of data in the table
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Ingest Sample Data</label>
|
||||
<ToggleSwitchV1
|
||||
checked={ingestSampleData}
|
||||
handleCheck={handleIngestSampleData}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">
|
||||
Extract sample data from each table
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const getUsageFields = () => {
|
||||
return (
|
||||
<>
|
||||
<Field>
|
||||
<label
|
||||
className="tw-block tw-form-label tw-mb-1"
|
||||
htmlFor="query-log-duration">
|
||||
Query Log Duration
|
||||
</label>
|
||||
<p className="tw-text-grey-muted tw-mt-1 tw-mb-2 tw-text-sm">
|
||||
Configuration to tune how far we want to look back in query logs to
|
||||
process usage data.
|
||||
</p>
|
||||
<input
|
||||
className="tw-form-inputs tw-px-3 tw-py-1"
|
||||
data-testid="query-log-duration"
|
||||
id="query-log-duration"
|
||||
name="query-log-duration"
|
||||
type="number"
|
||||
value={queryLogDuration}
|
||||
onChange={(e) => handleQueryLogDuration(parseInt(e.target.value))}
|
||||
/>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<label
|
||||
className="tw-block tw-form-label tw-mb-1"
|
||||
htmlFor="stage-file-location">
|
||||
Stage File Location
|
||||
</label>
|
||||
<p className="tw-text-grey-muted tw-mt-1 tw-mb-2 tw-text-sm">
|
||||
Temporary file name to store the query logs before processing.
|
||||
Absolute file path required.
|
||||
</p>
|
||||
<input
|
||||
className="tw-form-inputs tw-px-3 tw-py-1"
|
||||
data-testid="stage-file-location"
|
||||
id="stage-file-location"
|
||||
name="stage-file-location"
|
||||
type="text"
|
||||
value={stageFileLocation}
|
||||
onChange={(e) => handleStageFileLocation(e.target.value)}
|
||||
/>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<label
|
||||
className="tw-block tw-form-label tw-mb-1"
|
||||
htmlFor="result-limit">
|
||||
Result Limit
|
||||
</label>
|
||||
<p className="tw-text-grey-muted tw-mt-1 tw-mb-2 tw-text-sm">
|
||||
Configuration to set the limit for query logs.
|
||||
</p>
|
||||
<input
|
||||
className="tw-form-inputs tw-px-3 tw-py-1"
|
||||
data-testid="result-limit"
|
||||
id="result-limit"
|
||||
name="result-limit"
|
||||
type="number"
|
||||
value={resultLimit}
|
||||
onChange={(e) => handleResultLimit(parseInt(e.target.value))}
|
||||
/>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const getIngestionPipelineFields = () => {
|
||||
if (pipelineType === PipelineType.Usage) {
|
||||
return getUsageFields();
|
||||
} else {
|
||||
return getMetadataFields();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="tw-px-2" data-testid="configure-ingestion-container">
|
||||
<div>{getFilterPatternField()}</div>
|
||||
{getSeparator('')}
|
||||
<div>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Include views</label>
|
||||
<ToggleSwitchV1
|
||||
checked={includeView}
|
||||
handleCheck={handleIncludeView}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">
|
||||
Enable extracting views from the data source
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Enable Data Profiler</label>
|
||||
<ToggleSwitchV1
|
||||
checked={enableDataProfiler}
|
||||
handleCheck={handleEnableDataProfiler}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">
|
||||
Slowdown metadata extraction by calculate the metrics and
|
||||
distribution of data in the table
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Ingest Sample Data</label>
|
||||
<ToggleSwitchV1
|
||||
checked={ingestSampleData}
|
||||
handleCheck={handleIngestSampleData}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">
|
||||
Extract sample data from each table
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
</div>
|
||||
{getIngestionPipelineFields()}
|
||||
|
||||
<Field className="tw-flex tw-justify-end">
|
||||
<Button
|
||||
|
||||
@ -33,7 +33,7 @@ export interface AddIngestionProps {
|
||||
showSuccessScreen?: boolean;
|
||||
setActiveIngestionStep: (step: number) => void;
|
||||
handleCancelClick: () => void;
|
||||
onAddIngestionSave: (ingestion: CreateIngestionPipeline) => Promise<void>;
|
||||
onAddIngestionSave?: (ingestion: CreateIngestionPipeline) => Promise<void>;
|
||||
onUpdateIngestion?: (
|
||||
data: IngestionPipeline,
|
||||
oldData: IngestionPipeline,
|
||||
@ -56,17 +56,24 @@ export interface ConfigureIngestionProps {
|
||||
includeView: boolean;
|
||||
enableDataProfiler: boolean;
|
||||
ingestSampleData: boolean;
|
||||
pipelineType: PipelineType;
|
||||
showDashboardFilter: boolean;
|
||||
showSchemaFilter: boolean;
|
||||
showTableFilter: boolean;
|
||||
showTopicFilter: boolean;
|
||||
showChartFilter: boolean;
|
||||
queryLogDuration: number;
|
||||
stageFileLocation: string;
|
||||
resultLimit: number;
|
||||
handleIncludeView: () => void;
|
||||
handleEnableDataProfiler: () => void;
|
||||
handleIngestSampleData: () => void;
|
||||
getIncludeValue: (value: string[], type: FilterPatternEnum) => void;
|
||||
getExcludeValue: (value: string[], type: FilterPatternEnum) => void;
|
||||
handleShowFilter: (value: boolean, type: FilterPatternEnum) => void;
|
||||
handleQueryLogDuration: (value: number) => void;
|
||||
handleStageFileLocation: (value: string) => void;
|
||||
handleResultLimit: (value: number) => void;
|
||||
onCancel: () => void;
|
||||
onNext: () => void;
|
||||
}
|
||||
|
||||
@ -16,10 +16,6 @@ import { LoadingState } from 'Models';
|
||||
import React, { useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { getServiceDetailsPath, ROUTES } from '../../constants/constants';
|
||||
import {
|
||||
addIngestionGuide,
|
||||
addServiceGuide,
|
||||
} from '../../constants/service-guide.constant';
|
||||
import { STEPS_FOR_ADD_SERVICE } from '../../constants/services.const';
|
||||
import { FormSubmitType } from '../../enums/form.enum';
|
||||
import { PageLayoutType } from '../../enums/layout.enum';
|
||||
@ -33,7 +29,7 @@ import {
|
||||
import { getCurrentUserId } from '../../utils/CommonUtils';
|
||||
import { getAddServicePath } from '../../utils/RouterUtils';
|
||||
import {
|
||||
getFormattedGuideText,
|
||||
getServiceIngestionStepGuide,
|
||||
isIngestionSupported,
|
||||
} from '../../utils/ServiceUtils';
|
||||
import AddIngestion from '../AddIngestion/AddIngestion.component';
|
||||
@ -234,36 +230,14 @@ const AddService = ({
|
||||
};
|
||||
|
||||
const fetchRightPanel = () => {
|
||||
let guide;
|
||||
if (addIngestion) {
|
||||
guide = addIngestionGuide.find(
|
||||
(item) => item.step === activeIngestionStep
|
||||
);
|
||||
} else {
|
||||
guide = addServiceGuide.find((item) => item.step === activeServiceStep);
|
||||
}
|
||||
const stepData = addIngestion ? activeIngestionStep : activeServiceStep;
|
||||
|
||||
return (
|
||||
<>
|
||||
{guide && (
|
||||
<>
|
||||
<h6 className="tw-heading tw-text-base">{guide.title}</h6>
|
||||
<div className="tw-mb-5">
|
||||
{addIngestion
|
||||
? getFormattedGuideText(
|
||||
guide.description,
|
||||
'<Ingestion Pipeline Name>',
|
||||
`${serviceName}_${PipelineType.Metadata}`
|
||||
)
|
||||
: getFormattedGuideText(
|
||||
guide.description,
|
||||
'<Service Name>',
|
||||
serviceName
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
return getServiceIngestionStepGuide(
|
||||
stepData,
|
||||
addIngestion,
|
||||
`${serviceName}_${PipelineType.Metadata}`,
|
||||
serviceName,
|
||||
PipelineType.Metadata
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@ -15,23 +15,26 @@ import { faExclamationCircle } from '@fortawesome/free-solid-svg-icons';
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||
import classNames from 'classnames';
|
||||
import cronstrue from 'cronstrue';
|
||||
import { capitalize, isNil, isUndefined, lowerCase } from 'lodash';
|
||||
import { capitalize, isNil, lowerCase } from 'lodash';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { useAuthContext } from '../../authentication/auth-provider/AuthProvider';
|
||||
import {
|
||||
PAGE_SIZE,
|
||||
TITLE_FOR_NON_ADMIN_ACTION,
|
||||
} from '../../constants/constants';
|
||||
import { FormSubmitType } from '../../enums/form.enum';
|
||||
import {
|
||||
IngestionPipeline,
|
||||
PipelineType,
|
||||
} from '../../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
import { useAuth } from '../../hooks/authHooks';
|
||||
import { isEven } from '../../utils/CommonUtils';
|
||||
import {
|
||||
getAddIngestionPath,
|
||||
getEditIngestionPath,
|
||||
} from '../../utils/RouterUtils';
|
||||
import SVGIcons, { Icons } from '../../utils/SvgUtils';
|
||||
import { showInfoToast } from '../../utils/ToastUtils';
|
||||
import AddIngestion from '../AddIngestion/AddIngestion.component';
|
||||
import { Button } from '../buttons/Button/Button';
|
||||
import NextPrevious from '../common/next-previous/NextPrevious';
|
||||
import NonAdminAction from '../common/non-admin-action/NonAdminAction';
|
||||
@ -43,32 +46,27 @@ import { IngestionProps, ModifiedConfig } from './ingestion.interface';
|
||||
|
||||
const Ingestion: React.FC<IngestionProps> = ({
|
||||
airflowEndpoint,
|
||||
serviceDetails,
|
||||
serviceName,
|
||||
serviceCategory,
|
||||
ingestionList,
|
||||
isRequiredDetailsAvailable,
|
||||
deleteIngestion,
|
||||
triggerIngestion,
|
||||
addIngestion,
|
||||
updateIngestion,
|
||||
paging,
|
||||
pagingHandler,
|
||||
currrentPage,
|
||||
}: IngestionProps) => {
|
||||
const history = useHistory();
|
||||
const { isAdminUser } = useAuth();
|
||||
const { isAuthDisabled } = useAuthContext();
|
||||
const [searchText, setSearchText] = useState('');
|
||||
const [activeIngestionStep, setActiveIngestionStep] = useState(1);
|
||||
const [currTriggerId, setCurrTriggerId] = useState({ id: '', state: '' });
|
||||
const [showIngestionForm, setShowIngestionForm] = useState(false);
|
||||
const [isConfirmationModalOpen, setIsConfirmationModalOpen] = useState(false);
|
||||
const [deleteSelection, setDeleteSelection] = useState({
|
||||
id: '',
|
||||
name: '',
|
||||
state: '',
|
||||
});
|
||||
const [updateSelection, setUpdateSelection] = useState<IngestionPipeline>();
|
||||
const noConnectionMsg = `${serviceName} doesn't have connection details filled in. Please add the details before scheduling an ingestion job.`;
|
||||
|
||||
const handleSearchAction = (searchValue: string) => {
|
||||
@ -117,13 +115,14 @@ const Ingestion: React.FC<IngestionProps> = ({
|
||||
};
|
||||
|
||||
const handleUpdate = (ingestion: IngestionPipeline) => {
|
||||
setUpdateSelection(ingestion);
|
||||
setShowIngestionForm(true);
|
||||
};
|
||||
|
||||
const handleCancelUpdate = () => {
|
||||
setUpdateSelection(undefined);
|
||||
setShowIngestionForm(false);
|
||||
history.push(
|
||||
getEditIngestionPath(
|
||||
serviceCategory,
|
||||
serviceName,
|
||||
ingestion.fullyQualifiedName || `${serviceName}.${ingestion.name}`,
|
||||
ingestion.pipelineType
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const handleDelete = (id: string, displayName: string) => {
|
||||
@ -150,15 +149,58 @@ const Ingestion: React.FC<IngestionProps> = ({
|
||||
};
|
||||
|
||||
const handleAddIngestionClick = () => {
|
||||
if (!getIngestionPipelineTypeOption().length) {
|
||||
const types = getIngestionPipelineTypeOption();
|
||||
if (!types.length) {
|
||||
showInfoToast(
|
||||
`${serviceName} already has all the supported ingestion jobs added.`
|
||||
);
|
||||
} else {
|
||||
setShowIngestionForm(true);
|
||||
history.push(getAddIngestionPath(serviceCategory, serviceName, types[0]));
|
||||
}
|
||||
};
|
||||
|
||||
const getAddIngestionButton = () => {
|
||||
const types = getIngestionPipelineTypeOption();
|
||||
let buttonText;
|
||||
|
||||
switch (types[0]) {
|
||||
case PipelineType.Metadata: {
|
||||
buttonText = 'Add Metadata Ingestion';
|
||||
|
||||
break;
|
||||
}
|
||||
case PipelineType.Usage: {
|
||||
buttonText = 'Add Usage Ingestion';
|
||||
|
||||
break;
|
||||
}
|
||||
case PipelineType.Profiler:
|
||||
default: {
|
||||
buttonText = '';
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return buttonText ? (
|
||||
<NonAdminAction position="bottom" title={TITLE_FOR_NON_ADMIN_ACTION}>
|
||||
<Button
|
||||
className={classNames('tw-h-8 tw-rounded tw-mb-2')}
|
||||
data-testid="add-new-ingestion-button"
|
||||
disabled={
|
||||
getIngestionPipelineTypeOption().length === 0 ||
|
||||
(!isAdminUser && !isAuthDisabled)
|
||||
}
|
||||
size="small"
|
||||
theme="primary"
|
||||
variant="contained"
|
||||
onClick={handleAddIngestionClick}>
|
||||
{buttonText}
|
||||
</Button>
|
||||
</NonAdminAction>
|
||||
) : null;
|
||||
};
|
||||
|
||||
const getSearchedIngestions = useCallback(() => {
|
||||
const sText = lowerCase(searchText);
|
||||
|
||||
@ -240,25 +282,7 @@ const Ingestion: React.FC<IngestionProps> = ({
|
||||
) : null}
|
||||
</div>
|
||||
<div className="tw-w-8/12 tw-flex tw-justify-end">
|
||||
{isRequiredDetailsAvailable && (
|
||||
<NonAdminAction
|
||||
position="bottom"
|
||||
title={TITLE_FOR_NON_ADMIN_ACTION}>
|
||||
<Button
|
||||
className={classNames('tw-h-8 tw-rounded tw-mb-2')}
|
||||
data-testid="add-new-ingestion-button"
|
||||
disabled={
|
||||
getIngestionPipelineTypeOption().length === 0 ||
|
||||
(!isAdminUser && !isAuthDisabled)
|
||||
}
|
||||
size="small"
|
||||
theme="primary"
|
||||
variant="contained"
|
||||
onClick={handleAddIngestionClick}>
|
||||
Add Ingestion
|
||||
</Button>
|
||||
</NonAdminAction>
|
||||
)}
|
||||
{isRequiredDetailsAvailable && getAddIngestionButton()}
|
||||
</div>
|
||||
</div>
|
||||
{getSearchedIngestions().length ? (
|
||||
@ -422,46 +446,9 @@ const Ingestion: React.FC<IngestionProps> = ({
|
||||
);
|
||||
};
|
||||
|
||||
const getIngestionForm = () => {
|
||||
const type = getIngestionPipelineTypeOption();
|
||||
let heading = '';
|
||||
|
||||
if (isUndefined(updateSelection)) {
|
||||
heading = `Add ${capitalize(type[0])} Ingestion`;
|
||||
} else {
|
||||
heading = `Edit ${capitalize(updateSelection.pipelineType)} Ingestion`;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="tw-bg-white tw-pt-4 tw-w-full">
|
||||
<div className="tw-max-w-2xl tw-mx-auto tw-pb-6">
|
||||
<AddIngestion
|
||||
activeIngestionStep={activeIngestionStep}
|
||||
data={updateSelection}
|
||||
handleCancelClick={handleCancelUpdate}
|
||||
heading={heading}
|
||||
pipelineType={type[0]}
|
||||
serviceCategory={serviceCategory}
|
||||
serviceData={serviceDetails}
|
||||
setActiveIngestionStep={(step) => setActiveIngestionStep(step)}
|
||||
showSuccessScreen={false}
|
||||
status={
|
||||
isUndefined(updateSelection)
|
||||
? FormSubmitType.ADD
|
||||
: FormSubmitType.EDIT
|
||||
}
|
||||
onAddIngestionSave={addIngestion}
|
||||
onSuccessSave={handleCancelUpdate}
|
||||
onUpdateIngestion={updateIngestion}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div data-testid="ingestion-container">
|
||||
{showIngestionForm ? getIngestionForm() : getIngestionTab()}
|
||||
{getIngestionTab()}
|
||||
|
||||
{isConfirmationModalOpen && (
|
||||
<EntityDeleteModal
|
||||
|
||||
@ -43,7 +43,6 @@ const mockPaging = {
|
||||
total: 1,
|
||||
};
|
||||
|
||||
const mockFunction = jest.fn();
|
||||
const mockPaginghandler = jest.fn();
|
||||
const mockDeleteIngestion = jest.fn();
|
||||
const mockTriggerIngestion = jest
|
||||
@ -74,14 +73,6 @@ jest.mock('../common/next-previous/NextPrevious', () => {
|
||||
return jest.fn().mockImplementation(() => <div>NextPrevious</div>);
|
||||
});
|
||||
|
||||
jest.mock('../AddIngestion/AddIngestion.component', () => {
|
||||
return jest
|
||||
.fn()
|
||||
.mockImplementation(() => (
|
||||
<div data-testid="ingestion-form">AddIngestion</div>
|
||||
));
|
||||
});
|
||||
|
||||
jest.mock('../Modals/EntityDeleteModal/EntityDeleteModal', () => {
|
||||
return jest.fn().mockImplementation(() => <div>EntityDeleteModal</div>);
|
||||
});
|
||||
@ -91,7 +82,6 @@ describe('Test Ingestion page', () => {
|
||||
const { container } = render(
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={mockFunction}
|
||||
airflowEndpoint=""
|
||||
currrentPage={1}
|
||||
deleteIngestion={mockDeleteIngestion}
|
||||
@ -103,8 +93,8 @@ describe('Test Ingestion page', () => {
|
||||
serviceCategory={ServiceCategory.DASHBOARD_SERVICES}
|
||||
serviceDetails={mockService}
|
||||
serviceList={[]}
|
||||
serviceName=""
|
||||
triggerIngestion={mockTriggerIngestion}
|
||||
updateIngestion={mockFunction}
|
||||
/>,
|
||||
{
|
||||
wrapper: MemoryRouter,
|
||||
@ -132,7 +122,6 @@ describe('Test Ingestion page', () => {
|
||||
const { container } = render(
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={mockFunction}
|
||||
airflowEndpoint=""
|
||||
currrentPage={1}
|
||||
deleteIngestion={mockDeleteIngestion}
|
||||
@ -144,8 +133,8 @@ describe('Test Ingestion page', () => {
|
||||
serviceCategory={ServiceCategory.DASHBOARD_SERVICES}
|
||||
serviceDetails={mockService}
|
||||
serviceList={[]}
|
||||
serviceName=""
|
||||
triggerIngestion={mockTriggerIngestion}
|
||||
updateIngestion={mockFunction}
|
||||
/>,
|
||||
{
|
||||
wrapper: MemoryRouter,
|
||||
@ -188,7 +177,6 @@ describe('Test Ingestion page', () => {
|
||||
const { container } = render(
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={mockFunction}
|
||||
airflowEndpoint=""
|
||||
currrentPage={1}
|
||||
deleteIngestion={mockDeleteIngestion}
|
||||
@ -200,8 +188,8 @@ describe('Test Ingestion page', () => {
|
||||
serviceCategory={ServiceCategory.DASHBOARD_SERVICES}
|
||||
serviceDetails={mockService}
|
||||
serviceList={[]}
|
||||
serviceName=""
|
||||
triggerIngestion={mockTriggerIngestion}
|
||||
updateIngestion={mockFunction}
|
||||
/>,
|
||||
{
|
||||
wrapper: MemoryRouter,
|
||||
@ -223,7 +211,6 @@ describe('Test Ingestion page', () => {
|
||||
const { container } = render(
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={mockFunction}
|
||||
airflowEndpoint=""
|
||||
currrentPage={1}
|
||||
deleteIngestion={mockDeleteIngestion}
|
||||
@ -235,8 +222,8 @@ describe('Test Ingestion page', () => {
|
||||
serviceCategory={ServiceCategory.DASHBOARD_SERVICES}
|
||||
serviceDetails={mockService}
|
||||
serviceList={[]}
|
||||
serviceName=""
|
||||
triggerIngestion={mockTriggerIngestion}
|
||||
updateIngestion={mockFunction}
|
||||
/>,
|
||||
{
|
||||
wrapper: MemoryRouter,
|
||||
@ -247,9 +234,7 @@ describe('Test Ingestion page', () => {
|
||||
const editButton = await findByTestId(container, 'edit');
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const ingestionModal = await findByTestId(container, 'ingestion-form');
|
||||
|
||||
expect(ingestionModal).toBeInTheDocument();
|
||||
expect(editButton).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('CTA should work', async () => {
|
||||
@ -262,7 +247,6 @@ describe('Test Ingestion page', () => {
|
||||
const { container } = render(
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={mockFunction}
|
||||
airflowEndpoint=""
|
||||
currrentPage={1}
|
||||
deleteIngestion={mockDeleteIngestion}
|
||||
@ -274,8 +258,8 @@ describe('Test Ingestion page', () => {
|
||||
serviceCategory={ServiceCategory.DASHBOARD_SERVICES}
|
||||
serviceDetails={mockService}
|
||||
serviceList={[]}
|
||||
serviceName=""
|
||||
triggerIngestion={mockTriggerIngestion}
|
||||
updateIngestion={mockFunction}
|
||||
/>,
|
||||
{
|
||||
wrapper: MemoryRouter,
|
||||
@ -306,17 +290,12 @@ describe('Test Ingestion page', () => {
|
||||
'add-new-ingestion-button'
|
||||
);
|
||||
fireEvent.click(addIngestionButton);
|
||||
|
||||
const ingestionModal = await findByTestId(container, 'ingestion-form');
|
||||
|
||||
expect(ingestionModal).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('Airflow DAG view button should be present if endpoint is available', async () => {
|
||||
const { container } = render(
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={mockFunction}
|
||||
airflowEndpoint="http://localhost"
|
||||
currrentPage={1}
|
||||
deleteIngestion={mockDeleteIngestion}
|
||||
@ -328,8 +307,8 @@ describe('Test Ingestion page', () => {
|
||||
serviceCategory={ServiceCategory.DASHBOARD_SERVICES}
|
||||
serviceDetails={mockService}
|
||||
serviceList={[]}
|
||||
serviceName=""
|
||||
triggerIngestion={mockTriggerIngestion}
|
||||
updateIngestion={mockFunction}
|
||||
/>,
|
||||
{
|
||||
wrapper: MemoryRouter,
|
||||
|
||||
@ -12,7 +12,6 @@
|
||||
*/
|
||||
|
||||
import { IngestionType, ServiceCategory } from '../../enums/service.enum';
|
||||
import { CreateIngestionPipeline } from '../../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import { DatabaseService } from '../../generated/entity/services/databaseService';
|
||||
import {
|
||||
Connection,
|
||||
@ -56,7 +55,7 @@ export interface IngestionData {
|
||||
export interface IngestionProps {
|
||||
airflowEndpoint: string;
|
||||
serviceDetails: DataObj;
|
||||
serviceName?: string;
|
||||
serviceName: string;
|
||||
serviceCategory: ServiceCategory;
|
||||
isRequiredDetailsAvailable: boolean;
|
||||
paging: Paging;
|
||||
@ -66,14 +65,6 @@ export interface IngestionProps {
|
||||
pagingHandler: (value: string | number, activePage?: number) => void;
|
||||
deleteIngestion: (id: string, displayName: string) => Promise<void>;
|
||||
triggerIngestion: (id: string, displayName: string) => Promise<void>;
|
||||
addIngestion: (data: CreateIngestionPipeline) => Promise<void>;
|
||||
updateIngestion: (
|
||||
data: IngestionPipeline,
|
||||
oldData: IngestionPipeline,
|
||||
id: string,
|
||||
displayName: string,
|
||||
triggerIngestion?: boolean
|
||||
) => Promise<void>;
|
||||
}
|
||||
|
||||
export interface ModifiedConfig extends Connection {
|
||||
|
||||
@ -45,8 +45,10 @@ const PLACEHOLDER_ROUTE_PIPELINE_FQN = ':pipelineFQN';
|
||||
const PLACEHOLDER_ROUTE_DASHBOARD_FQN = ':dashboardFQN';
|
||||
const PLACEHOLDER_ROUTE_DATABASE_FQN = ':databaseFQN';
|
||||
const PLACEHOLDER_ROUTE_DATABASE_SCHEMA_FQN = ':databaseSchemaFQN';
|
||||
const PLACEHOLDER_ROUTE_SERVICE_FQN = ':serviceFQN';
|
||||
|
||||
export const PLACEHOLDER_ROUTE_SERVICE_FQN = ':serviceFQN';
|
||||
export const PLACEHOLDER_ROUTE_INGESTION_TYPE = ':ingestionType';
|
||||
export const PLACEHOLDER_ROUTE_INGESTION_FQN = ':ingestionFQN';
|
||||
export const PLACEHOLDER_ROUTE_SERVICE_CAT = ':serviceCategory';
|
||||
const PLACEHOLDER_ROUTE_SEARCHQUERY = ':searchQuery';
|
||||
const PLACEHOLDER_ROUTE_TAB = ':tab';
|
||||
@ -160,6 +162,8 @@ export const ROUTES = {
|
||||
SERVICE_WITH_TAB: `/service/${PLACEHOLDER_ROUTE_SERVICE_CAT}/${PLACEHOLDER_ROUTE_SERVICE_FQN}/${PLACEHOLDER_ROUTE_TAB}`,
|
||||
ADD_SERVICE: `/${PLACEHOLDER_ROUTE_SERVICE_CAT}/add-service`,
|
||||
SERVICES: '/services',
|
||||
ADD_INGESTION: `/service/${PLACEHOLDER_ROUTE_SERVICE_CAT}/${PLACEHOLDER_ROUTE_SERVICE_FQN}/add-ingestion/${PLACEHOLDER_ROUTE_INGESTION_TYPE}`,
|
||||
EDIT_INGESTION: `/service/${PLACEHOLDER_ROUTE_SERVICE_CAT}/${PLACEHOLDER_ROUTE_SERVICE_FQN}/edit-ingestion/${PLACEHOLDER_ROUTE_INGESTION_FQN}/${PLACEHOLDER_ROUTE_INGESTION_TYPE}`,
|
||||
USERS: '/users',
|
||||
SCORECARD: '/scorecard',
|
||||
SWAGGER: '/docs',
|
||||
|
||||
@ -28,7 +28,14 @@ export const addServiceGuide = [
|
||||
},
|
||||
];
|
||||
|
||||
export const addIngestionGuide = [
|
||||
const schedulingIngestionGuide = {
|
||||
step: 2,
|
||||
title: 'Schedule for Ingestion',
|
||||
description:
|
||||
'Scheduling can be set up at an hourly, daily, or weekly cadence. The timezone is in UTC. Select a Start Date to schedule for ingestion. It is optional to add an End Date.',
|
||||
};
|
||||
|
||||
export const addMetadataIngestionGuide = [
|
||||
{
|
||||
step: 1,
|
||||
title: 'Add Metadata Ingestion',
|
||||
@ -36,10 +43,7 @@ export const addIngestionGuide = [
|
||||
You can include or exclude the filter patterns. Choose to include views, enable or disable the data profiler, and ingest sample data, as required.`,
|
||||
},
|
||||
{
|
||||
step: 2,
|
||||
title: 'Schedule for Ingestion',
|
||||
description:
|
||||
'Scheduling can be set up at an hourly, daily, or weekly cadence. The timezone is in UTC. Select a Start Date to schedule for ingestion. It is optional to add an End Date.',
|
||||
...schedulingIngestionGuide,
|
||||
},
|
||||
{
|
||||
step: 3,
|
||||
@ -48,3 +52,37 @@ export const addIngestionGuide = [
|
||||
'You are all set! The <Ingestion Pipeline Name> has been successfully deployed. The metadata will be ingested at a regular interval as per the schedule.',
|
||||
},
|
||||
];
|
||||
|
||||
export const addUsageIngestionGuide = [
|
||||
{
|
||||
step: 1,
|
||||
title: 'Add Usage Ingestion',
|
||||
description: `Based on the service type selected, enter the filter pattern details for the schema or table (database), or topic (messaging), or dashboard.
|
||||
You can include or exclude the filter patterns. Choose to include views, enable or disable the data profiler, and ingest sample data, as required.`,
|
||||
},
|
||||
{
|
||||
...schedulingIngestionGuide,
|
||||
},
|
||||
{
|
||||
step: 3,
|
||||
title: 'Usage Ingestion Added Successfully',
|
||||
description:
|
||||
'You are all set! The <Ingestion Pipeline Name> has been successfully deployed. The metadata will be ingested at a regular interval as per the schedule.',
|
||||
},
|
||||
];
|
||||
|
||||
export const addProfilerIngestionGuide = [
|
||||
{
|
||||
step: 1,
|
||||
title: 'Add Profiler Ingestion',
|
||||
description: `Based on the service type selected, enter the filter pattern details for the schema or table (database), or topic (messaging), or dashboard.
|
||||
You can include or exclude the filter patterns. Choose to include views, enable or disable the data profiler, and ingest sample data, as required.`,
|
||||
},
|
||||
{ ...schedulingIngestionGuide },
|
||||
{
|
||||
step: 3,
|
||||
title: 'Profiler Ingestion Added Successfully',
|
||||
description:
|
||||
'You are all set! The <Ingestion Pipeline Name> has been successfully deployed. The metadata will be ingested at a regular interval as per the schedule.',
|
||||
},
|
||||
];
|
||||
|
||||
@ -160,6 +160,7 @@ export interface EntityReference {
|
||||
*/
|
||||
export enum PipelineType {
|
||||
Metadata = 'metadata',
|
||||
Profiler = 'profiler',
|
||||
Usage = 'usage',
|
||||
}
|
||||
|
||||
@ -223,6 +224,10 @@ export interface ConfigClass {
|
||||
* Regex exclude tables or databases that matches the pattern.
|
||||
*/
|
||||
tableFilterPattern?: FilterPattern;
|
||||
/**
|
||||
* Pipeline type
|
||||
*/
|
||||
type?: ConfigType;
|
||||
/**
|
||||
* Configuration to tune how far we want to look back in query logs to process usage data.
|
||||
*/
|
||||
@ -248,6 +253,10 @@ export interface ConfigClass {
|
||||
* Regex to only fetch topics that matches the pattern.
|
||||
*/
|
||||
topicFilterPattern?: FilterPattern;
|
||||
/**
|
||||
* Regex to only fetch tables with FQN matching the pattern.
|
||||
*/
|
||||
fqnFilterPattern?: FilterPattern;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -258,6 +267,8 @@ export interface ConfigClass {
|
||||
* Regex exclude tables or databases that matches the pattern.
|
||||
*
|
||||
* Regex to only fetch topics that matches the pattern.
|
||||
*
|
||||
* Regex to only fetch tables with FQN matching the pattern.
|
||||
*/
|
||||
export interface FilterPattern {
|
||||
/**
|
||||
@ -373,3 +384,24 @@ export enum DbtProvider {
|
||||
Local = 'local',
|
||||
S3 = 's3',
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipeline type
|
||||
*
|
||||
* Database Source Config Metadata Pipeline type
|
||||
*
|
||||
* Database Source Config Usage Pipeline type
|
||||
*
|
||||
* Dashboard Source Config Metadata Pipeline type
|
||||
*
|
||||
* Messaging Source Config Metadata Pipeline type
|
||||
*
|
||||
* Profiler Source Config Pipeline type
|
||||
*/
|
||||
export enum ConfigType {
|
||||
DashboardMetadata = 'DashboardMetadata',
|
||||
DatabaseMetadata = 'DatabaseMetadata',
|
||||
DatabaseUsage = 'DatabaseUsage',
|
||||
MessagingMetadata = 'MessagingMetadata',
|
||||
Profiler = 'Profiler',
|
||||
}
|
||||
|
||||
@ -0,0 +1,737 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/*
|
||||
* Copyright 2021 Collate
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test Service Connection to test user provided configuration is valid or not.
|
||||
*/
|
||||
export interface TestServiceConnection {
|
||||
/**
|
||||
* Database Connection.
|
||||
*/
|
||||
connection?: ConnectionClass;
|
||||
/**
|
||||
* Type of database service such as MySQL, BigQuery, Snowflake, Redshift, Postgres...
|
||||
*/
|
||||
connectionType?: ConnectionType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Database Connection.
|
||||
*
|
||||
* Dashboard Connection.
|
||||
*/
|
||||
export interface ConnectionClass {
|
||||
config?: ConfigClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Google BigQuery Connection Config
|
||||
*
|
||||
* AWS Athena Connection Config
|
||||
*
|
||||
* Azure SQL Connection Config
|
||||
*
|
||||
* Clickhouse Connection Config
|
||||
*
|
||||
* Databricks Connection Config
|
||||
*
|
||||
* Db2 Connection Config
|
||||
*
|
||||
* DeltaLake Database Connection Config
|
||||
*
|
||||
* Druid Connection Config
|
||||
*
|
||||
* DynamoDB Connection Config
|
||||
*
|
||||
* Glue Connection Config
|
||||
*
|
||||
* Hive SQL Connection Config
|
||||
*
|
||||
* MariaDB Database Connection Config
|
||||
*
|
||||
* Mssql Database Connection Config
|
||||
*
|
||||
* Mysql Database Connection Config
|
||||
*
|
||||
* SQLite Database Connection Config
|
||||
*
|
||||
* Oracle Database Connection Config
|
||||
*
|
||||
* Postgres Database Connection Config
|
||||
*
|
||||
* Presto Database Connection Config
|
||||
*
|
||||
* Redshift Connection Config
|
||||
*
|
||||
* Salesforce Connection Config
|
||||
*
|
||||
* SingleStore Database Connection Config
|
||||
*
|
||||
* Snowflake Connection Config
|
||||
*
|
||||
* Trino Connection Config
|
||||
*
|
||||
* Vertica Connection Config
|
||||
*
|
||||
* Sample Data Connection Config
|
||||
*
|
||||
* Looker Connection Config
|
||||
*
|
||||
* Metabase Connection Config
|
||||
*
|
||||
* PowerBI Connection Config
|
||||
*
|
||||
* Redash Connection Config
|
||||
*
|
||||
* Superset Connection Config
|
||||
*
|
||||
* Tableau Connection Config
|
||||
*
|
||||
* Kafka Connection Config
|
||||
*
|
||||
* Pulsar Connection Config
|
||||
*/
|
||||
export interface ConfigClass {
|
||||
connectionArguments?: { [key: string]: string };
|
||||
/**
|
||||
* Additional connection options that can be sent to service during the connection.
|
||||
*/
|
||||
connectionOptions?: { [key: string]: any };
|
||||
/**
|
||||
* GCS Credentials
|
||||
*
|
||||
* Credentials for the PowerBI.
|
||||
*/
|
||||
credentials?: GCSCredentials | string;
|
||||
/**
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Athena.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Azure SQL.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Clickhouse.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Databricks.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in DB2.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Druid.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Glue.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Hive.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in MariaDB.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in MsSQL.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Mysql.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank, OpenMetadata Ingestion
|
||||
* attempts to scan all the databases.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Oracle.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Postgres.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Redshift.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in MySQL.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Snowflake.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in the selected catalog in Trino.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in Vertica.
|
||||
*/
|
||||
database?: string;
|
||||
/**
|
||||
* Enable importing policy tags of BigQuery into OpenMetadata
|
||||
*/
|
||||
enablePolicyTagImport?: boolean;
|
||||
/**
|
||||
* BigQuery APIs URL
|
||||
*
|
||||
* Host and port of the Athena
|
||||
*
|
||||
* Host and port of the Clickhouse
|
||||
*
|
||||
* Host and port of the Databricks
|
||||
*
|
||||
* Host and port of the DB2
|
||||
*
|
||||
* Host and port of the Druid
|
||||
*
|
||||
* Host and port of the Hive.
|
||||
*
|
||||
* Host and port of the data source.
|
||||
*
|
||||
* Host and port of the MsSQL.
|
||||
*
|
||||
* Host and port of the data source. Blank for in-memory database.
|
||||
*
|
||||
* Host and port of the Oracle.
|
||||
*
|
||||
* Host and port of the Postgres.
|
||||
*
|
||||
* Host and port of the Redshift.
|
||||
*
|
||||
* URL to Looker instance.
|
||||
*
|
||||
* Host and Port of Metabase instance.
|
||||
*
|
||||
* Dashboard URL for the power BI.
|
||||
*
|
||||
* URL for the redash instance
|
||||
*
|
||||
* URL for the superset instance
|
||||
*
|
||||
* Tableau Server
|
||||
*/
|
||||
hostPort?: any;
|
||||
/**
|
||||
* Column name on which bigquery table will be partitioned
|
||||
*/
|
||||
partitionField?: string;
|
||||
/**
|
||||
* Partitioning query for bigquery tables
|
||||
*/
|
||||
partitionQuery?: string;
|
||||
/**
|
||||
* Duration for partitioning bigquery tables
|
||||
*/
|
||||
partitionQueryDuration?: number;
|
||||
/**
|
||||
* BigQuery project ID. Inform it here if passing the credentials path.
|
||||
*/
|
||||
projectId?: string;
|
||||
/**
|
||||
* SQLAlchemy driver scheme options.
|
||||
*/
|
||||
scheme?: Scheme;
|
||||
supportsMetadataExtraction?: boolean;
|
||||
supportsProfiler?: boolean;
|
||||
supportsUsageExtraction?: boolean;
|
||||
/**
|
||||
* OpenMetadata Tag category name if enablePolicyTagImport is set to true.
|
||||
*/
|
||||
tagCategoryName?: string;
|
||||
/**
|
||||
* Service Type
|
||||
*/
|
||||
type?: Type;
|
||||
/**
|
||||
* username to connect to the Bigquery. This user should have privileges to read all the
|
||||
* metadata in Bigquery.
|
||||
*
|
||||
* username to connect to the Athena. This user should have privileges to read all the
|
||||
* metadata in Azure SQL.
|
||||
*
|
||||
* username to connect to the Clickhouse. This user should have privileges to read all the
|
||||
* metadata in Clickhouse.
|
||||
*
|
||||
* username to connect to the Databricks. This user should have privileges to read all the
|
||||
* metadata in Databricks.
|
||||
*
|
||||
* username to connect to the DB2. This user should have privileges to read all the
|
||||
* metadata in DB2.
|
||||
*
|
||||
* username to connect to the Druid. This user should have privileges to read all the
|
||||
* metadata in Druid.
|
||||
*
|
||||
* username to connect to the Athena. This user should have privileges to read all the
|
||||
* metadata in Hive.
|
||||
*
|
||||
* username to connect to the MariaDB. This user should have privileges to read all the
|
||||
* metadata in MariaDB.
|
||||
*
|
||||
* username to connect to the MsSQL. This user should have privileges to read all the
|
||||
* metadata in MsSQL.
|
||||
*
|
||||
* username to connect to the Mysql. This user should have privileges to read all the
|
||||
* metadata in Mysql.
|
||||
*
|
||||
* username to connect to the SQLite. Blank for in-memory database.
|
||||
*
|
||||
* username to connect to the Oracle. This user should have privileges to read all the
|
||||
* metadata in Oracle.
|
||||
*
|
||||
* username to connect to the Postgres. This user should have privileges to read all the
|
||||
* metadata in Postgres.
|
||||
*
|
||||
* username to connect to the Redshift. This user should have privileges to read all the
|
||||
* metadata in Redshift.
|
||||
*
|
||||
* username to connect to the MySQL. This user should have privileges to read all the
|
||||
* metadata in MySQL.
|
||||
*
|
||||
* username to connect to the Snowflake. This user should have privileges to read all the
|
||||
* metadata in Snowflake.
|
||||
*
|
||||
* username to connect to Trino. This user should have privileges to read all the metadata
|
||||
* in Trino.
|
||||
*
|
||||
* username to connect to the Vertica. This user should have privileges to read all the
|
||||
* metadata in Vertica.
|
||||
*
|
||||
* username to connect to the Looker. This user should have privileges to read all the
|
||||
* metadata in Looker.
|
||||
*
|
||||
* username to connect to the Metabase. This user should have privileges to read all the
|
||||
* metadata in Metabase.
|
||||
*
|
||||
* username for the Redash
|
||||
*
|
||||
* username for the Superset
|
||||
*
|
||||
* username for the Tableau
|
||||
*/
|
||||
username?: string;
|
||||
awsConfig?: S3Credentials;
|
||||
/**
|
||||
* S3 Staging Directory.
|
||||
*/
|
||||
s3StagingDir?: string;
|
||||
/**
|
||||
* Athena workgroup.
|
||||
*/
|
||||
workgroup?: string;
|
||||
/**
|
||||
* SQLAlchemy driver for Azure SQL
|
||||
*/
|
||||
driver?: string;
|
||||
/**
|
||||
* password to connect to the Athena.
|
||||
*
|
||||
* password to connect to the Clickhouse.
|
||||
*
|
||||
* password to connect to the Databricks.
|
||||
*
|
||||
* password to connect to the DB2.
|
||||
*
|
||||
* password to connect to the Druid.
|
||||
*
|
||||
* password to connect to the Hive.
|
||||
*
|
||||
* password to connect to the MariaDB.
|
||||
*
|
||||
* password to connect to the MsSQL.
|
||||
*
|
||||
* password to connect to the Mysql.
|
||||
*
|
||||
* password to connect to SQLite. Blank for in-memory database.
|
||||
*
|
||||
* password to connect to the Oracle.
|
||||
*
|
||||
* password to connect to the Postgres.
|
||||
*
|
||||
* password to connect to the Redshift.
|
||||
*
|
||||
* password to connect to the MYSQL.
|
||||
*
|
||||
* password to connect to the Snowflake.
|
||||
*
|
||||
* password to connect to the Trino.
|
||||
*
|
||||
* password to connect to the Vertica.
|
||||
*
|
||||
* password to connect to the Looker.
|
||||
*
|
||||
* password to connect to the Metabase.
|
||||
*
|
||||
* password for the Superset
|
||||
*
|
||||
* password for the Tableau
|
||||
*/
|
||||
password?: string;
|
||||
/**
|
||||
* Clickhouse SQL connection duration
|
||||
*/
|
||||
duration?: number;
|
||||
/**
|
||||
* Generated Token to connect to Databricks
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* pySpark App Name
|
||||
*/
|
||||
appName?: string;
|
||||
/**
|
||||
* File path of local Hive Metastore.
|
||||
*/
|
||||
metastoreFilePath?: string;
|
||||
/**
|
||||
* Host and port of remote Hive Metastore.
|
||||
*/
|
||||
metastoreHostPort?: string;
|
||||
/**
|
||||
* AWS Access key ID.
|
||||
*/
|
||||
awsAccessKeyId?: string;
|
||||
/**
|
||||
* AWS Region Name.
|
||||
*/
|
||||
awsRegion?: string;
|
||||
/**
|
||||
* AWS Secret Access Key.
|
||||
*/
|
||||
awsSecretAccessKey?: string;
|
||||
/**
|
||||
* AWS Session Token.
|
||||
*/
|
||||
awsSessionToken?: string;
|
||||
/**
|
||||
* EndPoint URL for the Dynamo DB
|
||||
*
|
||||
* EndPoint URL for the Glue
|
||||
*/
|
||||
endPointURL?: string;
|
||||
/**
|
||||
* AWS pipelineServiceName Name.
|
||||
*/
|
||||
pipelineServiceName?: string;
|
||||
/**
|
||||
* AWS storageServiceName Name.
|
||||
*/
|
||||
storageServiceName?: string;
|
||||
/**
|
||||
* Authentication options to pass to Hive connector. These options are based on SQLAlchemy.
|
||||
*/
|
||||
authOptions?: string;
|
||||
/**
|
||||
* Connection URI In case of pyodbc
|
||||
*/
|
||||
uriString?: string;
|
||||
/**
|
||||
* How to run the SQLite database. :memory: by default.
|
||||
*/
|
||||
databaseMode?: string;
|
||||
/**
|
||||
* Oracle Service Name to be passed. Note: either Database or Oracle service name can be
|
||||
* sent, not both.
|
||||
*/
|
||||
oracleServiceName?: string;
|
||||
/**
|
||||
* Presto catalog
|
||||
*
|
||||
* Catalog of the data source.
|
||||
*/
|
||||
catalog?: string;
|
||||
/**
|
||||
* Salesforce Security Token.
|
||||
*/
|
||||
securityToken?: string;
|
||||
/**
|
||||
* Salesforce Object Name.
|
||||
*/
|
||||
sobjectName?: string;
|
||||
/**
|
||||
* Snowflake Account.
|
||||
*/
|
||||
account?: string;
|
||||
/**
|
||||
* Snowflake Role.
|
||||
*/
|
||||
role?: string;
|
||||
/**
|
||||
* Snowflake warehouse.
|
||||
*/
|
||||
warehouse?: string;
|
||||
/**
|
||||
* URL parameters for connection to the Trino data source
|
||||
*/
|
||||
params?: { [key: string]: any };
|
||||
/**
|
||||
* Proxies for the connection to Trino data source
|
||||
*/
|
||||
proxies?: { [key: string]: any };
|
||||
/**
|
||||
* Sample Data File Path
|
||||
*/
|
||||
sampleDataFolder?: string;
|
||||
/**
|
||||
* Looker Environment
|
||||
*
|
||||
* Tableau Environment Name
|
||||
*/
|
||||
env?: string;
|
||||
/**
|
||||
* Database Service Name for creation of lineage
|
||||
*/
|
||||
dbServiceName?: string;
|
||||
/**
|
||||
* client_id for the PowerBI.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* clientSecret for the PowerBI.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Dashboard redirect URI for the PowerBI.
|
||||
*/
|
||||
redirectURI?: string;
|
||||
/**
|
||||
* PowerBI secrets.
|
||||
*/
|
||||
scope?: string[];
|
||||
/**
|
||||
* API key of the redash instance to access.
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
* Database Service to create lineage
|
||||
*/
|
||||
dbServiceConnection?: string;
|
||||
/**
|
||||
* authenticaiton provider for the Superset
|
||||
*/
|
||||
provider?: string;
|
||||
/**
|
||||
* Tableau API version
|
||||
*/
|
||||
apiVersion?: string;
|
||||
/**
|
||||
* Personal Access Token Name
|
||||
*/
|
||||
personalAccessTokenName?: string;
|
||||
/**
|
||||
* Personal Access Token Secret
|
||||
*/
|
||||
personalAccessTokenSecret?: string;
|
||||
/**
|
||||
* Tableau Site Name
|
||||
*/
|
||||
siteName?: string;
|
||||
/**
|
||||
* Kafka bootstrap servers. add them in comma separated values ex: host1:9092,host2:9092
|
||||
*/
|
||||
bootstrapServers?: string;
|
||||
/**
|
||||
* Confluent Kafka Schema Registry URL.
|
||||
*/
|
||||
schemaRegistryURL?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* AWS S3 credentials configs.
|
||||
*/
|
||||
export interface S3Credentials {
|
||||
/**
|
||||
* AWS Access key ID.
|
||||
*/
|
||||
awsAccessKeyId: string;
|
||||
/**
|
||||
* AWS Region
|
||||
*/
|
||||
awsRegion: string;
|
||||
/**
|
||||
* AWS Secret Access Key.
|
||||
*/
|
||||
awsSecretAccessKey: string;
|
||||
/**
|
||||
* AWS Session Token.
|
||||
*/
|
||||
awsSessionToken?: string;
|
||||
/**
|
||||
* EndPoint URL for the AWS
|
||||
*/
|
||||
endPointURL?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* GCS Credentials
|
||||
*
|
||||
* GCS credentials configs.
|
||||
*/
|
||||
export interface GCSCredentials {
|
||||
/**
|
||||
* GCS configs.
|
||||
*/
|
||||
gcsConfig: GCSValues | string;
|
||||
}
|
||||
|
||||
/**
|
||||
* GCS Credentials.
|
||||
*/
|
||||
export interface GCSValues {
|
||||
/**
|
||||
* Google Cloud auth provider certificate.
|
||||
*/
|
||||
authProviderX509CertUrl?: string;
|
||||
/**
|
||||
* Google Cloud auth uri.
|
||||
*/
|
||||
authUri?: string;
|
||||
/**
|
||||
* Google Cloud email.
|
||||
*/
|
||||
clientEmail?: string;
|
||||
/**
|
||||
* Google Cloud Client ID.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* Google Cloud client certificate uri.
|
||||
*/
|
||||
clientX509CertUrl?: string;
|
||||
/**
|
||||
* Google Cloud private key.
|
||||
*/
|
||||
privateKey?: string;
|
||||
/**
|
||||
* Google Cloud private key id.
|
||||
*/
|
||||
privateKeyId?: string;
|
||||
/**
|
||||
* Google Cloud project id.
|
||||
*/
|
||||
projectId?: string;
|
||||
/**
|
||||
* Google Cloud token uri.
|
||||
*/
|
||||
tokenUri?: string;
|
||||
/**
|
||||
* Google Cloud service account type.
|
||||
*/
|
||||
type?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* SQLAlchemy driver scheme options.
|
||||
*/
|
||||
export enum Scheme {
|
||||
AwsathenaREST = 'awsathena+rest',
|
||||
Bigquery = 'bigquery',
|
||||
ClickhouseHTTP = 'clickhouse+http',
|
||||
DatabricksConnector = 'databricks+connector',
|
||||
Db2IBMDB = 'db2+ibm_db',
|
||||
Druid = 'druid',
|
||||
Hive = 'hive',
|
||||
MssqlPymssql = 'mssql+pymssql',
|
||||
MssqlPyodbc = 'mssql+pyodbc',
|
||||
MssqlPytds = 'mssql+pytds',
|
||||
MysqlPymysql = 'mysql+pymysql',
|
||||
OracleCxOracle = 'oracle+cx_oracle',
|
||||
PostgresqlPsycopg2 = 'postgresql+psycopg2',
|
||||
Presto = 'presto',
|
||||
RedshiftPsycopg2 = 'redshift+psycopg2',
|
||||
Salesforce = 'salesforce',
|
||||
Snowflake = 'snowflake',
|
||||
SqlitePysqlite = 'sqlite+pysqlite',
|
||||
Trino = 'trino',
|
||||
VerticaVerticaPython = 'vertica+vertica_python',
|
||||
}
|
||||
|
||||
/**
|
||||
* Service Type
|
||||
*
|
||||
* Service type.
|
||||
*
|
||||
* Looker service type
|
||||
*
|
||||
* Metabase service type
|
||||
*
|
||||
* PowerBI service type
|
||||
*
|
||||
* Redash service type
|
||||
*
|
||||
* Superset service type
|
||||
*
|
||||
* Tableau service type
|
||||
*
|
||||
* Kafka service type
|
||||
*
|
||||
* Pulsar service type
|
||||
*/
|
||||
export enum Type {
|
||||
Athena = 'Athena',
|
||||
AzureSQL = 'AzureSQL',
|
||||
BigQuery = 'BigQuery',
|
||||
Clickhouse = 'Clickhouse',
|
||||
Databricks = 'Databricks',
|
||||
Db2 = 'Db2',
|
||||
DeltaLake = 'DeltaLake',
|
||||
Druid = 'Druid',
|
||||
DynamoDB = 'DynamoDB',
|
||||
Glue = 'Glue',
|
||||
Hive = 'Hive',
|
||||
Kafka = 'Kafka',
|
||||
Looker = 'Looker',
|
||||
MariaDB = 'MariaDB',
|
||||
Metabase = 'Metabase',
|
||||
Mssql = 'Mssql',
|
||||
Mysql = 'Mysql',
|
||||
Oracle = 'Oracle',
|
||||
Postgres = 'Postgres',
|
||||
PowerBI = 'PowerBI',
|
||||
Presto = 'Presto',
|
||||
Pulsar = 'Pulsar',
|
||||
Redash = 'Redash',
|
||||
Redshift = 'Redshift',
|
||||
SQLite = 'SQLite',
|
||||
Salesforce = 'Salesforce',
|
||||
SampleData = 'SampleData',
|
||||
SingleStore = 'SingleStore',
|
||||
Snowflake = 'Snowflake',
|
||||
Superset = 'Superset',
|
||||
Tableau = 'Tableau',
|
||||
Trino = 'Trino',
|
||||
Vertica = 'Vertica',
|
||||
}
|
||||
|
||||
/**
|
||||
* Type of database service such as MySQL, BigQuery, Snowflake, Redshift, Postgres...
|
||||
*/
|
||||
export enum ConnectionType {
|
||||
Dashboard = 'Dashboard',
|
||||
Database = 'Database',
|
||||
Messaging = 'Messaging',
|
||||
}
|
||||
@ -26,6 +26,10 @@ export interface IngestionPipeline {
|
||||
* When `true` indicates the entity has been soft deleted.
|
||||
*/
|
||||
deleted?: boolean;
|
||||
/**
|
||||
* Indicates if the workflow has been successfully deployed to Airflow.
|
||||
*/
|
||||
deployed?: boolean;
|
||||
/**
|
||||
* Description of the Pipeline.
|
||||
*/
|
||||
@ -50,10 +54,6 @@ export interface IngestionPipeline {
|
||||
* Name that identifies this pipeline instance uniquely.
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Next execution date from the underlying pipeline platform once the pipeline scheduled.
|
||||
*/
|
||||
nextExecutionDate?: Date;
|
||||
openMetadataServerConnection: OpenMetadataConnection;
|
||||
/**
|
||||
* Owner of this Pipeline.
|
||||
@ -420,6 +420,7 @@ export interface PipelineStatus {
|
||||
*/
|
||||
export enum PipelineType {
|
||||
Metadata = 'metadata',
|
||||
Profiler = 'profiler',
|
||||
Usage = 'usage',
|
||||
}
|
||||
|
||||
@ -606,7 +607,7 @@ export interface Connection {
|
||||
*
|
||||
* password to connect to the MsSQL.
|
||||
*
|
||||
* password to connect to the SingleStore.
|
||||
* password to connect to the Mysql.
|
||||
*
|
||||
* password to connect to SQLite. Blank for in-memory database.
|
||||
*
|
||||
@ -645,8 +646,8 @@ export interface Connection {
|
||||
*
|
||||
* username for the Tableau
|
||||
*
|
||||
* username to connect to the Athena. This user should have privileges to read all the
|
||||
* metadata in Athena.
|
||||
* username to connect to the Bigquery. This user should have privileges to read all the
|
||||
* metadata in Bigquery.
|
||||
*
|
||||
* username to connect to the Athena. This user should have privileges to read all the
|
||||
* metadata in Azure SQL.
|
||||
@ -672,8 +673,8 @@ export interface Connection {
|
||||
* username to connect to the MsSQL. This user should have privileges to read all the
|
||||
* metadata in MsSQL.
|
||||
*
|
||||
* username to connect to the SingleStore. This user should have privileges to read all the
|
||||
* metadata in SingleStore.
|
||||
* username to connect to the Mysql. This user should have privileges to read all the
|
||||
* metadata in Mysql.
|
||||
*
|
||||
* username to connect to the SQLite. Blank for in-memory database.
|
||||
*
|
||||
@ -805,7 +806,7 @@ export interface Connection {
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank , OpenMetadata Ingestion
|
||||
* attempts to scan all the databases in SingleStore.
|
||||
* attempts to scan all the databases in Mysql.
|
||||
*
|
||||
* Database of the data source. This is optional parameter, if you would like to restrict
|
||||
* the metadata reading to a single database. When left blank, OpenMetadata Ingestion
|
||||
@ -864,17 +865,13 @@ export interface Connection {
|
||||
* SQLAlchemy driver scheme options.
|
||||
*/
|
||||
scheme?: Scheme;
|
||||
supportsProfiler?: boolean;
|
||||
supportsUsageExtraction?: boolean;
|
||||
/**
|
||||
* OpenMetadata Tag category name if enablePolicyTagImport is set to true.
|
||||
*/
|
||||
tagCategoryName?: string;
|
||||
/**
|
||||
* AWS Athena AWS Region.
|
||||
*
|
||||
* AWS Region Name.
|
||||
*/
|
||||
awsRegion?: string;
|
||||
awsConfig?: S3Credentials;
|
||||
/**
|
||||
* S3 Staging Directory.
|
||||
*/
|
||||
@ -911,6 +908,10 @@ export interface Connection {
|
||||
* AWS Access key ID.
|
||||
*/
|
||||
awsAccessKeyId?: string;
|
||||
/**
|
||||
* AWS Region Name.
|
||||
*/
|
||||
awsRegion?: string;
|
||||
/**
|
||||
* AWS Secret Access Key.
|
||||
*/
|
||||
@ -1055,6 +1056,32 @@ export interface Connection {
|
||||
securityConfig?: SsoClientConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* AWS S3 credentials configs.
|
||||
*/
|
||||
export interface S3Credentials {
|
||||
/**
|
||||
* AWS Access key ID.
|
||||
*/
|
||||
awsAccessKeyId: string;
|
||||
/**
|
||||
* AWS Region
|
||||
*/
|
||||
awsRegion: string;
|
||||
/**
|
||||
* AWS Secret Access Key.
|
||||
*/
|
||||
awsSecretAccessKey: string;
|
||||
/**
|
||||
* AWS Session Token.
|
||||
*/
|
||||
awsSessionToken?: string;
|
||||
/**
|
||||
* EndPoint URL for the AWS
|
||||
*/
|
||||
endPointURL?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* GCS Credentials
|
||||
*
|
||||
@ -1265,6 +1292,10 @@ export interface ConfigClass {
|
||||
* Regex exclude tables or databases that matches the pattern.
|
||||
*/
|
||||
tableFilterPattern?: FilterPattern;
|
||||
/**
|
||||
* Pipeline type
|
||||
*/
|
||||
type?: ConfigType;
|
||||
/**
|
||||
* Configuration to tune how far we want to look back in query logs to process usage data.
|
||||
*/
|
||||
@ -1290,6 +1321,10 @@ export interface ConfigClass {
|
||||
* Regex to only fetch topics that matches the pattern.
|
||||
*/
|
||||
topicFilterPattern?: FilterPattern;
|
||||
/**
|
||||
* Regex to only fetch tables with FQN matching the pattern.
|
||||
*/
|
||||
fqnFilterPattern?: FilterPattern;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1300,6 +1335,8 @@ export interface ConfigClass {
|
||||
* Regex exclude tables or databases that matches the pattern.
|
||||
*
|
||||
* Regex to only fetch topics that matches the pattern.
|
||||
*
|
||||
* Regex to only fetch tables with FQN matching the pattern.
|
||||
*/
|
||||
export interface FilterPattern {
|
||||
/**
|
||||
@ -1371,3 +1408,24 @@ export enum DbtProvider {
|
||||
Local = 'local',
|
||||
S3 = 's3',
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipeline type
|
||||
*
|
||||
* Database Source Config Metadata Pipeline type
|
||||
*
|
||||
* Database Source Config Usage Pipeline type
|
||||
*
|
||||
* Dashboard Source Config Metadata Pipeline type
|
||||
*
|
||||
* Messaging Source Config Metadata Pipeline type
|
||||
*
|
||||
* Profiler Source Config Pipeline type
|
||||
*/
|
||||
export enum ConfigType {
|
||||
DashboardMetadata = 'DashboardMetadata',
|
||||
DatabaseMetadata = 'DatabaseMetadata',
|
||||
DatabaseUsage = 'DatabaseUsage',
|
||||
MessagingMetadata = 'MessagingMetadata',
|
||||
Profiler = 'Profiler',
|
||||
}
|
||||
|
||||
@ -26,4 +26,17 @@ export interface DatabaseServiceQueryUsagePipelineClass {
|
||||
* required.
|
||||
*/
|
||||
stageFileLocation?: string;
|
||||
/**
|
||||
* Pipeline type
|
||||
*/
|
||||
type?: DatabaseUsageConfigType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipeline type
|
||||
*
|
||||
* Database Source Config Usage Pipeline type
|
||||
*/
|
||||
export enum DatabaseUsageConfigType {
|
||||
DatabaseUsage = 'DatabaseUsage',
|
||||
}
|
||||
|
||||
@ -57,6 +57,8 @@ const jsonData = {
|
||||
|
||||
'deploy-ingestion-error': 'Error while deploying ingestion workflow!',
|
||||
|
||||
'entity-already-exist-error': 'Entity already exists!',
|
||||
|
||||
'fetch-airflow-config-error':
|
||||
'Error occurred while fetching airflow configs!',
|
||||
'fetch-auth-config-error': 'Error occurred while fetching auth configs!',
|
||||
|
||||
@ -0,0 +1,170 @@
|
||||
/*
|
||||
* Copyright 2021 Collate
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { capitalize } from 'lodash';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useHistory, useParams } from 'react-router-dom';
|
||||
import {
|
||||
addIngestionPipeline,
|
||||
getIngestionPipelineByFqn,
|
||||
} from '../../axiosAPIs/ingestionPipelineAPI';
|
||||
import { getServiceByFQN } from '../../axiosAPIs/serviceAPI';
|
||||
import AddIngestion from '../../components/AddIngestion/AddIngestion.component';
|
||||
import ErrorPlaceHolder from '../../components/common/error-with-placeholder/ErrorPlaceHolder';
|
||||
import PageContainerV1 from '../../components/containers/PageContainerV1';
|
||||
import PageLayout from '../../components/containers/PageLayout';
|
||||
import Loader from '../../components/Loader/Loader';
|
||||
import { getServiceDetailsPath } from '../../constants/constants';
|
||||
import { FormSubmitType } from '../../enums/form.enum';
|
||||
import { PageLayoutType } from '../../enums/layout.enum';
|
||||
import { ServiceCategory } from '../../enums/service.enum';
|
||||
import { CreateIngestionPipeline } from '../../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import { PipelineType } from '../../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
import { DataObj } from '../../interface/service.interface';
|
||||
import jsonData from '../../jsons/en';
|
||||
import { getEntityMissingError } from '../../utils/CommonUtils';
|
||||
import { getServiceIngestionStepGuide } from '../../utils/ServiceUtils';
|
||||
import { showErrorToast } from '../../utils/ToastUtils';
|
||||
|
||||
const AddIngestionPage = () => {
|
||||
const { ingestionType, serviceFQN, serviceCategory } =
|
||||
useParams<{ [key: string]: string }>();
|
||||
const history = useHistory();
|
||||
const [serviceData, setServiceData] = useState<DataObj>();
|
||||
const [activeIngestionStep, setActiveIngestionStep] = useState(1);
|
||||
const [isLoading, setIsloading] = useState(true);
|
||||
const [isError, setIsError] = useState(false);
|
||||
|
||||
const fetchServiceDetails = () => {
|
||||
getServiceByFQN(serviceCategory, serviceFQN)
|
||||
.then((resService: AxiosResponse) => {
|
||||
if (resService.data) {
|
||||
setServiceData(resService.data);
|
||||
} else {
|
||||
showErrorToast(jsonData['api-error-messages']['fetch-service-error']);
|
||||
}
|
||||
})
|
||||
.catch((error: AxiosError) => {
|
||||
if (error.response?.status === 404) {
|
||||
setIsError(true);
|
||||
} else {
|
||||
showErrorToast(
|
||||
error,
|
||||
jsonData['api-error-messages']['fetch-service-error']
|
||||
);
|
||||
}
|
||||
})
|
||||
.finally(() => setIsloading(false));
|
||||
};
|
||||
|
||||
const onAddIngestionSave = (data: CreateIngestionPipeline) => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
return addIngestionPipeline(data)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
resolve();
|
||||
} else {
|
||||
showErrorToast(
|
||||
jsonData['api-error-messages']['create-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
}
|
||||
})
|
||||
.catch((err: AxiosError) => {
|
||||
if (err.response?.status === 409) {
|
||||
showErrorToast(
|
||||
err,
|
||||
jsonData['api-error-messages']['entity-already-exist-error']
|
||||
);
|
||||
} else {
|
||||
getIngestionPipelineByFqn(`${serviceData?.name}.${data.name}`)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
resolve();
|
||||
showErrorToast(
|
||||
err,
|
||||
jsonData['api-error-messages']['deploy-ingestion-error']
|
||||
);
|
||||
} else {
|
||||
throw jsonData['api-error-messages'][
|
||||
'unexpected-server-response'
|
||||
];
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
showErrorToast(
|
||||
err,
|
||||
jsonData['api-error-messages']['create-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const goToService = () => {
|
||||
history.push(
|
||||
getServiceDetailsPath(serviceFQN, serviceCategory, 'ingestions')
|
||||
);
|
||||
};
|
||||
|
||||
const renderAddIngestionPage = () => {
|
||||
if (isLoading) {
|
||||
return <Loader />;
|
||||
} else if (isError) {
|
||||
return (
|
||||
<ErrorPlaceHolder>
|
||||
{getEntityMissingError(serviceCategory, serviceFQN)}
|
||||
</ErrorPlaceHolder>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<PageLayout
|
||||
classes="tw-max-w-full-hd tw-h-full tw-pt-4"
|
||||
layout={PageLayoutType['2ColRTL']}
|
||||
rightPanel={getServiceIngestionStepGuide(
|
||||
activeIngestionStep,
|
||||
true,
|
||||
`${serviceData?.name || ''}_${ingestionType}`,
|
||||
'',
|
||||
ingestionType as PipelineType
|
||||
)}>
|
||||
<div className="tw-form-container">
|
||||
<AddIngestion
|
||||
activeIngestionStep={activeIngestionStep}
|
||||
handleCancelClick={goToService}
|
||||
handleViewServiceClick={goToService}
|
||||
heading={`Add ${capitalize(ingestionType)} Ingestion`}
|
||||
pipelineType={ingestionType as PipelineType}
|
||||
serviceCategory={serviceCategory as ServiceCategory}
|
||||
serviceData={serviceData as DataObj}
|
||||
setActiveIngestionStep={(step) => setActiveIngestionStep(step)}
|
||||
status={FormSubmitType.ADD}
|
||||
onAddIngestionSave={onAddIngestionSave}
|
||||
/>
|
||||
</div>
|
||||
</PageLayout>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
fetchServiceDetails();
|
||||
}, [serviceCategory, serviceFQN]);
|
||||
|
||||
return <PageContainerV1>{renderAddIngestionPage()}</PageContainerV1>;
|
||||
};
|
||||
|
||||
export default AddIngestionPage;
|
||||
@ -0,0 +1,230 @@
|
||||
/*
|
||||
* Copyright 2021 Collate
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { capitalize } from 'lodash';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useHistory, useParams } from 'react-router-dom';
|
||||
import {
|
||||
getIngestionPipelineByFqn,
|
||||
updateIngestionPipeline,
|
||||
} from '../../axiosAPIs/ingestionPipelineAPI';
|
||||
import { getServiceByFQN } from '../../axiosAPIs/serviceAPI';
|
||||
import AddIngestion from '../../components/AddIngestion/AddIngestion.component';
|
||||
import ErrorPlaceHolder from '../../components/common/error-with-placeholder/ErrorPlaceHolder';
|
||||
import PageContainerV1 from '../../components/containers/PageContainerV1';
|
||||
import PageLayout from '../../components/containers/PageLayout';
|
||||
import Loader from '../../components/Loader/Loader';
|
||||
import { getServiceDetailsPath } from '../../constants/constants';
|
||||
import { FormSubmitType } from '../../enums/form.enum';
|
||||
import { PageLayoutType } from '../../enums/layout.enum';
|
||||
import { ServiceCategory } from '../../enums/service.enum';
|
||||
import { CreateIngestionPipeline } from '../../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import {
|
||||
IngestionPipeline,
|
||||
PipelineType,
|
||||
} from '../../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
import { DataObj } from '../../interface/service.interface';
|
||||
import jsonData from '../../jsons/en';
|
||||
import { getEntityMissingError } from '../../utils/CommonUtils';
|
||||
import { getServiceIngestionStepGuide } from '../../utils/ServiceUtils';
|
||||
import { showErrorToast } from '../../utils/ToastUtils';
|
||||
|
||||
const EditIngestionPage = () => {
|
||||
const { ingestionFQN, ingestionType, serviceFQN, serviceCategory } =
|
||||
useParams<{ [key: string]: string }>();
|
||||
const history = useHistory();
|
||||
const [serviceData, setServiceData] = useState<DataObj>();
|
||||
const [ingestionData, setIngestionData] = useState<IngestionPipeline>(
|
||||
{} as IngestionPipeline
|
||||
);
|
||||
const [activeIngestionStep, setActiveIngestionStep] = useState(1);
|
||||
const [isLoading, setIsloading] = useState(true);
|
||||
const [errorMsg, setErrorMsg] = useState<string | JSX.Element>('');
|
||||
|
||||
const fetchServiceDetails = () => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
getServiceByFQN(serviceCategory, serviceFQN)
|
||||
.then((resService: AxiosResponse) => {
|
||||
if (resService.data) {
|
||||
setServiceData(resService.data);
|
||||
resolve();
|
||||
} else {
|
||||
showErrorToast(
|
||||
jsonData['api-error-messages']['fetch-service-error']
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch((error: AxiosError) => {
|
||||
if (error.response?.status === 404) {
|
||||
setErrorMsg(getEntityMissingError(serviceCategory, serviceFQN));
|
||||
} else {
|
||||
const errTextService =
|
||||
jsonData['api-error-messages']['fetch-service-error'];
|
||||
showErrorToast(error, errTextService);
|
||||
setErrorMsg(errTextService);
|
||||
}
|
||||
reject();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const fetchIngestionDetails = () => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
getIngestionPipelineByFqn(ingestionFQN)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
setIngestionData(res.data);
|
||||
resolve();
|
||||
} else {
|
||||
throw jsonData['api-error-messages']['unexpected-server-response'];
|
||||
}
|
||||
})
|
||||
.catch((error: AxiosError) => {
|
||||
if (error.response?.status === 404) {
|
||||
setErrorMsg(getEntityMissingError('Ingestion', ingestionFQN));
|
||||
} else {
|
||||
const errTextIngestion =
|
||||
jsonData['api-error-messages']['fetch-ingestion-error'];
|
||||
showErrorToast(error, errTextIngestion);
|
||||
setErrorMsg(errTextIngestion);
|
||||
}
|
||||
reject();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const fetchData = () => {
|
||||
const promises = [fetchServiceDetails(), fetchIngestionDetails()];
|
||||
Promise.allSettled(promises).finally(() => setIsloading(false));
|
||||
};
|
||||
|
||||
const onEditIngestionSave = (data: IngestionPipeline) => {
|
||||
const {
|
||||
airflowConfig,
|
||||
description,
|
||||
displayName,
|
||||
name,
|
||||
owner,
|
||||
pipelineType,
|
||||
service,
|
||||
source,
|
||||
} = data;
|
||||
const updateData = {
|
||||
airflowConfig,
|
||||
description,
|
||||
displayName,
|
||||
name,
|
||||
owner,
|
||||
pipelineType,
|
||||
service,
|
||||
sourceConfig: source.sourceConfig,
|
||||
};
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
return updateIngestionPipeline(updateData as CreateIngestionPipeline)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
resolve();
|
||||
} else {
|
||||
showErrorToast(
|
||||
jsonData['api-error-messages']['create-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
}
|
||||
})
|
||||
.catch((err: AxiosError) => {
|
||||
if (err.response?.status === 409) {
|
||||
showErrorToast(
|
||||
err,
|
||||
jsonData['api-error-messages']['entity-already-exist-error']
|
||||
);
|
||||
} else {
|
||||
getIngestionPipelineByFqn(`${serviceData?.name}.${data.name}`)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
resolve();
|
||||
showErrorToast(
|
||||
err,
|
||||
jsonData['api-error-messages']['deploy-ingestion-error']
|
||||
);
|
||||
} else {
|
||||
throw jsonData['api-error-messages'][
|
||||
'unexpected-server-response'
|
||||
];
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
showErrorToast(
|
||||
err,
|
||||
jsonData['api-error-messages']['update-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const goToService = () => {
|
||||
history.push(
|
||||
getServiceDetailsPath(serviceFQN, serviceCategory, 'ingestions')
|
||||
);
|
||||
};
|
||||
|
||||
const renderEditIngestionPage = () => {
|
||||
if (isLoading) {
|
||||
return <Loader />;
|
||||
} else if (errorMsg) {
|
||||
return <ErrorPlaceHolder>{errorMsg}</ErrorPlaceHolder>;
|
||||
} else {
|
||||
return (
|
||||
<PageLayout
|
||||
classes="tw-max-w-full-hd tw-h-full tw-pt-4"
|
||||
layout={PageLayoutType['2ColRTL']}
|
||||
rightPanel={getServiceIngestionStepGuide(
|
||||
activeIngestionStep,
|
||||
true,
|
||||
ingestionData?.name || '',
|
||||
'',
|
||||
ingestionType as PipelineType
|
||||
)}>
|
||||
<div className="tw-form-container">
|
||||
<AddIngestion
|
||||
activeIngestionStep={activeIngestionStep}
|
||||
data={ingestionData}
|
||||
handleCancelClick={goToService}
|
||||
handleViewServiceClick={goToService}
|
||||
heading={`Edit ${capitalize(ingestionType)} Ingestion`}
|
||||
pipelineType={ingestionType as PipelineType}
|
||||
serviceCategory={serviceCategory as ServiceCategory}
|
||||
serviceData={serviceData as DataObj}
|
||||
setActiveIngestionStep={(step) => setActiveIngestionStep(step)}
|
||||
status={FormSubmitType.EDIT}
|
||||
onSuccessSave={goToService}
|
||||
onUpdateIngestion={onEditIngestionSave}
|
||||
/>
|
||||
</div>
|
||||
</PageLayout>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
fetchData();
|
||||
}, [serviceCategory, serviceFQN]);
|
||||
|
||||
return <PageContainerV1>{renderEditIngestionPage()}</PageContainerV1>;
|
||||
};
|
||||
|
||||
export default EditIngestionPage;
|
||||
@ -13,7 +13,6 @@
|
||||
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import classNames from 'classnames';
|
||||
import { compare } from 'fast-json-patch';
|
||||
import { isNil, isUndefined } from 'lodash';
|
||||
import { ExtraInfo, ServicesData } from 'Models';
|
||||
import React, { Fragment, FunctionComponent, useEffect, useState } from 'react';
|
||||
@ -22,12 +21,9 @@ import { useAuthContext } from '../../authentication/auth-provider/AuthProvider'
|
||||
import { getDashboards } from '../../axiosAPIs/dashboardAPI';
|
||||
import { getDatabases } from '../../axiosAPIs/databaseAPI';
|
||||
import {
|
||||
addIngestionPipeline,
|
||||
deleteIngestionPipelineById,
|
||||
getIngestionPipelineByFqn,
|
||||
getIngestionPipelines,
|
||||
triggerIngestionPipelineById,
|
||||
updateIngestionPipeline,
|
||||
} from '../../axiosAPIs/ingestionPipelineAPI';
|
||||
import { fetchAirflowConfig } from '../../axiosAPIs/miscAPI';
|
||||
import { getPipelines } from '../../axiosAPIs/pipelineAPI';
|
||||
@ -54,7 +50,6 @@ import {
|
||||
} from '../../constants/constants';
|
||||
import { SearchIndex } from '../../enums/search.enum';
|
||||
import { ServiceCategory } from '../../enums/service.enum';
|
||||
import { CreateIngestionPipeline } from '../../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import { Dashboard } from '../../generated/entity/data/dashboard';
|
||||
import { Database } from '../../generated/entity/data/database';
|
||||
import { Pipeline } from '../../generated/entity/data/pipeline';
|
||||
@ -303,80 +298,6 @@ const ServicePage: FunctionComponent = () => {
|
||||
}).finally(() => setIsloading(false));
|
||||
};
|
||||
|
||||
const updateIngestion = (
|
||||
data: IngestionPipeline,
|
||||
oldData: IngestionPipeline,
|
||||
id: string,
|
||||
displayName: string,
|
||||
triggerIngestion?: boolean
|
||||
): Promise<void> => {
|
||||
const jsonPatch = compare(oldData, data);
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
updateIngestionPipeline(id, jsonPatch)
|
||||
.then(() => {
|
||||
resolve();
|
||||
getAllIngestionWorkflows();
|
||||
if (triggerIngestion) {
|
||||
triggerIngestionById(id, displayName).catch((error: AxiosError) => {
|
||||
showErrorToast(
|
||||
error,
|
||||
`${jsonData['api-error-messages']['triggering-ingestion-error']} ${displayName}`
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch((error: AxiosError) => {
|
||||
showErrorToast(
|
||||
error,
|
||||
`${jsonData['api-error-messages']['update-ingestion-error']}`
|
||||
);
|
||||
reject();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const onAddIngestionSave = (data: CreateIngestionPipeline) => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
return addIngestionPipeline(data)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
getAllIngestionWorkflows();
|
||||
resolve();
|
||||
} else {
|
||||
showErrorToast(
|
||||
jsonData['api-error-messages']['create-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
}
|
||||
})
|
||||
.catch((error: AxiosError) => {
|
||||
getIngestionPipelineByFqn(`${serviceDetails?.name}.${data.name}`)
|
||||
.then((res: AxiosResponse) => {
|
||||
if (res.data) {
|
||||
resolve();
|
||||
getAllIngestionWorkflows();
|
||||
showErrorToast(
|
||||
error,
|
||||
jsonData['api-error-messages']['deploy-ingestion-error']
|
||||
);
|
||||
} else {
|
||||
throw jsonData['api-error-messages'][
|
||||
'unexpected-server-response'
|
||||
];
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
showErrorToast(
|
||||
error,
|
||||
jsonData['api-error-messages']['create-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const handleConfigUpdate = (
|
||||
updatedData: ServicesData,
|
||||
serviceCategory: ServiceCategory
|
||||
@ -961,7 +882,6 @@ const ServicePage: FunctionComponent = () => {
|
||||
<div data-testid="ingestion-container">
|
||||
<Ingestion
|
||||
isRequiredDetailsAvailable
|
||||
addIngestion={onAddIngestionSave}
|
||||
airflowEndpoint={airflowEndpoint}
|
||||
currrentPage={ingestionCurrentPage}
|
||||
deleteIngestion={deleteIngestionById}
|
||||
@ -973,7 +893,6 @@ const ServicePage: FunctionComponent = () => {
|
||||
serviceList={serviceList}
|
||||
serviceName={serviceFQN}
|
||||
triggerIngestion={triggerIngestionById}
|
||||
updateIngestion={updateIngestion}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@ -18,6 +18,7 @@ import AppState from '../AppState';
|
||||
import { ROUTES } from '../constants/constants';
|
||||
import AddGlossaryPage from '../pages/AddGlossary/AddGlossaryPage.component';
|
||||
import AddGlossaryTermPage from '../pages/AddGlossaryTermPage/AddGlossaryTermPage.component';
|
||||
import AddIngestionPage from '../pages/AddIngestionPage/AddIngestionPage.component';
|
||||
import AddServicePage from '../pages/AddServicePage/AddServicePage.component';
|
||||
import AddWebhookPage from '../pages/AddWebhookPage/AddWebhookPage.component';
|
||||
import CreateUserPage from '../pages/CreateUserPage/CreateUserPage.component';
|
||||
@ -25,6 +26,7 @@ import DashboardDetailsPage from '../pages/DashboardDetailsPage/DashboardDetails
|
||||
import DatabaseDetails from '../pages/database-details/index';
|
||||
import DatabaseSchemaPageComponent from '../pages/DatabaseSchemaPage/DatabaseSchemaPage.component';
|
||||
import DatasetDetailsPage from '../pages/DatasetDetailsPage/DatasetDetailsPage.component';
|
||||
import EditIngestionPage from '../pages/EditIngestionPage/EditIngestionPage.component';
|
||||
import EditWebhookPage from '../pages/EditWebhookPage/EditWebhookPage.component';
|
||||
import EntityVersionPage from '../pages/EntityVersionPage/EntityVersionPage.component';
|
||||
import ExplorePage from '../pages/explore/ExplorePage.component';
|
||||
@ -65,6 +67,16 @@ const AuthenticatedAppRouter: FunctionComponent = () => {
|
||||
<Route exact component={ServicePage} path={ROUTES.SERVICE} />
|
||||
<Route exact component={ServicePage} path={ROUTES.SERVICE_WITH_TAB} />
|
||||
<Route exact component={AddServicePage} path={ROUTES.ADD_SERVICE} />
|
||||
<AdminProtectedRoute
|
||||
exact
|
||||
component={AddIngestionPage}
|
||||
path={ROUTES.ADD_INGESTION}
|
||||
/>
|
||||
<AdminProtectedRoute
|
||||
exact
|
||||
component={EditIngestionPage}
|
||||
path={ROUTES.EDIT_INGESTION}
|
||||
/>
|
||||
<Route exact component={SignupPage} path={ROUTES.SIGNUP}>
|
||||
{!isEmpty(AppState.userDetails) && <Redirect to={ROUTES.HOME} />}
|
||||
</Route>
|
||||
|
||||
@ -13,7 +13,10 @@
|
||||
|
||||
import {
|
||||
IN_PAGE_SEARCH_ROUTES,
|
||||
PLACEHOLDER_ROUTE_INGESTION_FQN,
|
||||
PLACEHOLDER_ROUTE_INGESTION_TYPE,
|
||||
PLACEHOLDER_ROUTE_SERVICE_CAT,
|
||||
PLACEHOLDER_ROUTE_SERVICE_FQN,
|
||||
ROUTES,
|
||||
} from '../constants/constants';
|
||||
|
||||
@ -46,3 +49,33 @@ export const getAddServicePath = (serviceCategory: string) => {
|
||||
|
||||
return path;
|
||||
};
|
||||
|
||||
export const getAddIngestionPath = (
|
||||
serviceCategory: string,
|
||||
serviceFQN: string,
|
||||
ingestionType: string
|
||||
) => {
|
||||
let path = ROUTES.ADD_INGESTION;
|
||||
path = path
|
||||
.replace(PLACEHOLDER_ROUTE_SERVICE_CAT, serviceCategory)
|
||||
.replace(PLACEHOLDER_ROUTE_SERVICE_FQN, serviceFQN)
|
||||
.replace(PLACEHOLDER_ROUTE_INGESTION_TYPE, ingestionType);
|
||||
|
||||
return path;
|
||||
};
|
||||
|
||||
export const getEditIngestionPath = (
|
||||
serviceCategory: string,
|
||||
serviceFQN: string,
|
||||
ingestionFQN: string,
|
||||
ingestionType: string
|
||||
) => {
|
||||
let path = ROUTES.EDIT_INGESTION;
|
||||
path = path
|
||||
.replace(PLACEHOLDER_ROUTE_SERVICE_CAT, serviceCategory)
|
||||
.replace(PLACEHOLDER_ROUTE_SERVICE_FQN, serviceFQN)
|
||||
.replace(PLACEHOLDER_ROUTE_INGESTION_FQN, ingestionFQN)
|
||||
.replace(PLACEHOLDER_ROUTE_INGESTION_TYPE, ingestionType);
|
||||
|
||||
return path;
|
||||
};
|
||||
|
||||
@ -21,7 +21,14 @@ import {
|
||||
ServicesData,
|
||||
ServiceTypes,
|
||||
} from 'Models';
|
||||
import React from 'react';
|
||||
import { getServiceDetails, getServices } from '../axiosAPIs/serviceAPI';
|
||||
import {
|
||||
addMetadataIngestionGuide,
|
||||
addProfilerIngestionGuide,
|
||||
addServiceGuide,
|
||||
addUsageIngestionGuide,
|
||||
} from '../constants/service-guide.constant';
|
||||
import {
|
||||
AIRFLOW,
|
||||
arrServiceTypes,
|
||||
@ -65,6 +72,7 @@ import {
|
||||
import { ServiceCategory } from '../enums/service.enum';
|
||||
import { DashboardServiceType } from '../generated/entity/services/dashboardService';
|
||||
import { DatabaseServiceType } from '../generated/entity/services/databaseService';
|
||||
import { PipelineType as IngestionPipelineType } from '../generated/entity/services/ingestionPipelines/ingestionPipeline';
|
||||
import { MessagingServiceType } from '../generated/entity/services/messagingService';
|
||||
import { PipelineServiceType } from '../generated/entity/services/pipelineService';
|
||||
import { PipelineType } from '../generated/operations/pipelines/airflowPipeline';
|
||||
@ -264,7 +272,7 @@ export const getAllServices = (
|
||||
}
|
||||
}
|
||||
getAllServiceList(allServiceCollectionArr, limit)
|
||||
.then((res) => resolve(res))
|
||||
.then((resAll) => resolve(resAll))
|
||||
.catch((err) => reject(err));
|
||||
});
|
||||
});
|
||||
@ -366,14 +374,12 @@ export const isIngestionSupported = (serviceCategory: ServiceCategory) => {
|
||||
};
|
||||
|
||||
export const getKeyValuePair = (obj: DynamicObj) => {
|
||||
const newObj = Object.entries(obj).map((v) => {
|
||||
return Object.entries(obj).map((v) => {
|
||||
return {
|
||||
key: v[0],
|
||||
value: v[1],
|
||||
};
|
||||
});
|
||||
|
||||
return newObj;
|
||||
};
|
||||
|
||||
export const getKeyValueObject = (arr: DynamicFormFieldType[]) => {
|
||||
@ -445,7 +451,7 @@ export const servicePageTabs = (entity: string) => [
|
||||
];
|
||||
|
||||
export const getCurrentServiceTab = (tab: string) => {
|
||||
let currentTab = 1;
|
||||
let currentTab;
|
||||
switch (tab) {
|
||||
case 'ingestions':
|
||||
currentTab = 2;
|
||||
@ -482,3 +488,58 @@ export const getFormattedGuideText = (
|
||||
|
||||
return text.replace(regExp, replacement);
|
||||
};
|
||||
|
||||
export const getServiceIngestionStepGuide = (
|
||||
step: number,
|
||||
isIngestion: boolean,
|
||||
ingestionName: string,
|
||||
serviceName: string,
|
||||
ingestionType: IngestionPipelineType
|
||||
) => {
|
||||
let guide;
|
||||
if (isIngestion) {
|
||||
switch (ingestionType) {
|
||||
case IngestionPipelineType.Usage: {
|
||||
guide = addUsageIngestionGuide.find((item) => item.step === step);
|
||||
|
||||
break;
|
||||
}
|
||||
case IngestionPipelineType.Profiler: {
|
||||
guide = addProfilerIngestionGuide.find((item) => item.step === step);
|
||||
|
||||
break;
|
||||
}
|
||||
case IngestionPipelineType.Metadata:
|
||||
default: {
|
||||
guide = addMetadataIngestionGuide.find((item) => item.step === step);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
guide = addServiceGuide.find((item) => item.step === step);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{guide && (
|
||||
<>
|
||||
<h6 className="tw-heading tw-text-base">{guide.title}</h6>
|
||||
<div className="tw-mb-5">
|
||||
{isIngestion
|
||||
? getFormattedGuideText(
|
||||
guide.description,
|
||||
'<Ingestion Pipeline Name>',
|
||||
`${ingestionName}`
|
||||
)
|
||||
: getFormattedGuideText(
|
||||
guide.description,
|
||||
'<Service Name>',
|
||||
serviceName
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
Loading…
x
Reference in New Issue
Block a user