mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2026-01-06 04:26:57 +00:00
fix issue: 4558 UI: Add support to enable debug logging in Metadata Ingestion form (#4616)
* fix issue: 4558 UI: Add support to enable debug logging in Metadata Ingestion form * addressing comment
This commit is contained in:
parent
0f63a4063e
commit
7f323e5ecc
@ -25,6 +25,7 @@ import { ServiceCategory } from '../../enums/service.enum';
|
||||
import {
|
||||
ConfigClass,
|
||||
CreateIngestionPipeline,
|
||||
LogLevels,
|
||||
PipelineType,
|
||||
} from '../../generated/api/services/ingestionPipelines/createIngestionPipeline';
|
||||
import {
|
||||
@ -156,6 +157,9 @@ const AddIngestion = ({
|
||||
(data?.source.sourceConfig.config as ConfigClass)?.generateSampleData ??
|
||||
true
|
||||
);
|
||||
const [enableDebugLog, setEnableDebugLog] = useState(
|
||||
isUndefined(data?.loggerLevel) ?? data?.loggerLevel === LogLevels.Debug
|
||||
);
|
||||
const [dashboardFilterPattern, setDashboardFilterPattern] =
|
||||
useState<FilterPattern>(
|
||||
(data?.source.sourceConfig.config as ConfigClass)
|
||||
@ -380,6 +384,7 @@ const AddIngestion = ({
|
||||
scheduleInterval: repeatFrequency,
|
||||
forceDeploy: true,
|
||||
},
|
||||
loggerLevel: enableDebugLog ? LogLevels.Debug : LogLevels.Info,
|
||||
name: ingestionName,
|
||||
displayName: ingestionName,
|
||||
owner: {
|
||||
@ -426,6 +431,7 @@ const AddIngestion = ({
|
||||
endDate: (endDate as unknown as Date) || null,
|
||||
scheduleInterval: repeatFrequency,
|
||||
},
|
||||
loggerLevel: enableDebugLog ? LogLevels.Debug : LogLevels.Info,
|
||||
source: {
|
||||
...data.source,
|
||||
sourceConfig: {
|
||||
@ -480,6 +486,7 @@ const AddIngestion = ({
|
||||
dashboardFilterPattern={dashboardFilterPattern}
|
||||
description={description}
|
||||
enableDataProfiler={enableDataProfiler}
|
||||
enableDebugLog={enableDebugLog}
|
||||
fqnFilterPattern={fqnFilterPattern}
|
||||
getExcludeValue={getExcludeValue}
|
||||
getIncludeValue={getIncludeValue}
|
||||
@ -487,6 +494,7 @@ const AddIngestion = ({
|
||||
handleEnableDataProfiler={() =>
|
||||
setEnableDataProfiler((pre) => !pre)
|
||||
}
|
||||
handleEnableDebugLog={() => setEnableDebugLog((pre) => !pre)}
|
||||
handleIncludeView={() => setIncludeView((pre) => !pre)}
|
||||
handleIngestSampleData={() => setIngestSampleData((pre) => !pre)}
|
||||
handleIngestionName={(val) => setIngestionName(val)}
|
||||
|
||||
@ -82,6 +82,8 @@ const mockConfigureIngestion: ConfigureIngestionProps = {
|
||||
onCancel: jest.fn(),
|
||||
onNext: jest.fn(),
|
||||
serviceCategory: ServiceCategory.DATABASE_SERVICES,
|
||||
enableDebugLog: false,
|
||||
handleEnableDebugLog: jest.fn(),
|
||||
};
|
||||
|
||||
describe('Test ConfigureIngestion component', () => {
|
||||
@ -110,6 +112,6 @@ describe('Test ConfigureIngestion component', () => {
|
||||
expect(backButton).toBeInTheDocument();
|
||||
expect(nextButton).toBeInTheDocument();
|
||||
expect(filterPatternComponents.length).toBe(2);
|
||||
expect(toggleSwitchs.length).toBe(4);
|
||||
expect(toggleSwitchs.length).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
@ -49,6 +49,8 @@ const ConfigureIngestion = ({
|
||||
queryLogDuration,
|
||||
stageFileLocation,
|
||||
resultLimit,
|
||||
enableDebugLog,
|
||||
handleEnableDebugLog,
|
||||
getExcludeValue,
|
||||
getIncludeValue,
|
||||
handleIngestionName,
|
||||
@ -208,6 +210,17 @@ const ConfigureIngestion = ({
|
||||
</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
<label>Enable Debug Log</label>
|
||||
<ToggleSwitchV1
|
||||
checked={enableDebugLog}
|
||||
handleCheck={handleEnableDebugLog}
|
||||
/>
|
||||
</div>
|
||||
<p className="tw-text-grey-muted tw-mt-3">Enable debug logging</p>
|
||||
{getSeparator('')}
|
||||
</Field>
|
||||
{!isNil(markDeletedTables) && (
|
||||
<Field>
|
||||
<div className="tw-flex tw-gap-1">
|
||||
|
||||
@ -59,6 +59,7 @@ export interface ConfigureIngestionProps {
|
||||
includeView: boolean;
|
||||
markDeletedTables?: boolean;
|
||||
enableDataProfiler: boolean;
|
||||
enableDebugLog: boolean;
|
||||
ingestSampleData: boolean;
|
||||
pipelineType: PipelineType;
|
||||
showDashboardFilter: boolean;
|
||||
@ -76,6 +77,7 @@ export interface ConfigureIngestionProps {
|
||||
handleMarkDeletedTables?: () => void;
|
||||
handleEnableDataProfiler: () => void;
|
||||
handleIngestSampleData: () => void;
|
||||
handleEnableDebugLog: () => void;
|
||||
getIncludeValue: (value: string[], type: FilterPatternEnum) => void;
|
||||
getExcludeValue: (value: string[], type: FilterPatternEnum) => void;
|
||||
handleShowFilter: (value: boolean, type: FilterPatternEnum) => void;
|
||||
|
||||
@ -25,6 +25,10 @@ export interface CreateIngestionPipeline {
|
||||
* Display Name that identifies this pipeline.
|
||||
*/
|
||||
displayName?: string;
|
||||
/**
|
||||
* Set the logging level for the workflow.
|
||||
*/
|
||||
loggerLevel?: LogLevels;
|
||||
/**
|
||||
* Name that identifies this pipeline instance uniquely.
|
||||
*/
|
||||
@ -107,6 +111,18 @@ export interface AirflowConfig {
|
||||
workflowTimeout?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the logging level for the workflow.
|
||||
*
|
||||
* Supported logging levels
|
||||
*/
|
||||
export enum LogLevels {
|
||||
Debug = 'DEBUG',
|
||||
Error = 'ERROR',
|
||||
Info = 'INFO',
|
||||
Warn = 'WARN',
|
||||
}
|
||||
|
||||
/**
|
||||
* Owner of this Pipeline.
|
||||
*
|
||||
@ -173,25 +189,9 @@ export interface SourceConfig {
|
||||
|
||||
export interface ConfigClass {
|
||||
/**
|
||||
* DBT Catalog file name
|
||||
* Available sources to fetch DBT catalog and manifest files.
|
||||
*/
|
||||
dbtCatalogFileName?: string;
|
||||
dbtConfig?: DbtConfig;
|
||||
/**
|
||||
* DBT Manifest file name
|
||||
*/
|
||||
dbtManifestFileName?: string;
|
||||
/**
|
||||
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||
* local system), 'http'(url path of dbt files).
|
||||
*/
|
||||
dbtProvider?: DbtProvider;
|
||||
/**
|
||||
* DBT configuration.
|
||||
*/
|
||||
dbtSecurityConfig?: SCredentials;
|
||||
dbtConfigSource?: any[] | boolean | number | null | DbtConfigSource | string;
|
||||
/**
|
||||
* Run data profiler as part of this metadata ingestion to get table profile data.
|
||||
*/
|
||||
@ -284,44 +284,35 @@ export interface FilterPattern {
|
||||
|
||||
/**
|
||||
* DBT Catalog and Manifest file path config.
|
||||
*
|
||||
* DBT Catalog and Manifest HTTP path configuration.
|
||||
*/
|
||||
export interface DbtConfig {
|
||||
export interface DbtConfigSource {
|
||||
/**
|
||||
* DBT catalog file to extract dbt models with their column schemas.
|
||||
* DBT catalog file path to extract dbt models with their column schemas.
|
||||
*/
|
||||
dbtCatalogFilePath: string;
|
||||
dbtCatalogFilePath?: string;
|
||||
/**
|
||||
* DBT manifest file path to extract dbt models and associate with tables.
|
||||
*/
|
||||
dbtManifestFilePath: string;
|
||||
dbtManifestFilePath?: string;
|
||||
/**
|
||||
* DBT catalog http file path to extract dbt models with their column schemas.
|
||||
*/
|
||||
dbtCatalogHttpPath?: string;
|
||||
/**
|
||||
* DBT manifest http file path to extract dbt models and associate with tables.
|
||||
*/
|
||||
dbtManifestHttpPath?: string;
|
||||
dbtSecurityConfig?: SCredentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||
* local system), 'http'(url path of dbt files).
|
||||
*/
|
||||
export enum DbtProvider {
|
||||
Gcs = 'gcs',
|
||||
GcsPath = 'gcs-path',
|
||||
HTTP = 'http',
|
||||
Local = 'local',
|
||||
S3 = 's3',
|
||||
}
|
||||
|
||||
/**
|
||||
* DBT configuration.
|
||||
* AWS credentials configs.
|
||||
*
|
||||
* GCS credentials configs.
|
||||
*
|
||||
* AWS credentials configs.
|
||||
*/
|
||||
export interface SCredentials {
|
||||
/**
|
||||
* GCS configs.
|
||||
*/
|
||||
gcsConfig?: GCSCredentialsValues | string;
|
||||
/**
|
||||
* AWS Access key ID.
|
||||
*/
|
||||
@ -342,6 +333,10 @@ export interface SCredentials {
|
||||
* EndPoint URL for the AWS
|
||||
*/
|
||||
endPointURL?: string;
|
||||
/**
|
||||
* GCS configs.
|
||||
*/
|
||||
gcsConfig?: GCSCredentialsValues | string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -50,6 +50,10 @@ export interface IngestionPipeline {
|
||||
* Unique identifier that identifies this pipeline.
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* Set the logging level for the workflow.
|
||||
*/
|
||||
loggerLevel?: LogLevels;
|
||||
/**
|
||||
* Name that identifies this pipeline instance uniquely.
|
||||
*/
|
||||
@ -191,6 +195,18 @@ export interface FieldChange {
|
||||
oldValue?: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the logging level for the workflow.
|
||||
*
|
||||
* Supported logging levels
|
||||
*/
|
||||
export enum LogLevels {
|
||||
Debug = 'DEBUG',
|
||||
Error = 'ERROR',
|
||||
Info = 'INFO',
|
||||
Warn = 'WARN',
|
||||
}
|
||||
|
||||
/**
|
||||
* OpenMetadata Connection Config
|
||||
*/
|
||||
@ -623,7 +639,7 @@ export interface Connection {
|
||||
*
|
||||
* password to connect to the Snowflake.
|
||||
*
|
||||
* password to connect to the Trino.
|
||||
* password to connect to the Trino.
|
||||
*
|
||||
* password to connect to the Vertica.
|
||||
*
|
||||
@ -896,6 +912,10 @@ export interface Connection {
|
||||
* Snowflake Role.
|
||||
*/
|
||||
role?: string;
|
||||
/**
|
||||
* Snowflake Passphrase Key used with Private Key
|
||||
*/
|
||||
snowflakePrivatekeyPassphrase?: string;
|
||||
/**
|
||||
* Snowflake warehouse.
|
||||
*/
|
||||
@ -903,11 +923,11 @@ export interface Connection {
|
||||
/**
|
||||
* URL parameters for connection to the Trino data source
|
||||
*/
|
||||
params?: { [key: string]: any };
|
||||
params?: { [key: string]: string };
|
||||
/**
|
||||
* Proxies for the connection to Trino data source
|
||||
*/
|
||||
proxies?: { [key: string]: any };
|
||||
proxies?: { [key: string]: string };
|
||||
/**
|
||||
* Sample Data File Path
|
||||
*/
|
||||
@ -1172,25 +1192,9 @@ export interface SourceConfig {
|
||||
|
||||
export interface ConfigClass {
|
||||
/**
|
||||
* DBT Catalog file name
|
||||
* Available sources to fetch DBT catalog and manifest files.
|
||||
*/
|
||||
dbtCatalogFileName?: string;
|
||||
dbtConfig?: DbtConfig;
|
||||
/**
|
||||
* DBT Manifest file name
|
||||
*/
|
||||
dbtManifestFileName?: string;
|
||||
/**
|
||||
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||
* local system), 'http'(url path of dbt files).
|
||||
*/
|
||||
dbtProvider?: DbtProvider;
|
||||
/**
|
||||
* DBT configuration.
|
||||
*/
|
||||
dbtSecurityConfig?: SCredentials;
|
||||
dbtConfigSource?: any[] | boolean | number | null | DbtConfigSource | string;
|
||||
/**
|
||||
* Run data profiler as part of this metadata ingestion to get table profile data.
|
||||
*/
|
||||
@ -1283,46 +1287,37 @@ export interface FilterPattern {
|
||||
|
||||
/**
|
||||
* DBT Catalog and Manifest file path config.
|
||||
*
|
||||
* DBT Catalog and Manifest HTTP path configuration.
|
||||
*/
|
||||
export interface DbtConfig {
|
||||
export interface DbtConfigSource {
|
||||
/**
|
||||
* DBT catalog file to extract dbt models with their column schemas.
|
||||
* DBT catalog file path to extract dbt models with their column schemas.
|
||||
*/
|
||||
dbtCatalogFilePath: string;
|
||||
dbtCatalogFilePath?: string;
|
||||
/**
|
||||
* DBT manifest file path to extract dbt models and associate with tables.
|
||||
*/
|
||||
dbtManifestFilePath: string;
|
||||
dbtManifestFilePath?: string;
|
||||
/**
|
||||
* DBT catalog http file path to extract dbt models with their column schemas.
|
||||
*/
|
||||
dbtCatalogHttpPath?: string;
|
||||
/**
|
||||
* DBT manifest http file path to extract dbt models and associate with tables.
|
||||
*/
|
||||
dbtManifestHttpPath?: string;
|
||||
dbtSecurityConfig?: SCredentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||
* local system), 'http'(url path of dbt files).
|
||||
*/
|
||||
export enum DbtProvider {
|
||||
Gcs = 'gcs',
|
||||
GcsPath = 'gcs-path',
|
||||
HTTP = 'http',
|
||||
Local = 'local',
|
||||
S3 = 's3',
|
||||
}
|
||||
|
||||
/**
|
||||
* DBT configuration.
|
||||
* AWS credentials configs.
|
||||
*
|
||||
* GCS Credentials
|
||||
*
|
||||
* GCS credentials configs.
|
||||
*
|
||||
* AWS credentials configs.
|
||||
*/
|
||||
export interface SCredentials {
|
||||
/**
|
||||
* GCS configs.
|
||||
*/
|
||||
gcsConfig?: GCSCredentialsValues | string;
|
||||
/**
|
||||
* AWS Access key ID.
|
||||
*/
|
||||
@ -1343,6 +1338,10 @@ export interface SCredentials {
|
||||
* EndPoint URL for the AWS
|
||||
*/
|
||||
endPointURL?: string;
|
||||
/**
|
||||
* GCS configs.
|
||||
*/
|
||||
gcsConfig?: GCSCredentialsValues | string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -87,6 +87,7 @@ const AddIngestionPage = () => {
|
||||
err,
|
||||
jsonData['api-error-messages']['entity-already-exist-error']
|
||||
);
|
||||
reject();
|
||||
} else {
|
||||
getIngestionPipelineByFqn(`${serviceData?.name}.${data.name}`)
|
||||
.then((res: AxiosResponse) => {
|
||||
@ -107,9 +108,9 @@ const AddIngestionPage = () => {
|
||||
err,
|
||||
jsonData['api-error-messages']['create-ingestion-error']
|
||||
);
|
||||
reject();
|
||||
});
|
||||
}
|
||||
reject();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user