mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-08-21 15:38:11 +00:00
fix issue: 4177 Implement pipeline status for service page (#4281)
This commit is contained in:
parent
c3993119f6
commit
61cb071ebc
@ -168,53 +168,49 @@ const Ingestion: React.FC<IngestionProps> = ({
|
|||||||
: ingestionList;
|
: ingestionList;
|
||||||
}, [searchText, ingestionList]);
|
}, [searchText, ingestionList]);
|
||||||
|
|
||||||
/* eslint-disable max-len */
|
const getStatuses = (ingestion: IngestionPipeline) => {
|
||||||
// TODO:- once api support status we need below function
|
const lastFiveIngestions = ingestion.pipelineStatuses
|
||||||
// const getStatuses = (ingestion: AirflowPipeline) => {
|
?.sort((a, b) => {
|
||||||
// const lastFiveIngestions = ingestion.pipelineStatuses
|
// Turn your strings into millis, and then subtract them
|
||||||
// ?.sort((a, b) => {
|
// to get a value that is either negative, positive, or zero.
|
||||||
// // Turn your strings into millis, and then subtract them
|
const date1 = new Date(a.startDate || '');
|
||||||
// // to get a value that is either negative, positive, or zero.
|
const date2 = new Date(b.startDate || '');
|
||||||
// const date1 = new Date(a.startDate || '');
|
|
||||||
// const date2 = new Date(b.startDate || '');
|
|
||||||
|
|
||||||
// return date1.getTime() - date2.getTime();
|
return date1.getTime() - date2.getTime();
|
||||||
// })
|
})
|
||||||
// .slice(Math.max(ingestion.pipelineStatuses.length - 5, 0));
|
.slice(Math.max(ingestion.pipelineStatuses.length - 5, 0));
|
||||||
|
|
||||||
// return lastFiveIngestions?.map((r, i) => {
|
return lastFiveIngestions?.map((r, i) => {
|
||||||
// return (
|
return (
|
||||||
// <PopOver
|
<PopOver
|
||||||
// html={
|
html={
|
||||||
// <div className="tw-text-left">
|
<div className="tw-text-left">
|
||||||
// {r.startDate ? (
|
{r.startDate ? (
|
||||||
// <p>Start Date: {new Date(r.startDate).toUTCString()}</p>
|
<p>Start Date: {new Date(r.startDate).toUTCString()}</p>
|
||||||
// ) : null}
|
) : null}
|
||||||
// {r.endDate ? (
|
{r.endDate ? (
|
||||||
// <p>End Date: {new Date(r.endDate).toUTCString()}</p>
|
<p>End Date: {new Date(r.endDate).toUTCString()}</p>
|
||||||
// ) : null}
|
) : null}
|
||||||
// </div>
|
</div>
|
||||||
// }
|
}
|
||||||
// key={i}
|
key={i}
|
||||||
// position="bottom"
|
position="bottom"
|
||||||
// theme="light"
|
theme="light"
|
||||||
// trigger="mouseenter">
|
trigger="mouseenter">
|
||||||
// {i === lastFiveIngestions.length - 1 ? (
|
{i === lastFiveIngestions.length - 1 ? (
|
||||||
// <p
|
<p
|
||||||
// className={`tw-h-5 tw-w-16 tw-rounded-sm tw-bg-status-${r.state} tw-mr-1 tw-px-1 tw-text-white tw-text-center`}>
|
className={`tw-h-5 tw-w-16 tw-rounded-sm tw-bg-status-${r.state} tw-mr-1 tw-px-1 tw-text-white tw-text-center`}>
|
||||||
// {capitalize(r.state)}
|
{capitalize(r.state)}
|
||||||
// </p>
|
</p>
|
||||||
// ) : (
|
) : (
|
||||||
// <p
|
<p
|
||||||
// className={`tw-w-4 tw-h-5 tw-rounded-sm tw-bg-status-${r.state} tw-mr-1`}
|
className={`tw-w-4 tw-h-5 tw-rounded-sm tw-bg-status-${r.state} tw-mr-1`}
|
||||||
// />
|
/>
|
||||||
// )}
|
)}
|
||||||
// </PopOver>
|
</PopOver>
|
||||||
// );
|
);
|
||||||
// });
|
});
|
||||||
// };
|
};
|
||||||
|
|
||||||
/* eslint-enable max-len */
|
|
||||||
|
|
||||||
const getIngestionTab = () => {
|
const getIngestionTab = () => {
|
||||||
return (
|
return (
|
||||||
@ -312,8 +308,7 @@ const Ingestion: React.FC<IngestionProps> = ({
|
|||||||
)}
|
)}
|
||||||
</td>
|
</td>
|
||||||
<td className="tableBody-cell">
|
<td className="tableBody-cell">
|
||||||
{/* TODO:- update this once api support pipeline status */}
|
<div className="tw-flex">{getStatuses(ingestion)}</div>
|
||||||
{/* <div className="tw-flex">{getStatuses(ingestion)}</div> */}
|
|
||||||
</td>
|
</td>
|
||||||
|
|
||||||
<td className="tableBody-cell">
|
<td className="tableBody-cell">
|
||||||
|
@ -65,10 +65,6 @@ export interface AirflowConfig {
|
|||||||
* Maximum Number of active runs.
|
* Maximum Number of active runs.
|
||||||
*/
|
*/
|
||||||
maxActiveRuns?: number;
|
maxActiveRuns?: number;
|
||||||
/**
|
|
||||||
* Next execution date from the underlying pipeline platform once the pipeline scheduled.
|
|
||||||
*/
|
|
||||||
nextExecutionDate?: Date;
|
|
||||||
/**
|
/**
|
||||||
* pause the pipeline from running once the deploy is finished successfully.
|
* pause the pipeline from running once the deploy is finished successfully.
|
||||||
*/
|
*/
|
||||||
@ -77,18 +73,10 @@ export interface AirflowConfig {
|
|||||||
* Run past executions if the start date is in the past.
|
* Run past executions if the start date is in the past.
|
||||||
*/
|
*/
|
||||||
pipelineCatchup?: boolean;
|
pipelineCatchup?: boolean;
|
||||||
/**
|
|
||||||
* Timeout for the pipeline in seconds.
|
|
||||||
*/
|
|
||||||
pipelineTimeout?: number;
|
|
||||||
/**
|
/**
|
||||||
* Timezone in which pipeline going to be scheduled.
|
* Timezone in which pipeline going to be scheduled.
|
||||||
*/
|
*/
|
||||||
pipelineTimezone?: string;
|
pipelineTimezone?: string;
|
||||||
/**
|
|
||||||
* File system directory path where managed python operator files are stored.
|
|
||||||
*/
|
|
||||||
pythonOperatorLocation?: string;
|
|
||||||
/**
|
/**
|
||||||
* Retry pipeline in case of failure.
|
* Retry pipeline in case of failure.
|
||||||
*/
|
*/
|
||||||
@ -101,10 +89,6 @@ export interface AirflowConfig {
|
|||||||
* Scheduler Interval for the pipeline in cron format.
|
* Scheduler Interval for the pipeline in cron format.
|
||||||
*/
|
*/
|
||||||
scheduleInterval?: string;
|
scheduleInterval?: string;
|
||||||
/**
|
|
||||||
* python method call back on SLA miss.
|
|
||||||
*/
|
|
||||||
slaMissCallback?: string;
|
|
||||||
/**
|
/**
|
||||||
* Start date of the pipeline.
|
* Start date of the pipeline.
|
||||||
*/
|
*/
|
||||||
@ -188,13 +172,24 @@ export interface SourceConfig {
|
|||||||
|
|
||||||
export interface ConfigClass {
|
export interface ConfigClass {
|
||||||
/**
|
/**
|
||||||
* DBT catalog file to extract dbt models with their column schemas.
|
* DBT Catalog file name
|
||||||
*/
|
*/
|
||||||
dbtCatalogFilePath?: string;
|
dbtCatalogFileName?: string;
|
||||||
/**
|
/**
|
||||||
* DBT manifest file path to extract dbt models and associate with tables.
|
* DBT configuration.
|
||||||
*/
|
*/
|
||||||
dbtManifestFilePath?: string;
|
dbtConfig?: LocalHTTPDbtConfig;
|
||||||
|
/**
|
||||||
|
* DBT Manifest file name
|
||||||
|
*/
|
||||||
|
dbtManifestFileName?: string;
|
||||||
|
/**
|
||||||
|
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||||
|
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||||
|
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||||
|
* local system), 'http'(url path of dbt files).
|
||||||
|
*/
|
||||||
|
dbtProvider?: DbtProvider;
|
||||||
/**
|
/**
|
||||||
* Run data profiler as part of this metadata ingestion to get table profile data.
|
* Run data profiler as part of this metadata ingestion to get table profile data.
|
||||||
*/
|
*/
|
||||||
@ -274,3 +269,107 @@ export interface FilterPattern {
|
|||||||
*/
|
*/
|
||||||
includes?: string[];
|
includes?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DBT configuration.
|
||||||
|
*
|
||||||
|
* Local and HTTP DBT configs.
|
||||||
|
*
|
||||||
|
* GCS credentials configs.
|
||||||
|
*
|
||||||
|
* AWS S3 credentials configs.
|
||||||
|
*/
|
||||||
|
export interface LocalHTTPDbtConfig {
|
||||||
|
/**
|
||||||
|
* DBT catalog file to extract dbt models with their column schemas.
|
||||||
|
*/
|
||||||
|
dbtCatalogFilePath?: string;
|
||||||
|
/**
|
||||||
|
* DBT manifest file path to extract dbt models and associate with tables.
|
||||||
|
*/
|
||||||
|
dbtManifestFilePath?: string;
|
||||||
|
/**
|
||||||
|
* GCS configs.
|
||||||
|
*/
|
||||||
|
gcsConfig?: GCSValues | string;
|
||||||
|
/**
|
||||||
|
* AWS Access key ID.
|
||||||
|
*/
|
||||||
|
awsAccessKeyId?: string;
|
||||||
|
/**
|
||||||
|
* AWS Region
|
||||||
|
*/
|
||||||
|
awsRegion?: string;
|
||||||
|
/**
|
||||||
|
* AWS Secret Access Key.
|
||||||
|
*/
|
||||||
|
awsSecretAccessKey?: string;
|
||||||
|
/**
|
||||||
|
* AWS Session Token.
|
||||||
|
*/
|
||||||
|
awsSessionToken?: string;
|
||||||
|
/**
|
||||||
|
* EndPoint URL for the AWS
|
||||||
|
*/
|
||||||
|
endPointURL?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GCS Credentials.
|
||||||
|
*/
|
||||||
|
export interface GCSValues {
|
||||||
|
/**
|
||||||
|
* Google Cloud auth provider certificate.
|
||||||
|
*/
|
||||||
|
authProviderX509CertUrl?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud auth uri.
|
||||||
|
*/
|
||||||
|
authUri?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud email.
|
||||||
|
*/
|
||||||
|
clientEmail?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud Client ID.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud client certificate uri.
|
||||||
|
*/
|
||||||
|
clientX509CertUrl?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud private key.
|
||||||
|
*/
|
||||||
|
privateKey?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud private key id.
|
||||||
|
*/
|
||||||
|
privateKeyId?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud project id.
|
||||||
|
*/
|
||||||
|
projectId?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud token uri.
|
||||||
|
*/
|
||||||
|
tokenUri?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud service account type.
|
||||||
|
*/
|
||||||
|
type?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||||
|
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||||
|
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||||
|
* local system), 'http'(url path of dbt files).
|
||||||
|
*/
|
||||||
|
export enum DbtProvider {
|
||||||
|
Gcs = 'gcs',
|
||||||
|
GcsPath = 'gcs-path',
|
||||||
|
HTTP = 'http',
|
||||||
|
Local = 'local',
|
||||||
|
S3 = 's3',
|
||||||
|
}
|
||||||
|
@ -50,11 +50,19 @@ export interface IngestionPipeline {
|
|||||||
* Name that identifies this pipeline instance uniquely.
|
* Name that identifies this pipeline instance uniquely.
|
||||||
*/
|
*/
|
||||||
name: string;
|
name: string;
|
||||||
|
/**
|
||||||
|
* Next execution date from the underlying pipeline platform once the pipeline scheduled.
|
||||||
|
*/
|
||||||
|
nextExecutionDate?: Date;
|
||||||
openMetadataServerConnection: OpenMetadataConnection;
|
openMetadataServerConnection: OpenMetadataConnection;
|
||||||
/**
|
/**
|
||||||
* Owner of this Pipeline.
|
* Owner of this Pipeline.
|
||||||
*/
|
*/
|
||||||
owner?: EntityReference;
|
owner?: EntityReference;
|
||||||
|
/**
|
||||||
|
* List of executions and status for the Pipeline.
|
||||||
|
*/
|
||||||
|
pipelineStatuses?: PipelineStatus[];
|
||||||
pipelineType: PipelineType;
|
pipelineType: PipelineType;
|
||||||
/**
|
/**
|
||||||
* Link to the database service where this database is hosted in.
|
* Link to the database service where this database is hosted in.
|
||||||
@ -100,10 +108,6 @@ export interface AirflowConfig {
|
|||||||
* Maximum Number of active runs.
|
* Maximum Number of active runs.
|
||||||
*/
|
*/
|
||||||
maxActiveRuns?: number;
|
maxActiveRuns?: number;
|
||||||
/**
|
|
||||||
* Next execution date from the underlying pipeline platform once the pipeline scheduled.
|
|
||||||
*/
|
|
||||||
nextExecutionDate?: Date;
|
|
||||||
/**
|
/**
|
||||||
* pause the pipeline from running once the deploy is finished successfully.
|
* pause the pipeline from running once the deploy is finished successfully.
|
||||||
*/
|
*/
|
||||||
@ -112,18 +116,10 @@ export interface AirflowConfig {
|
|||||||
* Run past executions if the start date is in the past.
|
* Run past executions if the start date is in the past.
|
||||||
*/
|
*/
|
||||||
pipelineCatchup?: boolean;
|
pipelineCatchup?: boolean;
|
||||||
/**
|
|
||||||
* Timeout for the pipeline in seconds.
|
|
||||||
*/
|
|
||||||
pipelineTimeout?: number;
|
|
||||||
/**
|
/**
|
||||||
* Timezone in which pipeline going to be scheduled.
|
* Timezone in which pipeline going to be scheduled.
|
||||||
*/
|
*/
|
||||||
pipelineTimezone?: string;
|
pipelineTimezone?: string;
|
||||||
/**
|
|
||||||
* File system directory path where managed python operator files are stored.
|
|
||||||
*/
|
|
||||||
pythonOperatorLocation?: string;
|
|
||||||
/**
|
/**
|
||||||
* Retry pipeline in case of failure.
|
* Retry pipeline in case of failure.
|
||||||
*/
|
*/
|
||||||
@ -136,10 +132,6 @@ export interface AirflowConfig {
|
|||||||
* Scheduler Interval for the pipeline in cron format.
|
* Scheduler Interval for the pipeline in cron format.
|
||||||
*/
|
*/
|
||||||
scheduleInterval?: string;
|
scheduleInterval?: string;
|
||||||
/**
|
|
||||||
* python method call back on SLA miss.
|
|
||||||
*/
|
|
||||||
slaMissCallback?: string;
|
|
||||||
/**
|
/**
|
||||||
* Start date of the pipeline.
|
* Start date of the pipeline.
|
||||||
*/
|
*/
|
||||||
@ -216,10 +208,47 @@ export interface OpenMetadataConnection {
|
|||||||
* OpenMetadata Server Config. Must include API end point ex: http://localhost:8585/api
|
* OpenMetadata Server Config. Must include API end point ex: http://localhost:8585/api
|
||||||
*/
|
*/
|
||||||
hostPort: string;
|
hostPort: string;
|
||||||
|
/**
|
||||||
|
* Include Dashboards for Indexing
|
||||||
|
*/
|
||||||
|
includeDashboards?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Glossary Terms for Indexing
|
||||||
|
*/
|
||||||
|
includeGlossaryTerms?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Pipelines for Indexing
|
||||||
|
*/
|
||||||
|
includePipelines?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Tables for Indexing
|
||||||
|
*/
|
||||||
|
includeTables?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Teams for Indexing
|
||||||
|
*/
|
||||||
|
includeTeams?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Topics for Indexing
|
||||||
|
*/
|
||||||
|
includeTopics?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Users for Indexing
|
||||||
|
*/
|
||||||
|
includeUsers?: boolean;
|
||||||
|
/**
|
||||||
|
* Limit the number of records for Indexing.
|
||||||
|
*/
|
||||||
|
limitRecords?: number;
|
||||||
/**
|
/**
|
||||||
* OpenMetadata Client security configuration.
|
* OpenMetadata Client security configuration.
|
||||||
*/
|
*/
|
||||||
securityConfig?: SsoClientConfig;
|
securityConfig?: SsoClientConfig;
|
||||||
|
supportsMetadataExtraction?: boolean;
|
||||||
|
/**
|
||||||
|
* Service Type
|
||||||
|
*/
|
||||||
|
type?: OpenmetadataType;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -307,6 +336,15 @@ export interface SsoClientConfig {
|
|||||||
tokenEndpoint?: string;
|
tokenEndpoint?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service Type
|
||||||
|
*
|
||||||
|
* OpenMetadata service type
|
||||||
|
*/
|
||||||
|
export enum OpenmetadataType {
|
||||||
|
OpenMetadata = 'OpenMetadata',
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Owner of this Pipeline.
|
* Owner of this Pipeline.
|
||||||
*
|
*
|
||||||
@ -355,6 +393,28 @@ export interface EntityReference {
|
|||||||
type: string;
|
type: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This defines runtime status of Pipeline.
|
||||||
|
*/
|
||||||
|
export interface PipelineStatus {
|
||||||
|
/**
|
||||||
|
* endDate of the pipeline run for this particular execution.
|
||||||
|
*/
|
||||||
|
endDate?: string;
|
||||||
|
/**
|
||||||
|
* Pipeline unique run ID.
|
||||||
|
*/
|
||||||
|
runId?: string;
|
||||||
|
/**
|
||||||
|
* startDate of the pipeline run for this particular execution.
|
||||||
|
*/
|
||||||
|
startDate?: string;
|
||||||
|
/**
|
||||||
|
* Pipeline status denotes if its failed or succeeded.
|
||||||
|
*/
|
||||||
|
state?: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Type of Pipeline - metadata, usage
|
* Type of Pipeline - metadata, usage
|
||||||
*/
|
*/
|
||||||
@ -420,7 +480,7 @@ export interface ServiceConnection {
|
|||||||
*
|
*
|
||||||
* Databricks Connection Config
|
* Databricks Connection Config
|
||||||
*
|
*
|
||||||
* DB2 Connection Config
|
* Db2 Connection Config
|
||||||
*
|
*
|
||||||
* DeltaLake Database Connection Config
|
* DeltaLake Database Connection Config
|
||||||
*
|
*
|
||||||
@ -465,6 +525,10 @@ export interface ServiceConnection {
|
|||||||
* Pulsar Connection Config
|
* Pulsar Connection Config
|
||||||
*
|
*
|
||||||
* Amundsen Connection Config
|
* Amundsen Connection Config
|
||||||
|
*
|
||||||
|
* Metadata to ElasticSeach Connection Config
|
||||||
|
*
|
||||||
|
* OpenMetadata Connection Config
|
||||||
*/
|
*/
|
||||||
export interface Connection {
|
export interface Connection {
|
||||||
/**
|
/**
|
||||||
@ -478,6 +542,10 @@ export interface Connection {
|
|||||||
*
|
*
|
||||||
* Host and Port of Metabase instance.
|
* Host and Port of Metabase instance.
|
||||||
*
|
*
|
||||||
|
* Dashboard URL for the power BI.
|
||||||
|
*
|
||||||
|
* URL for the redash instance
|
||||||
|
*
|
||||||
* URL for the superset instance
|
* URL for the superset instance
|
||||||
*
|
*
|
||||||
* Tableau Server
|
* Tableau Server
|
||||||
@ -494,10 +562,6 @@ export interface Connection {
|
|||||||
*
|
*
|
||||||
* Host and port of the Druid
|
* Host and port of the Druid
|
||||||
*
|
*
|
||||||
* Host and port of the DynamoDB
|
|
||||||
*
|
|
||||||
* Host and port of the Glue
|
|
||||||
*
|
|
||||||
* Host and port of the Hive.
|
* Host and port of the Hive.
|
||||||
*
|
*
|
||||||
* Host and port of the data source.
|
* Host and port of the data source.
|
||||||
@ -513,8 +577,10 @@ export interface Connection {
|
|||||||
* Host and port of the Redshift.
|
* Host and port of the Redshift.
|
||||||
*
|
*
|
||||||
* Host and port of the Amundsen Neo4j Connection.
|
* Host and port of the Amundsen Neo4j Connection.
|
||||||
|
*
|
||||||
|
* OpenMetadata Server Config. Must include API end point ex: http://localhost:8585/api
|
||||||
*/
|
*/
|
||||||
hostPort?: string;
|
hostPort?: any;
|
||||||
/**
|
/**
|
||||||
* password to connect to the Looker.
|
* password to connect to the Looker.
|
||||||
*
|
*
|
||||||
@ -565,7 +631,7 @@ export interface Connection {
|
|||||||
/**
|
/**
|
||||||
* Service Type
|
* Service Type
|
||||||
*/
|
*/
|
||||||
type?: AmundsenType;
|
type?: Type;
|
||||||
/**
|
/**
|
||||||
* username to connect to the Looker. This user should have privileges to read all the
|
* username to connect to the Looker. This user should have privileges to read all the
|
||||||
* metadata in Looker.
|
* metadata in Looker.
|
||||||
@ -649,12 +715,10 @@ export interface Connection {
|
|||||||
clientSecret?: string;
|
clientSecret?: string;
|
||||||
/**
|
/**
|
||||||
* Credentials for the PowerBI.
|
* Credentials for the PowerBI.
|
||||||
|
*
|
||||||
|
* GCS Credentials
|
||||||
*/
|
*/
|
||||||
credentials?: string;
|
credentials?: GCSCredentials | string;
|
||||||
/**
|
|
||||||
* Dashboard URL for the power BI.
|
|
||||||
*/
|
|
||||||
dashboardURL?: string;
|
|
||||||
/**
|
/**
|
||||||
* Dashboard redirect URI for the PowerBI.
|
* Dashboard redirect URI for the PowerBI.
|
||||||
*/
|
*/
|
||||||
@ -667,10 +731,6 @@ export interface Connection {
|
|||||||
* API key of the redash instance to access.
|
* API key of the redash instance to access.
|
||||||
*/
|
*/
|
||||||
apiKey?: string;
|
apiKey?: string;
|
||||||
/**
|
|
||||||
* URL for the redash instance
|
|
||||||
*/
|
|
||||||
redashURL?: string;
|
|
||||||
/**
|
/**
|
||||||
* Additional connection options that can be sent to service during the connection.
|
* Additional connection options that can be sent to service during the connection.
|
||||||
*/
|
*/
|
||||||
@ -685,6 +745,8 @@ export interface Connection {
|
|||||||
provider?: string;
|
provider?: string;
|
||||||
/**
|
/**
|
||||||
* Tableau API version
|
* Tableau API version
|
||||||
|
*
|
||||||
|
* OpenMetadata server API version to use.
|
||||||
*/
|
*/
|
||||||
apiVersion?: string;
|
apiVersion?: string;
|
||||||
/**
|
/**
|
||||||
@ -795,9 +857,9 @@ export interface Connection {
|
|||||||
*/
|
*/
|
||||||
partitionQueryDuration?: number;
|
partitionQueryDuration?: number;
|
||||||
/**
|
/**
|
||||||
* Google BigQuery project id.
|
* BigQuery project ID. Inform it here if passing the credentials path.
|
||||||
*/
|
*/
|
||||||
projectID?: string;
|
projectId?: string;
|
||||||
/**
|
/**
|
||||||
* SQLAlchemy driver scheme options.
|
* SQLAlchemy driver scheme options.
|
||||||
*/
|
*/
|
||||||
@ -848,7 +910,7 @@ export interface Connection {
|
|||||||
/**
|
/**
|
||||||
* AWS Access key ID.
|
* AWS Access key ID.
|
||||||
*/
|
*/
|
||||||
awsAccessKeyId?: any;
|
awsAccessKeyId?: string;
|
||||||
/**
|
/**
|
||||||
* AWS Secret Access Key.
|
* AWS Secret Access Key.
|
||||||
*/
|
*/
|
||||||
@ -950,6 +1012,105 @@ export interface Connection {
|
|||||||
* Enable SSL validation for the Amundsen Neo4j Connection.
|
* Enable SSL validation for the Amundsen Neo4j Connection.
|
||||||
*/
|
*/
|
||||||
validateSSL?: boolean;
|
validateSSL?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Dashboards for Indexing
|
||||||
|
*/
|
||||||
|
includeDashboards?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Glossary Terms for Indexing
|
||||||
|
*/
|
||||||
|
includeGlossaryTerms?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Pipelines for Indexing
|
||||||
|
*/
|
||||||
|
includePipelines?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Tables for Indexing
|
||||||
|
*/
|
||||||
|
includeTables?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Teams for Indexing
|
||||||
|
*/
|
||||||
|
includeTeams?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Topics for Indexing
|
||||||
|
*/
|
||||||
|
includeTopics?: boolean;
|
||||||
|
/**
|
||||||
|
* Include Users for Indexing
|
||||||
|
*/
|
||||||
|
includeUsers?: boolean;
|
||||||
|
/**
|
||||||
|
* Limit the number of records for Indexing.
|
||||||
|
*/
|
||||||
|
limitRecords?: number;
|
||||||
|
/**
|
||||||
|
* OpenMetadata Server Authentication Provider. Make sure configure same auth providers as
|
||||||
|
* the one configured on OpenMetadaata server.
|
||||||
|
*/
|
||||||
|
authProvider?: AuthProvider;
|
||||||
|
/**
|
||||||
|
* OpenMetadata Client security configuration.
|
||||||
|
*/
|
||||||
|
securityConfig?: SsoClientConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GCS Credentials
|
||||||
|
*
|
||||||
|
* GCS credentials configs.
|
||||||
|
*/
|
||||||
|
export interface GCSCredentials {
|
||||||
|
/**
|
||||||
|
* GCS configs.
|
||||||
|
*/
|
||||||
|
gcsConfig: GCSValues | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GCS Credentials.
|
||||||
|
*/
|
||||||
|
export interface GCSValues {
|
||||||
|
/**
|
||||||
|
* Google Cloud auth provider certificate.
|
||||||
|
*/
|
||||||
|
authProviderX509CertUrl?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud auth uri.
|
||||||
|
*/
|
||||||
|
authUri?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud email.
|
||||||
|
*/
|
||||||
|
clientEmail?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud Client ID.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud client certificate uri.
|
||||||
|
*/
|
||||||
|
clientX509CertUrl?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud private key.
|
||||||
|
*/
|
||||||
|
privateKey?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud private key id.
|
||||||
|
*/
|
||||||
|
privateKeyId?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud project id.
|
||||||
|
*/
|
||||||
|
projectId?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud token uri.
|
||||||
|
*/
|
||||||
|
tokenUri?: string;
|
||||||
|
/**
|
||||||
|
* Google Cloud service account type.
|
||||||
|
*/
|
||||||
|
type?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1000,8 +1161,12 @@ export enum Scheme {
|
|||||||
* Pulsar service type
|
* Pulsar service type
|
||||||
*
|
*
|
||||||
* Amundsen service type
|
* Amundsen service type
|
||||||
|
*
|
||||||
|
* Metadata to Elastic Seach type
|
||||||
|
*
|
||||||
|
* OpenMetadata service type
|
||||||
*/
|
*/
|
||||||
export enum AmundsenType {
|
export enum Type {
|
||||||
Amundsen = 'Amundsen',
|
Amundsen = 'Amundsen',
|
||||||
Athena = 'Athena',
|
Athena = 'Athena',
|
||||||
AzureSQL = 'AzureSQL',
|
AzureSQL = 'AzureSQL',
|
||||||
@ -1018,8 +1183,10 @@ export enum AmundsenType {
|
|||||||
Looker = 'Looker',
|
Looker = 'Looker',
|
||||||
MariaDB = 'MariaDB',
|
MariaDB = 'MariaDB',
|
||||||
Metabase = 'Metabase',
|
Metabase = 'Metabase',
|
||||||
|
MetadataES = 'MetadataES',
|
||||||
Mssql = 'Mssql',
|
Mssql = 'Mssql',
|
||||||
Mysql = 'Mysql',
|
Mysql = 'Mysql',
|
||||||
|
OpenMetadata = 'OpenMetadata',
|
||||||
Oracle = 'Oracle',
|
Oracle = 'Oracle',
|
||||||
Postgres = 'Postgres',
|
Postgres = 'Postgres',
|
||||||
PowerBI = 'PowerBI',
|
PowerBI = 'PowerBI',
|
||||||
@ -1047,13 +1214,24 @@ export interface SourceConfig {
|
|||||||
|
|
||||||
export interface ConfigClass {
|
export interface ConfigClass {
|
||||||
/**
|
/**
|
||||||
* DBT catalog file to extract dbt models with their column schemas.
|
* DBT Catalog file name
|
||||||
*/
|
*/
|
||||||
dbtCatalogFilePath?: string;
|
dbtCatalogFileName?: string;
|
||||||
/**
|
/**
|
||||||
* DBT manifest file path to extract dbt models and associate with tables.
|
* DBT configuration.
|
||||||
*/
|
*/
|
||||||
dbtManifestFilePath?: string;
|
dbtConfig?: LocalHTTPDbtConfig;
|
||||||
|
/**
|
||||||
|
* DBT Manifest file name
|
||||||
|
*/
|
||||||
|
dbtManifestFileName?: string;
|
||||||
|
/**
|
||||||
|
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||||
|
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||||
|
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||||
|
* local system), 'http'(url path of dbt files).
|
||||||
|
*/
|
||||||
|
dbtProvider?: DbtProvider;
|
||||||
/**
|
/**
|
||||||
* Run data profiler as part of this metadata ingestion to get table profile data.
|
* Run data profiler as part of this metadata ingestion to get table profile data.
|
||||||
*/
|
*/
|
||||||
@ -1133,3 +1311,63 @@ export interface FilterPattern {
|
|||||||
*/
|
*/
|
||||||
includes?: string[];
|
includes?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DBT configuration.
|
||||||
|
*
|
||||||
|
* Local and HTTP DBT configs.
|
||||||
|
*
|
||||||
|
* GCS Credentials
|
||||||
|
*
|
||||||
|
* GCS credentials configs.
|
||||||
|
*
|
||||||
|
* AWS S3 credentials configs.
|
||||||
|
*/
|
||||||
|
export interface LocalHTTPDbtConfig {
|
||||||
|
/**
|
||||||
|
* DBT catalog file to extract dbt models with their column schemas.
|
||||||
|
*/
|
||||||
|
dbtCatalogFilePath?: string;
|
||||||
|
/**
|
||||||
|
* DBT manifest file path to extract dbt models and associate with tables.
|
||||||
|
*/
|
||||||
|
dbtManifestFilePath?: string;
|
||||||
|
/**
|
||||||
|
* GCS configs.
|
||||||
|
*/
|
||||||
|
gcsConfig?: GCSValues | string;
|
||||||
|
/**
|
||||||
|
* AWS Access key ID.
|
||||||
|
*/
|
||||||
|
awsAccessKeyId?: string;
|
||||||
|
/**
|
||||||
|
* AWS Region
|
||||||
|
*/
|
||||||
|
awsRegion?: string;
|
||||||
|
/**
|
||||||
|
* AWS Secret Access Key.
|
||||||
|
*/
|
||||||
|
awsSecretAccessKey?: string;
|
||||||
|
/**
|
||||||
|
* AWS Session Token.
|
||||||
|
*/
|
||||||
|
awsSessionToken?: string;
|
||||||
|
/**
|
||||||
|
* EndPoint URL for the AWS
|
||||||
|
*/
|
||||||
|
endPointURL?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Method from which the DBT files will be fetched. Accepted values are: 's3'(Required aws
|
||||||
|
* s3 credentials to be provided), 'gcs'(Required gcs credentials to be provided),
|
||||||
|
* 'gcs-path'(path of the file containing gcs credentials), 'local'(path of dbt files on
|
||||||
|
* local system), 'http'(url path of dbt files).
|
||||||
|
*/
|
||||||
|
export enum DbtProvider {
|
||||||
|
Gcs = 'gcs',
|
||||||
|
GcsPath = 'gcs-path',
|
||||||
|
HTTP = 'http',
|
||||||
|
Local = 'local',
|
||||||
|
S3 = 's3',
|
||||||
|
}
|
||||||
|
@ -216,7 +216,7 @@ const ServicePage: FunctionComponent = () => {
|
|||||||
|
|
||||||
const getAllIngestionWorkflows = (paging?: string) => {
|
const getAllIngestionWorkflows = (paging?: string) => {
|
||||||
setIsloading(true);
|
setIsloading(true);
|
||||||
getIngestionPipelines(['owner'], serviceFQN, paging)
|
getIngestionPipelines(['owner', 'pipelineStatuses'], serviceFQN, paging)
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
if (res.data.data) {
|
if (res.data.data) {
|
||||||
setIngestions(res.data.data);
|
setIngestions(res.data.data);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user