2023-02-15 21:04:23 -08:00
|
|
|
-- Unique constraint for user email address
|
|
|
|
ALTER TABLE user_entity
|
|
|
|
ADD UNIQUE (email);
|
2023-02-21 00:43:45 +05:30
|
|
|
|
|
|
|
|
|
|
|
-- Remove classificationName in BigQuery
|
2023-02-23 15:42:40 +05:30
|
|
|
UPDATE dbservice_entity SET json = json #- '{connection,config,classificationName}' where serviceType in ('BigQuery');
|
|
|
|
|
|
|
|
-- migrate ingestAllDatabases in postgres
|
|
|
|
UPDATE dbservice_entity de2
|
|
|
|
SET json = JSONB_SET(
|
|
|
|
json || JSONB_SET(json,'{connection,config}', json#>'{connection,config}'||
|
|
|
|
jsonb_build_object('database',
|
|
|
|
(SELECT json->>'name'
|
|
|
|
FROM database_entity de
|
|
|
|
WHERE id = (SELECT er.toId
|
|
|
|
FROM entity_relationship er
|
|
|
|
WHERE er.fromId = de2.id
|
|
|
|
AND er.toEntity = 'database'
|
|
|
|
LIMIT 1)
|
|
|
|
)
|
|
|
|
)),
|
|
|
|
'{connection,config,ingestAllDatabases}',
|
|
|
|
'true'::jsonb
|
|
|
|
)
|
|
|
|
WHERE de2.serviceType = 'Postgres'
|
|
|
|
AND json->>'{connection,config,database}' IS NULL;
|
2023-02-25 00:34:08 +00:00
|
|
|
|
2023-04-12 11:44:46 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS storage_container_entity (
|
2023-02-25 00:34:08 +00:00
|
|
|
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
|
|
|
fullyQualifiedName VARCHAR(256) GENERATED ALWAYS AS (json ->> 'fullyQualifiedName') STORED NOT NULL,
|
|
|
|
json JSONB NOT NULL,
|
|
|
|
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
|
|
|
|
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
|
|
|
|
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
UNIQUE (fullyQualifiedName)
|
|
|
|
);
|
2023-03-05 10:17:08 +01:00
|
|
|
|
|
|
|
CREATE TABLE IF NOT EXISTS test_connection_definition (
|
|
|
|
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
|
|
|
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
|
|
|
|
json JSONB NOT NULL,
|
|
|
|
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
|
|
|
|
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
|
|
|
|
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
|
|
|
|
UNIQUE (name)
|
|
|
|
);
|
2023-03-06 14:44:16 +01:00
|
|
|
|
|
|
|
CREATE TABLE IF NOT EXISTS automations_workflow (
|
|
|
|
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
|
|
|
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
|
|
|
|
workflowType VARCHAR(256) GENERATED ALWAYS AS (json ->> 'workflowType') STORED NOT NULL,
|
|
|
|
status VARCHAR(256) GENERATED ALWAYS AS (json ->> 'status') STORED,
|
|
|
|
json JSONB NOT NULL,
|
|
|
|
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
|
|
|
|
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
|
|
|
|
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
UNIQUE (name)
|
|
|
|
);
|
2023-03-09 17:32:40 +01:00
|
|
|
|
|
|
|
-- Do not store OM server connection, we'll set it dynamically on the resource
|
|
|
|
UPDATE ingestion_pipeline_entity
|
|
|
|
SET json = json::jsonb #- '{openMetadataServerConnection}';
|
2023-03-16 09:25:30 +05:30
|
|
|
|
|
|
|
CREATE TABLE IF NOT EXISTS query_entity (
|
|
|
|
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
|
|
|
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
|
|
|
|
json JSONB NOT NULL,
|
|
|
|
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
|
|
|
|
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
|
|
|
|
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
|
2023-04-13 09:21:16 +05:30
|
|
|
PRIMARY KEY (id),
|
|
|
|
UNIQUE (name)
|
2023-03-16 09:25:30 +05:30
|
|
|
);
|
|
|
|
|
|
|
|
CREATE TABLE IF NOT EXISTS temp_query_migration (
|
|
|
|
tableId VARCHAR(36) NOT NULL,
|
|
|
|
queryId VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
2023-04-13 09:21:16 +05:30
|
|
|
queryName VARCHAR(255) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
|
2023-03-16 09:25:30 +05:30
|
|
|
json JSONB NOT NULL
|
|
|
|
);
|
|
|
|
|
2023-04-13 09:21:16 +05:30
|
|
|
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
2023-03-16 09:25:30 +05:30
|
|
|
|
|
|
|
INSERT INTO temp_query_migration(tableId,json)
|
2023-04-21 15:51:21 +05:30
|
|
|
SELECT id,json_build_object('id',gen_random_uuid(),'query',query,'users',users,'checksum',checksum,'duration',duration,'name',checksum,'updatedAt',
|
|
|
|
floor(EXTRACT(EPOCH FROM NOW())),'updatedBy','admin','deleted',false) AS json FROM entity_extension AS ee , jsonb_to_recordset(ee.json) AS x (query varchar,users json,
|
2023-04-13 09:21:16 +05:30
|
|
|
checksum varchar,name varchar, duration decimal,queryDate varchar)
|
2023-03-16 09:25:30 +05:30
|
|
|
WHERE ee.extension = 'table.tableQueries';
|
|
|
|
|
2023-04-13 09:21:16 +05:30
|
|
|
INSERT INTO query_entity (json)
|
|
|
|
SELECT value
|
|
|
|
FROM (
|
|
|
|
SELECT jsonb_object_agg(queryName, json) AS json_data FROM ( SELECT DISTINCT queryName, json FROM temp_query_migration) subquery
|
|
|
|
) cte, jsonb_each(cte.json_data)
|
|
|
|
ON CONFLICT (name) DO UPDATE SET json = EXCLUDED.json;
|
2023-03-16 09:25:30 +05:30
|
|
|
|
2023-04-13 09:21:16 +05:30
|
|
|
INSERT INTO entity_relationship(fromId, toId, fromEntity, toEntity, relation)
|
|
|
|
SELECT tmq.tableId, qe.id, 'table', 'query', 5
|
|
|
|
FROM temp_query_migration tmq
|
|
|
|
JOIN query_entity qe ON qe.name = tmq.queryName;
|
2023-03-16 09:25:30 +05:30
|
|
|
|
|
|
|
DELETE FROM entity_extension WHERE id in
|
|
|
|
(SELECT DISTINCT tableId FROM temp_query_migration) AND extension = 'table.tableQueries';
|
|
|
|
|
|
|
|
DROP TABLE temp_query_migration;
|
2023-03-16 14:11:44 +01:00
|
|
|
|
|
|
|
-- remove the audience if it was wrongfully sent from the UI after editing the OM service
|
|
|
|
UPDATE metadata_service_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,securityConfig,audience}'
|
|
|
|
WHERE name = 'OpenMetadata'
|
|
|
|
AND json#>'{connection,config,authProvider}' IS NOT NULL
|
2023-03-17 16:22:41 +05:30
|
|
|
AND json -> 'connection' -> 'config' ->> 'authProvider' != 'google';
|
|
|
|
|
|
|
|
ALTER TABLE user_tokens ALTER COLUMN expiryDate DROP NOT NULL;
|
2023-03-21 10:12:49 +05:30
|
|
|
|
2023-04-13 09:21:16 +05:30
|
|
|
CREATE TABLE IF NOT EXISTS event_subscription_entity (
|
|
|
|
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
|
|
|
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
|
|
|
|
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
|
|
|
|
json JSONB NOT NULL,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
UNIQUE (name)
|
|
|
|
);
|
2023-03-21 10:12:49 +05:30
|
|
|
|
2023-04-13 09:21:16 +05:30
|
|
|
drop table if exists alert_action_def;
|
|
|
|
drop table if exists alert_entity;
|
|
|
|
DELETE from entity_relationship where fromEntity = 'alert' and toEntity = 'alertAction';
|
2023-03-28 17:07:38 +02:00
|
|
|
|
|
|
|
-- create data model table
|
|
|
|
CREATE TABLE IF NOT EXISTS dashboard_data_model_entity (
|
|
|
|
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
|
|
|
|
json JSONB NOT NULL,
|
|
|
|
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
|
|
|
|
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
|
|
|
|
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
|
|
|
|
fullyQualifiedName VARCHAR(256) GENERATED ALWAYS AS (json ->> 'fullyQualifiedName') STORED NOT NULL,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
UNIQUE (fullyQualifiedName)
|
|
|
|
);
|
2023-04-04 22:45:19 +05:30
|
|
|
|
|
|
|
UPDATE dbservice_entity
|
|
|
|
SET json = jsonb_set(json::jsonb #- '{connection,config,database}', '{connection,config,databaseName}', json#> '{connection,config,database}', true)
|
2023-04-06 21:12:18 +02:00
|
|
|
WHERE servicetype = 'Druid' and json #>'{connection,config,database}' is not null;
|
|
|
|
|
|
|
|
-- We were using the same jsonSchema for Pipeline Services and Ingestion Pipeline status
|
|
|
|
-- Also, we relied on the extension to store the run id
|
|
|
|
UPDATE entity_extension_time_series
|
|
|
|
SET jsonSchema = 'ingestionPipelineStatus', extension = 'ingestionPipeline.pipelineStatus'
|
|
|
|
WHERE jsonSchema = 'pipelineStatus' AND extension <> 'pipeline.PipelineStatus';
|
2023-04-11 06:28:01 -07:00
|
|
|
|
2023-04-12 11:44:46 +02:00
|
|
|
-- We are refactoring the storage service with containers. We'll remove the locations
|
|
|
|
DROP TABLE location_entity;
|
2023-04-13 18:05:44 +02:00
|
|
|
DELETE FROM entity_relationship WHERE fromEntity='location' OR toEntity='location';
|
|
|
|
TRUNCATE TABLE storage_service_entity;
|
2023-04-12 11:44:46 +02:00
|
|
|
|
|
|
|
UPDATE dbservice_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,storageServiceName}'
|
|
|
|
WHERE servicetype = 'Glue';
|
|
|
|
|
2023-04-11 06:28:01 -07:00
|
|
|
UPDATE chart_entity
|
|
|
|
SET json = json::jsonb #- '{tables}';
|
2023-04-14 13:49:36 +05:30
|
|
|
|
|
|
|
-- Updating the tableau authentication fields
|
|
|
|
UPDATE dashboard_service_entity
|
|
|
|
SET json = JSONB_SET(json::jsonb,
|
|
|
|
'{connection,config}',json::jsonb #>'{connection,config}' #- '{password}' #- '{username}'||
|
|
|
|
jsonb_build_object('authType',jsonb_build_object(
|
|
|
|
'username',json #>'{connection,config,username}',
|
|
|
|
'password',json #>'{connection,config,password}'
|
|
|
|
)), true)
|
|
|
|
where servicetype = 'Tableau'
|
|
|
|
and json#>'{connection,config,password}' is not null
|
|
|
|
and json#>'{connection,config,username}' is not null;
|
|
|
|
|
|
|
|
UPDATE dashboard_service_entity
|
|
|
|
SET json = JSONB_SET(json::jsonb,
|
|
|
|
'{connection,config}',json::jsonb #>'{connection,config}' #- '{personalAccessTokenName}' #- '{personalAccessTokenSecret}'||
|
|
|
|
jsonb_build_object('authType',jsonb_build_object(
|
|
|
|
'personalAccessTokenName',json #>'{connection,config,personalAccessTokenName}',
|
|
|
|
'personalAccessTokenSecret',json #>'{connection,config,personalAccessTokenSecret}'
|
|
|
|
)), true)
|
|
|
|
where servicetype = 'Tableau'
|
|
|
|
and json#>'{connection,config,personalAccessTokenName}' is not null
|
|
|
|
and json#>'{connection,config,personalAccessTokenSecret}' is not null;
|
2023-04-17 10:53:25 +02:00
|
|
|
|
|
|
|
-- Removed property from metadataService.json
|
|
|
|
UPDATE metadata_service_entity
|
2023-04-17 13:43:59 +02:00
|
|
|
SET json = json::jsonb #- '{allowServiceCreation}'
|
2023-04-17 10:53:25 +02:00
|
|
|
WHERE serviceType in ('Amundsen', 'Atlas', 'MetadataES', 'OpenMetadata');
|
|
|
|
|
|
|
|
UPDATE metadata_service_entity
|
2023-04-17 13:43:59 +02:00
|
|
|
SET json = JSONB_SET(json::jsonb, '{provider}', '"system"')
|
2023-04-17 10:53:25 +02:00
|
|
|
WHERE name = 'OpenMetadata';
|
|
|
|
|
|
|
|
-- Fix Glue sample data endpoint URL to be a correct URI
|
|
|
|
UPDATE dbservice_entity
|
2023-04-17 13:43:59 +02:00
|
|
|
SET json = JSONB_SET(json::jsonb, '{connection,config,awsConfig,endPointURL}', '"https://glue.region_name.amazonaws.com/"')
|
2023-04-17 10:53:25 +02:00
|
|
|
WHERE serviceType = 'Glue'
|
2023-04-17 13:43:59 +02:00
|
|
|
AND json#>'{connection,config,awsConfig,endPointURL}' = '"https://glue.<region_name>.amazonaws.com/"';
|
2023-04-21 18:23:45 +05:30
|
|
|
|
|
|
|
-- Delete connectionOptions from superset
|
|
|
|
UPDATE dashboard_service_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,connectionOptions}'
|
2023-04-22 22:02:32 +05:30
|
|
|
WHERE serviceType = 'Superset';
|
|
|
|
|
2023-04-23 10:08:17 +05:30
|
|
|
-- Delete partitionQueryDuration, partitionQuery, partitionField from bigquery
|
|
|
|
UPDATE dbservice_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,partitionQueryDuration}' #- '{connection,config,partitionQuery}' #- '{connection,config,partitionField}'
|
|
|
|
WHERE serviceType = 'BigQuery';
|
|
|
|
|
2023-04-22 22:02:32 +05:30
|
|
|
-- Delete supportsQueryComment, scheme, hostPort, supportsProfiler from salesforce
|
|
|
|
UPDATE dbservice_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,supportsQueryComment}' #- '{connection,config,scheme}' #- '{connection,config,hostPort}' #- '{connection,config,supportsProfiler}'
|
2023-04-23 10:08:17 +05:30
|
|
|
WHERE serviceType = 'Salesforce';
|
|
|
|
|
2023-04-26 09:13:39 +02:00
|
|
|
-- Delete supportsProfiler from DynamoDB
|
|
|
|
UPDATE dbservice_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,supportsProfiler}'
|
|
|
|
WHERE serviceType = 'DynamoDB';
|
2023-04-26 17:32:02 +02:00
|
|
|
|
|
|
|
-- Update TagLabels source from 'Tag' to 'Classification' after #10486
|
|
|
|
UPDATE table_entity SET json = REGEXP_REPLACE(json::text, '"source"\s*:\s*"Tag\"', '"source": "Classification"', 'g')::jsonb;
|
|
|
|
UPDATE ml_model_entity SET json = REGEXP_REPLACE(json::text, '"source"\s*:\s*"Tag\"', '"source": "Classification"', 'g')::jsonb;
|
2023-04-27 10:55:10 +05:30
|
|
|
|
|
|
|
-- Delete uriString from Mssql
|
|
|
|
UPDATE dbservice_entity
|
|
|
|
SET json = json::jsonb #- '{connection,config,uriString}'
|
|
|
|
WHERE serviceType = 'Mssql';
|