OpenMetadata/bootstrap/sql/org.postgresql.Driver/v009__create_db_connection_info.sql

115 lines
5.0 KiB
MySQL
Raw Normal View History

-- Unique constraint for user email address
ALTER TABLE user_entity
ADD UNIQUE (email);
2023-02-21 00:43:45 +05:30
-- Remove classificationName in BigQuery
UPDATE dbservice_entity SET json = json #- '{connection,config,classificationName}' where serviceType in ('BigQuery');
-- migrate ingestAllDatabases in postgres
UPDATE dbservice_entity de2
SET json = JSONB_SET(
json || JSONB_SET(json,'{connection,config}', json#>'{connection,config}'||
jsonb_build_object('database',
(SELECT json->>'name'
FROM database_entity de
WHERE id = (SELECT er.toId
FROM entity_relationship er
WHERE er.fromId = de2.id
AND er.toEntity = 'database'
LIMIT 1)
)
)),
'{connection,config,ingestAllDatabases}',
'true'::jsonb
)
WHERE de2.serviceType = 'Postgres'
AND json->>'{connection,config,database}' IS NULL;
-- new object store service and container entities
CREATE TABLE IF NOT EXISTS objectstore_service_entity (
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
serviceType VARCHAR(256) GENERATED ALWAYS AS (json ->> 'serviceType') STORED NOT NULL,
json JSONB NOT NULL,
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
PRIMARY KEY (id),
UNIQUE (name)
);
CREATE TABLE IF NOT EXISTS objectstore_container_entity (
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
fullyQualifiedName VARCHAR(256) GENERATED ALWAYS AS (json ->> 'fullyQualifiedName') STORED NOT NULL,
json JSONB NOT NULL,
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
PRIMARY KEY (id),
UNIQUE (fullyQualifiedName)
);
CREATE TABLE IF NOT EXISTS test_connection_definition (
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
json JSONB NOT NULL,
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
UNIQUE (name)
);
CREATE TABLE IF NOT EXISTS automations_workflow (
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
workflowType VARCHAR(256) GENERATED ALWAYS AS (json ->> 'workflowType') STORED NOT NULL,
status VARCHAR(256) GENERATED ALWAYS AS (json ->> 'status') STORED,
json JSONB NOT NULL,
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
PRIMARY KEY (id),
UNIQUE (name)
);
-- Do not store OM server connection, we'll set it dynamically on the resource
UPDATE ingestion_pipeline_entity
SET json = json::jsonb #- '{openMetadataServerConnection}';
Query as entity (#10449) * added query as an entity * changed name of the variables and methods * Added Resource Descriptors * testcase bug fix * addressing comments * added script for table query migration * added script for table query migration postgresql * bug fix * db change for script test * added current timestamp * change db config from postgresql to mysql * added extension to use fucntion gen_random_uuid() * solving maven ci * added queryUsage and change is migration script * addressing comments * addressing comments * added queryUsage relation and testcase * added api to insert queries in bulk * . * fix a test case which was failing due to latest changes * Ingestion Changes for Query as Entity * move query changes to latest sqls * added tags and owner * update PR for Query as Entity * update type * fixed pagination * fix path param * fix TestCases * add validation criteria * removed exisitng query apis * checkstyle fix * remove vote from put * remove vote from put * Query As Entity Ingestion Changes * Remove unused func * update Review Comments * update Review Comments * remove previous changes for Query and Update Tests * moved Checksum to Query Util Class * update python api * fix python checkstyle * Fixed Tests * Fix pytest * remove space changes * remove space changes * Fixed put_addFollowerDeleteEntity_200 * Fix usage ingestion * Update Python SDK and tests * pylint fix --------- Co-authored-by: Himank Mehta <himankmehta@Himanks-MacBook-Air.local> Co-authored-by: ulixius9 <mayursingal9@gmail.com> Co-authored-by: Mayur Singal <39544459+ulixius9@users.noreply.github.com>
2023-03-16 09:25:30 +05:30
CREATE TABLE IF NOT EXISTS query_entity (
id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
json JSONB NOT NULL,
updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL,
updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL,
deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED,
UNIQUE (name)
);
CREATE TABLE IF NOT EXISTS temp_query_migration (
tableId VARCHAR(36) NOT NULL,
queryId VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL,
json JSONB NOT NULL
);
CREATE EXTENSION pgcrypto;
INSERT INTO temp_query_migration(tableId,json)
SELECT id,json_build_object('id',gen_random_uuid(),'vote',vote,'query',query,'users',users,'checksum',checksum,'duration',duration,'name','table','name',checksum,'updatedAt',
floor(EXTRACT(EPOCH FROM NOW())),'updatedBy','admin','deleted',false) AS json FROM entity_extension AS ee , jsonb_to_recordset(ee.json) AS x (vote decimal,query varchar,users json,
checksum varchar,duration decimal,queryDate varchar)
WHERE ee.extension = 'table.tableQueries';
INSERT INTO query_entity(json)
SELECT json FROM temp_query_migration;
INSERT INTO entity_relationship(fromId,toId,fromEntity,toEntity,relation)
SELECT tableId,queryId,'table','query',10 FROM temp_query_migration;
DELETE FROM entity_extension WHERE id in
(SELECT DISTINCT tableId FROM temp_query_migration) AND extension = 'table.tableQueries';
DROP TABLE temp_query_migration;