fix: replace direct and indirect references to linkedin with datahub-project (#4557)

* Update links for github-related links to use datahub-project:
  - https://github.com
  - https://img.shields.io/github/...
  - https://raw.githubusercontent.com/...
* Also replace references for github repo linkedin/datahub with
  datahub-project/datahub.
This commit is contained in:
David Haglund 2022-04-04 21:39:30 +02:00 committed by GitHub
parent 5e0409f8a6
commit df9e07fda2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 127 additions and 131 deletions

View File

@ -25,14 +25,14 @@ HOSTED_DOCS_ONLY-->
# DataHub: The Metadata Platform for the Modern Data Stack
## Built with ❤️ by <img src="https://datahubproject.io/img/acryl-logo-light-mark.png" width="25"/> [Acryl Data](https://acryldata.io) and <img src="https://datahubproject.io/img/LI-In-Bug.png" width="25"/> [LinkedIn](https://engineering.linkedin.com)
[![Version](https://img.shields.io/github/v/release/linkedin/datahub?include_prereleases)](https://github.com/datahub-project/datahub/releases/latest)
[![Version](https://img.shields.io/github/v/release/datahub-project/datahub?include_prereleases)](https://github.com/datahub-project/datahub/releases/latest)
[![PyPI version](https://badge.fury.io/py/acryl-datahub.svg)](https://badge.fury.io/py/acryl-datahub)
[![build & test](https://github.com/datahub-project/datahub/workflows/build%20&%20test/badge.svg?branch=master&event=push)](https://github.com/datahub-project/datahub/actions?query=workflow%3A%22build+%26+test%22+branch%3Amaster+event%3Apush)
[![Docker Pulls](https://img.shields.io/docker/pulls/linkedin/datahub-gms.svg)](https://hub.docker.com/r/linkedin/datahub-gms)
[![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.datahubproject.io)
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/datahub-project/datahub/blob/master/docs/CONTRIBUTING.md)
[![GitHub commit activity](https://img.shields.io/github/commit-activity/m/linkedin/datahub)](https://github.com/datahub-project/datahub/pulls?q=is%3Apr)
[![License](https://img.shields.io/github/license/linkedin/datahub)](https://github.com/datahub-project/datahub/blob/master/LICENSE)
[![GitHub commit activity](https://img.shields.io/github/commit-activity/m/datahub-project/datahub)](https://github.com/datahub-project/datahub/pulls?q=is%3Apr)
[![License](https://img.shields.io/github/license/datahub-project/datahub)](https://github.com/datahub-project/datahub/blob/master/LICENSE)
[![YouTube](https://img.shields.io/youtube/channel/subscribers/UC3qFQC5IiwR5fvWEqi_tJ5w?style=social)](https://www.youtube.com/channel/UC3qFQC5IiwR5fvWEqi_tJ5w)
[![Medium](https://img.shields.io/badge/Medium-12100E?style=for-the-badge&logo=medium&logoColor=white)](https://medium.com/datahub-project)
[![Follow](https://img.shields.io/twitter/follow/datahubproject?label=Follow&style=social)](https://twitter.com/datahubproject)
@ -81,7 +81,7 @@ There's a [hosted demo environment](https://datahubproject.io/docs/demo) where y
## Source Code and Repositories
- [linkedin/datahub](https://github.com/datahub-project/datahub): This repository contains the complete source code for DataHub's metadata model, metadata services, integration connectors and the web application.
- [datahub-project/datahub](https://github.com/datahub-project/datahub): This repository contains the complete source code for DataHub's metadata model, metadata services, integration connectors and the web application.
## Documentation

View File

@ -22,11 +22,11 @@ docker info | grep Memory
mkdir -p airflow_install
cd airflow_install
# Download docker-compose file
curl -L 'https://raw.githubusercontent.com/linkedin/datahub/master/docker/airflow/docker-compose.yaml' -o docker-compose.yaml
curl -L 'https://raw.githubusercontent.com/datahub-project/datahub/master/docker/airflow/docker-compose.yaml' -o docker-compose.yaml
# Create dags directory
mkdir -p dags
# Download a sample DAG
curl -L 'https://raw.githubusercontent.com/linkedin/datahub/master/metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py' -o dags/lineage_backend_demo.py
curl -L 'https://raw.githubusercontent.com/datahub-project/datahub/master/metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py' -o dags/lineage_backend_demo.py
```
### What is different between this docker-compose file and the official Apache Airflow docker compose file?

View File

@ -22,7 +22,7 @@ insert into metadata_aspect_v2 (urn, aspect, version, metadata, createdon, creat
'urn:li:corpuser:datahub',
'corpUserEditableInfo',
0,
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
now(),
'urn:li:corpuser:__datahub_system'
);

View File

@ -28,7 +28,7 @@ INSERT INTO temp_metadata_aspect_v2 (urn, aspect, version, metadata, createdon,
'urn:li:corpuser:datahub',
'corpUserEditableInfo',
0,
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
now(),
'urn:li:corpuser:__datahub_system'
);

View File

@ -22,7 +22,7 @@ INSERT INTO metadata_aspect_v2 (urn, aspect, version, metadata, createdon, creat
'urn:li:corpuser:datahub',
'corpUserEditableInfo',
0,
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
now(),
'urn:li:corpuser:__datahub_system'
);

View File

@ -24,7 +24,7 @@ INSERT INTO temp_metadata_aspect_v2 (urn, aspect, version, metadata, createdon,
'urn:li:corpuser:datahub',
'corpUserEditableInfo',
0,
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
now(),
'urn:li:corpuser:__datahub_system'
);

View File

@ -22,7 +22,7 @@ insert into metadata_aspect_v2 (urn, aspect, version, metadata, createdon, creat
'urn:li:corpuser:datahub',
'corpUserEditableInfo',
0,
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
now(),
'urn:li:corpuser:__datahub_system'
);

View File

@ -1,31 +1,31 @@
- Start Date: (fill me in with today's date, 2022-02-22)
- RFC PR: https://github.com/linkedin/datahub/pull/4237
- RFC PR: https://github.com/datahub-project/datahub/pull/4237
- Discussion Issue: (GitHub issue this was discussed in before the RFC, if any)
- Implementation PR(s): (leave this empty)
# Extend data model to model Notebook entity
# Extend data model to model Notebook entity
## Background
[Querybook](https://www.querybook.org/) is Pinterests open-source big data IDE via a notebook interface.
We(Included Health) leverage it as our main querying tool. It has a feature, DataDoc, which organizes rich text,
queries, and charts into a notebook to easily document analyses. People could work collaboratively with others in a
[Querybook](https://www.querybook.org/) is Pinterests open-source big data IDE via a notebook interface.
We(Included Health) leverage it as our main querying tool. It has a feature, DataDoc, which organizes rich text,
queries, and charts into a notebook to easily document analyses. People could work collaboratively with others in a
DataDoc and get real-time updates. We believe it would be valuable to ingest the DataDoc metadata to Datahub and make
it easily searchable and discoverable by others.
## Summary
This RFC proposes the data model used to model DataDoc entity. It does not talk about any architecture, API or other
implementation details. This RFC only includes minimum data model which could meet our initial goal. If the community
This RFC proposes the data model used to model DataDoc entity. It does not talk about any architecture, API or other
implementation details. This RFC only includes minimum data model which could meet our initial goal. If the community
decides to adopt this new entity, further effort is needed.
## Detailed design
### DataDoc Model
![DataDoc High Level Model](DataDoc-high-level-model.png)
![DataDoc High Level Model](DataDoc-high-level-model.png)
As shown in the above diagram, DataDoc is a document which contains a list of DataDoc cells. It organizes rich text,
queries, and charts into a notebook to easily document analyses. We could see that the DataDoc model is very similar as
Notebook. DataDoc would be viewed as a subset of Notebook. Therefore we are going to model Notebook rather than DataDoc.
We will include "subTypes" aspect to differentiate Notebook and DataDoc
queries, and charts into a notebook to easily document analyses. We could see that the DataDoc model is very similar as
Notebook. DataDoc would be viewed as a subset of Notebook. Therefore we are going to model Notebook rather than DataDoc.
We will include "subTypes" aspect to differentiate Notebook and DataDoc
### Notebook Data Model
This section talks about the mininum data model of Notebook which could meet our needs.
@ -44,13 +44,13 @@ This section talks about the mininum data model of Notebook which could meet our
- globalTags
- institutionalMemory
- browsePaths
- domains
- domains
- subTypes
- dataPlatformInstance
- glossaryTerms
### Notebook Cells
Notebook cell is the unit that compose a Notebook. There are three types of cells: Text Cell, Query Cell, Chart Cell. Each
Notebook cell is the unit that compose a Notebook. There are three types of cells: Text Cell, Query Cell, Chart Cell. Each
type of cell has its own metadata. Since the cell only lives within a Notebook, we model cells as one aspect of Notebook
rather than another entity. Here are the metadata of each type of cell:
- TextCell
@ -70,5 +70,5 @@ rather than another entity. Here are the metadata of each type of cell:
- lastModified: Captures information about who created/last modified/deleted this Notebook cell and when
## Future Work
Querybook provides an embeddable feature. We could embed a query tab which utilize the embedded feature in Datahub
Querybook provides an embeddable feature. We could embed a query tab which utilize the embedded feature in Datahub
which provide a search-and-explore experience to user.

View File

@ -1582,7 +1582,7 @@
"name": "adlsGen1",
"displayName": "Azure Data Lake (Gen 1)",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/adlslogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/adlslogo.png"
}
}
]
@ -1602,7 +1602,7 @@
"name": "adlsGen2",
"displayName": "Azure Data Lake (Gen 2)",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/adlslogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/adlslogo.png"
}
}
]
@ -1641,7 +1641,7 @@
"name": "couchbase",
"displayName": "Couchbase",
"type": "KEY_VALUE_STORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/couchbaselogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/couchbaselogo.png"
}
}
]
@ -1680,7 +1680,7 @@
"name": "hdfs",
"displayName": "HDFS",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/hadooplogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/hadooplogo.png"
}
}
]
@ -1700,7 +1700,7 @@
"name": "hive",
"displayName": "Hive",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/hivelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/hivelogo.png"
}
}
]
@ -1720,7 +1720,7 @@
"name": "s3",
"displayName": "AWS S3",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/s3.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/s3.png"
}
}
]
@ -1740,7 +1740,7 @@
"name": "kafka",
"displayName": "Kafka",
"type": "MESSAGE_BROKER",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/kafkalogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/kafkalogo.png"
}
}
]
@ -1760,7 +1760,7 @@
"name": "kusto",
"displayName": "Kusto",
"type": "OLAP_DATASTORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/kustologo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/kustologo.png"
}
}
]
@ -1780,7 +1780,7 @@
"name": "mongodb",
"displayName": "MongoDB",
"type": "KEY_VALUE_STORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/mongodblogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/mongodblogo.png"
}
}
]
@ -1800,7 +1800,7 @@
"name": "mysql",
"displayName": "MySQL",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/mysqllogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/mysqllogo.png"
}
}
]
@ -1820,7 +1820,7 @@
"name": "oracle",
"displayName": "Oracle",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/oraclelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/oraclelogo.png"
}
}
]
@ -1840,7 +1840,7 @@
"name": "pinot",
"displayName": "Pinot",
"type": "OLAP_DATASTORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/pinotlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/pinotlogo.png"
}
}
]
@ -1860,7 +1860,7 @@
"name": "postgres",
"displayName": "PostgreSQL",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/postgreslogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/postgreslogo.png"
}
}
]
@ -1880,7 +1880,7 @@
"name": "presto",
"displayName": "Presto",
"type": "QUERY_ENGINE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/prestologo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/prestologo.png"
}
}
]
@ -1900,7 +1900,7 @@
"name": "teradata",
"displayName": "Teradata",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/teradatalogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/teradatalogo.png"
}
}
]
@ -1939,7 +1939,7 @@
"name": "snowflake",
"displayName": "Snowflake",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/snowflakelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/snowflakelogo.png"
}
}
]
@ -1959,7 +1959,7 @@
"name": "redshift",
"displayName": "Redshift",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/redshiftlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/redshiftlogo.png"
}
}
]
@ -1979,7 +1979,7 @@
"name": "mssql",
"displayName": "SQL Server",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/mssqllogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/mssqllogo.png"
}
}
]
@ -1999,7 +1999,7 @@
"name": "bigquery",
"displayName": "BigQuery",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/bigquerylogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/bigquerylogo.png"
}
}
]
@ -2019,7 +2019,7 @@
"name": "druid",
"displayName": "Druid",
"type": "OLAP_DATASTORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/druidlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/druidlogo.png"
}
}
]
@ -2039,7 +2039,7 @@
"name": "looker",
"displayName": "Looker",
"type": "OTHERS",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/lookerlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/lookerlogo.png"
}
}
]
@ -2059,7 +2059,7 @@
"name": "feast",
"displayName": "Feast",
"type": "OTHERS",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/feastlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/feastlogo.png"
}
}
]
@ -2079,7 +2079,7 @@
"name": "sagemaker",
"displayName": "SageMaker",
"type": "OTHERS",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/sagemakerlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/sagemakerlogo.png"
}
}
]
@ -2099,7 +2099,7 @@
"name": "glue",
"displayName": "Glue",
"type": "OTHERS",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/gluelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/gluelogo.png"
}
}
]

View File

@ -18,14 +18,14 @@ This plugin extracts the following:
**_NOTE_:** To get complete Looker metadata integration (including Looker dashboards and charts and lineage to the underlying Looker views, you must ALSO use the Looker source. Documentation for that is [here](./looker.md)
| Capability | Status | Details |
| Capability | Status | Details |
| -----------| ------ | ---- |
| Platform Instance | Partial (Lineage only) | Platform instances are supported for lineage edges between Looker and external data platforms like BigQuery, Snowflake etc. Not supported for Looker entities themselves. [link](../../docs/platform-instances.md) |
### Configuration Notes
See the [Looker authentication docs](https://docs.looker.com/reference/api-and-integration/api-auth#authentication_with_an_sdk) for the steps to create a client ID and secret.
See the [Looker authentication docs](https://docs.looker.com/reference/api-and-integration/api-auth#authentication_with_an_sdk) for the steps to create a client ID and secret.
You need to ensure that the API key is attached to a user that has Admin privileges. If that is not possible, read the configuration section to provide an offline specification of the `connection_to_platform_map` and the `project_name`.
@ -48,9 +48,9 @@ source:
base_url: https://YOUR_INSTANCE.cloud.looker.com
# Credentials for your Looker connection (https://docs.looker.com/reference/api-and-integration/api-auth)
client_id: client_id_from_looker
client_id: client_id_from_looker
client_secret: client_secret_from_looker
# Alternative to API section above if you want a purely file-based ingestion with no api calls to Looker or if you want to provide platform_instance ids for your connections
# project_name: PROJECT_NAME # See (https://docs.looker.com/data-modeling/getting-started/how-project-works) to understand what is your project name
# connection_to_platform_map:
@ -66,11 +66,11 @@ source:
# default_schema: DEFAULT_SCHEMA # the default schema configured for this connection
# platform_instance: bq_warehouse # optional
# platform_env: DEV # optional
github_info:
repo: org/repo-name
sink:
# sink configs
```
@ -82,9 +82,9 @@ Note that a `.` is used to denote nested fields in the YAML recipe.
| Field | Required | Default | Description |
| ---------------------------------------------- | -------- | ---------- | ----------------------------------------------------------------------- |
| `base_folder` | ✅ | | Where the `*.model.lkml` and `*.view.lkml` files are stored. |
| `api.base_url` | ❓ if using api | | Url to your Looker instance: https://company.looker.com:19999 or https://looker.company.com, or similar. |
| `api.base_url` | ❓ if using api | | Url to your Looker instance: https://company.looker.com:19999 or https://looker.company.com, or similar. |
| `api.client_id` | ❓ if using api | | Looker API3 client ID. |
| `api.client_secret` | ❓ if using api | | Looker API3 client secret. |
| `api.client_secret` | ❓ if using api | | Looker API3 client secret. |
| `project_name` | ❓ if NOT using api | | The project name within with all the model files live. See (https://docs.looker.com/data-modeling/getting-started/how-project-works) to understand what the Looker project name should be. The simplest way to see your projects is to click on `Develop` followed by `Manage LookML Projects` in the Looker application. |
| `connection_to_platform_map.<connection_name>` | | | Mappings between connection names in the model files to platform, database and schema values |
| `connection_to_platform_map.<connection_name>.platform` | ❓ if NOT using api | | Mappings between connection name in the model files to platform name (e.g. snowflake, bigquery, etc) |
@ -99,20 +99,20 @@ Note that a `.` is used to denote nested fields in the YAML recipe.
| `view_pattern.allow` | | | List of regex patterns for views to include in ingestion. |
| `view_pattern.deny` | | | List of regex patterns for views to exclude from ingestion. |
| `view_pattern.ignoreCase` | | `True` | Whether to ignore case sensitivity during pattern matching. |
| `view_naming_pattern` | | `{project}.view.{name}` | Pattern for providing dataset names to views. Allowed variables are `{project}`, `{model}`, `{name}` |
| `view_browse_pattern` | | `/{env}/{platform}/{project}/views/{name}` | Pattern for providing browse paths to views. Allowed variables are `{project}`, `{model}`, `{name}`, `{platform}` and `{env}` |
| `view_naming_pattern` | | `{project}.view.{name}` | Pattern for providing dataset names to views. Allowed variables are `{project}`, `{model}`, `{name}` |
| `view_browse_pattern` | | `/{env}/{platform}/{project}/views/{name}` | Pattern for providing browse paths to views. Allowed variables are `{project}`, `{model}`, `{name}`, `{platform}` and `{env}` |
| `env` | | `"PROD"` | Environment to use in namespace when constructing URNs. |
| `parse_table_names_from_sql` | | `False` | See note below. |
| `tag_measures_and_dimensions` | | `True` | When enabled, attaches tags to measures, dimensions and dimension groups to make them more discoverable. When disabled, adds this information to the description of the column. |
| `github_info` | | Empty. | When provided, will annotate views with github urls. See config variables below. |
| `github_info.repo` | ✅ if providing `github_info` | | Your github repository in `org/repo` form. e.g. `linkedin/datahub` |
| `github_info.branch` | | `main` | The default branch in your repo that you want urls to point to. Typically `main` or `master` |
| `github_info.base_url` | | `https://github.com` | The base url for your github coordinates |
| `github_info` | | Empty. | When provided, will annotate views with github urls. See config variables below. |
| `github_info.repo` | ✅ if providing `github_info` | | Your github repository in `org/repo` form. e.g. `datahub-project/datahub` |
| `github_info.branch` | | `main` | The default branch in your repo that you want urls to point to. Typically `main` or `master` |
| `github_info.base_url` | | `https://github.com` | The base url for your github coordinates |
| `sql_parser` | | `datahub.utilities.sql_parser.DefaultSQLParser` | See note below. |
| `transport_options` | | | Populates the [TransportOptions](https://github.com/looker-open-source/sdk-codegen/blob/94d6047a0d52912ac082eb91616c1e7c379ab262/python/looker_sdk/rtl/transport.py#L70) struct for looker client |
Note! The integration can use an SQL parser to try to parse the tables the views depends on. This parsing is disabled by default,
but can be enabled by setting `parse_table_names_from_sql: True`. The default parser is based on the [`sqllineage`](https://pypi.org/project/sqllineage/) package.
Note! The integration can use an SQL parser to try to parse the tables the views depends on. This parsing is disabled by default,
but can be enabled by setting `parse_table_names_from_sql: True`. The default parser is based on the [`sqllineage`](https://pypi.org/project/sqllineage/) package.
As this package doesn't officially support all the SQL dialects that Looker supports, the result might not be correct. You can, however, implement a
custom parser and take it into use by setting the `sql_parser` configuration value. A custom SQL parser must inherit from `datahub.utilities.sql_parser.SQLParser`
and must be made available to Datahub by ,for example, installing it. The configuration then needs to be set to `module_name.ClassName` of the parser.

View File

@ -34,7 +34,7 @@ M1_QUICKSTART_COMPOSE_FILE = (
BOOTSTRAP_MCES_FILE = "metadata-ingestion/examples/mce_files/bootstrap_mce.json"
GITHUB_BASE_URL = "https://raw.githubusercontent.com/linkedin/datahub/master"
GITHUB_BASE_URL = "https://raw.githubusercontent.com/datahub-project/datahub/master"
GITHUB_NEO4J_AND_ELASTIC_QUICKSTART_COMPOSE_URL = (
f"{GITHUB_BASE_URL}/{NEO4J_AND_ELASTIC_QUICKSTART_COMPOSE_FILE}"
)

View File

@ -24,7 +24,7 @@ insert into metadata_aspect (urn, aspect, version, metadata, createdon, createdb
'urn:li:corpuser:datahub',
'com.linkedin.identity.CorpUserEditableInfo',
0,
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
'{"skills":[],"teams":[],"pictureLink":"https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"}',
now(),
'urn:li:corpuser:__datahub_system'
);

View File

@ -12,10 +12,10 @@ import org.apache.tools.ant.filters.ReplaceTokens
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
dependencies {
implementation project(':metadata-models')
shadow externalDependency.httpAsyncClient // we want our clients to provide this
implementation externalDependency.jacksonDataBind
implementation externalDependency.jacksonDataBind
compileOnly externalDependency.lombok
annotationProcessor externalDependency.lombok
testCompile externalDependency.httpAsyncClient // needed as shadow excludes it
@ -72,7 +72,7 @@ if (project.hasProperty("releaseVersion")) {
version = versionParts[0..versionParts.size()-1].join('.') + '-SNAPSHOT'
}
}
processResources {
filter(ReplaceTokens, tokens:[fullVersion: detailedVersionString])
}
@ -82,7 +82,7 @@ test {
finalizedBy jacocoTestReport
}
task checkShadowJar(type: Exec) {
task checkShadowJar(type: Exec) {
commandLine 'sh', '-c', 'scripts/check_jar.sh'
}
@ -130,7 +130,7 @@ task sourcesJar(type: Jar) {
}
task javadocJar(type: Jar) {
archiveClassifier = 'javadoc'
archiveClassifier = 'javadoc'
from javadoc
}
@ -148,8 +148,8 @@ publishing {
artifacts = [ shadowJar, javadocJar, sourcesJar ]
scm {
connection = 'scm:git:git://github.com/linkedin/datahub.git'
developerConnection = 'scm:git:ssh://github.com:linkedin/datahub.git'
connection = 'scm:git:git://github.com/datahub-project/datahub.git'
developerConnection = 'scm:git:ssh://github.com:datahub-project/datahub.git'
url = 'https://github.com/datahub-project/datahub.git'
}

View File

@ -11,14 +11,14 @@ import org.apache.tools.ant.filters.ReplaceTokens
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
//to rename artifacts for publish
//to rename artifacts for publish
project.archivesBaseName = 'datahub-'+project.name
//mark implementaion dependencies which needs to excluded along with transitive dependencies from shadowjar
//functionality is exactly same as "implementation"
//functionality is exactly same as "implementation"
configurations {
provided
implementation.extendsFrom provided
implementation.extendsFrom provided
}
def detailedVersionString = "0.0.0-unknown-SNAPSHOT"
@ -64,7 +64,7 @@ if (project.hasProperty("releaseVersion")) {
version = versionParts[0..versionParts.size()-1].join('.') + '-SNAPSHOT'
}
}
processResources {
filter(ReplaceTokens, tokens:[fullVersion: detailedVersionString])
}
@ -78,15 +78,15 @@ dependencies {
provided(externalDependency.hadoopCommon) {
force = true
} // required for org.apache.hadoop.util.StopWatch
} // required for org.apache.hadoop.util.StopWatch
provided(externalDependency.commonsIo) {
force = true
} // required for org.apache.commons.io.Charsets that is used internally
compileOnly externalDependency.lombok
annotationProcessor externalDependency.lombok
implementation externalDependency.typesafeConfig
implementation externalDependency.opentracingJdbc
@ -99,33 +99,33 @@ dependencies {
// Tests need a concrete log4j available. Providing it here
testImplementation 'org.apache.logging.log4j:log4j-api:2.17.1'
testImplementation 'org.apache.logging.log4j:log4j-core:2.17.1'
testImplementation(externalDependency.postgresql){
exclude group: "com.fasterxml.jackson.core"
}
testImplementation externalDependency.mockito
testImplementation(externalDependency.mockServer){
exclude group: "com.fasterxml.jackson.core"
} // older version to allow older guava
testImplementation(externalDependency.mockServerClient){
exclude group: "com.fasterxml.jackson.core"
} // older version to allow older guava
testImplementation(externalDependency.testContainersPostgresql)
}
task checkShadowJar(type: Exec) {
task checkShadowJar(type: Exec) {
commandLine 'sh', '-c', 'scripts/check_jar.sh'
}
shadowJar {
zip64=true
zip64=true
classifier=''
mergeServiceFiles()
def exclude_modules = project
.configurations
.provided
@ -136,13 +136,13 @@ shadowJar {
it.name
}
dependencies {
exclude(dependency {
exclude_modules.contains(it.name)
})
}
relocate 'org.apache.http','datahub.spark2.shaded.http'
relocate 'org.apache.commons.codec', 'datahub.spark2.shaded.o.a.c.codec'
relocate 'org.apache.commons.compress', 'datahub.spark2.shaded.o.a.c.compress'
@ -197,8 +197,8 @@ publishing {
artifacts = [ shadowJar, javadocJar, sourcesJar ]
scm {
connection = 'scm:git:git://github.com/linkedin/datahub.git'
developerConnection = 'scm:git:ssh://github.com:linkedin/datahub.git'
connection = 'scm:git:git://github.com/datahub-project/datahub.git'
developerConnection = 'scm:git:ssh://github.com:datahub-project/datahub.git'
url = 'https://github.com/datahub-project/datahub.git'
}
@ -248,7 +248,3 @@ nexusStaging {
username = System.getenv("NEXUS_USERNAME")
password = System.getenv("NEXUS_PASSWORD")
}

View File

@ -6,6 +6,6 @@ Examples of data platforms are `redshift`, `hive`, `bigquery`, `looker`, `tablea
## Identity
Data Platforms are identified by the name of the technology. A complete list of currently supported data platforms is available [here](https://raw.githubusercontent.com/linkedin/datahub/master/metadata-service/restli-servlet-impl/src/main/resources/DataPlatformInfo.json).
Data Platforms are identified by the name of the technology. A complete list of currently supported data platforms is available [here](https://raw.githubusercontent.com/datahub-project/datahub/master/metadata-service/restli-servlet-impl/src/main/resources/DataPlatformInfo.json).

View File

@ -7,7 +7,7 @@ The dataset entity is one the most important entities in the metadata model. The
Datasets are identified by three pieces of information:
- The platform that they belong to: this is the specific data technology that hosts this dataset. Examples are `hive`, `bigquery`, `redshift` etc. See [dataplatform](./dataPlatform.md) for more details.
- The name of the dataset in the specific platform. Each platform will have a unique way of naming assets within its system. Usually, names are composed by combining the structural elements of the name and separating them by `.`. e.g. relational datasets are usually named as `<db>.<schema>.<table>`, except for platforms like MySQL which do not have the concept of a `schema`; as a result MySQL datasets are named `<db>.<table>`. In cases where the specific platform can have multiple instances (e.g. there are multiple different instances of MySQL databases that have different data assets in them), names can also include instance ids, making the general pattern for a name `<platform_instance>.<db>.<schema>.<table>`.
- The environment or fabric in which the dataset belongs: this is an additional qualifier available on the identifier, to allow disambiguating datasets that live in Production environments from datasets that live in Non-production environments, such as Staging, QA, etc. The full list of supported environments / fabrics is available in [FabricType.pdl](https://raw.githubusercontent.com/linkedin/datahub/master/li-utils/src/main/pegasus/com/linkedin/common/FabricType.pdl).
- The environment or fabric in which the dataset belongs: this is an additional qualifier available on the identifier, to allow disambiguating datasets that live in Production environments from datasets that live in Non-production environments, such as Staging, QA, etc. The full list of supported environments / fabrics is available in [FabricType.pdl](https://raw.githubusercontent.com/datahub-project/datahub/master/li-utils/src/main/pegasus/com/linkedin/common/FabricType.pdl).
An example of a dataset identifier is `urn:li:dataset:(urn:li:dataPlatform:redshift,userdb.public.customer_table,PROD)`.
@ -108,7 +108,7 @@ Similarly, here is an example of how you would add a term to a field in a datase
### Ownership
Ownership is associated to a dataset using the `ownership` aspect. Owners can be of a few different types, `DATAOWNER`, `PRODUCER`, `DEVELOPER`, `CONSUMER`, etc. See [OwnershipType.pdl](https://raw.githubusercontent.com/linkedin/datahub/master/metadata-models/src/main/pegasus/com/linkedin/common/OwnershipType.pdl) for the full list of ownership types and their meanings. Ownership can be inherited from source systems, or additionally added in DataHub using the UI. Ingestion connectors for sources will automatically set owners when the source system supports it.
Ownership is associated to a dataset using the `ownership` aspect. Owners can be of a few different types, `DATAOWNER`, `PRODUCER`, `DEVELOPER`, `CONSUMER`, etc. See [OwnershipType.pdl](https://raw.githubusercontent.com/datahub-project/datahub/master/metadata-models/src/main/pegasus/com/linkedin/common/OwnershipType.pdl) for the full list of ownership types and their meanings. Ownership can be inherited from source systems, or additionally added in DataHub using the UI. Ingestion connectors for sources will automatically set owners when the source system supports it.
#### Adding Owners

View File

@ -21,7 +21,7 @@ record CorpGroupEditableInfo {
/**
* A URL which points to a picture which user wants to set as the photo for the group
*/
pictureLink: Url = "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"
pictureLink: Url = "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"
/**
* Slack channel for the group

View File

@ -39,7 +39,7 @@ record CorpUserEditableInfo {
/**
* A URL which points to a picture which user wants to set as a profile photo
*/
pictureLink: Url = "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"
pictureLink: Url = "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"
/**
* DataHub-native display name

View File

@ -1864,7 +1864,7 @@
"name" : "pictureLink",
"type" : "com.linkedin.common.Url",
"doc" : "A URL which points to a picture which user wants to set as a profile photo",
"default" : "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"
"default" : "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"
}, {
"name" : "displayName",
"type" : "string",

View File

@ -2274,7 +2274,7 @@
"name" : "pictureLink",
"type" : "com.linkedin.common.Url",
"doc" : "A URL which points to a picture which user wants to set as a profile photo",
"default" : "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"
"default" : "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"
}, {
"name" : "displayName",
"type" : "string",

View File

@ -1611,7 +1611,7 @@
"name" : "pictureLink",
"type" : "com.linkedin.common.Url",
"doc" : "A URL which points to a picture which user wants to set as a profile photo",
"default" : "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"
"default" : "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"
}, {
"name" : "displayName",
"type" : "string",

View File

@ -2274,7 +2274,7 @@
"name" : "pictureLink",
"type" : "com.linkedin.common.Url",
"doc" : "A URL which points to a picture which user wants to set as a profile photo",
"default" : "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/default_avatar.png"
"default" : "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/default_avatar.png"
}, {
"name" : "displayName",
"type" : "string",

View File

@ -4,14 +4,14 @@
"name": "adlsGen1",
"displayName": "Azure Data Lake (Gen 1)",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/adlslogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/adlslogo.png"
},
"urn:li:dataPlatform:adlsGen2": {
"datasetNameDelimiter": "/",
"name": "adlsGen2",
"displayName": "Azure Data Lake (Gen 2)",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/adlslogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/adlslogo.png"
},
"urn:li:dataPlatform:ambry": {
"datasetNameDelimiter": ".",
@ -24,7 +24,7 @@
"name": "couchbase",
"displayName": "Couchbase",
"type": "KEY_VALUE_STORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/couchbaselogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/couchbaselogo.png"
},
"urn:li:dataPlatform:external": {
"datasetNameDelimiter": ".",
@ -37,84 +37,84 @@
"name": "hdfs",
"displayName": "HDFS",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/hadooplogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/hadooplogo.png"
},
"urn:li:dataPlatform:hive": {
"datasetNameDelimiter": ".",
"name": "hive",
"displayName": "Hive",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/hivelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/hivelogo.png"
},
"urn:li:dataPlatform:s3": {
"datasetNameDelimiter": "/",
"name": "s3",
"displayName": "AWS S3",
"type": "FILE_SYSTEM",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/s3.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/s3.png"
},
"urn:li:dataPlatform:kafka": {
"datasetNameDelimiter": ".",
"name": "kafka",
"displayName": "Kafka",
"type": "MESSAGE_BROKER",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/kafkalogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/kafkalogo.png"
},
"urn:li:dataPlatform:kusto": {
"datasetNameDelimiter": ".",
"name": "kusto",
"displayName": "Kusto",
"type": "OLAP_DATASTORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/kustologo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/kustologo.png"
},
"urn:li:dataPlatform:mongodb": {
"datasetNameDelimiter": ".",
"name": "mongodb",
"displayName": "MongoDB",
"type": "KEY_VALUE_STORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/mongodblogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/mongodblogo.png"
},
"urn:li:dataPlatform:mysql": {
"datasetNameDelimiter": ".",
"name": "mysql",
"displayName": "MySQL",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/mysqllogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/mysqllogo.png"
},
"urn:li:dataPlatform:oracle": {
"datasetNameDelimiter": ".",
"name": "oracle",
"displayName": "Oracle",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/oraclelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/oraclelogo.png"
},
"urn:li:dataPlatform:pinot": {
"datasetNameDelimiter": ".",
"name": "pinot",
"displayName": "Pinot",
"type": "OLAP_DATASTORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/pinotlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/pinotlogo.png"
},
"urn:li:dataPlatform:postgres": {
"datasetNameDelimiter": ".",
"name": "postgres",
"displayName": "PostgreSQL",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/postgreslogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/postgreslogo.png"
},
"urn:li:dataPlatform:presto": {
"datasetNameDelimiter": ".",
"name": "presto",
"displayName": "Presto",
"type": "QUERY_ENGINE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/prestologo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/prestologo.png"
},
"urn:li:dataPlatform:teradata": {
"datasetNameDelimiter": ".",
"name": "teradata",
"displayName": "Teradata",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/teradatalogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/teradatalogo.png"
},
"urn:li:dataPlatform:voldemort": {
"datasetNameDelimiter": ".",
@ -127,48 +127,48 @@
"name": "snowflake",
"displayName": "Snowflake",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/snowflakelogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/snowflakelogo.png"
},
"urn:li:dataPlatform:redshift": {
"datasetNameDelimiter": ".",
"name": "redshift",
"displayName": "Redshift",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/redshiftlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/redshiftlogo.png"
},
"urn:li:dataPlatform:mssql": {
"datasetNameDelimiter": ".",
"name": "mssql",
"displayName": "SQL Server",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/mssqllogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/mssqllogo.png"
},
"urn:li:dataPlatform:bigquery": {
"datasetNameDelimiter": ".",
"name": "bigquery",
"displayName": "BigQuery",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/bigquerylogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/bigquerylogo.png"
},
"urn:li:dataPlatform:druid": {
"datasetNameDelimiter": ".",
"name": "druid",
"displayName": "Druid",
"type": "OLAP_DATASTORE",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/druidlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/druidlogo.png"
},
"urn:li:dataPlatform:looker": {
"datasetNameDelimiter": ".",
"name": "looker",
"displayName": "Looker",
"type": "OTHERS",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/lookerlogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/lookerlogo.png"
},
"urn:li:dataPlatform:powerbi": {
"datasetNameDelimiter": ".",
"name": "powerbi",
"displayName": "Power BI",
"type": "OTHERS",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/powerbilogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/powerbilogo.png"
}
}

View File

@ -45,7 +45,7 @@
"name": "clickhouse",
"displayName": "ClickHouse",
"type": "RELATIONAL_DB",
"logoUrl": "https://raw.githubusercontent.com/linkedin/datahub/master/datahub-web-react/src/images/clickhouselogo.png"
"logoUrl": "https://raw.githubusercontent.com/datahub-project/datahub/master/datahub-web-react/src/images/clickhouselogo.png"
}
},
{