mirror of
https://github.com/datahub-project/datahub.git
synced 2025-12-28 02:17:53 +00:00
feat(graphql): adds container aspect for dataflow and datajob entities (#12236)
Co-authored-by: Chris Collins <chriscollins3456@gmail.com>
This commit is contained in:
parent
d8e7cb25e0
commit
efc5d31f03
@ -2377,6 +2377,17 @@ public class GmsGraphQLEngine {
|
||||
? dataJob.getDataPlatformInstance().getUrn()
|
||||
: null;
|
||||
}))
|
||||
.dataFetcher(
|
||||
"container",
|
||||
new LoadableTypeResolver<>(
|
||||
containerType,
|
||||
(env) -> {
|
||||
final DataJob dataJob = env.getSource();
|
||||
return dataJob.getContainer() != null
|
||||
? dataJob.getContainer().getUrn()
|
||||
: null;
|
||||
}))
|
||||
.dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
|
||||
.dataFetcher("runs", new DataJobRunsResolver(entityClient))
|
||||
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
|
||||
.dataFetcher("exists", new EntityExistsResolver(entityService))
|
||||
@ -2454,6 +2465,17 @@ public class GmsGraphQLEngine {
|
||||
? dataFlow.getDataPlatformInstance().getUrn()
|
||||
: null;
|
||||
}))
|
||||
.dataFetcher(
|
||||
"container",
|
||||
new LoadableTypeResolver<>(
|
||||
containerType,
|
||||
(env) -> {
|
||||
final DataFlow dataFlow = env.getSource();
|
||||
return dataFlow.getContainer() != null
|
||||
? dataFlow.getContainer().getUrn()
|
||||
: null;
|
||||
}))
|
||||
.dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
|
||||
.dataFetcher(
|
||||
"health",
|
||||
new EntityHealthResolver(
|
||||
|
||||
@ -74,6 +74,7 @@ public class DataFlowType
|
||||
DOMAINS_ASPECT_NAME,
|
||||
DEPRECATION_ASPECT_NAME,
|
||||
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
|
||||
CONTAINER_ASPECT_NAME,
|
||||
DATA_PRODUCTS_ASPECT_NAME,
|
||||
BROWSE_PATHS_V2_ASPECT_NAME,
|
||||
STRUCTURED_PROPERTIES_ASPECT_NAME,
|
||||
|
||||
@ -16,6 +16,7 @@ import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.data.DataMap;
|
||||
import com.linkedin.datahub.graphql.QueryContext;
|
||||
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
|
||||
import com.linkedin.datahub.graphql.generated.Container;
|
||||
import com.linkedin.datahub.graphql.generated.DataFlow;
|
||||
import com.linkedin.datahub.graphql.generated.DataFlowEditableProperties;
|
||||
import com.linkedin.datahub.graphql.generated.DataFlowInfo;
|
||||
@ -106,6 +107,7 @@ public class DataFlowMapper implements ModelMapper<EntityResponse, DataFlow> {
|
||||
(dataset, dataMap) ->
|
||||
dataset.setDataPlatformInstance(
|
||||
DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap))));
|
||||
mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DataFlowMapper::mapContainers);
|
||||
mappingHelper.mapToResult(
|
||||
BROWSE_PATHS_V2_ASPECT_NAME,
|
||||
(dataFlow, dataMap) ->
|
||||
@ -206,6 +208,17 @@ public class DataFlowMapper implements ModelMapper<EntityResponse, DataFlow> {
|
||||
dataFlow.setTags(globalTags);
|
||||
}
|
||||
|
||||
private static void mapContainers(
|
||||
@Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) {
|
||||
final com.linkedin.container.Container gmsContainer =
|
||||
new com.linkedin.container.Container(dataMap);
|
||||
dataFlow.setContainer(
|
||||
Container.builder()
|
||||
.setType(EntityType.CONTAINER)
|
||||
.setUrn(gmsContainer.getContainer().toString())
|
||||
.build());
|
||||
}
|
||||
|
||||
private static void mapDomains(
|
||||
@Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) {
|
||||
final Domains domains = new Domains(dataMap);
|
||||
|
||||
@ -75,6 +75,7 @@ public class DataJobType
|
||||
DOMAINS_ASPECT_NAME,
|
||||
DEPRECATION_ASPECT_NAME,
|
||||
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
|
||||
CONTAINER_ASPECT_NAME,
|
||||
DATA_PRODUCTS_ASPECT_NAME,
|
||||
BROWSE_PATHS_V2_ASPECT_NAME,
|
||||
SUB_TYPES_ASPECT_NAME,
|
||||
|
||||
@ -9,6 +9,7 @@ import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.data.DataMap;
|
||||
import com.linkedin.datahub.graphql.QueryContext;
|
||||
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
|
||||
import com.linkedin.datahub.graphql.generated.Container;
|
||||
import com.linkedin.datahub.graphql.generated.DataFlow;
|
||||
import com.linkedin.datahub.graphql.generated.DataJob;
|
||||
import com.linkedin.datahub.graphql.generated.DataJobEditableProperties;
|
||||
@ -112,6 +113,14 @@ public class DataJobMapper implements ModelMapper<EntityResponse, DataJob> {
|
||||
} else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) {
|
||||
result.setDataPlatformInstance(
|
||||
DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data)));
|
||||
} else if (CONTAINER_ASPECT_NAME.equals(name)) {
|
||||
final com.linkedin.container.Container gmsContainer =
|
||||
new com.linkedin.container.Container(data);
|
||||
result.setContainer(
|
||||
Container.builder()
|
||||
.setType(EntityType.CONTAINER)
|
||||
.setUrn(gmsContainer.getContainer().toString())
|
||||
.build());
|
||||
} else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) {
|
||||
result.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(data)));
|
||||
} else if (SUB_TYPES_ASPECT_NAME.equals(name)) {
|
||||
|
||||
@ -6275,6 +6275,16 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity {
|
||||
"""
|
||||
dataPlatformInstance: DataPlatformInstance
|
||||
|
||||
"""
|
||||
The parent container in which the entity resides
|
||||
"""
|
||||
container: Container
|
||||
|
||||
"""
|
||||
Recursively get the lineage of containers for this entity
|
||||
"""
|
||||
parentContainers: ParentContainersResult
|
||||
|
||||
"""
|
||||
Granular API for querying edges extending from this entity
|
||||
"""
|
||||
@ -6457,6 +6467,16 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity {
|
||||
"""
|
||||
dataPlatformInstance: DataPlatformInstance
|
||||
|
||||
"""
|
||||
The parent container in which the entity resides
|
||||
"""
|
||||
container: Container
|
||||
|
||||
"""
|
||||
Recursively get the lineage of containers for this entity
|
||||
"""
|
||||
parentContainers: ParentContainersResult
|
||||
|
||||
"""
|
||||
Additional read write properties associated with the Data Job
|
||||
"""
|
||||
|
||||
@ -0,0 +1,42 @@
|
||||
package com.linkedin.datahub.graphql.types.dataflow.mappers;
|
||||
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.datahub.graphql.generated.DataFlow;
|
||||
import com.linkedin.entity.Aspect;
|
||||
import com.linkedin.entity.EntityResponse;
|
||||
import com.linkedin.entity.EnvelopedAspect;
|
||||
import com.linkedin.entity.EnvelopedAspectMap;
|
||||
import com.linkedin.metadata.Constants;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class DataFlowMapperTest {
|
||||
private static final Urn TEST_DATA_FLOW_URN =
|
||||
Urn.createFromTuple(Constants.DATA_FLOW_ENTITY_NAME, "dataflow1");
|
||||
private static final Urn TEST_CONTAINER_URN =
|
||||
Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "container1");
|
||||
|
||||
@Test
|
||||
public void testMapDataFlowContainer() throws URISyntaxException {
|
||||
com.linkedin.container.Container input = new com.linkedin.container.Container();
|
||||
input.setContainer(TEST_CONTAINER_URN);
|
||||
|
||||
final Map<String, EnvelopedAspect> containerAspect = new HashMap<>();
|
||||
containerAspect.put(
|
||||
Constants.CONTAINER_ASPECT_NAME,
|
||||
new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data())));
|
||||
final EntityResponse response =
|
||||
new EntityResponse()
|
||||
.setEntityName(Constants.DATA_FLOW_ENTITY_NAME)
|
||||
.setUrn(TEST_DATA_FLOW_URN)
|
||||
.setAspects(new EnvelopedAspectMap(containerAspect));
|
||||
|
||||
final DataFlow actual = DataFlowMapper.map(null, response);
|
||||
|
||||
Assert.assertEquals(actual.getUrn(), TEST_DATA_FLOW_URN.toString());
|
||||
Assert.assertEquals(actual.getContainer().getUrn(), TEST_CONTAINER_URN.toString());
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,42 @@
|
||||
package com.linkedin.datahub.graphql.types.datajob.mappers;
|
||||
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.datahub.graphql.generated.DataJob;
|
||||
import com.linkedin.entity.Aspect;
|
||||
import com.linkedin.entity.EntityResponse;
|
||||
import com.linkedin.entity.EnvelopedAspect;
|
||||
import com.linkedin.entity.EnvelopedAspectMap;
|
||||
import com.linkedin.metadata.Constants;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class DataJobMapperTest {
|
||||
private static final Urn TEST_DATA_JOB_URN =
|
||||
Urn.createFromTuple(Constants.DATA_JOB_ENTITY_NAME, "datajob1");
|
||||
private static final Urn TEST_CONTAINER_URN =
|
||||
Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "container1");
|
||||
|
||||
@Test
|
||||
public void testMapDataJobContainer() throws URISyntaxException {
|
||||
com.linkedin.container.Container input = new com.linkedin.container.Container();
|
||||
input.setContainer(TEST_CONTAINER_URN);
|
||||
|
||||
final Map<String, EnvelopedAspect> containerAspect = new HashMap<>();
|
||||
containerAspect.put(
|
||||
Constants.CONTAINER_ASPECT_NAME,
|
||||
new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data())));
|
||||
final EntityResponse response =
|
||||
new EntityResponse()
|
||||
.setEntityName(Constants.DATA_JOB_ENTITY_NAME)
|
||||
.setUrn(TEST_DATA_JOB_URN)
|
||||
.setAspects(new EnvelopedAspectMap(containerAspect));
|
||||
|
||||
final DataJob actual = DataJobMapper.map(null, response);
|
||||
|
||||
Assert.assertEquals(actual.getUrn(), TEST_DATA_JOB_URN.toString());
|
||||
Assert.assertEquals(actual.getContainer().getUrn(), TEST_CONTAINER_URN.toString());
|
||||
}
|
||||
}
|
||||
@ -184,6 +184,7 @@ export class DataFlowEntity implements Entity<DataFlow> {
|
||||
degree={(result as any).degree}
|
||||
paths={(result as any).paths}
|
||||
health={data.health}
|
||||
parentContainers={data.parentContainers}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@ -10,6 +10,7 @@ import {
|
||||
GlobalTags,
|
||||
Health,
|
||||
Owner,
|
||||
ParentContainersResult,
|
||||
SearchInsight,
|
||||
} from '../../../../types.generated';
|
||||
import DefaultPreviewCard from '../../../preview/DefaultPreviewCard';
|
||||
@ -40,6 +41,7 @@ export const Preview = ({
|
||||
degree,
|
||||
paths,
|
||||
health,
|
||||
parentContainers,
|
||||
}: {
|
||||
urn: string;
|
||||
name: string;
|
||||
@ -59,6 +61,7 @@ export const Preview = ({
|
||||
degree?: number;
|
||||
paths?: EntityPath[];
|
||||
health?: Health[] | null;
|
||||
parentContainers?: ParentContainersResult | null;
|
||||
}): JSX.Element => {
|
||||
const entityRegistry = useEntityRegistry();
|
||||
return (
|
||||
@ -91,6 +94,7 @@ export const Preview = ({
|
||||
degree={degree}
|
||||
paths={paths}
|
||||
health={health || undefined}
|
||||
parentContainers={parentContainers}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@ -205,6 +205,7 @@ export class DataJobEntity implements Entity<DataJob> {
|
||||
degree={(result as any).degree}
|
||||
paths={(result as any).paths}
|
||||
health={data.health}
|
||||
parentContainers={data.parentContainers}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@ -12,6 +12,7 @@ import {
|
||||
GlobalTags,
|
||||
Health,
|
||||
Owner,
|
||||
ParentContainersResult,
|
||||
SearchInsight,
|
||||
} from '../../../../types.generated';
|
||||
import DefaultPreviewCard from '../../../preview/DefaultPreviewCard';
|
||||
@ -44,6 +45,7 @@ export const Preview = ({
|
||||
degree,
|
||||
paths,
|
||||
health,
|
||||
parentContainers,
|
||||
}: {
|
||||
urn: string;
|
||||
name: string;
|
||||
@ -64,6 +66,7 @@ export const Preview = ({
|
||||
degree?: number;
|
||||
paths?: EntityPath[];
|
||||
health?: Health[] | null;
|
||||
parentContainers?: ParentContainersResult | null;
|
||||
}): JSX.Element => {
|
||||
const entityRegistry = useEntityRegistry();
|
||||
return (
|
||||
@ -98,6 +101,7 @@ export const Preview = ({
|
||||
degree={degree}
|
||||
paths={paths}
|
||||
health={health || undefined}
|
||||
parentContainers={parentContainers}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@ -50,6 +50,9 @@ fragment dataFlowFields on DataFlow {
|
||||
dataPlatformInstance {
|
||||
...dataPlatformInstanceFields
|
||||
}
|
||||
parentContainers {
|
||||
...parentContainersFields
|
||||
}
|
||||
browsePathV2 {
|
||||
...browsePathV2Fields
|
||||
}
|
||||
|
||||
@ -403,6 +403,9 @@ fragment dataJobFields on DataJob {
|
||||
dataPlatformInstance {
|
||||
...dataPlatformInstanceFields
|
||||
}
|
||||
parentContainers {
|
||||
...parentContainersFields
|
||||
}
|
||||
privileges {
|
||||
canEditLineage
|
||||
}
|
||||
|
||||
@ -128,6 +128,9 @@ fragment autoCompleteFields on Entity {
|
||||
dataPlatformInstance {
|
||||
...dataPlatformInstanceFields
|
||||
}
|
||||
parentContainers {
|
||||
...parentContainersFields
|
||||
}
|
||||
}
|
||||
... on DataJob {
|
||||
dataFlow {
|
||||
@ -146,6 +149,9 @@ fragment autoCompleteFields on Entity {
|
||||
dataPlatformInstance {
|
||||
...dataPlatformInstanceFields
|
||||
}
|
||||
parentContainers {
|
||||
...parentContainersFields
|
||||
}
|
||||
}
|
||||
... on GlossaryTerm {
|
||||
name
|
||||
@ -626,6 +632,9 @@ fragment searchResultsWithoutSchemaField on Entity {
|
||||
dataPlatformInstance {
|
||||
...dataPlatformInstanceFields
|
||||
}
|
||||
parentContainers {
|
||||
...parentContainersFields
|
||||
}
|
||||
domain {
|
||||
...entityDomain
|
||||
}
|
||||
@ -677,6 +686,9 @@ fragment searchResultsWithoutSchemaField on Entity {
|
||||
dataPlatformInstance {
|
||||
...dataPlatformInstanceFields
|
||||
}
|
||||
parentContainers {
|
||||
...parentContainersFields
|
||||
}
|
||||
subTypes {
|
||||
typeNames
|
||||
}
|
||||
|
||||
@ -44,6 +44,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe
|
||||
- OpenAPI Update: PIT Keep Alive parameter added to scroll. NOTE: This parameter requires the `pointInTimeCreationEnabled` feature flag to be enabled and the `elasticSearch.implementation` configuration to be `elasticsearch`. This feature is not supported for OpenSearch at this time and the parameter will not be respected without both of these set.
|
||||
- OpenAPI Update 2: Previously there was an incorrectly marked parameter named `sort` on the generic list entities endpoint for v3. This parameter is deprecated and only supports a single string value while the documentation indicates it supports a list of strings. This documentation error has been fixed and the correct field, `sortCriteria`, is now documented which supports a list of strings.
|
||||
- #12223: For dbt Cloud ingestion, the "View in dbt" link will point at the "Explore" page in the dbt Cloud UI. You can revert to the old behavior of linking to the dbt Cloud IDE by setting `external_url_mode: ide".
|
||||
- #12236: Data flow and data job entities may additionally produce container aspect that will require a corresponding upgrade of server. Otherwise server can reject the aspect.
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
|
||||
@ -70,6 +70,7 @@ entities:
|
||||
- glossaryTerms
|
||||
- institutionalMemory
|
||||
- dataPlatformInstance
|
||||
- container
|
||||
- browsePathsV2
|
||||
- structuredProperties
|
||||
- forms
|
||||
@ -93,6 +94,7 @@ entities:
|
||||
- glossaryTerms
|
||||
- institutionalMemory
|
||||
- dataPlatformInstance
|
||||
- container
|
||||
- browsePathsV2
|
||||
- structuredProperties
|
||||
- incidentsSummary
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user