Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
anshbansal authored Jan 12, 2025
2 parents f9b3282 + 9897804 commit c6be903
Show file tree
Hide file tree
Showing 49 changed files with 1,239 additions and 725 deletions.
15 changes: 12 additions & 3 deletions .github/actions/ci-optimization/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,16 @@ outputs:
value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'true' }}
backend-change:
description: "Backend code has changed"
value: ${{ steps.filter.outputs.backend == 'true' }}
value: ${{ steps.filter.outputs.backend == 'true' || steps.trigger.outputs.trigger == 'manual' }}
ingestion-change:
description: "Ingestion code has changed"
value: ${{ steps.filter.outputs.ingestion == 'true' }}
value: ${{ steps.filter.outputs.ingestion == 'true' || steps.trigger.outputs.trigger == 'manual' }}
ingestion-base-change:
description: "Ingestion base image docker image has changed"
value: ${{ steps.filter.outputs.ingestion-base == 'true' }}
frontend-change:
description: "Frontend code has changed"
value: ${{ steps.filter.outputs.frontend == 'true' }}
value: ${{ steps.filter.outputs.frontend == 'true' || steps.trigger.outputs.trigger == 'manual' }}
docker-change:
description: "Docker code has changed"
value: ${{ steps.filter.outputs.docker == 'true' }}
Expand All @@ -44,6 +44,15 @@ outputs:
runs:
using: "composite"
steps:
- name: Check trigger type
id: trigger # Add an ID to reference this step
shell: bash
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "trigger=manual" >> $GITHUB_OUTPUT
else
echo "trigger=pr" >> $GITHUB_OUTPUT
fi
- uses: dorny/paths-filter@v3
id: filter
with:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/airflow-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ on:
- "metadata-models/**"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
Expand Down
5 changes: 4 additions & 1 deletion .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ on:
paths-ignore:
- "docs/**"
- "**.md"
workflow_dispatch:
release:
types: [published]

Expand Down Expand Up @@ -117,10 +118,12 @@ jobs:
run: |
echo "BACKEND_FILES=`find ./build/coverage-reports/ -type f | grep -E '(metadata-models|entity-registry|datahuyb-graphql-core|metadata-io|metadata-jobs|metadata-utils|metadata-service|medata-dao-impl|metadata-operation|li-utils|metadata-integration|metadata-events|metadata-auth|ingestion-scheduler|notifications|datahub-upgrade)' | xargs | sed 's/ /,/g'`" >> $GITHUB_ENV
echo "FRONTEND_FILES=`find ./build/coverage-reports/ -type f | grep -E '(datahub-frontend|datahub-web-react).*\.(xml|json)$' | xargs | sed 's/ /,/g'`" >> $GITHUB_ENV
- name: Generate tz artifact name
run: echo "NAME_TZ=$(echo ${{ matrix.timezone }} | tr '/' '-')" >> $GITHUB_ENV
- uses: actions/upload-artifact@v4
if: always()
with:
name: Test Results (build)
name: Test Results (build) - ${{ matrix.command}}-${{ env.NAME_TZ }}
path: |
**/build/reports/tests/test/**
**/build/test-results/test/**
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/dagster-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ on:
- "metadata-models/**"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/gx-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ on:
- "metadata-models/**"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/metadata-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ on:
- "metadata-models/**"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/metadata-io.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ on:
- ".github/workflows/metadata-io.yml"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/prefect-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ on:
- "metadata-models/**"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
Expand Down
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -379,6 +379,7 @@ configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) {

resolutionStrategy.force externalDependency.antlr4Runtime
resolutionStrategy.force externalDependency.antlr4
resolutionStrategy.force 'org.apache.mina:mina-core:2.2.4'
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2377,6 +2377,17 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) {
? dataJob.getDataPlatformInstance().getUrn()
: null;
}))
.dataFetcher(
"container",
new LoadableTypeResolver<>(
containerType,
(env) -> {
final DataJob dataJob = env.getSource();
return dataJob.getContainer() != null
? dataJob.getContainer().getUrn()
: null;
}))
.dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
.dataFetcher("runs", new DataJobRunsResolver(entityClient))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
.dataFetcher("exists", new EntityExistsResolver(entityService))
Expand Down Expand Up @@ -2454,6 +2465,17 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) {
? dataFlow.getDataPlatformInstance().getUrn()
: null;
}))
.dataFetcher(
"container",
new LoadableTypeResolver<>(
containerType,
(env) -> {
final DataFlow dataFlow = env.getSource();
return dataFlow.getContainer() != null
? dataFlow.getContainer().getUrn()
: null;
}))
.dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
.dataFetcher(
"health",
new EntityHealthResolver(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ public class DataFlowType
DOMAINS_ASPECT_NAME,
DEPRECATION_ASPECT_NAME,
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
CONTAINER_ASPECT_NAME,
DATA_PRODUCTS_ASPECT_NAME,
BROWSE_PATHS_V2_ASPECT_NAME,
STRUCTURED_PROPERTIES_ASPECT_NAME,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import com.linkedin.data.DataMap;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
import com.linkedin.datahub.graphql.generated.Container;
import com.linkedin.datahub.graphql.generated.DataFlow;
import com.linkedin.datahub.graphql.generated.DataFlowEditableProperties;
import com.linkedin.datahub.graphql.generated.DataFlowInfo;
Expand Down Expand Up @@ -106,6 +107,7 @@ public DataFlow apply(
(dataset, dataMap) ->
dataset.setDataPlatformInstance(
DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap))));
mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DataFlowMapper::mapContainers);
mappingHelper.mapToResult(
BROWSE_PATHS_V2_ASPECT_NAME,
(dataFlow, dataMap) ->
Expand Down Expand Up @@ -206,6 +208,17 @@ private static void mapGlobalTags(
dataFlow.setTags(globalTags);
}

private static void mapContainers(
@Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) {
final com.linkedin.container.Container gmsContainer =
new com.linkedin.container.Container(dataMap);
dataFlow.setContainer(
Container.builder()
.setType(EntityType.CONTAINER)
.setUrn(gmsContainer.getContainer().toString())
.build());
}

private static void mapDomains(
@Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) {
final Domains domains = new Domains(dataMap);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ public class DataJobType
DOMAINS_ASPECT_NAME,
DEPRECATION_ASPECT_NAME,
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
CONTAINER_ASPECT_NAME,
DATA_PRODUCTS_ASPECT_NAME,
BROWSE_PATHS_V2_ASPECT_NAME,
SUB_TYPES_ASPECT_NAME,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import com.linkedin.data.DataMap;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
import com.linkedin.datahub.graphql.generated.Container;
import com.linkedin.datahub.graphql.generated.DataFlow;
import com.linkedin.datahub.graphql.generated.DataJob;
import com.linkedin.datahub.graphql.generated.DataJobEditableProperties;
Expand Down Expand Up @@ -112,6 +113,14 @@ public DataJob apply(
} else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) {
result.setDataPlatformInstance(
DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data)));
} else if (CONTAINER_ASPECT_NAME.equals(name)) {
final com.linkedin.container.Container gmsContainer =
new com.linkedin.container.Container(data);
result.setContainer(
Container.builder()
.setType(EntityType.CONTAINER)
.setUrn(gmsContainer.getContainer().toString())
.build());
} else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) {
result.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(data)));
} else if (SUB_TYPES_ASPECT_NAME.equals(name)) {
Expand Down
20 changes: 20 additions & 0 deletions datahub-graphql-core/src/main/resources/entity.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -6275,6 +6275,16 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity {
"""
dataPlatformInstance: DataPlatformInstance

"""
The parent container in which the entity resides
"""
container: Container

"""
Recursively get the lineage of containers for this entity
"""
parentContainers: ParentContainersResult

"""
Granular API for querying edges extending from this entity
"""
Expand Down Expand Up @@ -6457,6 +6467,16 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity {
"""
dataPlatformInstance: DataPlatformInstance

"""
The parent container in which the entity resides
"""
container: Container

"""
Recursively get the lineage of containers for this entity
"""
parentContainers: ParentContainersResult

"""
Additional read write properties associated with the Data Job
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package com.linkedin.datahub.graphql.types.dataflow.mappers;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.generated.DataFlow;
import com.linkedin.entity.Aspect;
import com.linkedin.entity.EntityResponse;
import com.linkedin.entity.EnvelopedAspect;
import com.linkedin.entity.EnvelopedAspectMap;
import com.linkedin.metadata.Constants;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;

public class DataFlowMapperTest {
private static final Urn TEST_DATA_FLOW_URN =
Urn.createFromTuple(Constants.DATA_FLOW_ENTITY_NAME, "dataflow1");
private static final Urn TEST_CONTAINER_URN =
Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "container1");

@Test
public void testMapDataFlowContainer() throws URISyntaxException {
com.linkedin.container.Container input = new com.linkedin.container.Container();
input.setContainer(TEST_CONTAINER_URN);

final Map<String, EnvelopedAspect> containerAspect = new HashMap<>();
containerAspect.put(
Constants.CONTAINER_ASPECT_NAME,
new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data())));
final EntityResponse response =
new EntityResponse()
.setEntityName(Constants.DATA_FLOW_ENTITY_NAME)
.setUrn(TEST_DATA_FLOW_URN)
.setAspects(new EnvelopedAspectMap(containerAspect));

final DataFlow actual = DataFlowMapper.map(null, response);

Assert.assertEquals(actual.getUrn(), TEST_DATA_FLOW_URN.toString());
Assert.assertEquals(actual.getContainer().getUrn(), TEST_CONTAINER_URN.toString());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package com.linkedin.datahub.graphql.types.datajob.mappers;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.generated.DataJob;
import com.linkedin.entity.Aspect;
import com.linkedin.entity.EntityResponse;
import com.linkedin.entity.EnvelopedAspect;
import com.linkedin.entity.EnvelopedAspectMap;
import com.linkedin.metadata.Constants;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;

public class DataJobMapperTest {
private static final Urn TEST_DATA_JOB_URN =
Urn.createFromTuple(Constants.DATA_JOB_ENTITY_NAME, "datajob1");
private static final Urn TEST_CONTAINER_URN =
Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "container1");

@Test
public void testMapDataJobContainer() throws URISyntaxException {
com.linkedin.container.Container input = new com.linkedin.container.Container();
input.setContainer(TEST_CONTAINER_URN);

final Map<String, EnvelopedAspect> containerAspect = new HashMap<>();
containerAspect.put(
Constants.CONTAINER_ASPECT_NAME,
new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data())));
final EntityResponse response =
new EntityResponse()
.setEntityName(Constants.DATA_JOB_ENTITY_NAME)
.setUrn(TEST_DATA_JOB_URN)
.setAspects(new EnvelopedAspectMap(containerAspect));

final DataJob actual = DataJobMapper.map(null, response);

Assert.assertEquals(actual.getUrn(), TEST_DATA_JOB_URN.toString());
Assert.assertEquals(actual.getContainer().getUrn(), TEST_CONTAINER_URN.toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ export class DataFlowEntity implements Entity<DataFlow> {
degree={(result as any).degree}
paths={(result as any).paths}
health={data.health}
parentContainers={data.parentContainers}
/>
);
};
Expand Down
4 changes: 4 additions & 0 deletions datahub-web-react/src/app/entity/dataFlow/preview/Preview.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
GlobalTags,
Health,
Owner,
ParentContainersResult,
SearchInsight,
} from '../../../../types.generated';
import DefaultPreviewCard from '../../../preview/DefaultPreviewCard';
Expand Down Expand Up @@ -40,6 +41,7 @@ export const Preview = ({
degree,
paths,
health,
parentContainers,
}: {
urn: string;
name: string;
Expand All @@ -59,6 +61,7 @@ export const Preview = ({
degree?: number;
paths?: EntityPath[];
health?: Health[] | null;
parentContainers?: ParentContainersResult | null;
}): JSX.Element => {
const entityRegistry = useEntityRegistry();
return (
Expand Down Expand Up @@ -91,6 +94,7 @@ export const Preview = ({
degree={degree}
paths={paths}
health={health || undefined}
parentContainers={parentContainers}
/>
);
};
1 change: 1 addition & 0 deletions datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,7 @@ export class DataJobEntity implements Entity<DataJob> {
degree={(result as any).degree}
paths={(result as any).paths}
health={data.health}
parentContainers={data.parentContainers}
/>
);
};
Expand Down
Loading

0 comments on commit c6be903

Please sign in to comment.