Skip to content

Commit

Permalink
Merge branch 'master' into pb-string-escaping
Browse files Browse the repository at this point in the history
  • Loading branch information
pinakipb2 authored Aug 9, 2024
2 parents 79029a6 + 080f2a2 commit 830b8e0
Show file tree
Hide file tree
Showing 307 changed files with 5,693 additions and 5,885 deletions.
8 changes: 4 additions & 4 deletions .github/scripts/docker_helpers.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ function get_tag {
}

function get_tag_slim {
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g'),${SHORT_SHA}-slim
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim,g" -e 's,refs/tags/\(.*\),\1-slim,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g'),${SHORT_SHA}-slim
}

function get_tag_full {
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g'),${SHORT_SHA}-full
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g'),${SHORT_SHA}-full
}

function get_python_docker_release_v {
Expand All @@ -32,9 +32,9 @@ function get_unique_tag {
}

function get_unique_tag_slim {
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-slim,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g')
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-slim,g" -e 's,refs/tags/\(.*\),\1-slim,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g')
}

function get_unique_tag_full {
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g')
echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g')
}
4 changes: 2 additions & 2 deletions .github/workflows/airflow-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ jobs:
- name: pip freeze show list installed
if: always()
run: source metadata-ingestion-modules/airflow-plugin/venv/bin/activate && pip freeze
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: ${{ always() && matrix.python-version == '3.10' && matrix.extra_pip_requirements == 'apache-airflow>=2.7.0' }}
with:
name: Test Results (Airflow Plugin ${{ matrix.python-version}})
Expand All @@ -98,7 +98,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{ github.event_path }}
4 changes: 2 additions & 2 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ jobs:
if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }}
run: |
./gradlew -PjavaClassVersionDefault=8 :metadata-integration:java:spark-lineage:compileJava
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: Test Results (build)
Expand Down Expand Up @@ -128,7 +128,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{ github.event_path }}
4 changes: 2 additions & 2 deletions .github/workflows/dagster-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ jobs:
- name: pip freeze show list installed
if: always()
run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && pip freeze
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }}
with:
name: Test Results (dagster Plugin ${{ matrix.python-version}})
Expand All @@ -79,7 +79,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{ github.event_path }}
6 changes: 3 additions & 3 deletions .github/workflows/docker-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1024,18 +1024,18 @@ jobs:
docker logs datahub-datahub-frontend-react-1 >& frontend-${{ matrix.test_strategy }}.log || true
docker logs datahub-upgrade-1 >& upgrade-${{ matrix.test_strategy }}.log || true
- name: Upload logs
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: docker logs
path: "*.log"
- name: Upload screenshots
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: cypress-snapshots-${{ matrix.test_strategy }}
path: smoke-test/tests/cypress/cypress/screenshots/
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: Test Results (smoke tests) ${{ matrix.test_strategy }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/metadata-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ jobs:
df -hl
docker image ls
docker system df
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: Test Results (metadata ingestion ${{ matrix.python-version }})
path: |
Expand All @@ -106,7 +106,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{ github.event_path }}
4 changes: 2 additions & 2 deletions .github/workflows/metadata-io.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ jobs:
- name: Gradle build (and test)
run: |
./gradlew :metadata-io:test
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: Test Results (metadata-io)
Expand All @@ -78,7 +78,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{ github.event_path }}
6 changes: 2 additions & 4 deletions .github/workflows/pr-labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,10 @@ jobs:
"treff7es",
"yoonhyejin",
"eboneil",
"ethan-cartwright",
"gabe-lyons",
"hsheth2",
"jjoyce0510",
"maggiehays",
"mrjefflewis",
"pedro93",
"RyanHolstien",
"Kunal-kankriya",
Expand All @@ -45,7 +43,8 @@ jobs:
"kushagra-apptware",
"Salman-Apptware",
"mayurinehate",
"noggi"
"noggi",
"skrydal"
]'),
github.actor
)
Expand All @@ -60,7 +59,6 @@ jobs:
${{
contains(
fromJson('[
"skrydal",
"siladitya2",
"sgomezvillamor",
"ngamanda",
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/spark-smoke-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,14 +69,14 @@ jobs:
docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true
docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true
- name: Upload logs
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: docker logs
path: |
"**/build/container-logs/*.log"
"*.log"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: Test Results (smoke tests)
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,6 @@ Here are the companies that have officially adopted DataHub. Please feel free to
- [Peloton](https://www.onepeloton.com)
- [PITS Global Data Recovery Services](https://www.pitsdatarecovery.net/)
- [Razer](https://www.razer.com)
- [Saxo Bank](https://www.home.saxo)
- [Showroomprive](https://www.showroomprive.com/)
- [SpotHero](https://spothero.com)
- [Stash](https://www.stash.com)
Expand Down
3 changes: 2 additions & 1 deletion datahub-frontend/app/controllers/SsoCallbackController.java
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,8 @@ public SsoCallbackController(

public CompletionStage<Result> handleCallback(String protocol, Http.Request request) {
if (shouldHandleCallback(protocol)) {
log.debug(String.format("Handling SSO callback. Protocol: %s", protocol));
log.debug("Handling SSO callback. Protocol: {}",
_ssoManager.getSsoProvider().protocol().getCommonName());
return callback(request)
.handle(
(res, e) -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1032,6 +1032,8 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("mlModel", getResolver(mlModelType))
.dataFetcher("mlModelGroup", getResolver(mlModelGroupType))
.dataFetcher("assertion", getResolver(assertionType))
.dataFetcher("form", getResolver(formType))
.dataFetcher("view", getResolver(dataHubViewType))
.dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient))
.dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver())
.dataFetcher("listUsers", new ListUsersResolver(this.entityClient))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,6 @@ public class FeatureFlags {
private boolean schemaFieldEntityFetchEnabled = false;
private boolean businessAttributeEntityEnabled = false;
private boolean dataContractsEnabled = false;
private boolean editableDatasetNameEnabled = false;
private boolean showSeparateSiblings = false;
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw
final QueryContext context = environment.getContext();
final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class);

log.info("User {} revoking access token {}", context.getActorUrn(), tokenId);
log.info("User {} revoking access token", context.getActorUrn());

if (isAuthorizedToRevokeToken(context, tokenId)) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen
.setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled())
.setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2())
.setDataContractsEnabled(_featureFlags.isDataContractsEnabled())
.setEditableDatasetNameEnabled(_featureFlags.isEditableDatasetNameEnabled())
.setShowSeparateSiblings(_featureFlags.isShowSeparateSiblings())
.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,16 @@ public DeleteSecretResolver(final EntityClient entityClient) {
public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception {
final QueryContext context = environment.getContext();
if (IngestionAuthUtils.canManageSecrets(context)) {
final String secretUrn = environment.getArgument("urn");
final Urn urn = Urn.createFromString(secretUrn);
final String inputUrn = environment.getArgument("urn");
final Urn urn = Urn.createFromString(inputUrn);
return GraphQLConcurrencyUtils.supplyAsync(
() -> {
try {
_entityClient.deleteEntity(context.getOperationContext(), urn);
return secretUrn;
return inputUrn;
} catch (Exception e) {
throw new RuntimeException(
String.format("Failed to perform delete against secret with urn %s", secretUrn),
String.format("Failed to perform delete against secret with urn %s", inputUrn),
e);
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ static String encrypt(String value, String secret) {
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
Cipher cipher = Cipher.getInstance("AES");
cipher.init(Cipher.ENCRYPT_MODE, secretKey);
return Base64.getEncoder()
.encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8)));
Expand All @@ -48,7 +48,7 @@ static String decrypt(String encryptedValue, String secret) {
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5PADDING");
Cipher cipher = Cipher.getInstance("AES");
cipher.init(Cipher.DECRYPT_MODE, secretKey);
return new String(cipher.doFinal(Base64.getDecoder().decode(encryptedValue)));
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.linkedin.datahub.graphql.resolvers.mutate;

import static com.linkedin.metadata.Constants.*;
import static com.linkedin.metadata.utils.SystemMetadataUtils.createDefaultSystemMetadata;

import com.linkedin.common.urn.Urn;
import com.linkedin.data.template.RecordTemplate;
Expand Down Expand Up @@ -84,7 +85,7 @@ private static MetadataChangeProposal setProposalProperties(
proposal.setChangeType(ChangeType.UPSERT);

// Assumes proposal is generated first from the builder methods above so SystemMetadata is empty
SystemMetadata systemMetadata = new SystemMetadata();
SystemMetadata systemMetadata = createDefaultSystemMetadata();
StringMap properties = new StringMap();
properties.put(APP_SOURCE, UI_SOURCE);
systemMetadata.setProperties(properties);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect;

import com.linkedin.businessattribute.BusinessAttributeInfo;
import com.linkedin.common.AuditStamp;
import com.linkedin.common.urn.CorpuserUrn;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.data.template.SetMode;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils;
Expand All @@ -20,6 +22,7 @@
import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils;
import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils;
import com.linkedin.dataproduct.DataProductProperties;
import com.linkedin.dataset.EditableDatasetProperties;
import com.linkedin.domain.DomainProperties;
import com.linkedin.domain.Domains;
import com.linkedin.entity.client.EntityClient;
Expand Down Expand Up @@ -70,6 +73,8 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw
return updateDataProductName(targetUrn, input, context);
case Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME:
return updateBusinessAttributeName(targetUrn, input, environment.getContext());
case Constants.DATASET_ENTITY_NAME:
return updateDatasetName(targetUrn, input, environment.getContext());
default:
throw new RuntimeException(
String.format(
Expand Down Expand Up @@ -236,6 +241,37 @@ private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryConte
"Unauthorized to perform this action. Please contact your DataHub administrator.");
}

// udpates editable dataset properties aspect's name field
private Boolean updateDatasetName(Urn targetUrn, UpdateNameInput input, QueryContext context) {
if (AuthorizationUtils.canEditProperties(targetUrn, context)) {
try {
if (input.getName() != null) {
final EditableDatasetProperties editableDatasetProperties =
new EditableDatasetProperties();
editableDatasetProperties.setName(input.getName());
final AuditStamp auditStamp = new AuditStamp();
Urn actor = UrnUtils.getUrn(context.getActorUrn());
auditStamp.setActor(actor, SetMode.IGNORE_NULL);
auditStamp.setTime(System.currentTimeMillis());
editableDatasetProperties.setLastModified(auditStamp);
persistAspect(
context.getOperationContext(),
targetUrn,
Constants.EDITABLE_DATASET_PROPERTIES_ASPECT_NAME,
editableDatasetProperties,
actor,
_entityService);
}
return true;
} catch (Exception e) {
throw new RuntimeException(
String.format("Failed to perform update against input %s", input), e);
}
}
throw new AuthorizationException(
"Unauthorized to perform this action. Please contact your DataHub administrator.");
}

private Boolean updateDataProductName(
Urn targetUrn, UpdateNameInput input, QueryContext context) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,7 @@ private void mapDatasetProperties(
properties.setQualifiedName(gmsProperties.getQualifiedName());
dataset.setProperties(properties);
dataset.setDescription(properties.getDescription());
dataset.setName(properties.getName());
if (gmsProperties.getUri() != null) {
dataset.setUri(gmsProperties.getUri().toString());
}
Expand All @@ -248,6 +249,9 @@ private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull Dat
new EditableDatasetProperties(dataMap);
final DatasetEditableProperties editableProperties = new DatasetEditableProperties();
editableProperties.setDescription(editableDatasetProperties.getDescription());
if (editableDatasetProperties.getName() != null) {
editableProperties.setName(editableDatasetProperties.getName());
}
dataset.setEditableProperties(editableProperties);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,13 @@ public Collection<MetadataChangeProposal> apply(

if (datasetUpdateInput.getEditableProperties() != null) {
final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties();
editableDatasetProperties.setDescription(
datasetUpdateInput.getEditableProperties().getDescription());
if (datasetUpdateInput.getEditableProperties().getDescription() != null) {
editableDatasetProperties.setDescription(
datasetUpdateInput.getEditableProperties().getDescription());
}
if (datasetUpdateInput.getEditableProperties().getName() != null) {
editableDatasetProperties.setName(datasetUpdateInput.getEditableProperties().getName());
}
editableDatasetProperties.setLastModified(auditStamp);
editableDatasetProperties.setCreated(auditStamp);
proposals.add(
Expand Down
Loading

0 comments on commit 830b8e0

Please sign in to comment.