From 2755cf3559fe04a982326cca78ec44df12cb1be0 Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Wed, 7 Aug 2024 09:32:21 +0530 Subject: [PATCH 01/72] fix(ingest/powerbi): fix broken lineage between chart and dataset (#11080) --- .../ingestion/source/powerbi/powerbi.py | 15 +- .../src/datahub/utilities/urns/urn_iter.py | 6 +- .../golden_test_lower_case_urn_ingest.json | 696 +++++++++--------- .../tests/integration/powerbi/test_powerbi.py | 2 + 4 files changed, 368 insertions(+), 351 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py index e0a72c71a1ef0..a2d841c3f8fdc 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py @@ -89,6 +89,7 @@ from datahub.metadata.urns import ChartUrn from datahub.sql_parsing.sqlglot_lineage import ColumnLineageInfo from datahub.utilities.dedup_list import deduplicate_list +from datahub.utilities.urns.urn_iter import lowercase_dataset_urn # Logger instance logger = logging.getLogger(__name__) @@ -127,7 +128,7 @@ def __init__( @staticmethod def urn_to_lowercase(value: str, flag: bool) -> str: if flag is True: - return value.lower() + return lowercase_dataset_urn(value) return value @@ -390,11 +391,13 @@ def to_datahub_dataset( for table in dataset.tables: # Create a URN for dataset - ds_urn = builder.make_dataset_urn_with_platform_instance( - platform=self.__config.platform_name, - name=self.assets_urn_to_lowercase(table.full_name), - platform_instance=self.__config.platform_instance, - env=self.__config.env, + ds_urn = self.assets_urn_to_lowercase( + builder.make_dataset_urn_with_platform_instance( + platform=self.__config.platform_name, + name=table.full_name, + platform_instance=self.__config.platform_instance, + env=self.__config.env, + ) ) logger.debug(f"dataset_urn={ds_urn}") diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index 5bef17119675e..f0e4c6f5ee14a 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -131,7 +131,7 @@ def _modify_at_path( _modify_at_path(getattr(model, path[0]), path[1:], new_value) -def _lowercase_dataset_urn(dataset_urn: str) -> str: +def lowercase_dataset_urn(dataset_urn: str) -> str: cur_urn = DatasetUrn.from_string(dataset_urn) new_urn = DatasetUrn( platform=cur_urn.platform, name=cur_urn.name.lower(), env=cur_urn.env @@ -149,10 +149,10 @@ def lowercase_dataset_urns( ) -> None: def modify_urn(urn: str) -> str: if guess_entity_type(urn) == "dataset": - return _lowercase_dataset_urn(urn) + return lowercase_dataset_urn(urn) elif guess_entity_type(urn) == "schemaField": cur_urn = Urn.from_string(urn) - cur_urn._entity_ids[0] = _lowercase_dataset_urn(cur_urn._entity_ids[0]) + cur_urn._entity_ids[0] = lowercase_dataset_urn(cur_urn._entity_ids[0]) return str(cur_urn) return urn diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_lower_case_urn_ingest.json b/metadata-ingestion/tests/integration/powerbi/golden_test_lower_case_urn_ingest.json index d80aa02c4cb12..a4eb670a4b7f9 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_lower_case_urn_ingest.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_lower_case_urn_ingest.json @@ -1,7 +1,7 @@ [ { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.public_issue_history,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -17,9 +17,25 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User1@foo.com", + "changeType": "UPSERT", + "aspectName": "corpUserKey", + "aspect": { + "json": { + "username": "User1@foo.com" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.public_issue_history,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -40,13 +56,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)", + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User2@foo.com", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "corpUserKey", "aspect": { "json": { - "removed": false + "username": "User2@foo.com" } }, "systemMetadata": { @@ -57,15 +73,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_testtable,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "viewProperties", "aspect": { "json": { - "typeNames": [ - "PowerBI Dataset Table", - "View" - ] + "materialized": false, + "viewLogic": "let\n Source = Snowflake.Databases(\"hp123rt5.ap-southeast-2.fakecomputing.com\",\"PBI_TEST_WAREHOUSE_PROD\",[Role=\"PBI_TEST_MEMBER\"]),\n PBI_TEST_Database = Source{[Name=\"PBI_TEST\",Kind=\"Database\"]}[Data],\n TEST_Schema = PBI_TEST_Database{[Name=\"TEST\",Kind=\"Schema\"]}[Data],\n TESTTABLE_Table = TEST_Schema{[Name=\"TESTTABLE\",Kind=\"Table\"]}[Data]\nin\n TESTTABLE_Table", + "viewLanguage": "m_query" } }, "systemMetadata": { @@ -76,14 +91,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_testtable,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.public_issue_history,PROD)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "status", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = Snowflake.Databases(\"hp123rt5.ap-southeast-2.fakecomputing.com\",\"PBI_TEST_WAREHOUSE_PROD\",[Role=\"PBI_TEST_MEMBER\"]),\n PBI_TEST_Database = Source{[Name=\"PBI_TEST\",Kind=\"Database\"]}[Data],\n TEST_Schema = PBI_TEST_Database{[Name=\"TEST\",Kind=\"Schema\"]}[Data],\n TESTTABLE_Table = TEST_Schema{[Name=\"TESTTABLE\",Kind=\"Table\"]}[Data]\nin\n TESTTABLE_Table", - "viewLanguage": "m_query" + "removed": false } }, "systemMetadata": { @@ -94,7 +107,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_testtable,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_testtable,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -115,8 +128,8 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_testtable,DEV)", + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User1@foo.com", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -132,15 +145,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_testtable,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_testtable,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "PowerBI Dataset Table", - "View" - ] + "removed": false } }, "systemMetadata": { @@ -151,14 +161,15 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.public_issue_history,PROD)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "subTypes", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = Value.NativeQuery(Snowflake.Databases(\"bu20658.ap-southeast-2.snowflakecomputing.com\",\"operations_analytics_warehouse_prod\",[Role=\"OPERATIONS_ANALYTICS_MEMBER\"]){[Name=\"OPERATIONS_ANALYTICS\"]}[Data], \"SELECT#(lf)concat((UPPER(REPLACE(SELLER,'-',''))), MONTHID) as AGENT_KEY,#(lf)concat((UPPER(REPLACE(CLIENT_DIRECTOR,'-',''))), MONTHID) as CD_AGENT_KEY,#(lf) *#(lf)FROM#(lf)OPERATIONS_ANALYTICS.TRANSFORMED_PROD.V_APS_SME_UNITS_V4\", null, [EnableFolding=true]),\n #\"Added Conditional Column\" = Table.AddColumn(Source, \"SME Units ENT\", each if [DEAL_TYPE] = \"SME Unit\" then [UNIT] else 0),\n #\"Added Conditional Column1\" = Table.AddColumn(#\"Added Conditional Column\", \"Banklink Units\", each if [DEAL_TYPE] = \"Banklink\" then [UNIT] else 0),\n #\"Removed Columns\" = Table.RemoveColumns(#\"Added Conditional Column1\",{\"Banklink Units\"}),\n #\"Added Custom\" = Table.AddColumn(#\"Removed Columns\", \"Banklink Units\", each if [DEAL_TYPE] = \"Banklink\" and [SALES_TYPE] = \"3 - Upsell\"\nthen [UNIT]\n\nelse if [SALES_TYPE] = \"Adjusted BL Migration\"\nthen [UNIT]\n\nelse 0),\n #\"Added Custom1\" = Table.AddColumn(#\"Added Custom\", \"SME Units in $ (*$361)\", each if [DEAL_TYPE] = \"SME Unit\" \nand [SALES_TYPE] <> \"4 - Renewal\"\n then [UNIT] * 361\nelse 0),\n #\"Added Custom2\" = Table.AddColumn(#\"Added Custom1\", \"Banklink in $ (*$148)\", each [Banklink Units] * 148)\nin\n #\"Added Custom2\"", - "viewLanguage": "m_query" + "typeNames": [ + "PowerBI Dataset Table", + "View" + ] } }, "systemMetadata": { @@ -169,18 +180,15 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_testtable,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "datasetId": "05169CD2-E713-41E6-9600-1D8066D95445" - }, - "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/05169CD2-E713-41E6-9600-1D8066D95445/details", - "name": "snowflake native-query", - "description": "Library dataset description", - "tags": [] + "typeNames": [ + "PowerBI Dataset Table", + "View" + ] } }, "systemMetadata": { @@ -190,8 +198,8 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query,DEV)", + "entityType": "corpuser", + "entityUrn": "urn:li:corpuser:users.User2@foo.com", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -207,15 +215,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.big-query-with-parameter,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "viewProperties", "aspect": { "json": { - "typeNames": [ - "PowerBI Dataset Table", - "View" - ] + "materialized": false, + "viewLogic": "let\n Source = GoogleBigQuery.Database([BillingProject = #\"Parameter - Source\"]),\n#\"gcp-project\" = Source{[Name=#\"Parameter - Source\"]}[Data],\nuniversal_Schema = #\"gcp-project\"{[Name=\"universal\",Kind=\"Schema\"]}[Data],\nD_WH_DATE_Table = universal_Schema{[Name=\"D_WH_DATE\",Kind=\"Table\"]}[Data],\n#\"Filtered Rows\" = Table.SelectRows(D_WH_DATE_Table, each [D_DATE] > #datetime(2019, 9, 10, 0, 0, 0)),\n#\"Filtered Rows1\" = Table.SelectRows(#\"Filtered Rows\", each DateTime.IsInPreviousNHours([D_DATE], 87600))\n in \n#\"Filtered Rows1\"", + "viewLanguage": "m_query" } }, "systemMetadata": { @@ -226,13 +233,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.big-query-with-parameter,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { "json": { "materialized": false, - "viewLogic": "let\n Source = GoogleBigQuery.Database([BillingProject = #\"Parameter - Source\"]),\n#\"gcp-project\" = Source{[Name=#\"Parameter - Source\"]}[Data],\nuniversal_Schema = #\"gcp-project\"{[Name=\"universal\",Kind=\"Schema\"]}[Data],\nD_WH_DATE_Table = universal_Schema{[Name=\"D_WH_DATE\",Kind=\"Table\"]}[Data],\n#\"Filtered Rows\" = Table.SelectRows(D_WH_DATE_Table, each [D_DATE] > #datetime(2019, 9, 10, 0, 0, 0)),\n#\"Filtered Rows1\" = Table.SelectRows(#\"Filtered Rows\", each DateTime.IsInPreviousNHours([D_DATE], 87600))\n in \n#\"Filtered Rows1\"", + "viewLogic": "let\n Source = Value.NativeQuery(Snowflake.Databases(\"bu20658.ap-southeast-2.snowflakecomputing.com\",\"operations_analytics_warehouse_prod\",[Role=\"OPERATIONS_ANALYTICS_MEMBER\"]){[Name=\"OPERATIONS_ANALYTICS\"]}[Data], \"SELECT#(lf)concat((UPPER(REPLACE(SELLER,'-',''))), MONTHID) as AGENT_KEY,#(lf)concat((UPPER(REPLACE(CLIENT_DIRECTOR,'-',''))), MONTHID) as CD_AGENT_KEY,#(lf) *#(lf)FROM#(lf)OPERATIONS_ANALYTICS.TRANSFORMED_PROD.V_APS_SME_UNITS_V4\", null, [EnableFolding=true]),\n #\"Added Conditional Column\" = Table.AddColumn(Source, \"SME Units ENT\", each if [DEAL_TYPE] = \"SME Unit\" then [UNIT] else 0),\n #\"Added Conditional Column1\" = Table.AddColumn(#\"Added Conditional Column\", \"Banklink Units\", each if [DEAL_TYPE] = \"Banklink\" then [UNIT] else 0),\n #\"Removed Columns\" = Table.RemoveColumns(#\"Added Conditional Column1\",{\"Banklink Units\"}),\n #\"Added Custom\" = Table.AddColumn(#\"Removed Columns\", \"Banklink Units\", each if [DEAL_TYPE] = \"Banklink\" and [SALES_TYPE] = \"3 - Upsell\"\nthen [UNIT]\n\nelse if [SALES_TYPE] = \"Adjusted BL Migration\"\nthen [UNIT]\n\nelse 0),\n #\"Added Custom1\" = Table.AddColumn(#\"Added Custom\", \"SME Units in $ (*$361)\", each if [DEAL_TYPE] = \"SME Unit\" \nand [SALES_TYPE] <> \"4 - Renewal\"\n then [UNIT] * 361\nelse 0),\n #\"Added Custom2\" = Table.AddColumn(#\"Added Custom1\", \"Banklink in $ (*$148)\", each [Banklink Units] * 148)\nin\n #\"Added Custom2\"", "viewLanguage": "m_query" } }, @@ -244,7 +251,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.big-query-with-parameter,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.big-query-with-parameter,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -266,23 +273,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.big-query-with-parameter,DEV)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.big-query-with-parameter,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.job-history,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -301,14 +292,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.big-query-with-parameter,PROD)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "status", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = Value.NativeQuery(Snowflake.Databases(\"xaa48144.snowflakecomputing.com\",\"GSL_TEST_WH\",[Role=\"ACCOUNTADMIN\"]){[Name=\"GSL_TEST_DB\"]}[Data], \"select A.name from GSL_TEST_DB.PUBLIC.SALES_ANALYST as A inner join GSL_TEST_DB.PUBLIC.SALES_FORECAST as B on A.name = B.name where startswith(A.name, 'mo')\", null, [EnableFolding=true])\nin\n Source", - "viewLanguage": "m_query" + "removed": false } }, "systemMetadata": { @@ -319,7 +308,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -328,7 +317,7 @@ "datasetId": "05169CD2-E713-41E6-9600-1D8066D95445" }, "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/05169CD2-E713-41E6-9600-1D8066D95445/details", - "name": "snowflake native-query-with-join", + "name": "snowflake native-query", "description": "Library dataset description", "tags": [] } @@ -341,12 +330,15 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.big-query-with-parameter,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "subTypes", "aspect": { "json": { - "removed": false + "typeNames": [ + "PowerBI Dataset Table", + "View" + ] } }, "systemMetadata": { @@ -357,15 +349,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.job-history,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "PowerBI Dataset Table", - "View" - ] + "removed": false } }, "systemMetadata": { @@ -376,14 +365,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.job-history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query,PROD)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "status", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = Oracle.Database(\"localhost:1521/salesdb.domain.com\", [HierarchicalNavigation=true]), HR = Source{[Schema=\"HR\"]}[Data], EMPLOYEES1 = HR{[Name=\"EMPLOYEES\"]}[Data] \n in EMPLOYEES1", - "viewLanguage": "m_query" + "removed": false } }, "systemMetadata": { @@ -394,7 +381,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.job-history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.job-history,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -416,12 +403,33 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.job-history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "subTypes", "aspect": { "json": { - "removed": false + "typeNames": [ + "PowerBI Dataset Table", + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.job-history,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "let\n Source = Oracle.Database(\"localhost:1521/salesdb.domain.com\", [HierarchicalNavigation=true]), HR = Source{[Schema=\"HR\"]}[Data], EMPLOYEES1 = HR{[Name=\"EMPLOYEES\"]}[Data] \n in EMPLOYEES1", + "viewLanguage": "m_query" } }, "systemMetadata": { @@ -432,7 +440,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.job-history,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query-with-join,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -451,14 +459,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.postgres_test_table,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query-with-join,PROD)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "status", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = PostgreSQL.Database(\"localhost\" , \"mics\" ),\n public_order_date = Source{[Schema=\"public\",Item=\"order_date\"]}[Data] \n in \n public_order_date", - "viewLanguage": "m_query" + "removed": false } }, "systemMetadata": { @@ -469,7 +475,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.postgres_test_table,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query-with-join,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -478,7 +484,7 @@ "datasetId": "05169CD2-E713-41E6-9600-1D8066D95445" }, "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/05169CD2-E713-41E6-9600-1D8066D95445/details", - "name": "postgres_test_table", + "name": "snowflake native-query-with-join", "description": "Library dataset description", "tags": [] } @@ -491,12 +497,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.postgres_test_table,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query-with-join,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "viewProperties", "aspect": { "json": { - "removed": false + "materialized": false, + "viewLogic": "let\n Source = Value.NativeQuery(Snowflake.Databases(\"xaa48144.snowflakecomputing.com\",\"GSL_TEST_WH\",[Role=\"ACCOUNTADMIN\"]){[Name=\"GSL_TEST_DB\"]}[Data], \"select A.name from GSL_TEST_DB.PUBLIC.SALES_ANALYST as A inner join GSL_TEST_DB.PUBLIC.SALES_FORECAST as B on A.name = B.name where startswith(A.name, 'mo')\", null, [EnableFolding=true])\nin\n Source", + "viewLanguage": "m_query" } }, "systemMetadata": { @@ -507,15 +515,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.postgres_test_table,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.postgres_test_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "viewProperties", "aspect": { "json": { - "typeNames": [ - "PowerBI Dataset Table", - "View" - ] + "materialized": false, + "viewLogic": "let\n Source = PostgreSQL.Database(\"localhost\" , \"mics\" ),\n public_order_date = Source{[Schema=\"public\",Item=\"order_date\"]}[Data] \n in \n public_order_date", + "viewLanguage": "m_query" } }, "systemMetadata": { @@ -525,15 +532,52 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "chartInfo", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = Sql.Database(\"localhost\", \"library\"),\n dbo_book_issue = Source{[Schema=\"dbo\",Item=\"book_issue\"]}[Data]\n in dbo_book_issue", - "viewLanguage": "m_query" + "customProperties": { + "createdFrom": "Dataset", + "datasetId": "05169CD2-E713-41E6-9600-1D8066D95445", + "datasetWebUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/05169CD2-E713-41E6-9600-1D8066D95445/details" + }, + "title": "test_tile", + "description": "test_tile", + "lastModified": { + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + }, + "inputs": [ + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.public_issue_history,PROD)" + }, + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_testtable,PROD)" + }, + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query,PROD)" + }, + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.big-query-with-parameter,PROD)" + }, + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.snowflake_native-query-with-join,PROD)" + }, + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.job-history,PROD)" + }, + { + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.postgres_test_table,PROD)" + } + ] } }, "systemMetadata": { @@ -544,17 +588,17 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.postgres_test_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": { - "datasetId": "ba0130a1-5b03-40de-9535-b34e778ea6ed" + "datasetId": "05169CD2-E713-41E6-9600-1D8066D95445" }, - "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/ba0130a1-5b03-40de-9535-b34e778ea6ed/details", - "name": "dbo_book_issue", - "description": "hr pbi test description", + "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/05169CD2-E713-41E6-9600-1D8066D95445/details", + "name": "postgres_test_table", + "description": "Library dataset description", "tags": [] } }, @@ -565,13 +609,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "subTypes", "aspect": { "json": { - "removed": false + "typeNames": [ + "PowerBI Tile" + ] } }, "systemMetadata": { @@ -582,15 +628,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.postgres_test_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "PowerBI Dataset Table", - "View" - ] + "removed": false } }, "systemMetadata": { @@ -600,15 +643,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.ms_sql_native_table,DEV)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", "changeType": "UPSERT", - "aspectName": "viewProperties", + "aspectName": "browsePaths", "aspect": { "json": { - "materialized": false, - "viewLogic": "let\n Source = Sql.Database(\"AUPRDWHDB\", \"COMMOPSDB\", [Query=\"select *,#(lf)concat((UPPER(REPLACE(CLIENT_DIRECTOR,'-',''))), MONTH_WID) as CD_AGENT_KEY,#(lf)concat((UPPER(REPLACE(CLIENT_MANAGER_CLOSING_MONTH,'-',''))), MONTH_WID) as AGENT_KEY#(lf)#(lf)from V_PS_CD_RETENTION\", CommandTimeout=#duration(0, 1, 30, 0)]),\n #\"Changed Type\" = Table.TransformColumnTypes(Source,{{\"mth_date\", type date}}),\n #\"Added Custom\" = Table.AddColumn(#\"Changed Type\", \"Month\", each Date.Month([mth_date])),\n #\"Added Custom1\" = Table.AddColumn(#\"Added Custom\", \"TPV Opening\", each if [Month] = 1 then [TPV_AMV_OPENING]\nelse if [Month] = 2 then 0\nelse if [Month] = 3 then 0\nelse if [Month] = 4 then [TPV_AMV_OPENING]\nelse if [Month] = 5 then 0\nelse if [Month] = 6 then 0\nelse if [Month] = 7 then [TPV_AMV_OPENING]\nelse if [Month] = 8 then 0\nelse if [Month] = 9 then 0\nelse if [Month] = 10 then [TPV_AMV_OPENING]\nelse if [Month] = 11 then 0\nelse if [Month] = 12 then 0\n\nelse 0)\nin\n #\"Added Custom1\"", - "viewLanguage": "m_query" + "paths": [ + "/powerbi/demo-workspace" + ] } }, "systemMetadata": { @@ -619,18 +662,15 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.ms_sql_native_table,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.library-dataset.postgres_test_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "datasetId": "ba0130a1-5b03-40de-9535-b34e778ea6ed" - }, - "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/ba0130a1-5b03-40de-9535-b34e778ea6ed/details", - "name": "ms_sql_native_table", - "description": "hr pbi test description", - "tags": [] + "typeNames": [ + "PowerBI Dataset Table", + "View" + ] } }, "systemMetadata": { @@ -640,8 +680,49 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.ms_sql_native_table,DEV)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "changeType": "UPSERT", + "aspectName": "chartKey", + "aspect": { + "json": { + "dashboardTool": "powerbi", + "chartId": "myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:powerbi,myPlatformInstance)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:powerbi,myPlatformInstance)" + }, + { + "id": "demo-workspace" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -657,7 +738,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.ms_sql_native_table,DEV)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.dbo_book_issue,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -675,13 +756,13 @@ } }, { - "entityType": "corpuser", - "entityUrn": "urn:li:corpuser:users.User1@foo.com", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.dbo_book_issue,PROD)", "changeType": "UPSERT", - "aspectName": "corpUserKey", + "aspectName": "status", "aspect": { "json": { - "username": "User1@foo.com" + "removed": false } }, "systemMetadata": { @@ -691,13 +772,19 @@ } }, { - "entityType": "corpuser", - "entityUrn": "urn:li:corpuser:users.User2@foo.com", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.dbo_book_issue,PROD)", "changeType": "UPSERT", - "aspectName": "corpUserKey", + "aspectName": "datasetProperties", "aspect": { "json": { - "username": "User2@foo.com" + "customProperties": { + "datasetId": "ba0130a1-5b03-40de-9535-b34e778ea6ed" + }, + "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/ba0130a1-5b03-40de-9535-b34e778ea6ed/details", + "name": "dbo_book_issue", + "description": "hr pbi test description", + "tags": [] } }, "systemMetadata": { @@ -707,51 +794,33 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.dbo_book_issue,PROD)", "changeType": "UPSERT", - "aspectName": "chartInfo", + "aspectName": "viewProperties", "aspect": { "json": { - "customProperties": { - "createdFrom": "Dataset", - "datasetId": "05169CD2-E713-41E6-9600-1D8066D95445", - "datasetWebUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/05169CD2-E713-41E6-9600-1D8066D95445/details" - }, - "title": "test_tile", - "description": "test_tile", - "lastModified": { - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - }, - "inputs": [ - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" - }, - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_testtable,DEV)" - }, - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query,DEV)" - }, - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.big-query-with-parameter,DEV)" - }, - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.snowflake_native-query-with-join,DEV)" - }, - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.job-history,DEV)" - }, - { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.postgres_test_table,DEV)" - } + "materialized": false, + "viewLogic": "let\n Source = Sql.Database(\"localhost\", \"library\"),\n dbo_book_issue = Source{[Schema=\"dbo\",Item=\"book_issue\"]}[Data]\n in dbo_book_issue", + "viewLanguage": "m_query" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "powerbi-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.ms_sql_native_table,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "PowerBI Dataset Table", + "View" ] } }, @@ -762,8 +831,8 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.ms_sql_native_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -778,15 +847,19 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.ms_sql_native_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "datasetProperties", "aspect": { "json": { - "typeNames": [ - "PowerBI Tile" - ] + "customProperties": { + "datasetId": "ba0130a1-5b03-40de-9535-b34e778ea6ed" + }, + "externalUrl": "http://localhost/groups/64ED5CAD-7C10-4684-8180-826122881108/datasets/ba0130a1-5b03-40de-9535-b34e778ea6ed/details", + "name": "ms_sql_native_table", + "description": "hr pbi test description", + "tags": [] } }, "systemMetadata": { @@ -796,14 +869,15 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.ms_sql_native_table,PROD)", "changeType": "UPSERT", - "aspectName": "chartKey", + "aspectName": "viewProperties", "aspect": { "json": { - "dashboardTool": "powerbi", - "chartId": "charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0" + "materialized": false, + "viewLogic": "let\n Source = Sql.Database(\"AUPRDWHDB\", \"COMMOPSDB\", [Query=\"select *,#(lf)concat((UPPER(REPLACE(CLIENT_DIRECTOR,'-',''))), MONTH_WID) as CD_AGENT_KEY,#(lf)concat((UPPER(REPLACE(CLIENT_MANAGER_CLOSING_MONTH,'-',''))), MONTH_WID) as AGENT_KEY#(lf)#(lf)from V_PS_CD_RETENTION\", CommandTimeout=#duration(0, 1, 30, 0)]),\n #\"Changed Type\" = Table.TransformColumnTypes(Source,{{\"mth_date\", type date}}),\n #\"Added Custom\" = Table.AddColumn(#\"Changed Type\", \"Month\", each Date.Month([mth_date])),\n #\"Added Custom1\" = Table.AddColumn(#\"Added Custom\", \"TPV Opening\", each if [Month] = 1 then [TPV_AMV_OPENING]\nelse if [Month] = 2 then 0\nelse if [Month] = 3 then 0\nelse if [Month] = 4 then [TPV_AMV_OPENING]\nelse if [Month] = 5 then 0\nelse if [Month] = 6 then 0\nelse if [Month] = 7 then [TPV_AMV_OPENING]\nelse if [Month] = 8 then 0\nelse if [Month] = 9 then 0\nelse if [Month] = 10 then [TPV_AMV_OPENING]\nelse if [Month] = 11 then 0\nelse if [Month] = 12 then 0\n\nelse 0)\nin\n #\"Added Custom1\"", + "viewLanguage": "m_query" } }, "systemMetadata": { @@ -813,15 +887,27 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,myPlatformInstance.dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", "changeType": "UPSERT", - "aspectName": "browsePaths", + "aspectName": "ownership", "aspect": { "json": { - "paths": [ - "/powerbi/demo-workspace" - ] + "owners": [ + { + "owner": "urn:li:corpuser:users.User1@foo.com", + "type": "NONE" + }, + { + "owner": "urn:li:corpuser:users.User2@foo.com", + "type": "NONE" + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -831,17 +917,14 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,myPlatformInstance.dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "dashboardKey", "aspect": { "json": { - "path": [ - { - "id": "demo-workspace" - } - ] + "dashboardTool": "powerbi", + "dashboardId": "powerbi.linkedin.com/dashboards/7D668CAD-7FFC-4505-9215-655BCA5BEBAE" } }, "systemMetadata": { @@ -852,7 +935,7 @@ }, { "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", "changeType": "UPSERT", "aspectName": "chartInfo", "aspect": { @@ -876,10 +959,10 @@ }, "inputs": [ { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.dbo_book_issue,PROD)" }, { - "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.ms_sql_native_table,DEV)" + "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,myplatforminstance.hr_pbi_test.ms_sql_native_table,PROD)" } ] } @@ -892,7 +975,7 @@ }, { "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -907,43 +990,8 @@ } }, { - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", - "changeType": "UPSERT", - "aspectName": "chartKey", - "aspect": { - "json": { - "dashboardTool": "powerbi", - "chartId": "charts.23212598-23b5-4980-87cc-5fc0ecd84385" - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "PowerBI Tile" - ] - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,myPlatformInstance.dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", "changeType": "UPSERT", "aspectName": "browsePaths", "aspect": { @@ -961,15 +1009,13 @@ }, { "entityType": "chart", - "entityUrn": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "subTypes", "aspect": { "json": { - "path": [ - { - "id": "demo-workspace" - } + "typeNames": [ + "PowerBI Tile" ] } }, @@ -980,15 +1026,14 @@ } }, { - "entityType": "dashboard", - "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", "changeType": "UPSERT", - "aspectName": "browsePaths", + "aspectName": "chartKey", "aspect": { "json": { - "paths": [ - "/powerbi/demo-workspace" - ] + "dashboardTool": "powerbi", + "chartId": "myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385" } }, "systemMetadata": { @@ -999,7 +1044,7 @@ }, { "entityType": "dashboard", - "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", + "entityUrn": "urn:li:dashboard:(powerbi,myPlatformInstance.dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", "changeType": "PATCH", "aspectName": "dashboardInfo", "aspect": { @@ -1031,13 +1076,13 @@ }, { "op": "add", - "path": "/charts/urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", - "value": "urn:li:chart:(powerbi,charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)" + "path": "/charts/urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)", + "value": "urn:li:chart:(powerbi,myPlatformInstance.charts.B8E293DC-0C83-4AA0-9BB9-0A8738DF24A0)" }, { "op": "add", - "path": "/charts/urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)", - "value": "urn:li:chart:(powerbi,charts.23212598-23b5-4980-87cc-5fc0ecd84385)" + "path": "/charts/urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", + "value": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)" }, { "op": "add", @@ -1067,30 +1112,21 @@ } }, { - "entityType": "dashboard", - "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dashboard", - "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", "changeType": "UPSERT", - "aspectName": "dashboardKey", + "aspectName": "browsePathsV2", "aspect": { "json": { - "dashboardTool": "powerbi", - "dashboardId": "powerbi.linkedin.com/dashboards/7D668CAD-7FFC-4505-9215-655BCA5BEBAE" + "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:powerbi,myPlatformInstance)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:powerbi,myPlatformInstance)" + }, + { + "id": "demo-workspace" + } + ] } }, "systemMetadata": { @@ -1100,27 +1136,15 @@ } }, { - "entityType": "dashboard", - "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", + "entityType": "chart", + "entityUrn": "urn:li:chart:(powerbi,myPlatformInstance.charts.23212598-23b5-4980-87cc-5fc0ecd84385)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePaths", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:users.User1@foo.com", - "type": "NONE" - }, - { - "owner": "urn:li:corpuser:users.User2@foo.com", - "type": "NONE" - } - ], - "ownerTypes": {}, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "paths": [ + "/powerbi/demo-workspace" + ] } }, "systemMetadata": { @@ -1131,12 +1155,16 @@ }, { "entityType": "dashboard", - "entityUrn": "urn:li:dashboard:(powerbi,dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", + "entityUrn": "urn:li:dashboard:(powerbi,myPlatformInstance.dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ + { + "id": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:powerbi,myPlatformInstance)", + "urn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:powerbi,myPlatformInstance)" + }, { "id": "demo-workspace" } @@ -1150,24 +1178,8 @@ } }, { - "entityType": "corpuser", - "entityUrn": "urn:li:corpuser:users.User1@foo.com", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1643871600000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "corpuser", - "entityUrn": "urn:li:corpuser:users.User2@foo.com", + "entityType": "dashboard", + "entityUrn": "urn:li:dashboard:(powerbi,myPlatformInstance.dashboards.7D668CAD-7FFC-4505-9215-655BCA5BEBAE)", "changeType": "UPSERT", "aspectName": "status", "aspect": { diff --git a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py index 6a95ec2c1dda4..23b23ecada0d4 100644 --- a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py +++ b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py @@ -819,6 +819,8 @@ def test_powerbi_ingest_urn_lower_case( "type": "powerbi", "config": { **default_source_config(), + "env": "PROD", + "platform_instance": "myPlatformInstance", "convert_urns_to_lowercase": True, "convert_lineage_urns_to_lowercase": True, }, From 8bea5d2a3da503b5d4381bbf6dd0f9c0d2ce8d7b Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Wed, 7 Aug 2024 09:33:14 +0530 Subject: [PATCH 02/72] feat(ingest/lookml): CLL support for sql set in sql_table_name attribute of lookml view (#11069) --- .../ingestion/source/looker/looker_source.py | 21 -- .../source/looker/looker_template_language.py | 17 +- .../source/looker/lookml_concept_context.py | 70 ++++- .../ingestion/source/looker/view_upstream.py | 51 +++- .../looker/golden_test_ingest.json | 34 --- .../data.model.lkml | 4 + .../rent_as_employee_income_source.view.lkml | 27 ++ .../vv_lineage_liquid_template_golden.json | 248 ++++++++++++++++++ 8 files changed, 394 insertions(+), 78 deletions(-) create mode 100644 metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/rent_as_employee_income_source.view.lkml diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index d61458d8e924a..ef329da930dda 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -96,13 +96,11 @@ ChartTypeClass, ContainerClass, DashboardInfoClass, - DataPlatformInfoClass, InputFieldClass, InputFieldsClass, OwnerClass, OwnershipClass, OwnershipTypeClass, - PlatformTypeClass, SubTypesClass, ) from datahub.utilities.backpressure_aware_executor import BackpressureAwareExecutor @@ -1573,25 +1571,6 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: looker_dashboards_for_usage: List[looker_usage.LookerDashboardForUsage] = [] - # Emit platform instance entity - if self.source_config.platform_instance: - platform_instance_urn = builder.make_dataplatform_instance_urn( - platform=self.source_config.platform_name, - instance=self.source_config.platform_instance, - ) - - yield MetadataWorkUnit( - id=f"{platform_instance_urn}-aspect-dataplatformInfo", - mcp=MetadataChangeProposalWrapper( - entityUrn=platform_instance_urn, - aspect=DataPlatformInfoClass( - name=self.source_config.platform_instance, - type=PlatformTypeClass.OTHERS, - datasetNameDelimiter=".", - ), - ), - ) - with self.reporter.report_stage("dashboard_chart_metadata"): for job in BackpressureAwareExecutor.map( self.process_dashboard, diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py index 2c523fcd98d08..99f83b5e922ba 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py @@ -10,9 +10,6 @@ create_template, ) from datahub.ingestion.source.looker.lookml_config import DERIVED_VIEW_PATTERN -from datahub.ingestion.source.looker.str_functions import ( - remove_extra_spaces_and_newlines, -) logger = logging.getLogger(__name__) @@ -95,6 +92,11 @@ def resolve_liquid_variable(text: str, liquid_variable: Dict[Any, Any]) -> str: return text +def _drop_derived_view_pattern(value: str) -> str: + # Drop ${ and } + return re.sub(DERIVED_VIEW_PATTERN, r"\1", value) + + def _complete_incomplete_sql(raw_view: dict, sql: str) -> str: # Looker supports sql fragments that omit the SELECT and FROM parts of the query @@ -109,8 +111,7 @@ def _complete_incomplete_sql(raw_view: dict, sql: str) -> str: # add a FROM clause at the end sql_query = f"{sql_query} FROM {raw_view['name']}" - # Drop ${ and } - return re.sub(DERIVED_VIEW_PATTERN, r"\1", sql_query) + return _drop_derived_view_pattern(sql_query) def resolve_liquid_variable_in_view_dict( @@ -122,10 +123,14 @@ def resolve_liquid_variable_in_view_dict( for view in raw_view["views"]: if "sql_table_name" in view: view["datahub_transformed_sql_table_name"] = resolve_liquid_variable( - text=remove_extra_spaces_and_newlines(view["sql_table_name"]), + text=view["sql_table_name"], liquid_variable=liquid_variable, ) # keeping original sql_table_name as is to avoid any visualization issue later + view["datahub_transformed_sql_table_name"] = _drop_derived_view_pattern( + value=view["datahub_transformed_sql_table_name"] + ) + if "derived_table" in view and "sql" in view["derived_table"]: # In sql we don't need to remove the extra spaces as sql parser takes care of extra spaces and \n # while generating URN from sql diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py index a83aa2638ec96..7805b8b7b7d9a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py @@ -11,12 +11,14 @@ from datahub.ingestion.source.looker.looker_dataclasses import LookerViewFile from datahub.ingestion.source.looker.looker_file_loader import LookerViewFileLoader from datahub.ingestion.source.looker.lookml_config import ( - DERIVED_VIEW_PATTERN, DERIVED_VIEW_SUFFIX, NAME, LookMLSourceReport, ) from datahub.ingestion.source.looker.lookml_refinement import LookerRefinementResolver +from datahub.ingestion.source.looker.str_functions import ( + remove_extra_spaces_and_newlines, +) logger = logging.getLogger(__name__) @@ -56,7 +58,7 @@ def column_name_in_sql_attribute(self) -> List[str]: class LookerViewContext: """ - There are six patterns to associate the view's fields with dataset + There are seven patterns to associate the view's fields with dataset Pattern1: view: view_name { @@ -161,6 +163,36 @@ class LookerViewContext: For all possible options of "sql" attribute please refer looker doc: https://cloud.google.com/looker/docs/reference/param-field-sql + For pattern 6 i.e. view.derived.sql, The looker creates a temporary table to store the sql result, + However if we don't want to have a temporary table and want looker to always execute the sql to fetch the result then + in that case pattern 7 is useful (mentioned below). + + Pattern7: + view: customer_sales { + sql_table_name: ( + SELECT + customer_id, + SUM(sales_amount) AS total_sales + FROM + sales + GROUP BY + customer_id + ) ;; + + dimension: customer_id { + sql: ${TABLE}.customer_id ;; + } + + measure: total_sales { + type: sum + sql: ${TABLE}.total_sales ;; + } + } + + + In Pattern7 the fields' upstream dataset is the output of sql mentioned in + customer_sales.sql_table_name. + """ raw_view: Dict @@ -252,6 +284,7 @@ def _get_sql_table_name_field(self) -> Optional[str]: return self.get_including_extends(field="sql_table_name") def _is_dot_sql_table_name_present(self) -> bool: + sql_table_name: Optional[str] = self._get_sql_table_name_field() if sql_table_name is None: @@ -268,7 +301,7 @@ def sql_table_name(self) -> str: if sql_table_name is None: sql_table_name = self.raw_view[NAME].lower() - return sql_table_name + return sql_table_name.lower() def datahub_transformed_sql_table_name(self) -> str: table_name: Optional[str] = self.raw_view.get( @@ -278,13 +311,13 @@ def datahub_transformed_sql_table_name(self) -> str: if not table_name: table_name = self.sql_table_name() - # sql_table_name is in the format "${view-name}.SQL_TABLE_NAME" - # remove extra characters - if self._is_dot_sql_table_name_present(): - table_name = re.sub(DERIVED_VIEW_PATTERN, r"\1", table_name) + # remove extra spaces and new lines from sql_table_name if it is not a sql + if not self.is_direct_sql_query_case(): + table_name = remove_extra_spaces_and_newlines(table_name) + # Some sql_table_name fields contain quotes like: optimizely."group", just remove the quotes + table_name = table_name.replace('"', "").replace("`", "").lower() - # Some sql_table_name fields contain quotes like: optimizely."group", just remove the quotes - return table_name.replace('"', "").replace("`", "").lower() + return table_name def derived_table(self) -> Dict[Any, Any]: """ @@ -371,6 +404,11 @@ def is_materialized_derived_view(self) -> bool: def is_regular_case(self) -> bool: # regular-case is pattern1 and 2 where upstream table is either view-name or # table name mentioned in sql_table_name attribute + + # It should not be the sql query + if self.is_direct_sql_query_case(): + return False + if ( self.is_sql_table_name_referring_to_view() or self.is_sql_based_derived_case() @@ -381,6 +419,9 @@ def is_regular_case(self) -> bool: return True def is_sql_table_name_referring_to_view(self) -> bool: + if self.is_direct_sql_query_case(): + return False + # It is pattern3 return self._is_dot_sql_table_name_present() @@ -413,3 +454,14 @@ def is_sql_based_derived_view_without_fields_case(self) -> bool: return True return False + + def is_direct_sql_query_case(self) -> bool: + # pattern 7 + # sqlglot doesn't have a function to validate whether text is valid SQL or not. + # Applying a simple logic to check if sql_table_name contains a sql. + # if sql_table_name contains sql then its value starts with "(" and checking if "select" is present in side the + # text + return ( + self.sql_table_name().strip().startswith("(") + and "select" in self.sql_table_name() + ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/view_upstream.py b/metadata-ingestion/src/datahub/ingestion/source/looker/view_upstream.py index 98646e19a7014..d5929b52aea3a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/view_upstream.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/view_upstream.py @@ -237,7 +237,7 @@ def create_fields(self) -> List[ViewField]: return [] # it is for the special case -class SqlBasedDerivedViewUpstream(AbstractViewUpstream): +class SqlBasedDerivedViewUpstream(AbstractViewUpstream, ABC): """ Handle the case where upstream dataset is defined in derived_table.sql """ @@ -263,7 +263,7 @@ def __get_spr(self) -> Optional[SqlParsingResult]: return None spr = create_lineage_sql_parsed_result( - query=self.view_context.datahub_transformed_sql(), + query=self.get_sql_query(), default_schema=self.view_context.view_connection.default_schema, default_db=self.view_context.view_connection.default_db, platform=self.view_context.view_connection.platform, @@ -390,6 +390,28 @@ def get_upstream_column_ref( def get_upstream_dataset_urn(self) -> List[Urn]: return self._get_upstream_dataset_urn() + @abstractmethod + def get_sql_query(self) -> str: + pass + + +class DirectQueryUpstreamSource(SqlBasedDerivedViewUpstream): + """ + Pattern 7 as per view-context documentation + """ + + def get_sql_query(self) -> str: + return self.view_context.datahub_transformed_sql_table_name() + + +class DerivedQueryUpstreamSource(SqlBasedDerivedViewUpstream): + """ + Pattern 4 as per view-context documentation + """ + + def get_sql_query(self) -> str: + return self.view_context.datahub_transformed_sql() + class NativeDerivedViewUpstream(AbstractViewUpstream): """ @@ -611,6 +633,7 @@ def create_view_upstream( ctx: PipelineContext, reporter: LookMLSourceReport, ) -> AbstractViewUpstream: + if view_context.is_regular_case(): return RegularViewUpstream( view_context=view_context, @@ -629,11 +652,23 @@ def create_view_upstream( looker_view_id_cache=looker_view_id_cache, ) - if ( - view_context.is_sql_based_derived_case() - or view_context.is_sql_based_derived_view_without_fields_case() + if any( + [ + view_context.is_sql_based_derived_case(), + view_context.is_sql_based_derived_view_without_fields_case(), + ] ): - return SqlBasedDerivedViewUpstream( + + return DerivedQueryUpstreamSource( + view_context=view_context, + config=config, + reporter=reporter, + ctx=ctx, + looker_view_id_cache=looker_view_id_cache, + ) + + if view_context.is_direct_sql_query_case(): + return DirectQueryUpstreamSource( view_context=view_context, config=config, reporter=reporter, @@ -651,9 +686,9 @@ def create_view_upstream( ) reporter.report_warning( - title="Implementation Not Found", + title="ViewUpstream Implementation Not Found", message="No implementation found to resolve upstream of the view", - context=view_context.view_file_name(), + context=f"view_name={view_context.name()} , view_file_name={view_context.view_file_name()}", ) return EmptyImplementation( diff --git a/metadata-ingestion/tests/integration/looker/golden_test_ingest.json b/metadata-ingestion/tests/integration/looker/golden_test_ingest.json index 76c8f04e8447a..9c0363e0892f0 100644 --- a/metadata-ingestion/tests/integration/looker/golden_test_ingest.json +++ b/metadata-ingestion/tests/integration/looker/golden_test_ingest.json @@ -1,22 +1,4 @@ [ -{ - "entityType": "dataPlatformInstance", - "entityUrn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:looker,ap-south-1)", - "changeType": "UPSERT", - "aspectName": "dataPlatformInfo", - "aspect": { - "json": { - "name": "ap-south-1", - "type": "OTHERS", - "datasetNameDelimiter": "." - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "looker-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "container", "entityUrn": "urn:li:container:e7fe6fc9c3ca70e78694dcc5dd9c05b7", @@ -805,22 +787,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataPlatformInstance", - "entityUrn": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:looker,ap-south-1)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "looker-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "tag", "entityUrn": "urn:li:tag:Dimension", diff --git a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml index 6eb92d749c9f7..2cc6ae994d245 100644 --- a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml +++ b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml @@ -6,6 +6,7 @@ include: "employee_total_income.view.lkml" include: "top_10_employee_income_source.view.lkml" include: "employee_tax_report.view.lkml" include: "employee_salary_rating.view.lkml" +include: "rent_as_employee_income_source.view.lkml" explore: activity_logs { } @@ -23,4 +24,7 @@ explore: employee_tax_report { } explore: employee_salary_rating { +} + +explore: rent_as_employee_income_source { } \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/rent_as_employee_income_source.view.lkml b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/rent_as_employee_income_source.view.lkml new file mode 100644 index 0000000000000..40b6e3642f3b3 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/rent_as_employee_income_source.view.lkml @@ -0,0 +1,27 @@ +view: rent_as_employee_income_source { + sql_table_name: ( + SELECT id, + name, + source + FROM ${employee_income_source.SQL_TABLE_NAME} + WHERE source = "RENT" + ORDER BY source desc + LIMIT 10 + );; + + + dimension: id { + type: number + sql: ${TABLE}.id ;; + } + + dimension: name { + type: string + sql: ${TABLE}.name ;; + } + + dimension: source { + type: string + sql: ${TABLE}.source ;; + } +} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json b/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json index d12ced5e42506..2e55971b65bd4 100644 --- a/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json +++ b/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json @@ -1580,6 +1580,254 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "view: rent_as_employee_income_source {\n sql_table_name: (\n SELECT id,\n name,\n source\n FROM ${employee_income_source.SQL_TABLE_NAME}\n WHERE source = \"RENT\"\n ORDER BY source desc\n LIMIT 10\n );;\n\n\n dimension: id {\n type: number\n sql: ${TABLE}.id ;;\n }\n\n dimension: name {\n type: string\n sql: ${TABLE}.name ;;\n }\n\n dimension: source {\n type: string\n sql: ${TABLE}.source ;;\n }\n}", + "viewLanguage": "lookml" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:78f22c19304954b15e8adb1d9809975e" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/Develop/lkml_samples/" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1586847600000, + "actor": "urn:li:corpuser:datahub" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source,PROD),id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source,PROD),name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD),name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source,PROD),source)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD),source)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "rent_as_employee_income_source", + "platform": "urn:li:dataPlatform:looker", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "id", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "number", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "name", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "source", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + } + ], + "primaryKeys": [] + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "looker.file.path": "rent_as_employee_income_source.view.lkml", + "looker.model": "data" + }, + "name": "rent_as_employee_income_source", + "tags": [] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Develop" + }, + { + "id": "urn:li:container:78f22c19304954b15e8adb1d9809975e", + "urn": "urn:li:container:78f22c19304954b15e8adb1d9809975e" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "tag", "entityUrn": "urn:li:tag:Dimension", From 40e61f9d6e96cc2c741be654ca9f7adbc868e9c7 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Wed, 7 Aug 2024 13:12:02 +0900 Subject: [PATCH 03/72] docs: update graphql docs on forms & structured properties (#11100) --- docs/api/tutorials/forms.md | 134 +++++++++++++++++- docs/api/tutorials/structured-properties.md | 117 ++++++++++++++- .../feature-guides/documentation-forms.md | 2 +- 3 files changed, 244 insertions(+), 9 deletions(-) diff --git a/docs/api/tutorials/forms.md b/docs/api/tutorials/forms.md index 3f28353595be7..eb555910f18eb 100644 --- a/docs/api/tutorials/forms.md +++ b/docs/api/tutorials/forms.md @@ -9,16 +9,16 @@ Documentation Forms are a way for end-users to fill out all mandatory attributes Learn more about forms in the [Documentation Forms Feature Guide](../../../docs/features/feature-guides/documentation-forms.md). - ### Goal Of This Guide -This guide will show you how to create and read forms. +This guide will show you how to +- Create, Update, Read, and Delete a form +- Assign and Remove a form from entities ## Prerequisites For this tutorial, you need to deploy DataHub Quickstart and ingest sample data. For detailed information, please refer to [Datahub Quickstart Guide](/docs/quickstart.md). - @@ -29,14 +29,45 @@ Connect to your instance via [init](https://datahubproject.io/docs/cli/#init): 2. Set the server to your sandbox instance, `https://{your-instance-address}/gms` 3. Set the token to your access token - - ## Create a Form + + +```graphql +mutation createForm { + createForm( + input: { + id: "metadataInitiative2024", + name: "Metadata Initiative 2024", + description: "How we want to ensure the most important data assets in our organization have all of the most important and expected pieces of metadata filled out", + type: VERIFICATION, + prompts: [ + { + id: "123", + title: "retentionTime", + description: "Apply Retention Time structured property to form", + type: STRUCTURED_PROPERTY, + structuredPropertyParams: { + urn: "urn:li:structuredProperty:retentionTime" + } + } + ], + actors: { + users: ["urn:li:corpuser:jane@email.com", "urn:li:corpuser:john@email.com"], + groups: ["urn:li:corpGroup:team@email.com"] + } + } + ) { + urn + } +} +``` + + Create a yaml file representing the forms you’d like to load. @@ -111,8 +142,42 @@ If successful, you should see `Created form urn:li:form:...` -## Read Property Definition +## Update Form + + + +```graphql +mutation updateForm { + updateForm( + input: { + urn: "urn:li:form:metadataInitiative2024", + name: "Metadata Initiative 2024", + description: "How we want to ensure the most important data assets in our organization have all of the most important and expected pieces of metadata filled out", + type: VERIFICATION, + promptsToAdd: [ + { + id: "456", + title: "deprecationDate", + description: "Deprecation date for dataset", + type: STRUCTURED_PROPERTY, + structuredPropertyParams: { + urn: "urn:li:structuredProperty:deprecationDate" + } + } + ] + promptsToRemove: ["123"] + } + ) { + urn + } +} +``` + + + + +## Read Property Definition @@ -146,3 +211,60 @@ If successful, you should see metadata about your form returned like below. + +## Delete Form + + + + +```graphql +mutation deleteForm { + deleteForm( + input: { + urn: "urn:li:form:metadataInitiative2024" + } + ) +} +``` + + + +## Assign Form to Entities + +For assigning a form to a given list of entities: + + + + +```graphql +mutation batchAssignForm { + batchAssignForm( + input: { + formUrn: "urn:li:form:myform", + entityUrns: ["urn:li:dataset:mydataset1", "urn:li:dataset:mydataset2"] + } + ) +} +``` + + + +## Remove Form from Entities + +For removing a form from a given list of entities: + + + + +```graphql +mutation batchRemoveForm { + batchRemoveForm( + input: { + formUrn: "urn:li:form:myform", + entityUrns: ["urn:li:dataset:mydataset1", "urn:li:dataset:mydataset2"] + } + ) +} +``` + + diff --git a/docs/api/tutorials/structured-properties.md b/docs/api/tutorials/structured-properties.md index c56a2848638fc..6f6c6541554d9 100644 --- a/docs/api/tutorials/structured-properties.md +++ b/docs/api/tutorials/structured-properties.md @@ -56,7 +56,33 @@ Requirements for OpenAPI are: The following code will create a structured property `io.acryl.privacy.retentionTime`. - + + +```graphql +mutation createStructuredProperty { + createStructuredProperty( + input: { + id: "retentionTime", + qualifiedName:"retentionTime", + displayName: "Retention Time", + description: "Retention Time is used to figure out how long to retain records in a dataset", + valueType: "urn:li:dataType:number", + allowedValues: [ + {numberValue: 30, description: "30 days, usually reserved for datasets that are ephemeral and contain pii"}, + {numberValue: 90, description:"description: Use this for datasets that drive monthly reporting but contain pii"}, + {numberValue: 365, description:"Use this for non-sensitive data that can be retained for longer"} + ], + cardinality: SINGLE, + entityTypes: ["urn:li:entityType:dataset", "urn:li:entityType:dataFlow"], + } + ) { + urn + } +} +``` + + + Create a yaml file representing the properties you’d like to load. For example, below file represents a property `io.acryl.privacy.retentionTime`. You can see the full example [here](https://github.com/datahub-project/datahub/blob/example-yaml-sp/metadata-ingestion/examples/structured_properties/struct_props.yaml). @@ -355,7 +381,37 @@ Example Response: This action will set/replace all structured properties on the entity. See PATCH operations to add/remove a single property. - + + +```graphql +mutation upsertStructuredProperties { + upsertStructuredProperties( + input: { + assetUrn: "urn:li:mydataset1", + structuredPropertyInputParams: [ + { + structuredPropertyUrn: "urn:li:structuredProperty:mystructuredproperty", + values: [ + { + stringValue: "123" + } + ] + } + ] + } + ) { + properties { + structuredProperty { + urn + } + } + } +} + +``` + + + You can set structured properties to a dataset by creating a dataset yaml file with structured properties. For example, below is a dataset yaml file with structured properties in both the field and dataset level. @@ -466,6 +522,31 @@ Or you can run the following command to view the properties associated with the datahub dataset get --urn {urn} ``` +## Remove Structured Properties From a Dataset + +For removing a structured property or list of structured properties from a dataset: + + + + +```graphql +mutation removeStructuredProperties { + removeStructuredProperties( + input: { + assetUrn: "urn:li:mydataset1", + structuredPropertyUrns: ["urn:li:structuredProperty:mystructuredproperty"] + } + ) { + properties { + structuredProperty {urn} + } + } +} +``` + + + + ## Patch Structured Property Value This section will show you how to patch a structured property value - either by removing, adding, or upserting a single property. @@ -780,6 +861,38 @@ You can see that the first property has been removed and the second property is In this example, we'll add the property back with a different value, preserving the existing property. + + +```graphql +mutation updateStructuredProperty { + updateStructuredProperty( + input: { + urn: "urn:li:structuredProperty:retentionTime", + displayName: "Retention Time", + description: "Retention Time is used to figure out how long to retain records in a dataset", + newAllowedValues: [ + { + numberValue: 30, + description: "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + numberValue: 90, + description: "Use this for datasets that drive monthly reporting but contain pii" + }, + { + numberValue: 365, + description: "Use this for non-sensitive data that can be retained for longer" + } + ] + } + ) { + urn + } +} + +``` + + ```shell diff --git a/docs/features/feature-guides/documentation-forms.md b/docs/features/feature-guides/documentation-forms.md index b007892e66094..2edeb8ce302d7 100644 --- a/docs/features/feature-guides/documentation-forms.md +++ b/docs/features/feature-guides/documentation-forms.md @@ -101,7 +101,7 @@ You sure can! Please keep in mind that an Asset will only be considered Document ### API Tutorials -- [Create a Documentation Form](../../../docs/api/tutorials/forms.md) +- [API Guides on Documentation Form](../../../docs/api/tutorials/forms.md) :::note You must create a Structured Property before including it in a Documentation Form. From 900c25986cb36ca61d723426b4b207a2a67b93aa Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:54:58 +0530 Subject: [PATCH 04/72] test(search): search openAPI v3 test (#11049) --- smoke-test/tests/read_only/test_search.py | 75 +++++++++++++++++------ 1 file changed, 57 insertions(+), 18 deletions(-) diff --git a/smoke-test/tests/read_only/test_search.py b/smoke-test/tests/read_only/test_search.py index 90385c5228bc1..3b9635f3da2cd 100644 --- a/smoke-test/tests/read_only/test_search.py +++ b/smoke-test/tests/read_only/test_search.py @@ -1,10 +1,13 @@ import pytest +import requests from tests.test_result_msg import add_datahub_stats -from tests.utils import get_frontend_session, get_frontend_url +from tests.utils import get_frontend_session, get_frontend_url, get_gms_url -restli_default_headers = { - "X-RestLi-Protocol-Version": "2.0.0", +BASE_URL_V3 = f"{get_gms_url()}/openapi/v3" + +default_headers = { + "Content-Type": "application/json", } ENTITY_TO_MAP = { @@ -59,16 +62,8 @@ def _get_search_result(frontend_session, entity: str): ("chart", "chart"), ("dataset", "dataset"), ("dashboard", "dashboard"), - ( - # Task - "dataJob", - "dataJob", - ), - ( - # Pipeline - "dataFlow", - "dataFlow", - ), + ("dataJob", "dataJob"), + ("dataFlow", "dataFlow"), ("container", "container"), ("tag", "tag"), ("corpUser", "corpUser"), @@ -78,11 +73,7 @@ def _get_search_result(frontend_session, entity: str): ("mlPrimaryKey", "mlPrimaryKey"), ("corpGroup", "corpGroup"), ("mlFeatureTable", "mlFeatureTable"), - ( - # Term group - "glossaryNode", - "glossaryNode", - ), + ("glossaryNode", "glossaryNode"), ("mlModel", "mlModel"), ], ) @@ -112,8 +103,56 @@ def test_search_works(entity_type, api_name): """, "variables": {"input": first_urn}, } + response = frontend_session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) response.raise_for_status() res_data = response.json() assert res_data["data"], f"res_data was {res_data}" assert res_data["data"][api_name]["urn"] == first_urn, f"res_data was {res_data}" + + +@pytest.mark.read_only +@pytest.mark.parametrize( + "entity_type", + [ + "chart", + "dataset", + "dashboard", + "dataJob", + "dataFlow", + "container", + "tag", + "corpUser", + "mlFeature", + "glossaryTerm", + "domain", + "mlPrimaryKey", + "corpGroup", + "mlFeatureTable", + "glossaryNode", + "mlModel", + ], +) +def test_openapi_v3_entity(entity_type): + frontend_session = get_frontend_session() + search_result = _get_search_result(frontend_session, entity_type) + num_entities = search_result["total"] + if num_entities == 0: + print(f"[WARN] No results for {entity_type}") + return + entities = search_result["searchResults"] + + first_urn = entities[0]["entity"]["urn"] + + session = requests.Session() + url = f"{BASE_URL_V3}/entity/{entity_type}/{first_urn}" + response = session.get(url, headers=default_headers) + response.raise_for_status() + actual_data = response.json() + print(f"Entity Data for URN {first_urn}: {actual_data}") + + expected_data = {"urn": first_urn} + + assert ( + actual_data["urn"] == expected_data["urn"] + ), f"Mismatch: expected {expected_data}, got {actual_data}" From edb0f19f1b594d662ed12584fe07e6fd348e8f12 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 7 Aug 2024 13:50:08 -0700 Subject: [PATCH 05/72] fix(ingest/tableau): prevent empty site content urls (#11057) Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- .../src/datahub/ingestion/source/tableau.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index 9cde3b1f8d3a0..510cb6c96d1f2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -757,6 +757,12 @@ def _re_authenticate(self): ] = self.config.get_tableau_auth(self.site.content_url) self.server.auth.sign_in(tableau_auth) + @property + def site_content_url(self) -> Optional[str]: + if self.site and self.site.content_url: + return self.site.content_url + return None + def _populate_usage_stat_registry(self) -> None: if self.server is None: return @@ -2524,7 +2530,9 @@ def emit_sheets_as_charts( last_modified = self.get_last_modified(creator, created_at, updated_at) if sheet.get(c.PATH): - site_part = f"/site/{self.site.content_url}" if self.site else "" + site_part = ( + f"/site/{self.site_content_url}" if self.site_content_url else "" + ) sheet_external_url = ( f"{self.config.connect_uri}/#{site_part}/views/{sheet.get(c.PATH)}" ) @@ -2535,7 +2543,7 @@ def emit_sheets_as_charts( and sheet[c.CONTAINED_IN_DASHBOARDS][0].get(c.PATH) ): # sheet contained in dashboard - site_part = f"/t/{self.site.content_url}" if self.site else "" + site_part = f"/t/{self.site_content_url}" if self.site_content_url else "" dashboard_path = sheet[c.CONTAINED_IN_DASHBOARDS][0][c.PATH] sheet_external_url = f"{self.config.connect_uri}{site_part}/authoring/{dashboard_path}/{quote(sheet.get(c.NAME, ''), safe='')}" else: @@ -2667,7 +2675,7 @@ def emit_workbook_as_container(self, workbook: Dict) -> Iterable[MetadataWorkUni else None ) - site_part = f"/site/{self.site.content_url}" if self.site else "" + site_part = f"/site/{self.site_content_url}" if self.site_content_url else "" workbook_uri = workbook.get("uri") workbook_part = ( workbook_uri[workbook_uri.index("/workbooks/") :] if workbook_uri else None @@ -2826,7 +2834,7 @@ def emit_dashboard( updated_at = dashboard.get(c.UPDATED_AT, datetime.now()) last_modified = self.get_last_modified(creator, created_at, updated_at) - site_part = f"/site/{self.site.content_url}" if self.site else "" + site_part = f"/site/{self.site_content_url}" if self.site_content_url else "" dashboard_external_url = ( f"{self.config.connect_uri}/#{site_part}/views/{dashboard.get(c.PATH, '')}" ) From c226883097d01daf2fcb18689aee72ac5bf9f1a0 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 7 Aug 2024 15:53:36 -0500 Subject: [PATCH 06/72] feat(entity-client): implement client batch interface (#11106) --- .../entity/ebean/batch/AspectsBatchImpl.java | 2 +- .../entity/ebean/batch/ProposedItem.java | 8 ++- .../metadata/client/JavaEntityClient.java | 58 ++++++++++----- .../linkedin/entity/client/EntityClient.java | 44 +++++------- .../entity/client/RestliEntityClient.java | 27 +++---- .../tests/privileges/test_privileges.py | 8 +++ smoke-test/tests/privileges/utils.py | 70 ++++++++++++++++++- 7 files changed, 157 insertions(+), 60 deletions(-) diff --git a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index a23f6ab175046..7a1af12272ac5 100644 --- a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -123,7 +123,7 @@ public AspectsBatchImplBuilder one(BatchItem data, RetrieverContext retrieverCon } public AspectsBatchImplBuilder mcps( - List mcps, + Collection mcps, AuditStamp auditStamp, RetrieverContext retrieverContext) { diff --git a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/ProposedItem.java b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/ProposedItem.java index 452ed39ddf317..132a731d278af 100644 --- a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/ProposedItem.java +++ b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/ProposedItem.java @@ -7,6 +7,7 @@ import com.linkedin.metadata.aspect.batch.MCPItem; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; @@ -63,7 +64,12 @@ public RecordTemplate getRecordTemplate() { @Nonnull @Override public Urn getUrn() { - return metadataChangeProposal.getEntityUrn(); + Urn urn = metadataChangeProposal.getEntityUrn(); + if (urn == null) { + urn = + EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); + } + return urn; } @Nullable diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 337288ab59c60..f8370c9efe3e6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -12,7 +12,6 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; @@ -24,6 +23,7 @@ import com.linkedin.metadata.aspect.EnvelopedAspectArray; import com.linkedin.metadata.aspect.VersionedAspect; import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.BatchItem; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.entity.DeleteEntityService; @@ -48,6 +48,7 @@ import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; @@ -60,6 +61,7 @@ import java.net.URISyntaxException; import java.time.Clock; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -68,6 +70,7 @@ import java.util.Optional; import java.util.Set; import java.util.function.Supplier; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; @@ -738,35 +741,54 @@ public List getTimeseriesAspectValues( return response.getValues(); } - // TODO: Factor out ingest logic into a util that can be accessed by the java client and the - // resource @Override - public String ingestProposal( + @Nonnull + public List batchIngestProposals( @Nonnull OperationContext opContext, - @Nonnull final MetadataChangeProposal metadataChangeProposal, - final boolean async) - throws RemoteInvocationException { + @Nonnull Collection metadataChangeProposals, + boolean async) { String actorUrnStr = opContext.getSessionAuthentication().getActor() != null ? opContext.getSessionAuthentication().getActor().toUrnStr() : Constants.UNKNOWN_ACTOR; - final AuditStamp auditStamp = - new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); + final AuditStamp auditStamp = AuditStampUtils.createAuditStamp(actorUrnStr); AspectsBatch batch = AspectsBatchImpl.builder() - .mcps( - List.of(metadataChangeProposal), auditStamp, opContext.getRetrieverContext().get()) + .mcps(metadataChangeProposals, auditStamp, opContext.getRetrieverContext().get()) .build(); - Optional one = - entityService.ingestProposal(opContext, batch, async).stream().findFirst(); + Map> resultMap = + entityService.ingestProposal(opContext, batch, async).stream() + .collect(Collectors.groupingBy(IngestResult::getRequest)); + + // Update runIds + batch.getItems().stream() + .filter(resultMap::containsKey) + .forEach( + requestItem -> { + List results = resultMap.get(requestItem); + Optional resultUrn = + results.stream().map(IngestResult::getUrn).filter(Objects::nonNull).findFirst(); + resultUrn.ifPresent( + urn -> tryIndexRunId(opContext, urn, requestItem.getSystemMetadata())); + }); - Urn urn = one.map(IngestResult::getUrn).orElse(metadataChangeProposal.getEntityUrn()); - if (one.isPresent()) { - tryIndexRunId(opContext, urn, metadataChangeProposal.getSystemMetadata()); - } - return urn.toString(); + // Preserve ordering + return batch.getItems().stream() + .map( + requestItem -> { + if (resultMap.containsKey(requestItem)) { + List results = resultMap.get(requestItem); + return results.stream() + .filter(r -> r.getUrn() != null) + .findFirst() + .map(r -> r.getUrn().toString()) + .orElse(null); + } + return null; + }) + .collect(Collectors.toList()); } @SneakyThrows diff --git a/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java index 8821143cde6cc..cb5c691d0cb61 100644 --- a/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -38,7 +38,6 @@ import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -519,27 +518,17 @@ default String ingestProposal( return ingestProposal(opContext, metadataChangeProposal, false); } - String ingestProposal( + /** + * Ingest a MetadataChangeProposal event. + * + * @return the urn string ingested + */ + default String ingestProposal( @Nonnull OperationContext opContext, @Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) - throws RemoteInvocationException; - - @Deprecated - default String wrappedIngestProposal( - @Nonnull OperationContext opContext, @Nonnull MetadataChangeProposal metadataChangeProposal) { - return wrappedIngestProposal(opContext, metadataChangeProposal, false); - } - - default String wrappedIngestProposal( - @Nonnull OperationContext opContext, - @Nonnull MetadataChangeProposal metadataChangeProposal, - final boolean async) { - try { - return ingestProposal(opContext, metadataChangeProposal, async); - } catch (RemoteInvocationException e) { - throw new RuntimeException(e); - } + throws RemoteInvocationException { + return batchIngestProposals(opContext, List.of(metadataChangeProposal), async).get(0); } @Deprecated @@ -550,15 +539,20 @@ default List batchIngestProposals( return batchIngestProposals(opContext, metadataChangeProposals, false); } - default List batchIngestProposals( + /** + * Ingest a list of proposals in a batch. + * + * @param opContext operation context + * @param metadataChangeProposals list of proposals + * @param async async or sync ingestion path + * @return ingested urns + */ + @Nonnull + List batchIngestProposals( @Nonnull OperationContext opContext, @Nonnull final Collection metadataChangeProposals, final boolean async) - throws RemoteInvocationException { - return metadataChangeProposals.stream() - .map(proposal -> wrappedIngestProposal(opContext, proposal, async)) - .collect(Collectors.toList()); - } + throws RemoteInvocationException; @Deprecated Optional getVersionedAspect( diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index fe1ca571efea5..2a3ae5d006ae0 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -12,7 +12,7 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.entity.AspectsDoGetTimeseriesAspectValuesRequestBuilder; -import com.linkedin.entity.AspectsDoIngestProposalRequestBuilder; +import com.linkedin.entity.AspectsDoIngestProposalBatchRequestBuilder; import com.linkedin.entity.AspectsGetRequestBuilder; import com.linkedin.entity.AspectsRequestBuilders; import com.linkedin.entity.EntitiesBatchGetRequestBuilder; @@ -67,6 +67,7 @@ import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.mxe.MetadataChangeProposalArray; import com.linkedin.mxe.PlatformEvent; import com.linkedin.mxe.SystemMetadata; import com.linkedin.parseq.retry.backoff.BackoffPolicy; @@ -1047,23 +1048,23 @@ public List getTimeseriesAspectValues( .getValues(); } - /** - * Ingest a MetadataChangeProposal event. - * - * @return the urn string ingested - */ + @Nonnull @Override - public String ingestProposal( + public List batchIngestProposals( @Nonnull OperationContext opContext, - @Nonnull final MetadataChangeProposal metadataChangeProposal, - final boolean async) + @Nonnull Collection metadataChangeProposals, + boolean async) throws RemoteInvocationException { - final AspectsDoIngestProposalRequestBuilder requestBuilder = + final AspectsDoIngestProposalBatchRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS - .actionIngestProposal() - .proposalParam(metadataChangeProposal) + .actionIngestProposalBatch() + .proposalsParam(new MetadataChangeProposalArray(metadataChangeProposals)) .asyncParam(String.valueOf(async)); - return sendClientRequest(requestBuilder, opContext.getSessionAuthentication()).getEntity(); + String result = + sendClientRequest(requestBuilder, opContext.getSessionAuthentication()).getEntity(); + return metadataChangeProposals.stream() + .map(proposal -> "success".equals(result) ? proposal.getEntityUrn().toString() : null) + .collect(Collectors.toList()); } @Override diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index c9a0b62159314..bce7b8a238c38 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -4,11 +4,13 @@ from tests.privileges.utils import ( assign_role, assign_user_to_group, + clear_polices, create_group, create_user, create_user_policy, remove_group, remove_policy, + remove_secret, remove_user, set_base_platform_privileges_policy_status, set_view_dataset_sensitive_info_policy_status, @@ -65,6 +67,12 @@ def privileges_and_test_user_setup(admin_session): # Remove test user remove_user(admin_session, "urn:li:corpuser:user") + # Remove secret + remove_secret(admin_session, "urn:li:dataHubSecret:TestSecretName") + + # Remove test policies + clear_polices(admin_session) + # Restore All users privileges set_base_platform_privileges_policy_status("ACTIVE", admin_session) set_view_dataset_sensitive_info_policy_status("ACTIVE", admin_session) diff --git a/smoke-test/tests/privileges/utils.py b/smoke-test/tests/privileges/utils.py index 1e58ec4085b70..72ad94a42a462 100644 --- a/smoke-test/tests/privileges/utils.py +++ b/smoke-test/tests/privileges/utils.py @@ -246,8 +246,8 @@ def create_user_policy(user_urn, privileges, session): "variables": { "input": { "type": "PLATFORM", - "name": "Policy Name", - "description": "Policy Description", + "name": "Test Policy Name", + "description": "Test Policy Description", "state": "ACTIVE", "resources": {"filter": {"criteria": []}}, "privileges": privileges, @@ -288,3 +288,69 @@ def remove_policy(urn, session): assert res_data["data"] assert res_data["data"]["deletePolicy"] assert res_data["data"]["deletePolicy"] == urn + + +def clear_polices(session): + list_policy_json = { + "query": """query listPolicies($input: ListPoliciesInput!) { + listPolicies(input: $input) { + start + count + total + policies { + urn + editable + name + description + __typename + } + __typename + } + }""", + "variables": { + "input": { + "count": 100, + "start": 0, + "orFilters": [ + { + "and": [ + { + "field": "state", + "values": ["ACTIVE"], + "condition": "EQUAL", + }, + { + "field": "editable", + "values": ["true"], + "condition": "EQUAL", + }, + ] + } + ], + } + }, + } + + response = session.post( + f"{get_frontend_url()}/api/v2/graphql", json=list_policy_json + ) + response.raise_for_status() + res_data = response.json() + + assert res_data + assert res_data["data"] + assert res_data["data"]["listPolicies"] + for policy in res_data["data"]["listPolicies"]["policies"]: + if "test" in policy["name"].lower() or "test" in policy["description"].lower(): + remove_policy(policy["urn"], session) + + +def remove_secret(session, urn): + remove_secret = { + "query": """mutation deleteSecret($urn: String!) {\n + deleteSecret(urn: $urn)\n}""", + "variables": {"urn": urn}, + } + + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_secret) + response.raise_for_status() From a25df8e6a0ab5c36605e674380721edc8f72e95f Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 7 Aug 2024 14:04:18 -0700 Subject: [PATCH 07/72] fix(snowflake): avoid reporting warnings/info for sys tables (#11114) --- .../src/datahub/ingestion/api/source.py | 2 +- .../source/snowflake/snowflake_schema_gen.py | 2 +- .../source/snowflake/snowflake_utils.py | 20 +++++++++++++------ 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/api/source.py b/metadata-ingestion/src/datahub/ingestion/api/source.py index a4de8b382430c..3dea3d36f41f1 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source.py @@ -47,7 +47,7 @@ logger = logging.getLogger(__name__) -_MAX_CONTEXT_STRING_LENGTH = 300 +_MAX_CONTEXT_STRING_LENGTH = 1000 class SourceCapability(Enum): diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema_gen.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema_gen.py index 1d4a5b377da14..a64589bcfed02 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema_gen.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema_gen.py @@ -440,7 +440,7 @@ def _process_schema( yield from self._process_tag(tag) if not snowflake_schema.views and not snowflake_schema.tables: - self.structured_reporter.warning( + self.structured_reporter.info( title="No tables/views found in schema", message="If tables exist, please grant REFERENCES or SELECT permissions on them.", context=f"{db_name}.{schema_name}", diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py index a1878963d3798..0177d59ef6b21 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py @@ -127,6 +127,8 @@ def is_dataset_pattern_allowed( SnowflakeObjectDomain.MATERIALIZED_VIEW, ): return False + if _is_sys_table(dataset_name): + return False if len(dataset_params) != 3: self.structured_reporter.info( @@ -176,6 +178,11 @@ def _combine_identifier_parts( return f"{db_name}.{schema_name}.{table_name}" +def _is_sys_table(table_name: str) -> bool: + # Often will look like `SYS$_UNPIVOT_VIEW1737` or `sys$_pivot_view19`. + return table_name.lower().startswith("sys$") + + # Qualified Object names from snowflake audit logs have quotes for for snowflake quoted identifiers, # For example "test-database"."test-schema".test_table # whereas we generate urns without quotes even for quoted identifiers for backward compatibility @@ -186,12 +193,13 @@ def _cleanup_qualified_name( ) -> str: name_parts = qualified_name.split(".") if len(name_parts) != 3: - structured_reporter.info( - title="Unexpected dataset pattern", - message="We failed to parse a Snowflake qualified name into its constituent parts. " - "DB/schema/table filtering may not work as expected on these entities.", - context=f"{qualified_name} has {len(name_parts)} parts", - ) + if not _is_sys_table(qualified_name): + structured_reporter.info( + title="Unexpected dataset pattern", + message="We failed to parse a Snowflake qualified name into its constituent parts. " + "DB/schema/table filtering may not work as expected on these entities.", + context=f"{qualified_name} has {len(name_parts)} parts", + ) return qualified_name.replace('"', "") return _combine_identifier_parts( db_name=name_parts[0].strip('"'), From d6e46b9bcf3b8e8b1e8719fb352f5837bf6b402c Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 7 Aug 2024 14:57:05 -0700 Subject: [PATCH 08/72] fix(ingest): downgrade column type mapping warning to info (#11115) --- .../datahub/ingestion/source/abs/source.py | 74 +---------------- .../ingestion/source/dbt/dbt_common.py | 7 +- .../src/datahub/ingestion/source/s3/source.py | 79 +------------------ .../ingestion/source/sql/sql_common.py | 7 +- 4 files changed, 15 insertions(+), 152 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/abs/source.py b/metadata-ingestion/src/datahub/ingestion/source/abs/source.py index 39ebd79c2e226..66f268799b2f1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/abs/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/abs/source.py @@ -8,29 +8,10 @@ from collections import OrderedDict from datetime import datetime from pathlib import PurePath -from typing import Any, Dict, Iterable, List, Optional, Tuple +from typing import Dict, Iterable, List, Optional, Tuple import smart_open.compression as so_compression from more_itertools import peekable -from pyspark.sql.types import ( - ArrayType, - BinaryType, - BooleanType, - ByteType, - DateType, - DecimalType, - DoubleType, - FloatType, - IntegerType, - LongType, - MapType, - NullType, - ShortType, - StringType, - StructField, - StructType, - TimestampType, -) from smart_open import open as smart_open from datahub.emitter.mce_builder import ( @@ -48,7 +29,7 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceReport +from datahub.ingestion.api.source import MetadataWorkUnitProcessor from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.abs.config import DataLakeSourceConfig, PathSpec from datahub.ingestion.source.abs.report import DataLakeSourceReport @@ -72,22 +53,14 @@ StatefulIngestionSourceBase, ) from datahub.metadata.com.linkedin.pegasus2avro.schema import ( - BooleanTypeClass, - BytesTypeClass, - DateTypeClass, - NullTypeClass, - NumberTypeClass, - RecordTypeClass, SchemaField, SchemaFieldDataType, SchemaMetadata, StringTypeClass, - TimeTypeClass, ) from datahub.metadata.schema_classes import ( DataPlatformInstanceClass, DatasetPropertiesClass, - MapTypeClass, OperationClass, OperationTypeClass, OtherSchemaClass, @@ -100,55 +73,12 @@ logging.getLogger("py4j").setLevel(logging.ERROR) logger: logging.Logger = logging.getLogger(__name__) -# for a list of all types, see https://spark.apache.org/docs/3.0.3/api/python/_modules/pyspark/sql/types.html -_field_type_mapping = { - NullType: NullTypeClass, - StringType: StringTypeClass, - BinaryType: BytesTypeClass, - BooleanType: BooleanTypeClass, - DateType: DateTypeClass, - TimestampType: TimeTypeClass, - DecimalType: NumberTypeClass, - DoubleType: NumberTypeClass, - FloatType: NumberTypeClass, - ByteType: BytesTypeClass, - IntegerType: NumberTypeClass, - LongType: NumberTypeClass, - ShortType: NumberTypeClass, - ArrayType: NullTypeClass, - MapType: MapTypeClass, - StructField: RecordTypeClass, - StructType: RecordTypeClass, -} PAGE_SIZE = 1000 # Hack to support the .gzip extension with smart_open. so_compression.register_compressor(".gzip", so_compression._COMPRESSOR_REGISTRY[".gz"]) -def get_column_type( - report: SourceReport, dataset_name: str, column_type: str -) -> SchemaFieldDataType: - """ - Maps known Spark types to datahub types - """ - TypeClass: Any = None - - for field_type, type_class in _field_type_mapping.items(): - if isinstance(column_type, field_type): - TypeClass = type_class - break - - # if still not found, report the warning - if TypeClass is None: - report.report_warning( - dataset_name, f"unable to map type {column_type} to metadata schema" - ) - TypeClass = NullTypeClass - - return SchemaFieldDataType(type=TypeClass()) - - # config flags to emit telemetry for config_options_to_report = [ "platform", diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index ead86acc299ca..e2b5f8378732c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -849,8 +849,11 @@ def get_column_type( # if still not found, report the warning if TypeClass is None: if column_type: - report.report_warning( - dataset_name, f"unable to map type {column_type} to metadata schema" + report.info( + title="Unable to map column types to DataHub types", + message="Got an unexpected column type. The column's parsed field type will not be populated.", + context=f"{dataset_name} - {column_type}", + log=False, ) TypeClass = NullTypeClass diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py index b8c7fd5aa88fc..f81d06c35e3b0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py @@ -8,32 +8,13 @@ from collections import OrderedDict from datetime import datetime from pathlib import PurePath -from typing import Any, Dict, Iterable, List, Optional, Tuple +from typing import Dict, Iterable, List, Optional, Tuple import smart_open.compression as so_compression from more_itertools import peekable from pyspark.conf import SparkConf from pyspark.sql import SparkSession from pyspark.sql.dataframe import DataFrame -from pyspark.sql.types import ( - ArrayType, - BinaryType, - BooleanType, - ByteType, - DateType, - DecimalType, - DoubleType, - FloatType, - IntegerType, - LongType, - MapType, - NullType, - ShortType, - StringType, - StructField, - StructType, - TimestampType, -) from pyspark.sql.utils import AnalysisException from smart_open import open as smart_open @@ -52,7 +33,7 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceReport +from datahub.ingestion.api.source import MetadataWorkUnitProcessor from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.aws.s3_boto_utils import get_s3_tags, list_folders from datahub.ingestion.source.aws.s3_util import ( @@ -72,22 +53,13 @@ StatefulIngestionSourceBase, ) from datahub.metadata.com.linkedin.pegasus2avro.schema import ( - BooleanTypeClass, - BytesTypeClass, - DateTypeClass, - NullTypeClass, - NumberTypeClass, - RecordTypeClass, SchemaField, - SchemaFieldDataType, SchemaMetadata, StringTypeClass, - TimeTypeClass, ) from datahub.metadata.schema_classes import ( DataPlatformInstanceClass, DatasetPropertiesClass, - MapTypeClass, OperationClass, OperationTypeClass, OtherSchemaClass, @@ -101,55 +73,12 @@ logging.getLogger("py4j").setLevel(logging.ERROR) logger: logging.Logger = logging.getLogger(__name__) -# for a list of all types, see https://spark.apache.org/docs/3.0.3/api/python/_modules/pyspark/sql/types.html -_field_type_mapping = { - NullType: NullTypeClass, - StringType: StringTypeClass, - BinaryType: BytesTypeClass, - BooleanType: BooleanTypeClass, - DateType: DateTypeClass, - TimestampType: TimeTypeClass, - DecimalType: NumberTypeClass, - DoubleType: NumberTypeClass, - FloatType: NumberTypeClass, - ByteType: BytesTypeClass, - IntegerType: NumberTypeClass, - LongType: NumberTypeClass, - ShortType: NumberTypeClass, - ArrayType: NullTypeClass, - MapType: MapTypeClass, - StructField: RecordTypeClass, - StructType: RecordTypeClass, -} PAGE_SIZE = 1000 # Hack to support the .gzip extension with smart_open. so_compression.register_compressor(".gzip", so_compression._COMPRESSOR_REGISTRY[".gz"]) -def get_column_type( - report: SourceReport, dataset_name: str, column_type: str -) -> SchemaFieldDataType: - """ - Maps known Spark types to datahub types - """ - TypeClass: Any = None - - for field_type, type_class in _field_type_mapping.items(): - if isinstance(column_type, field_type): - TypeClass = type_class - break - - # if still not found, report the warning - if TypeClass is None: - report.report_warning( - dataset_name, f"unable to map type {column_type} to metadata schema" - ) - TypeClass = NullTypeClass - - return SchemaFieldDataType(type=TypeClass()) - - # config flags to emit telemetry for config_options_to_report = [ "platform", @@ -490,9 +419,7 @@ def add_partition_columns_to_schema( if not is_fieldpath_v2 else f"[version=2.0].[type=string].{partition_key}", nativeDataType="string", - type=SchemaFieldDataType(StringTypeClass()) - if not is_fieldpath_v2 - else SchemaFieldDataTypeClass(type=StringTypeClass()), + type=SchemaFieldDataTypeClass(StringTypeClass()), isPartitioningKey=True, nullable=True, recursive=False, diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 1fa308eae6b76..2ab1e6bb41af1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -263,8 +263,11 @@ def get_column_type( break if TypeClass is None: - sql_report.report_warning( - dataset_name, f"unable to map type {column_type!r} to metadata schema" + sql_report.info( + title="Unable to map column types to DataHub types", + message="Got an unexpected column type. The column's parsed field type will not be populated.", + context=f"{dataset_name} - {column_type!r}", + log=False, ) TypeClass = NullTypeClass From e08412e513215405902a61713895bbefa2ed624e Mon Sep 17 00:00:00 2001 From: Ajoy Majumdar Date: Thu, 8 Aug 2024 08:00:38 -0700 Subject: [PATCH 09/72] feat(api): add AuditStamp to the V3 API entity/aspect response (#11118) --- .../openapi/v3/models/AspectItem.java | 15 +++++++ .../openapi/v3/models/GenericAspectV3.java | 1 + .../openapi/v3/models/GenericEntityV3.java | 19 ++++---- .../openapi/v3/OpenAPIV3Generator.java | 26 +++++++---- .../v3/controller/EntityController.java | 44 ++++++++++++++----- 5 files changed, 77 insertions(+), 28 deletions(-) create mode 100644 metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/AspectItem.java diff --git a/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/AspectItem.java b/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/AspectItem.java new file mode 100644 index 0000000000000..ec5dff7817231 --- /dev/null +++ b/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/AspectItem.java @@ -0,0 +1,15 @@ +package io.datahubproject.openapi.v3.models; + +import com.linkedin.common.AuditStamp; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.mxe.SystemMetadata; +import lombok.Builder; +import lombok.Value; + +@Builder(toBuilder = true) +@Value +public class AspectItem { + RecordTemplate aspect; + SystemMetadata systemMetadata; + AuditStamp auditStamp; +} diff --git a/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericAspectV3.java b/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericAspectV3.java index 4db2c3288d154..70bf2182c29f4 100644 --- a/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericAspectV3.java +++ b/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericAspectV3.java @@ -19,4 +19,5 @@ public class GenericAspectV3 implements GenericAspect { @Nonnull Map value; @Nullable Map systemMetadata; @Nullable Map headers; + @Nullable Map auditStamp; } diff --git a/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericEntityV3.java b/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericEntityV3.java index 3af3b25028fad..54d6ac2c1736f 100644 --- a/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericEntityV3.java +++ b/metadata-service/openapi-servlet/models/src/main/java/io/datahubproject/openapi/v3/models/GenericEntityV3.java @@ -5,9 +5,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.common.urn.Urn; -import com.linkedin.data.template.RecordTemplate; -import com.linkedin.mxe.SystemMetadata; -import com.linkedin.util.Pair; import io.datahubproject.openapi.models.GenericEntity; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -42,9 +39,7 @@ public Map getAspects() { public static class GenericEntityV3Builder { public GenericEntityV3 build( - ObjectMapper objectMapper, - @Nonnull Urn urn, - Map> aspects) { + ObjectMapper objectMapper, @Nonnull Urn urn, Map aspects) { Map jsonObjectMap = aspects.entrySet().stream() .map( @@ -53,13 +48,18 @@ public GenericEntityV3 build( String aspectName = entry.getKey(); Map aspectValue = objectMapper.readValue( - RecordUtils.toJsonString(entry.getValue().getFirst()) + RecordUtils.toJsonString(entry.getValue().getAspect()) .getBytes(StandardCharsets.UTF_8), new TypeReference<>() {}); Map systemMetadata = - entry.getValue().getSecond() != null + entry.getValue().getSystemMetadata() != null ? objectMapper.convertValue( - entry.getValue().getSecond(), new TypeReference<>() {}) + entry.getValue().getSystemMetadata(), new TypeReference<>() {}) + : null; + Map auditStamp = + entry.getValue().getAuditStamp() != null + ? objectMapper.convertValue( + entry.getValue().getAuditStamp().data(), new TypeReference<>() {}) : null; return Map.entry( @@ -67,6 +67,7 @@ public GenericEntityV3 build( GenericAspectV3.builder() .value(aspectValue) .systemMetadata(systemMetadata) + .auditStamp(auditStamp) .build()); } catch (IOException ex) { throw new RuntimeException(ex); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/OpenAPIV3Generator.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/OpenAPIV3Generator.java index f26ad6821c583..f6f248be77c67 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/OpenAPIV3Generator.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/OpenAPIV3Generator.java @@ -40,7 +40,7 @@ public class OpenAPIV3Generator { private static final String NAME_QUERY = "query"; private static final String NAME_PATH = "path"; private static final String NAME_SYSTEM_METADATA = "systemMetadata"; - private static final String NAME_ASYNC = "async"; + private static final String NAME_AUDIT_STAMP = "auditStamp"; private static final String NAME_VERSION = "version"; private static final String NAME_SCROLL_ID = "scrollId"; private static final String NAME_INCLUDE_SOFT_DELETE = "includeSoftDelete"; @@ -77,9 +77,6 @@ public static OpenAPI generateOpenApiSpec(EntityRegistry entityRegistry) { // Components final Components components = new Components(); // --> Aspect components - // TODO: Correct handling of SystemMetadata and SortOrder - components.addSchemas( - "SystemMetadata", new Schema().type(TYPE_OBJECT).additionalProperties(true)); components.addSchemas("SortOrder", new Schema()._enum(List.of("ASCENDING", "DESCENDING"))); components.addSchemas("AspectPatch", buildAspectPatchSchema()); components.addSchemas( @@ -167,6 +164,10 @@ public static OpenAPI generateOpenApiSpec(EntityRegistry entityRegistry) { buildSingleEntityAspectPath( e, a.getName(), a.getPegasusSchema().getName()))); }); + // TODO: Correct handling of SystemMetadata and AuditStamp + components.addSchemas( + "SystemMetadata", new Schema().type(TYPE_OBJECT).additionalProperties(true)); + components.addSchemas("AuditStamp", new Schema().type(TYPE_OBJECT).additionalProperties(true)); return new OpenAPI().openapi("3.0.1").info(info).paths(paths).components(components); } @@ -185,7 +186,7 @@ private static PathItem buildSingleEntityPath(final EntitySpec entity) { .schema(new Schema().type(TYPE_STRING)), new Parameter() .in(NAME_QUERY) - .name("systemMetadata") + .name(NAME_SYSTEM_METADATA) .description("Include systemMetadata with response.") .schema(new Schema().type(TYPE_BOOLEAN)._default(false)), new Parameter() @@ -424,7 +425,7 @@ private static PathItem buildBatchGetEntityPath(final EntitySpec entity) { List.of( new Parameter() .in(NAME_QUERY) - .name("systemMetadata") + .name(NAME_SYSTEM_METADATA) .description("Include systemMetadata with response.") .schema(new Schema().type(TYPE_BOOLEAN)._default(false)))) .requestBody( @@ -575,12 +576,19 @@ private static Schema buildAspectRefResponseSchema(final String aspectName) { .required(List.of(PROPERTY_VALUE)) .addProperty(PROPERTY_VALUE, new Schema<>().$ref(PATH_DEFINITIONS + aspectName)); result.addProperty( - "systemMetadata", + NAME_SYSTEM_METADATA, new Schema<>() .type(TYPE_OBJECT) .anyOf(List.of(new Schema().$ref(PATH_DEFINITIONS + "SystemMetadata"))) .description("System metadata for the aspect.") .nullable(true)); + result.addProperty( + NAME_AUDIT_STAMP, + new Schema<>() + .type(TYPE_OBJECT) + .anyOf(List.of(new Schema().$ref(PATH_DEFINITIONS + "AuditStamp"))) + .description("Audit stamp for the aspect.") + .nullable(true)); return result; } @@ -592,7 +600,7 @@ private static Schema buildAspectRefRequestSchema(final String aspectName) { .required(List.of(PROPERTY_VALUE)) .addProperty(PROPERTY_VALUE, new Schema<>().$ref(PATH_DEFINITIONS + aspectName)); result.addProperty( - "systemMetadata", + NAME_SYSTEM_METADATA, new Schema<>() .type(TYPE_OBJECT) .anyOf(List.of(new Schema().$ref(PATH_DEFINITIONS + "SystemMetadata"))) @@ -867,7 +875,7 @@ private static PathItem buildSingleEntityAspectPath( List.of( new Parameter() .in(NAME_QUERY) - .name("systemMetadata") + .name(NAME_SYSTEM_METADATA) .description("Include systemMetadata with response.") .schema(new Schema().type(TYPE_BOOLEAN)._default(false)))) .summary(String.format("Patch aspect %s on %s ", aspect, upperFirstEntity)) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java index 9ca34934e4c65..a0478c9af1609 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java @@ -13,7 +13,6 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; import com.linkedin.data.ByteString; -import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; @@ -28,12 +27,12 @@ import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.SystemMetadata; -import com.linkedin.util.Pair; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.RequestContext; import io.datahubproject.openapi.controller.GenericEntitiesController; import io.datahubproject.openapi.exception.InvalidUrnException; import io.datahubproject.openapi.exception.UnauthorizedException; +import io.datahubproject.openapi.v3.models.AspectItem; import io.datahubproject.openapi.v3.models.GenericAspectV3; import io.datahubproject.openapi.v3.models.GenericEntityScrollResultV3; import io.datahubproject.openapi.v3.models.GenericEntityV3; @@ -143,11 +142,27 @@ protected List buildEntityVersionedAspectList( .map( u -> GenericEntityV3.builder() - .build(objectMapper, u, toAspectMap(u, aspects.get(u), withSystemMetadata))) + .build( + objectMapper, u, toAspectItemMap(u, aspects.get(u), withSystemMetadata))) .collect(Collectors.toList()); } } + private Map toAspectItemMap( + Urn urn, List aspects, boolean withSystemMetadata) { + return aspects.stream() + .map( + a -> + Map.entry( + a.getName(), + AspectItem.builder() + .aspect(toRecordTemplate(lookupAspectSpec(urn, a.getName()), a)) + .systemMetadata(withSystemMetadata ? a.getSystemMetadata() : null) + .auditStamp(withSystemMetadata ? a.getCreated() : null) + .build())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + @Override protected List buildEntityList( Set ingestResults, boolean withSystemMetadata) { @@ -156,15 +171,21 @@ protected List buildEntityList( Map> entityMap = ingestResults.stream().collect(Collectors.groupingBy(IngestResult::getUrn)); for (Map.Entry> urnAspects : entityMap.entrySet()) { - Map> aspectsMap = + Map aspectsMap = urnAspects.getValue().stream() .map( ingest -> Map.entry( ingest.getRequest().getAspectName(), - Pair.of( - ingest.getRequest().getRecordTemplate(), - withSystemMetadata ? ingest.getRequest().getSystemMetadata() : null))) + AspectItem.builder() + .aspect(ingest.getRequest().getRecordTemplate()) + .systemMetadata( + withSystemMetadata + ? ingest.getRequest().getSystemMetadata() + : null) + .auditStamp( + withSystemMetadata ? ingest.getRequest().getAuditStamp() : null) + .build())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); responseList.add( GenericEntityV3.builder().build(objectMapper, urnAspects.getKey(), aspectsMap)); @@ -183,9 +204,12 @@ protected GenericEntityV3 buildGenericEntity( updateAspectResult.getUrn(), Map.of( aspectName, - Pair.of( - updateAspectResult.getNewValue(), - withSystemMetadata ? updateAspectResult.getNewSystemMetadata() : null))); + AspectItem.builder() + .aspect(updateAspectResult.getNewValue()) + .systemMetadata( + withSystemMetadata ? updateAspectResult.getNewSystemMetadata() : null) + .auditStamp(withSystemMetadata ? updateAspectResult.getAuditStamp() : null) + .build())); } private List toRecordTemplates( From a4a887c866c362ecdd9ccb9e4df2591a01b90a3f Mon Sep 17 00:00:00 2001 From: AndreasHegerNuritas <163423418+AndreasHegerNuritas@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:38:16 +0100 Subject: [PATCH 10/72] =?UTF-8?q?fix(ingest/redshift):=20replace=20r'\n'?= =?UTF-8?q?=20with=20'\n'=20to=20avoid=20token=20error=20redshift=20server?= =?UTF-8?q?less=E2=80=A6=20(#11111)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../datahub/ingestion/source/redshift/redshift_schema.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py index 6e88a50f898a5..2e628269edbc3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py @@ -504,7 +504,11 @@ def get_alter_table_commands( yield AlterTableRow( transaction_id=row[field_names.index("transaction_id")], session_id=session_id, - query_text=row[field_names.index("query_text")], + # See https://docs.aws.amazon.com/redshift/latest/dg/r_STL_QUERYTEXT.html + # for why we need to replace the \n with a newline. + query_text=row[field_names.index("query_text")].replace( + r"\n", "\n" + ), start_time=row[field_names.index("start_time")], ) rows = cursor.fetchmany() From 3d9a9541f1ff37ea80dd9d7d44fe501909269495 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 8 Aug 2024 13:54:14 -0500 Subject: [PATCH 11/72] fix(entiy-client): handle null entityUrn case for restli (#11122) --- .../entity/client/RestliEntityClient.java | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 2a3ae5d006ae0..780c6c6a007c2 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -50,6 +50,7 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.graph.LineageDirection; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.LineageFlags; import com.linkedin.metadata.query.ListResult; @@ -66,6 +67,7 @@ import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.MetadataChangeProposalArray; import com.linkedin.mxe.PlatformEvent; @@ -1063,7 +1065,20 @@ public List batchIngestProposals( String result = sendClientRequest(requestBuilder, opContext.getSessionAuthentication()).getEntity(); return metadataChangeProposals.stream() - .map(proposal -> "success".equals(result) ? proposal.getEntityUrn().toString() : null) + .map( + proposal -> { + if ("success".equals(result)) { + if (proposal.getEntityUrn() != null) { + return proposal.getEntityUrn().toString(); + } else { + EntitySpec entitySpec = + opContext.getEntityRegistry().getEntitySpec(proposal.getEntityType()); + return EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()) + .toString(); + } + } + return null; + }) .collect(Collectors.toList()); } From 840b15083a17c5347c63b0e74b079e7b5ea70a1e Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 8 Aug 2024 14:05:55 -0700 Subject: [PATCH 12/72] fix(sql-parser): prevent bad urns from alter table lineage (#11092) --- .../goldens/v2_sqlite_operator.json | 112 +++++++++--------- .../v2_sqlite_operator_no_dag_listener.json | 64 +++++----- .../datahub/sql_parsing/sqlglot_lineage.py | 62 ++++++---- .../testing/check_sql_parser_result.py | 1 - .../test_bigquery_alter_table_column.json | 14 +++ .../goldens/test_snowflake_drop_schema.json | 12 ++ .../goldens/test_sqlite_drop_table.json | 14 +++ .../goldens/test_sqlite_drop_view.json | 14 +++ .../unit/sql_parsing/test_sqlglot_lineage.py | 51 ++++++++ 9 files changed, 231 insertions(+), 113 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_alter_table_column.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_drop_schema.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_table.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_view.json diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json index e7902d165051b..4bc34b7b0d3ce 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json @@ -350,8 +350,8 @@ "json": { "timestampMillis": 1717179743558, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -367,8 +367,8 @@ "json": { "timestampMillis": 1717179743932, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "actor": "urn:li:corpuser:airflow", "operationType": "CREATE", @@ -552,8 +552,8 @@ "json": { "timestampMillis": 1717179743960, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -742,8 +742,8 @@ "json": { "timestampMillis": 1717179748679, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -759,8 +759,8 @@ "json": { "timestampMillis": 1717179749258, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "actor": "urn:li:corpuser:airflow", "operationType": "CREATE", @@ -875,8 +875,8 @@ "json": { "timestampMillis": 1717179749324, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1161,8 +1161,8 @@ "json": { "timestampMillis": 1717179757397, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -1178,8 +1178,8 @@ "json": { "timestampMillis": 1717179758424, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "actor": "urn:li:corpuser:airflow", "operationType": "CREATE", @@ -1420,8 +1420,8 @@ "json": { "timestampMillis": 1717179758496, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1483,10 +1483,10 @@ "aspectName": "dataJobInputOutput", "aspect": { "json": { - "inputDatasets": [ + "inputDatasets": [], + "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.costs,PROD)" ], - "outputDatasets": [], "inputDatajobs": [ "urn:li:dataJob:(urn:li:dataFlow:(airflow,sqlite_operator,prod),transform_cost_table)" ], @@ -1555,6 +1555,19 @@ } } }, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07285de22276959612189d51336cc21a", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.costs,PROD)" + ] + } + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.processed_costs,PROD)", @@ -1640,19 +1653,6 @@ } } }, -{ - "entityType": "dataProcessInstance", - "entityUrn": "urn:li:dataProcessInstance:07285de22276959612189d51336cc21a", - "changeType": "UPSERT", - "aspectName": "dataProcessInstanceInput", - "aspect": { - "json": { - "inputs": [ - "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.costs,PROD)" - ] - } - } -}, { "entityType": "dataProcessInstance", "entityUrn": "urn:li:dataProcessInstance:07285de22276959612189d51336cc21a", @@ -1662,8 +1662,8 @@ "json": { "timestampMillis": 1718733767964, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -1679,8 +1679,8 @@ "json": { "timestampMillis": 1718733768638, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1697,10 +1697,10 @@ "aspectName": "dataJobInputOutput", "aspect": { "json": { - "inputDatasets": [ + "inputDatasets": [], + "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.processed_costs,PROD)" ], - "outputDatasets": [], "inputDatajobs": [ "urn:li:dataJob:(urn:li:dataFlow:(airflow,sqlite_operator,prod),transform_cost_table)" ], @@ -1809,19 +1809,6 @@ } } }, -{ - "entityType": "dataProcessInstance", - "entityUrn": "urn:li:dataProcessInstance:bab908abccf3cd6607b50fdaf3003372", - "changeType": "UPSERT", - "aspectName": "dataProcessInstanceInput", - "aspect": { - "json": { - "inputs": [ - "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.processed_costs,PROD)" - ] - } - } -}, { "entityType": "dataProcessInstance", "entityUrn": "urn:li:dataProcessInstance:bab908abccf3cd6607b50fdaf3003372", @@ -1843,8 +1830,8 @@ "json": { "timestampMillis": 1718733773354, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -1860,8 +1847,8 @@ "json": { "timestampMillis": 1718733774147, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1870,5 +1857,18 @@ } } } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:bab908abccf3cd6607b50fdaf3003372", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceOutput", + "aspect": { + "json": { + "outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.processed_costs,PROD)" + ] + } + } } ] \ No newline at end of file diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json index a9af068e2e4e9..99bda0e0f2569 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json @@ -336,8 +336,8 @@ "json": { "timestampMillis": 1717180072004, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -382,8 +382,8 @@ "json": { "timestampMillis": 1719864194882, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "actor": "urn:li:corpuser:airflow", "operationType": "CREATE", @@ -435,8 +435,8 @@ "json": { "timestampMillis": 1717180072275, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -641,8 +641,8 @@ "json": { "timestampMillis": 1717180078196, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -722,8 +722,8 @@ "json": { "timestampMillis": 1717180078619, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1000,8 +1000,8 @@ "json": { "timestampMillis": 1717180084642, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -1081,8 +1081,8 @@ "json": { "timestampMillis": 1717180085266, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1186,10 +1186,10 @@ "aspectName": "dataJobInputOutput", "aspect": { "json": { - "inputDatasets": [ + "inputDatasets": [], + "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.costs,PROD)" ], - "outputDatasets": [], "inputDatajobs": [ "urn:li:dataJob:(urn:li:dataFlow:(airflow,sqlite_operator,prod),transform_cost_table)" ], @@ -1287,8 +1287,8 @@ "json": { "timestampMillis": 1717180091148, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -1368,8 +1368,8 @@ "json": { "timestampMillis": 1717180091923, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1499,10 +1499,10 @@ "aspectName": "dataJobInputOutput", "aspect": { "json": { - "inputDatasets": [ + "inputDatasets": [], + "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.processed_costs,PROD)" ], - "outputDatasets": [], "inputDatajobs": [ "urn:li:dataJob:(urn:li:dataFlow:(airflow,sqlite_operator,prod),transform_cost_table)" ], @@ -1613,8 +1613,8 @@ "json": { "timestampMillis": 1717180096108, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED", "attempt": 1 @@ -1630,8 +1630,8 @@ "json": { "timestampMillis": 1719864203487, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "actor": "urn:li:corpuser:airflow", "operationType": "CREATE", @@ -1712,8 +1712,8 @@ "json": { "timestampMillis": 1717180096993, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1727,10 +1727,10 @@ "entityType": "dataProcessInstance", "entityUrn": "urn:li:dataProcessInstance:bab908abccf3cd6607b50fdaf3003372", "changeType": "UPSERT", - "aspectName": "dataProcessInstanceInput", + "aspectName": "dataProcessInstanceOutput", "aspect": { "json": { - "inputs": [ + "outputs": [ "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.processed_costs,PROD)" ] } @@ -1740,10 +1740,10 @@ "entityType": "dataProcessInstance", "entityUrn": "urn:li:dataProcessInstance:07285de22276959612189d51336cc21a", "changeType": "UPSERT", - "aspectName": "dataProcessInstanceInput", + "aspectName": "dataProcessInstanceOutput", "aspect": { "json": { - "inputs": [ + "outputs": [ "urn:li:dataset:(urn:li:dataPlatform:sqlite,public.costs,PROD)" ] } diff --git a/metadata-ingestion/src/datahub/sql_parsing/sqlglot_lineage.py b/metadata-ingestion/src/datahub/sql_parsing/sqlglot_lineage.py index 976ff8bcc9b3f..0146343002171 100644 --- a/metadata-ingestion/src/datahub/sql_parsing/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/sql_parsing/sqlglot_lineage.py @@ -189,35 +189,49 @@ def _table_level_lineage( statement: sqlglot.Expression, dialect: sqlglot.Dialect ) -> Tuple[Set[_TableName], Set[_TableName]]: # Generate table-level lineage. - modified = { - _TableName.from_sqlglot_table(expr.this) - for expr in statement.find_all( - sqlglot.exp.Create, - sqlglot.exp.Insert, - sqlglot.exp.Update, - sqlglot.exp.Delete, - sqlglot.exp.Merge, - ) - # In some cases like "MERGE ... then INSERT (col1, col2) VALUES (col1, col2)", - # the `this` on the INSERT part isn't a table. - if isinstance(expr.this, sqlglot.exp.Table) - } | { - # For statements that include a column list, like - # CREATE DDL statements and `INSERT INTO table (col1, col2) SELECT ...` - # the table name is nested inside a Schema object. - _TableName.from_sqlglot_table(expr.this.this) - for expr in statement.find_all( - sqlglot.exp.Create, - sqlglot.exp.Insert, - ) - if isinstance(expr.this, sqlglot.exp.Schema) - and isinstance(expr.this.this, sqlglot.exp.Table) - } + modified = ( + { + _TableName.from_sqlglot_table(expr.this) + for expr in statement.find_all( + sqlglot.exp.Create, + sqlglot.exp.Insert, + sqlglot.exp.Update, + sqlglot.exp.Delete, + sqlglot.exp.Merge, + sqlglot.exp.AlterTable, + ) + # In some cases like "MERGE ... then INSERT (col1, col2) VALUES (col1, col2)", + # the `this` on the INSERT part isn't a table. + if isinstance(expr.this, sqlglot.exp.Table) + } + | { + # For statements that include a column list, like + # CREATE DDL statements and `INSERT INTO table (col1, col2) SELECT ...` + # the table name is nested inside a Schema object. + _TableName.from_sqlglot_table(expr.this.this) + for expr in statement.find_all( + sqlglot.exp.Create, + sqlglot.exp.Insert, + ) + if isinstance(expr.this, sqlglot.exp.Schema) + and isinstance(expr.this.this, sqlglot.exp.Table) + } + | { + # For drop statements, we only want it if a table/view is being dropped. + # Other "kinds" will not have table.name populated. + _TableName.from_sqlglot_table(expr.this) + for expr in ([statement] if isinstance(statement, sqlglot.exp.Drop) else []) + if isinstance(expr.this, sqlglot.exp.Table) + and expr.this.this + and expr.this.name + } + ) tables = ( { _TableName.from_sqlglot_table(table) for table in statement.find_all(sqlglot.exp.Table) + if not isinstance(table.parent, sqlglot.exp.Drop) } # ignore references created in this query - modified diff --git a/metadata-ingestion/src/datahub/testing/check_sql_parser_result.py b/metadata-ingestion/src/datahub/testing/check_sql_parser_result.py index 39c0dddd31400..72b5f6c5e26e4 100644 --- a/metadata-ingestion/src/datahub/testing/check_sql_parser_result.py +++ b/metadata-ingestion/src/datahub/testing/check_sql_parser_result.py @@ -15,7 +15,6 @@ logger = logging.getLogger(__name__) -# TODO: Hook this into the standard --update-golden-files mechanism. UPDATE_FILES = os.environ.get("UPDATE_SQLPARSER_FILES", "false").lower() == "true" diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_alter_table_column.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_alter_table_column.json new file mode 100644 index 0000000000000..3c6c9737e8e19 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_alter_table_column.json @@ -0,0 +1,14 @@ +{ + "query_type": "UNKNOWN", + "query_type_props": {}, + "query_fingerprint": "7d04253c3add0194c557942ef9b7485f38e68762d300dad364b9cec8656035b3", + "in_tables": [], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-bq-project.covid_data.covid_deaths,PROD)" + ], + "column_lineage": null, + "debug_info": { + "confidence": 0.2, + "generalized_statement": "ALTER TABLE `my-bq-project.covid_data.covid_deaths` DROP COLUMN patient_name" + } +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_drop_schema.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_drop_schema.json new file mode 100644 index 0000000000000..2784b8e9543b2 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_drop_schema.json @@ -0,0 +1,12 @@ +{ + "query_type": "UNKNOWN", + "query_type_props": {}, + "query_fingerprint": "4eefab57619a812a94030acce0071857561265945e79d798563adb53bd0b9646", + "in_tables": [], + "out_tables": [], + "column_lineage": null, + "debug_info": { + "confidence": 0.9, + "generalized_statement": "DROP SCHEMA my_schema" + } +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_table.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_table.json new file mode 100644 index 0000000000000..ae8b3f99897dc --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_table.json @@ -0,0 +1,14 @@ +{ + "query_type": "UNKNOWN", + "query_type_props": {}, + "query_fingerprint": "d1c29ad73325b08bb66e62ec00ba1d5be4412ec72b4bbc9c094f1272b9da4f86", + "in_tables": [], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:sqlite,my_schema.my_table,PROD)" + ], + "column_lineage": null, + "debug_info": { + "confidence": 0.2, + "generalized_statement": "DROP TABLE my_schema.my_table" + } +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_view.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_view.json new file mode 100644 index 0000000000000..6650ef396a570 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_sqlite_drop_view.json @@ -0,0 +1,14 @@ +{ + "query_type": "UNKNOWN", + "query_type_props": {}, + "query_fingerprint": "35a3c60e7ed98884dde3f1f5fe9079f844832430589a3326b97d617b8303f191", + "in_tables": [], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:sqlite,my_schema.my_view,PROD)" + ], + "column_lineage": null, + "debug_info": { + "confidence": 0.2, + "generalized_statement": "DROP VIEW my_schema.my_view" + } +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index e5b669329f16c..3096c9b8269a1 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -2,11 +2,22 @@ import pytest +import datahub.testing.check_sql_parser_result as checker from datahub.testing.check_sql_parser_result import assert_sql_result RESOURCE_DIR = pathlib.Path(__file__).parent / "goldens" +@pytest.fixture(autouse=True) +def set_update_sql_parser( + pytestconfig: pytest.Config, monkeypatch: pytest.MonkeyPatch +) -> None: + update_golden = pytestconfig.getoption("--update-golden-files") + + if update_golden: + monkeypatch.setattr(checker, "UPDATE_FILES", True) + + def test_invalid_sql(): assert_sql_result( """ @@ -1202,3 +1213,43 @@ def test_bigquery_information_schema_query() -> None: dialect="bigquery", expected_file=RESOURCE_DIR / "test_bigquery_information_schema_query.json", ) + + +def test_bigquery_alter_table_column() -> None: + assert_sql_result( + """\ +ALTER TABLE `my-bq-project.covid_data.covid_deaths` drop COLUMN patient_name + """, + dialect="bigquery", + expected_file=RESOURCE_DIR / "test_bigquery_alter_table_column.json", + ) + + +def test_sqlite_drop_table() -> None: + assert_sql_result( + """\ +DROP TABLE my_schema.my_table +""", + dialect="sqlite", + expected_file=RESOURCE_DIR / "test_sqlite_drop_table.json", + ) + + +def test_sqlite_drop_view() -> None: + assert_sql_result( + """\ +DROP VIEW my_schema.my_view +""", + dialect="sqlite", + expected_file=RESOURCE_DIR / "test_sqlite_drop_view.json", + ) + + +def test_snowflake_drop_schema() -> None: + assert_sql_result( + """\ +DROP SCHEMA my_schema +""", + dialect="snowflake", + expected_file=RESOURCE_DIR / "test_snowflake_drop_schema.json", + ) From 78336c9f58fb89a25f4228b2e8b5c0322d66807f Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Fri, 9 Aug 2024 09:18:51 +0530 Subject: [PATCH 13/72] fix(ingest/bigquery): use small batch size if use_tables_list_query_v2 is set (#11121) --- .../datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py index 46ec75edb9734..c6a50a1c977f4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py @@ -985,7 +985,7 @@ def get_tables_for_dataset( # https://cloud.google.com/bigquery/docs/information-schema-partitions max_batch_size: int = ( self.config.number_of_datasets_process_in_batch - if not self.config.is_profiling_enabled() + if not self.config.have_table_data_read_permission else self.config.number_of_datasets_process_in_batch_if_profiling_enabled ) From aa07e2a9371e8a5c90c87217dfc136dda38f60f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20L=C3=BCdin?= <13187726+Masterchen09@users.noreply.github.com> Date: Fri, 9 Aug 2024 17:59:38 +0200 Subject: [PATCH 14/72] fix(graphql): add missing entities to EntityTypeMapper and EntityTypeUrnMapper (#10366) --- .../types/entitytype/EntityTypeMapper.java | 33 ++++++++------ .../types/entitytype/EntityTypeUrnMapper.java | 43 ++++++++++++++++--- 2 files changed, 57 insertions(+), 19 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java index 26835f9e57dcd..77457a814bd67 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java @@ -15,40 +15,49 @@ public class EntityTypeMapper { static final Map ENTITY_TYPE_TO_NAME = ImmutableMap.builder() + .put(EntityType.DOMAIN, Constants.DOMAIN_ENTITY_NAME) .put(EntityType.DATASET, Constants.DATASET_ENTITY_NAME) - .put(EntityType.ROLE, Constants.ROLE_ENTITY_NAME) .put(EntityType.CORP_USER, Constants.CORP_USER_ENTITY_NAME) .put(EntityType.CORP_GROUP, Constants.CORP_GROUP_ENTITY_NAME) .put(EntityType.DATA_PLATFORM, Constants.DATA_PLATFORM_ENTITY_NAME) + .put(EntityType.ER_MODEL_RELATIONSHIP, Constants.ER_MODEL_RELATIONSHIP_ENTITY_NAME) .put(EntityType.DASHBOARD, Constants.DASHBOARD_ENTITY_NAME) + .put(EntityType.NOTEBOOK, Constants.NOTEBOOK_ENTITY_NAME) .put(EntityType.CHART, Constants.CHART_ENTITY_NAME) - .put(EntityType.TAG, Constants.TAG_ENTITY_NAME) .put(EntityType.DATA_FLOW, Constants.DATA_FLOW_ENTITY_NAME) .put(EntityType.DATA_JOB, Constants.DATA_JOB_ENTITY_NAME) - .put(EntityType.DATA_PROCESS_INSTANCE, Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME) + .put(EntityType.TAG, Constants.TAG_ENTITY_NAME) .put(EntityType.GLOSSARY_TERM, Constants.GLOSSARY_TERM_ENTITY_NAME) .put(EntityType.GLOSSARY_NODE, Constants.GLOSSARY_NODE_ENTITY_NAME) + .put(EntityType.CONTAINER, Constants.CONTAINER_ENTITY_NAME) .put(EntityType.MLMODEL, Constants.ML_MODEL_ENTITY_NAME) .put(EntityType.MLMODEL_GROUP, Constants.ML_MODEL_GROUP_ENTITY_NAME) .put(EntityType.MLFEATURE_TABLE, Constants.ML_FEATURE_TABLE_ENTITY_NAME) .put(EntityType.MLFEATURE, Constants.ML_FEATURE_ENTITY_NAME) .put(EntityType.MLPRIMARY_KEY, Constants.ML_PRIMARY_KEY_ENTITY_NAME) - .put(EntityType.CONTAINER, Constants.CONTAINER_ENTITY_NAME) - .put(EntityType.DOMAIN, Constants.DOMAIN_ENTITY_NAME) - .put(EntityType.NOTEBOOK, Constants.NOTEBOOK_ENTITY_NAME) + .put(EntityType.INGESTION_SOURCE, Constants.INGESTION_SOURCE_ENTITY_NAME) + .put(EntityType.EXECUTION_REQUEST, Constants.EXECUTION_REQUEST_ENTITY_NAME) + .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) + .put(EntityType.DATA_PROCESS_INSTANCE, Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME) .put(EntityType.DATA_PLATFORM_INSTANCE, Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .put(EntityType.ACCESS_TOKEN, Constants.ACCESS_TOKEN_ENTITY_NAME) .put(EntityType.TEST, Constants.TEST_ENTITY_NAME) - .put(EntityType.ER_MODEL_RELATIONSHIP, Constants.ER_MODEL_RELATIONSHIP_ENTITY_NAME) + .put(EntityType.DATAHUB_POLICY, Constants.POLICY_ENTITY_NAME) + .put(EntityType.DATAHUB_ROLE, Constants.DATAHUB_ROLE_ENTITY_NAME) + .put(EntityType.POST, Constants.POST_ENTITY_NAME) + .put(EntityType.SCHEMA_FIELD, Constants.SCHEMA_FIELD_ENTITY_NAME) .put(EntityType.DATAHUB_VIEW, Constants.DATAHUB_VIEW_ENTITY_NAME) + .put(EntityType.QUERY, Constants.QUERY_ENTITY_NAME) .put(EntityType.DATA_PRODUCT, Constants.DATA_PRODUCT_ENTITY_NAME) - .put(EntityType.SCHEMA_FIELD, Constants.SCHEMA_FIELD_ENTITY_NAME) + .put(EntityType.CUSTOM_OWNERSHIP_TYPE, Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .put(EntityType.INCIDENT, Constants.INCIDENT_ENTITY_NAME) + .put(EntityType.ROLE, Constants.ROLE_ENTITY_NAME) .put(EntityType.STRUCTURED_PROPERTY, Constants.STRUCTURED_PROPERTY_ENTITY_NAME) - .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) + .put(EntityType.FORM, Constants.FORM_ENTITY_NAME) + .put(EntityType.DATA_TYPE, Constants.DATA_TYPE_ENTITY_NAME) + .put(EntityType.ENTITY_TYPE, Constants.ENTITY_TYPE_ENTITY_NAME) .put(EntityType.RESTRICTED, Constants.RESTRICTED_ENTITY_NAME) .put(EntityType.BUSINESS_ATTRIBUTE, Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME) - .put(EntityType.QUERY, Constants.QUERY_ENTITY_NAME) - .put(EntityType.POST, Constants.POST_ENTITY_NAME) - .put(EntityType.FORM, Constants.FORM_ENTITY_NAME) .build(); private static final Map ENTITY_NAME_TO_TYPE = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java index 9e9bf86e5fe7f..334faf753cb8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java @@ -20,34 +20,63 @@ public class EntityTypeUrnMapper { static final Map ENTITY_NAME_TO_ENTITY_TYPE_URN = ImmutableMap.builder() + .put(Constants.DOMAIN_ENTITY_NAME, "urn:li:entityType:datahub.domain") .put(Constants.DATASET_ENTITY_NAME, "urn:li:entityType:datahub.dataset") - .put(Constants.ROLE_ENTITY_NAME, "urn:li:entityType:datahub.role") .put(Constants.CORP_USER_ENTITY_NAME, "urn:li:entityType:datahub.corpuser") .put(Constants.CORP_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.corpGroup") .put(Constants.DATA_PLATFORM_ENTITY_NAME, "urn:li:entityType:datahub.dataPlatform") + .put( + Constants.ER_MODEL_RELATIONSHIP_ENTITY_NAME, + "urn:li:entityType:datahub.erModelRelationship") .put(Constants.DASHBOARD_ENTITY_NAME, "urn:li:entityType:datahub.dashboard") + .put(Constants.NOTEBOOK_ENTITY_NAME, "urn:li:entityType:datahub.notebook") .put(Constants.CHART_ENTITY_NAME, "urn:li:entityType:datahub.chart") - .put(Constants.TAG_ENTITY_NAME, "urn:li:entityType:datahub.tag") .put(Constants.DATA_FLOW_ENTITY_NAME, "urn:li:entityType:datahub.dataFlow") .put(Constants.DATA_JOB_ENTITY_NAME, "urn:li:entityType:datahub.dataJob") + .put(Constants.TAG_ENTITY_NAME, "urn:li:entityType:datahub.tag") .put(Constants.GLOSSARY_TERM_ENTITY_NAME, "urn:li:entityType:datahub.glossaryTerm") .put(Constants.GLOSSARY_NODE_ENTITY_NAME, "urn:li:entityType:datahub.glossaryNode") + .put(Constants.CONTAINER_ENTITY_NAME, "urn:li:entityType:datahub.container") .put(Constants.ML_MODEL_ENTITY_NAME, "urn:li:entityType:datahub.mlModel") .put(Constants.ML_MODEL_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.mlModelGroup") .put(Constants.ML_FEATURE_TABLE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeatureTable") .put(Constants.ML_FEATURE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeature") .put(Constants.ML_PRIMARY_KEY_ENTITY_NAME, "urn:li:entityType:datahub.mlPrimaryKey") - .put(Constants.CONTAINER_ENTITY_NAME, "urn:li:entityType:datahub.container") - .put(Constants.DOMAIN_ENTITY_NAME, "urn:li:entityType:datahub.domain") - .put(Constants.NOTEBOOK_ENTITY_NAME, "urn:li:entityType:datahub.notebook") + .put( + Constants.INGESTION_SOURCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataHubIngestionSource") + .put( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + "urn:li:entityType:datahub.dataHubExecutionRequest") + .put(Constants.ASSERTION_ENTITY_NAME, "urn:li:entityType:datahub.assertion") + .put( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataProcessInstance") .put( Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, "urn:li:entityType:datahub.dataPlatformInstance") + .put(Constants.ACCESS_TOKEN_ENTITY_NAME, "urn:li:entityType:datahub.dataHubAccessToken") .put(Constants.TEST_ENTITY_NAME, "urn:li:entityType:datahub.test") + .put(Constants.POLICY_ENTITY_NAME, "urn:li:entityType:datahub.dataHubPolicy") + .put(Constants.DATAHUB_ROLE_ENTITY_NAME, "urn:li:entityType:datahub.dataHubRole") + .put(Constants.POST_ENTITY_NAME, "urn:li:entityType:datahub.post") + .put(Constants.SCHEMA_FIELD_ENTITY_NAME, "urn:li:entityType:datahub.schemaField") .put(Constants.DATAHUB_VIEW_ENTITY_NAME, "urn:li:entityType:datahub.dataHubView") + .put(Constants.QUERY_ENTITY_NAME, "urn:li:entityType:datahub.query") .put(Constants.DATA_PRODUCT_ENTITY_NAME, "urn:li:entityType:datahub.dataProduct") - .put(Constants.ASSERTION_ENTITY_NAME, "urn:li:entityType:datahub.assertion") - .put(Constants.SCHEMA_FIELD_ENTITY_NAME, "urn:li:entityType:datahub.schemaField") + .put(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "urn:li:entityType:datahub.ownershipType") + .put(Constants.INCIDENT_ENTITY_NAME, "urn:li:entityType:datahub.incident") + .put(Constants.ROLE_ENTITY_NAME, "urn:li:entityType:datahub.role") + .put( + Constants.STRUCTURED_PROPERTY_ENTITY_NAME, + "urn:li:entityType:datahub.structuredProperty") + .put(Constants.FORM_ENTITY_NAME, "urn:li:entityType:datahub.form") + .put(Constants.DATA_TYPE_ENTITY_NAME, "urn:li:entityType:datahub.dataType") + .put(Constants.ENTITY_TYPE_ENTITY_NAME, "urn:li:entityType:datahub.entityType") + .put(Constants.RESTRICTED_ENTITY_NAME, "urn:li:entityType:datahub.restricted") + .put( + Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME, + "urn:li:entityType:datahub.businessAttribute") .build(); private static final Map ENTITY_TYPE_URN_TO_NAME = From 3a38415d6b1497f439cb8fffa2b69032e35cf04a Mon Sep 17 00:00:00 2001 From: jayasimhankv <145704974+jayasimhankv@users.noreply.github.com> Date: Fri, 9 Aug 2024 11:02:17 -0500 Subject: [PATCH 15/72] feat(ui): Changes to allow editable dataset name (#10608) Co-authored-by: Jay Kadambi --- .../graphql/featureflags/FeatureFlags.java | 1 + .../resolvers/config/AppConfigResolver.java | 1 + .../resolvers/mutate/UpdateNameResolver.java | 36 +++++++++++++++++++ .../types/dataset/mappers/DatasetMapper.java | 4 +++ .../mappers/DatasetUpdateInputMapper.java | 9 +++-- .../src/main/resources/app.graphql | 5 +++ .../src/main/resources/entity.graphql | 9 +++++ .../src/app/entity/dataset/DatasetEntity.tsx | 7 ++-- .../profile/header/EntityHeader.tsx | 10 ++++-- datahub-web-react/src/app/useAppConfig.ts | 5 +++ datahub-web-react/src/appConfigContext.tsx | 1 + datahub-web-react/src/graphql/app.graphql | 1 + datahub-web-react/src/graphql/browse.graphql | 1 + .../src/graphql/fragments.graphql | 1 + datahub-web-react/src/graphql/preview.graphql | 1 + datahub-web-react/src/graphql/search.graphql | 1 + .../dataset/EditableDatasetProperties.pdl | 9 +++++ ...com.linkedin.entity.entities.snapshot.json | 9 +++++ ...m.linkedin.platform.platform.snapshot.json | 9 +++++ 19 files changed, 113 insertions(+), 7 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 85a2c09ed79a7..167515a13c4da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -21,5 +21,6 @@ public class FeatureFlags { private boolean schemaFieldEntityFetchEnabled = false; private boolean businessAttributeEntityEnabled = false; private boolean dataContractsEnabled = false; + private boolean editableDatasetNameEnabled = false; private boolean showSeparateSiblings = false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index fb1672d54dc97..259d05c631557 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -186,6 +186,7 @@ public CompletableFuture get(final DataFetchingEnvironment environmen .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) .setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2()) .setDataContractsEnabled(_featureFlags.isDataContractsEnabled()) + .setEditableDatasetNameEnabled(_featureFlags.isEditableDatasetNameEnabled()) .setShowSeparateSiblings(_featureFlags.isShowSeparateSiblings()) .build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 1d90720fc6902..ad6dbbe635ed1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -4,9 +4,11 @@ import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; @@ -20,6 +22,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.dataproduct.DataProductProperties; +import com.linkedin.dataset.EditableDatasetProperties; import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.client.EntityClient; @@ -70,6 +73,8 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return updateDataProductName(targetUrn, input, context); case Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME: return updateBusinessAttributeName(targetUrn, input, environment.getContext()); + case Constants.DATASET_ENTITY_NAME: + return updateDatasetName(targetUrn, input, environment.getContext()); default: throw new RuntimeException( String.format( @@ -236,6 +241,37 @@ private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryConte "Unauthorized to perform this action. Please contact your DataHub administrator."); } + // udpates editable dataset properties aspect's name field + private Boolean updateDatasetName(Urn targetUrn, UpdateNameInput input, QueryContext context) { + if (AuthorizationUtils.canEditProperties(targetUrn, context)) { + try { + if (input.getName() != null) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(); + editableDatasetProperties.setName(input.getName()); + final AuditStamp auditStamp = new AuditStamp(); + Urn actor = UrnUtils.getUrn(context.getActorUrn()); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + editableDatasetProperties.setLastModified(auditStamp); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + editableDatasetProperties, + actor, + _entityService); + } + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + private Boolean updateDataProductName( Urn targetUrn, UpdateNameInput input, QueryContext context) { try { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 89d5aa8621bf0..a7b5f6de0c183 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -222,6 +222,7 @@ private void mapDatasetProperties( properties.setQualifiedName(gmsProperties.getQualifiedName()); dataset.setProperties(properties); dataset.setDescription(properties.getDescription()); + dataset.setName(properties.getName()); if (gmsProperties.getUri() != null) { dataset.setUri(gmsProperties.getUri().toString()); } @@ -248,6 +249,9 @@ private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull Dat new EditableDatasetProperties(dataMap); final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); editableProperties.setDescription(editableDatasetProperties.getDescription()); + if (editableDatasetProperties.getName() != null) { + editableProperties.setName(editableDatasetProperties.getName()); + } dataset.setEditableProperties(editableProperties); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 122298bcab654..104dc0e104341 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -111,8 +111,13 @@ public Collection apply( if (datasetUpdateInput.getEditableProperties() != null) { final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(); - editableDatasetProperties.setDescription( - datasetUpdateInput.getEditableProperties().getDescription()); + if (datasetUpdateInput.getEditableProperties().getDescription() != null) { + editableDatasetProperties.setDescription( + datasetUpdateInput.getEditableProperties().getDescription()); + } + if (datasetUpdateInput.getEditableProperties().getName() != null) { + editableDatasetProperties.setName(datasetUpdateInput.getEditableProperties().getName()); + } editableDatasetProperties.setLastModified(auditStamp); editableDatasetProperties.setCreated(auditStamp); proposals.add( diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 024a7a989f9db..262d2384d84ad 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -508,6 +508,11 @@ type FeatureFlagsConfig { """ dataContractsEnabled: Boolean! + """ + Whether dataset names are editable + """ + editableDatasetNameEnabled: Boolean! + """ If turned on, all siblings will be separated with no way to get to a "combined" sibling view """ diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 941a6a28ceb2c..609597beee51b 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -3482,6 +3482,11 @@ type DatasetEditableProperties { Description of the Dataset """ description: String + + """ + Editable name of the Dataset + """ + name: String } """ @@ -4850,6 +4855,10 @@ input DatasetEditablePropertiesUpdate { Writable description aka documentation for a Dataset """ description: String! + """ + Editable name of the Dataset + """ + name: String } """ diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index c30fee7abc0b6..21ae085832cb3 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -220,6 +220,7 @@ export class DatasetEntity implements Entity { }, ]} sidebarSections={this.getSidebarSections()} + isNameEditable /> ); @@ -283,7 +284,7 @@ export class DatasetEntity implements Entity { return ( { return ( { }; displayName = (data: Dataset) => { - return data?.properties?.name || data.name || data.urn; + return data?.editableProperties?.name || data?.properties?.name || data.name || data.urn; }; platformLogoUrl = (data: Dataset) => { diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx index 09fa23dbc9f57..11335d0378760 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx @@ -17,6 +17,7 @@ import { capitalizeFirstLetterOnly } from '../../../../../shared/textUtil'; import { useUserContext } from '../../../../../context/useUserContext'; import { useEntityRegistry } from '../../../../../useEntityRegistry'; import EntityHeaderLoadingSection from './EntityHeaderLoadingSection'; +import { useIsEditableDatasetNameEnabled } from '../../../../../useAppConfig'; const TitleWrapper = styled.div` display: flex; @@ -71,6 +72,8 @@ export function getCanEditName( return true; // TODO: add permissions for data products case EntityType.BusinessAttribute: return privileges?.manageBusinessAttributes; + case EntityType.Dataset: + return entityData?.privileges?.canEditProperties; default: return false; } @@ -94,8 +97,11 @@ export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEdi const entityName = entityData?.name; const subType = capitalizeFirstLetterOnly(entityData?.subTypes?.typeNames?.[0]) || undefined; + const isEditableDatasetNameEnabled = useIsEditableDatasetNameEnabled(); const canEditName = - isNameEditable && getCanEditName(entityType, entityData, me?.platformPrivileges as PlatformPrivileges); + isEditableDatasetNameEnabled && + isNameEditable && + getCanEditName(entityType, entityData, me?.platformPrivileges as PlatformPrivileges); const entityRegistry = useEntityRegistry(); return ( @@ -106,7 +112,7 @@ export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEdi <> - + {entityData?.deprecation?.deprecated && ( Date: Sat, 10 Aug 2024 01:18:11 +0900 Subject: [PATCH 16/72] fix: remove saxo (#11127) --- README.md | 1 - docs-website/adoptionStoriesIndexes.json | 11 ----- .../src/pages/_components/Logos/index.js | 42 +++++++++--------- .../logos/scrollingCompanies/saxo_bank.webp | Bin 3592 -> 0 bytes 4 files changed, 21 insertions(+), 33 deletions(-) delete mode 100644 docs-website/static/img/logos/scrollingCompanies/saxo_bank.webp diff --git a/README.md b/README.md index b3c2e2d545941..3ac0668918f70 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,6 @@ Here are the companies that have officially adopted DataHub. Please feel free to - [Peloton](https://www.onepeloton.com) - [PITS Global Data Recovery Services](https://www.pitsdatarecovery.net/) - [Razer](https://www.razer.com) -- [Saxo Bank](https://www.home.saxo) - [Showroomprive](https://www.showroomprive.com/) - [SpotHero](https://spothero.com) - [Stash](https://www.stash.com) diff --git a/docs-website/adoptionStoriesIndexes.json b/docs-website/adoptionStoriesIndexes.json index 3fe666ccf1c13..9697bdfcf39a9 100644 --- a/docs-website/adoptionStoriesIndexes.json +++ b/docs-website/adoptionStoriesIndexes.json @@ -77,17 +77,6 @@ "category": "B2B & B2C", "description": "“We looked around for data catalog tool, and DataHub was a clear winner.”

Zynga levels up data management using DataHub, highlighting its role in enhancing data management, tracing data lineage, and ensuring data quality." }, - { - "name": "Saxo Bank", - "slug": "saxo-bank", - "imageUrl": "/img/logos/companies/saxobank.svg", - "imageSize": "default", - "link": "https://blog.datahubproject.io/enabling-data-discovery-in-a-data-mesh-the-saxo-journey-451b06969c8f", - "linkType": "blog", - "tagline": "Enabling Data Discovery in a Data Mesh", - "category": "Financial & Fintech", - "description": "Saxo Bank adopted DataHub to enhance data quality and streamline governance, facilitating efficient data management through self-service capabilities.

By integrating Apache Kafka and Snowflake with DataHub, the bank embraced Data Mesh principles to democratize data, support rapid growth, and improve business processes." - }, { "name": "MediaMarkt Saturn", "slug": "mediamarkt-saturn", diff --git a/docs-website/src/pages/_components/Logos/index.js b/docs-website/src/pages/_components/Logos/index.js index 565f6e9a46fee..b17c072d02d57 100644 --- a/docs-website/src/pages/_components/Logos/index.js +++ b/docs-website/src/pages/_components/Logos/index.js @@ -168,36 +168,36 @@ export const CompanyLogos = () => ( modules={[Pagination]} className={clsx("mySwiper", styles.companyWrapper)} > - {companies.map((company, idx) => ( - - {company.link ? ( - + {companies + .filter((company) => company.imageUrl) // Filter companies with imageUrl + .map((company, idx) => ( + + {company.link ? ( + + {company.name} + + ) : ( {company.name} - - ) : ( - {company.name} - )} - - ))} + )} + + ))} ); diff --git a/docs-website/static/img/logos/scrollingCompanies/saxo_bank.webp b/docs-website/static/img/logos/scrollingCompanies/saxo_bank.webp deleted file mode 100644 index a4c1aae73fe48b88946801f5713f15b10df3c177..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3592 zcmeHEc|6qZ7XPUz>)0y0y75L4Dq8Hs7)#lg7(&Q0b|Yd)*0NPHcIgcfrbcG$%eXIU z>@#*o$U7LcV5Aw`pZeU}`^S6lfA_!6bIy6b&pF@E^PJ}#bC|w9zykm_dOD`orWb8F z002+~trP&N0RX0}Yud#E9s!oFJ%0d9A$)@a&7nHSZ0)WeJNO=80oQkNb`A3Xf&SKm zEc);dE=m4x8UH)Te$y?;6{LIuO)3z49IQzY#DX3_u*@EI`GM8;aA>f9Fvv08!-1CO zx*)y@Vi}LWV3)sOSO360elo~WL->U3y|pKMzvFWAwXy*BL!gNPL4Y{`1$6fQ51v8s zdjkU--UuCf8)?fqn( z-?&Ljushnl)>z>lZFjc|t<)(dFg zf&X&{-hXDERltY9|D>gP3e7D>XcYEQKk9v@23=9!Xv0Mp+{dbIN7!eEG(~o!4^`av zbsPzS|4^#ry>-5UDG8%2$Lq^RkfAqkqDvl)D&S?1C;FA0*I#*PN)MXozVm5=drkAJ zGY8t%a-u6AOidJ7=shoN;6HsRYMv~hJP>Mjhy6>$x!$B%POYd)Z$8p+hZc7E1Ul54w(=i6P4Undr)n@#D`fC{(?T zTQPy3F0W0|5Qj91dx<+-va7wndP!-leU0sQ((Khp)ozEE__;oH$fUV@SJOCMXlA)A-GFhl2-KWk35!2 za;H7NScB>+X@LOU*ZqugRI)!g9deiAr)ZoJt=e`UQiV2!yw7ZIAsbs@OqioJ&SICE?46IRxL(R-y~+&_dR=)1LL!dNYr|4p|_z zcM57!7cdeXj@4ol-pTQ*zdJUB9`u3fNYKk|4Y-CScEwlDk$P{%Zsye4*|5MS6&z2- z=Ty)$($vF;?8jBCRhz1?Mky+k1LX*~n?Pl~C346y&Bsy5ii-#3ddgx@f#lTijqDp(+#Ijb|;wM3*_0%0BAEb>KRehP3j9-S2%Hkxy}gDPAS5P0UB^ zY@H4D+AaF5WAAW2dN7e3u%tZ<#e683*6?i_{LODopDpBx$Q#R}9os#XAwJ$M=vCo7 z!BnfJuuqFmW{aDjWA0bHUa?e#Ray#pq;j7prG>{b3fLGbHAiTqGYSvH|L9*Et{Fk! zovf$!Py}UdTvQ0Mh@Lc;!qFMGxnz`7Z!6?&{?&Rz)@!U&^GXVO@v6+ck(a9vDhM(R zEO}ny$|%qDsZcbrI0M}ieG1|CI`vk0eBFsnI;qRxJ!2kGK*iJM8Llj(^CqCWk<+9T zE!Z9TEUe?;RcmT}36s01o#csjUB*EP2?-V6%_GGT@dEKyk`r07IP;!QOZF<*>s3YD z*PEd?M4!9nVAU=>Z6}urhavI<7w~D&ZjD|@g7Mh-4&^8{Io`y6&fajr^`93}pI+t$T{^@es8 zw@J`cspiPM(fVkB*K>`jhgE1|V{gh>3;Yb47-X0PTj@AlRyT>st=r^L*S_=315QO_ zAj+P9lrRS3;gb+zmaYjYgrhK~s(A=m zp@IkCN{7UV}r&iw-9VL zaVHsuR!Dxs7`!-rz_D382os-Ql6g^T@~U*_*vwOU^YX11xjG4pLOKcQ6rp!So*JlG zpHsWjWN;{CT%FeJr+SUFSf7x5Y$Hgl@yUXavm|-8P8!R)Ktd}VjNc5|+MdA^H7>}R zWpyYQEp9D(iXIuBrGJ%0UV{84E7jc^YQRlFPG(cvKH(B)FckT1wJfeUV%V#7xs3?p z!@2F*2!ikpF-X}P^B#pZ6ps4x#H&M#iR*mtSWan0Mn!3|YF9-??yO0$O|^ZQo`xKl d;llDM>8QJxp`)}5Z|-22yB*Ay-Ca=u;9raMP3Qms From 080f2a2100d2f42d9913a7bc85b6efb7d8e5f5b3 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 9 Aug 2024 11:40:03 -0500 Subject: [PATCH 17/72] feat(mcl-processor): Update mcl processor hooks (#11134) --- docs/how/kafka-config.md | 21 +++ .../kafka/MaeConsumerApplication.java | 7 +- .../metadata/kafka/MCLKafkaListener.java | 103 +++++++++++++ .../kafka/MCLKafkaListenerRegistrar.java | 120 +++++++++++++++ .../kafka/MetadataChangeLogProcessor.java | 140 ------------------ .../kafka/hook/MetadataChangeLogHook.java | 8 + .../kafka/hook/UpdateIndicesHook.java | 17 ++- .../event/EntityChangeEventGeneratorHook.java | 34 +++-- .../kafka/hook/form/FormAssignmentHook.java | 26 +++- .../hook/incident/IncidentsSummaryHook.java | 45 ++++-- .../ingestion/IngestionSchedulerHook.java | 30 ++-- .../hook/siblings/SiblingAssociationHook.java | 20 ++- .../kafka/hook/spring/MCLGMSSpringTest.java | 16 +- .../kafka/hook/spring/MCLMAESpringTest.java | 16 +- .../MCLSpringCommonTestConfiguration.java | 9 +- .../datahub/event/PlatformEventProcessor.java | 9 +- .../src/main/resources/application.yaml | 10 ++ .../kafka/KafkaEventConsumerFactory.java | 2 +- .../linkedin/gms/CommonApplicationConfig.java | 5 +- 19 files changed, 421 insertions(+), 217 deletions(-) create mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListener.java create mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListenerRegistrar.java delete mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java diff --git a/docs/how/kafka-config.md b/docs/how/kafka-config.md index 2f20e8b548f83..06c7418f16713 100644 --- a/docs/how/kafka-config.md +++ b/docs/how/kafka-config.md @@ -116,6 +116,27 @@ We've included an environment variable to customize the consumer group id, if yo - `KAFKA_CONSUMER_GROUP_ID`: The name of the kafka consumer's group id. +#### datahub-mae-consumer MCL Hooks + +By default, all MetadataChangeLog processing hooks execute as part of the same kafka consumer group based on the +previously mentioned `KAFKA_CONSUMER_GROUP_ID`. + +The various MCL Hooks could alsp be separated into separate groups which allows for controlling parallelization and +prioritization of the hooks. + +For example, the `UpdateIndicesHook` and `SiblingsHook` processing can be delayed by other hooks. Separating these +hooks into their own group can reduce latency from these other hooks. The `application.yaml` configuration +includes options for assigning a suffix to the consumer group, see `consumerGroupSuffix`. + +| Environment Variable | Default | Description | +|------------------------------------------------|---------|---------------------------------------------------------------------------------------------| +| SIBLINGS_HOOK_CONSUMER_GROUP_SUFFIX | '' | Siblings processing hook. Considered one of the primary hooks in the `datahub-mae-consumer` | +| UPDATE_INDICES_CONSUMER_GROUP_SUFFIX | '' | Primary processing hook. | +| INGESTION_SCHEDULER_HOOK_CONSUMER_GROUP_SUFFIX | '' | Scheduled ingestion hook. | +| INCIDENTS_HOOK_CONSUMER_GROUP_SUFFIX | '' | Incidents hook. | +| ECE_CONSUMER_GROUP_SUFFIX | '' | Entity Change Event hook which publishes to the Platform Events topic. | +| FORMS_HOOK_CONSUMER_GROUP_SUFFIX | '' | Forms processing. | + ## Applying Configurations ### Docker diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index f6533a6ac1d8a..617bc8e0b7303 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -18,8 +18,6 @@ "com.linkedin.metadata.service", "com.datahub.event", "com.linkedin.gms.factory.kafka", - "com.linkedin.gms.factory.kafka.common", - "com.linkedin.gms.factory.kafka.schemaregistry", "com.linkedin.metadata.boot.kafka", "com.linkedin.metadata.kafka", "com.linkedin.metadata.dao.producer", @@ -34,7 +32,10 @@ "com.linkedin.gms.factory.context", "com.linkedin.gms.factory.timeseries", "com.linkedin.gms.factory.assertion", - "com.linkedin.gms.factory.plugins" + "com.linkedin.gms.factory.plugins", + "com.linkedin.gms.factory.change", + "com.datahub.event.hook", + "com.linkedin.gms.factory.notifications" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListener.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListener.java new file mode 100644 index 0000000000000..70b452722abc7 --- /dev/null +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListener.java @@ -0,0 +1,103 @@ +package com.linkedin.metadata.kafka; + +import com.codahale.metrics.Histogram; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Timer; +import com.linkedin.metadata.EventUtils; +import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; +import com.linkedin.metadata.utils.metrics.MetricUtils; +import com.linkedin.mxe.MetadataChangeLog; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerRecord; + +@Slf4j +public class MCLKafkaListener { + private static final Histogram kafkaLagStats = + MetricUtils.get() + .histogram( + MetricRegistry.name( + "com.linkedin.metadata.kafka.MetadataChangeLogProcessor", "kafkaLag")); + + private final String consumerGroupId; + private final List hooks; + + public MCLKafkaListener( + OperationContext systemOperationContext, + String consumerGroup, + List hooks) { + this.consumerGroupId = consumerGroup; + this.hooks = hooks; + this.hooks.forEach(hook -> hook.init(systemOperationContext)); + + log.info( + "Enabled MCL Hooks - Group: {} Hooks: {}", + consumerGroup, + hooks.stream().map(hook -> hook.getClass().getSimpleName()).collect(Collectors.toList())); + } + + public void consume(final ConsumerRecord consumerRecord) { + try (Timer.Context i = MetricUtils.timer(this.getClass(), "consume").time()) { + kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); + final GenericRecord record = consumerRecord.value(); + log.debug( + "Got MCL event consumer: {} key: {}, topic: {}, partition: {}, offset: {}, value size: {}, timestamp: {}", + consumerGroupId, + consumerRecord.key(), + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset(), + consumerRecord.serializedValueSize(), + consumerRecord.timestamp()); + MetricUtils.counter(this.getClass(), consumerGroupId + "_received_mcl_count").inc(); + + MetadataChangeLog event; + try { + event = EventUtils.avroToPegasusMCL(record); + } catch (Exception e) { + MetricUtils.counter( + this.getClass(), consumerGroupId + "_avro_to_pegasus_conversion_failure") + .inc(); + log.error("Error deserializing message due to: ", e); + log.error("Message: {}", record.toString()); + return; + } + + log.info( + "Invoking MCL hooks for consumer: {} urn: {}, aspect name: {}, entity type: {}, change type: {}", + consumerGroupId, + event.getEntityUrn(), + event.hasAspectName() ? event.getAspectName() : null, + event.hasEntityType() ? event.getEntityType() : null, + event.hasChangeType() ? event.getChangeType() : null); + + // Here - plug in additional "custom processor hooks" + for (MetadataChangeLogHook hook : this.hooks) { + log.info( + "Invoking MCL hook {} for urn: {}", + hook.getClass().getSimpleName(), + event.getEntityUrn()); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") + .time()) { + hook.invoke(event); + } catch (Exception e) { + // Just skip this hook and continue. - Note that this represents "at most once"// + // processing. + MetricUtils.counter(this.getClass(), hook.getClass().getSimpleName() + "_failure").inc(); + log.error( + "Failed to execute MCL hook with name {}", hook.getClass().getCanonicalName(), e); + } + } + // TODO: Manually commit kafka offsets after full processing. + MetricUtils.counter(this.getClass(), consumerGroupId + "_consumed_mcl_count").inc(); + log.info( + "Successfully completed MCL hooks for consumer: {} urn: {}", + consumerGroupId, + event.getEntityUrn()); + } + } +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListenerRegistrar.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListenerRegistrar.java new file mode 100644 index 0000000000000..fb2880f617d30 --- /dev/null +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MCLKafkaListenerRegistrar.java @@ -0,0 +1,120 @@ +package com.linkedin.metadata.kafka; + +import com.linkedin.metadata.kafka.config.MetadataChangeLogProcessorCondition; +import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; +import com.linkedin.mxe.Topics; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Conditional; +import org.springframework.kafka.annotation.EnableKafka; +import org.springframework.kafka.config.KafkaListenerContainerFactory; +import org.springframework.kafka.config.KafkaListenerEndpoint; +import org.springframework.kafka.config.KafkaListenerEndpointRegistry; +import org.springframework.kafka.config.MethodKafkaListenerEndpoint; +import org.springframework.messaging.handler.annotation.support.DefaultMessageHandlerMethodFactory; +import org.springframework.stereotype.Component; + +@Slf4j +@EnableKafka +@Component +@Conditional(MetadataChangeLogProcessorCondition.class) +public class MCLKafkaListenerRegistrar implements InitializingBean { + + @Autowired + @Qualifier("systemOperationContext") + private OperationContext systemOperationContext; + + @Autowired private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry; + + @Autowired + @Qualifier("kafkaEventConsumer") + private KafkaListenerContainerFactory kafkaListenerContainerFactory; + + @Value("${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}") + private String consumerGroupBase; + + @Value("${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}") + private String mclVersionedTopicName; + + @Value( + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") + private String mclTimeseriesTopicName; + + @Autowired private List metadataChangeLogHooks; + + @Override + public void afterPropertiesSet() { + Map> hookGroups = + getMetadataChangeLogHooks().stream() + .collect(Collectors.groupingBy(MetadataChangeLogHook::getConsumerGroupSuffix)); + + log.info( + "MetadataChangeLogProcessor Consumer Groups: {}", + hookGroups.keySet().stream().map(this::buildConsumerGroupName).collect(Collectors.toSet())); + + hookGroups.forEach( + (key, hooks) -> { + KafkaListenerEndpoint kafkaListenerEndpoint = + createListenerEndpoint( + buildConsumerGroupName(key), + List.of(mclVersionedTopicName, mclTimeseriesTopicName), + hooks); + registerMCLKafkaListener(kafkaListenerEndpoint, true); + }); + } + + public List getMetadataChangeLogHooks() { + return metadataChangeLogHooks.stream() + .filter(MetadataChangeLogHook::isEnabled) + .sorted(Comparator.comparing(MetadataChangeLogHook::executionOrder)) + .toList(); + } + + @SneakyThrows + public void registerMCLKafkaListener( + KafkaListenerEndpoint kafkaListenerEndpoint, boolean startImmediately) { + kafkaListenerEndpointRegistry.registerListenerContainer( + kafkaListenerEndpoint, kafkaListenerContainerFactory, startImmediately); + } + + private KafkaListenerEndpoint createListenerEndpoint( + String consumerGroupId, List topics, List hooks) { + MethodKafkaListenerEndpoint kafkaListenerEndpoint = + new MethodKafkaListenerEndpoint<>(); + kafkaListenerEndpoint.setId(consumerGroupId); + kafkaListenerEndpoint.setGroupId(consumerGroupId); + kafkaListenerEndpoint.setAutoStartup(true); + kafkaListenerEndpoint.setTopics(topics.toArray(new String[topics.size()])); + kafkaListenerEndpoint.setMessageHandlerMethodFactory(new DefaultMessageHandlerMethodFactory()); + kafkaListenerEndpoint.setBean( + new MCLKafkaListener(systemOperationContext, consumerGroupId, hooks)); + try { + kafkaListenerEndpoint.setMethod( + MCLKafkaListener.class.getMethod("consume", ConsumerRecord.class)); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + + return kafkaListenerEndpoint; + } + + private String buildConsumerGroupName(@Nonnull String suffix) { + if (suffix.isEmpty()) { + return consumerGroupBase; + } else { + return String.join("-", consumerGroupBase, suffix); + } + } +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java deleted file mode 100644 index 6112ad798d73d..0000000000000 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ /dev/null @@ -1,140 +0,0 @@ -package com.linkedin.metadata.kafka; - -import com.codahale.metrics.Histogram; -import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.Timer; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; -import com.linkedin.metadata.EventUtils; -import com.linkedin.metadata.kafka.config.MetadataChangeLogProcessorCondition; -import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; -import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; -import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; -import com.linkedin.metadata.kafka.hook.form.FormAssignmentHook; -import com.linkedin.metadata.kafka.hook.incident.IncidentsSummaryHook; -import com.linkedin.metadata.kafka.hook.ingestion.IngestionSchedulerHook; -import com.linkedin.metadata.kafka.hook.siblings.SiblingAssociationHook; -import com.linkedin.metadata.utils.metrics.MetricUtils; -import com.linkedin.mxe.MetadataChangeLog; -import com.linkedin.mxe.Topics; -import io.datahubproject.metadata.context.OperationContext; -import java.util.Comparator; -import java.util.List; -import java.util.stream.Collectors; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.apache.avro.generic.GenericRecord; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Import; -import org.springframework.kafka.annotation.EnableKafka; -import org.springframework.kafka.annotation.KafkaListener; -import org.springframework.stereotype.Component; - -@Slf4j -@Component -@Conditional(MetadataChangeLogProcessorCondition.class) -@Import({ - UpdateIndicesHook.class, - IngestionSchedulerHook.class, - EntityChangeEventGeneratorHook.class, - KafkaEventConsumerFactory.class, - SiblingAssociationHook.class, - FormAssignmentHook.class, - IncidentsSummaryHook.class, -}) -@EnableKafka -public class MetadataChangeLogProcessor { - - @Getter private final List hooks; - private final Histogram kafkaLagStats = - MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - - @Autowired - public MetadataChangeLogProcessor( - @Qualifier("systemOperationContext") OperationContext systemOperationContext, - List metadataChangeLogHooks) { - this.hooks = - metadataChangeLogHooks.stream() - .filter(MetadataChangeLogHook::isEnabled) - .sorted(Comparator.comparing(MetadataChangeLogHook::executionOrder)) - .collect(Collectors.toList()); - log.info( - "Enabled hooks: {}", - this.hooks.stream() - .map(hook -> hook.getClass().getSimpleName()) - .collect(Collectors.toList())); - this.hooks.forEach(hook -> hook.init(systemOperationContext)); - } - - @KafkaListener( - id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", - topics = { - "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", - "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}" - }, - containerFactory = "kafkaEventConsumer") - public void consume(final ConsumerRecord consumerRecord) { - try (Timer.Context i = MetricUtils.timer(this.getClass(), "consume").time()) { - kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); - final GenericRecord record = consumerRecord.value(); - log.info( - "Got MCL event key: {}, topic: {}, partition: {}, offset: {}, value size: {}, timestamp: {}", - consumerRecord.key(), - consumerRecord.topic(), - consumerRecord.partition(), - consumerRecord.offset(), - consumerRecord.serializedValueSize(), - consumerRecord.timestamp()); - MetricUtils.counter(this.getClass(), "received_mcl_count").inc(); - - MetadataChangeLog event; - try { - event = EventUtils.avroToPegasusMCL(record); - log.debug( - "Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", - event.getEntityUrn(), - event.getEntityKeyAspect()); - } catch (Exception e) { - MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); - log.error("Error deserializing message due to: ", e); - log.error("Message: {}", record.toString()); - return; - } - - log.info( - "Invoking MCL hooks for urn: {}, aspect name: {}, entity type: {}, change type: {}", - event.getEntityUrn(), - event.hasAspectName() ? event.getAspectName() : null, - event.hasEntityType() ? event.getEntityType() : null, - event.hasChangeType() ? event.getChangeType() : null); - - // Here - plug in additional "custom processor hooks" - for (MetadataChangeLogHook hook : this.hooks) { - if (!hook.isEnabled()) { - log.info(String.format("Skipping disabled hook %s", hook.getClass())); - continue; - } - log.info( - "Invoking MCL hook {} for urn: {}", - hook.getClass().getSimpleName(), - event.getEntityUrn()); - try (Timer.Context ignored = - MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { - hook.invoke(event); - } catch (Exception e) { - // Just skip this hook and continue. - Note that this represents "at most once"// - // processing. - MetricUtils.counter(this.getClass(), hook.getClass().getSimpleName() + "_failure").inc(); - log.error( - "Failed to execute MCL hook with name {}", hook.getClass().getCanonicalName(), e); - } - } - // TODO: Manually commit kafka offsets after full processing. - MetricUtils.counter(this.getClass(), "consumed_mcl_count").inc(); - log.info("Successfully completed MCL hooks for urn: {}", event.getEntityUrn()); - } - } -} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java index 145d1ded724cc..06a184c9f89f9 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java @@ -18,6 +18,14 @@ default MetadataChangeLogHook init(@Nonnull OperationContext systemOperationCont return this; } + /** + * Suffix for the consumer group + * + * @return suffix + */ + @Nonnull + String getConsumerGroupSuffix(); + /** * Return whether the hook is enabled or not. If not enabled, the below invoke method is not * triggered diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java index a0e304b26ea60..bd804b0f4424c 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.*; +import com.google.common.annotations.VisibleForTesting; import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.common.SystemMetadataServiceFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; @@ -12,7 +13,9 @@ import com.linkedin.mxe.MetadataChangeLog; import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; @@ -34,15 +37,27 @@ public class UpdateIndicesHook implements MetadataChangeLogHook { private final boolean isEnabled; private final boolean reprocessUIEvents; private OperationContext systemOperationContext; + @Getter private final String consumerGroupSuffix; + @Autowired public UpdateIndicesHook( UpdateIndicesService updateIndicesService, @Nonnull @Value("${updateIndices.enabled:true}") Boolean isEnabled, @Nonnull @Value("${featureFlags.preProcessHooks.reprocessEnabled:false}") - Boolean reprocessUIEvents) { + Boolean reprocessUIEvents, + @Nonnull @Value("${updateIndices.consumerGroupSuffix}") String consumerGroupSuffix) { this.updateIndicesService = updateIndicesService; this.isEnabled = isEnabled; this.reprocessUIEvents = reprocessUIEvents; + this.consumerGroupSuffix = consumerGroupSuffix; + } + + @VisibleForTesting + public UpdateIndicesHook( + UpdateIndicesService updateIndicesService, + @Nonnull Boolean isEnabled, + @Nonnull Boolean reprocessUIEvents) { + this(updateIndicesService, isEnabled, reprocessUIEvents, ""); } @Override diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java index 8dc98d77233ce..59d068a46d8c6 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.kafka.hook.event; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -29,6 +30,7 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -78,10 +80,11 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { private static final Set SUPPORTED_OPERATIONS = ImmutableSet.of("CREATE", "UPSERT", "DELETE"); - private final EntityChangeEventGeneratorRegistry _entityChangeEventGeneratorRegistry; + private final EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry; private final OperationContext systemOperationContext; - private final SystemEntityClient _entityClient; - private final Boolean _isEnabled; + private final SystemEntityClient entityClient; + private final Boolean isEnabled; + @Getter private final String consumerGroupSuffix; @Autowired public EntityChangeEventGeneratorHook( @@ -89,17 +92,28 @@ public EntityChangeEventGeneratorHook( @Nonnull @Qualifier("entityChangeEventGeneratorRegistry") final EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry, @Nonnull final SystemEntityClient entityClient, - @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled) { + @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled, + @Nonnull @Value("${entityChangeEvents.consumerGroupSuffix}") String consumerGroupSuffix) { this.systemOperationContext = systemOperationContext; - _entityChangeEventGeneratorRegistry = + this.entityChangeEventGeneratorRegistry = Objects.requireNonNull(entityChangeEventGeneratorRegistry); - _entityClient = Objects.requireNonNull(entityClient); - _isEnabled = isEnabled; + this.entityClient = Objects.requireNonNull(entityClient); + this.isEnabled = isEnabled; + this.consumerGroupSuffix = consumerGroupSuffix; + } + + @VisibleForTesting + public EntityChangeEventGeneratorHook( + @Nonnull OperationContext systemOperationContext, + @Nonnull final EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry, + @Nonnull final SystemEntityClient entityClient, + @Nonnull Boolean isEnabled) { + this(systemOperationContext, entityChangeEventGeneratorRegistry, entityClient, isEnabled, ""); } @Override public boolean isEnabled() { - return _isEnabled; + return isEnabled; } @Override @@ -166,7 +180,7 @@ private List generateChangeEvents( @Nonnull final Aspect to, @Nonnull AuditStamp auditStamp) { final List> entityChangeEventGenerators = - _entityChangeEventGeneratorRegistry.getEntityChangeEventGenerators(aspectName).stream() + entityChangeEventGeneratorRegistry.getEntityChangeEventGenerators(aspectName).stream() // Note: Assumes that correct types have been registered for the aspect. .map(changeEventGenerator -> (EntityChangeEventGenerator) changeEventGenerator) .collect(Collectors.toList()); @@ -186,7 +200,7 @@ private boolean isEligibleForProcessing(final MetadataChangeLog log) { private void emitPlatformEvent( @Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) throws Exception { - _entityClient.producePlatformEvent( + entityClient.producePlatformEvent( systemOperationContext, Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, partitioningKey, event); } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java index 8d093fe0b8a12..063fa6de92c83 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.*; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.events.metadata.ChangeType; import com.linkedin.form.DynamicFormAssignment; @@ -15,6 +16,7 @@ import java.util.Objects; import java.util.Set; import javax.annotation.Nonnull; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; @@ -53,17 +55,25 @@ public class FormAssignmentHook implements MetadataChangeLogHook { ImmutableSet.of( ChangeType.UPSERT, ChangeType.CREATE, ChangeType.CREATE_ENTITY, ChangeType.RESTATE); - private final FormService _formService; - private final boolean _isEnabled; + private final FormService formService; + private final boolean isEnabled; private OperationContext systemOperationContext; + @Getter private final String consumerGroupSuffix; @Autowired public FormAssignmentHook( @Nonnull final FormService formService, - @Nonnull @Value("${forms.hook.enabled:true}") Boolean isEnabled) { - _formService = Objects.requireNonNull(formService, "formService is required"); - _isEnabled = isEnabled; + @Nonnull @Value("${forms.hook.enabled:true}") Boolean isEnabled, + @Nonnull @Value("${forms.hook.consumerGroupSuffix}") String consumerGroupSuffix) { + this.formService = Objects.requireNonNull(formService, "formService is required"); + this.isEnabled = isEnabled; + this.consumerGroupSuffix = consumerGroupSuffix; + } + + @VisibleForTesting + public FormAssignmentHook(@Nonnull final FormService formService, @Nonnull Boolean isEnabled) { + this(formService, isEnabled, ""); } @Override @@ -74,12 +84,12 @@ public FormAssignmentHook init(@Nonnull OperationContext systemOperationContext) @Override public boolean isEnabled() { - return _isEnabled; + return isEnabled; } @Override public void invoke(@Nonnull final MetadataChangeLog event) { - if (_isEnabled && isEligibleForProcessing(event)) { + if (isEnabled && isEligibleForProcessing(event)) { if (isFormDynamicFilterUpdated(event)) { handleFormFilterUpdated(event); } @@ -96,7 +106,7 @@ private void handleFormFilterUpdated(@Nonnull final MetadataChangeLog event) { DynamicFormAssignment.class); // 2. Register a automation to assign it. - _formService.upsertFormAssignmentRunner( + formService.upsertFormAssignmentRunner( systemOperationContext, event.getEntityUrn(), formFilters); } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java index 7c03a11a81f7a..5483fed9116e1 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java @@ -2,6 +2,7 @@ import static com.linkedin.metadata.Constants.*; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.common.IncidentSummaryDetails; import com.linkedin.common.IncidentSummaryDetailsArray; @@ -27,6 +28,7 @@ import java.util.Objects; import java.util.Set; import javax.annotation.Nonnull; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; @@ -57,20 +59,31 @@ public class IncidentsSummaryHook implements MetadataChangeLogHook { ImmutableSet.of(INCIDENT_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); private OperationContext systemOperationContext; - private final IncidentService _incidentService; - private final boolean _isEnabled; + private final IncidentService incidentService; + private final boolean isEnabled; + @Getter private final String consumerGroupSuffix; /** Max number of incidents to allow in incident summary, limited to prevent HTTP errors */ - private final int _maxIncidentHistory; + private final int maxIncidentHistory; @Autowired public IncidentsSummaryHook( @Nonnull final IncidentService incidentService, - @Nonnull @Value("${incidents.hook.enabled:true}") Boolean isEnabled, - @Nonnull @Value("${incidents.hook.maxIncidentHistory:100}") Integer maxIncidentHistory) { - _incidentService = Objects.requireNonNull(incidentService, "incidentService is required"); - _isEnabled = isEnabled; - _maxIncidentHistory = maxIncidentHistory; + @Nonnull @Value("${incidents.hook.enabled}") Boolean isEnabled, + @Nonnull @Value("${incidents.hook.maxIncidentHistory}") Integer maxIncidentHistory, + @Nonnull @Value("${incidents.hook.consumerGroupSuffix}") String consumerGroupSuffix) { + this.incidentService = Objects.requireNonNull(incidentService, "incidentService is required"); + this.isEnabled = isEnabled; + this.maxIncidentHistory = maxIncidentHistory; + this.consumerGroupSuffix = consumerGroupSuffix; + } + + @VisibleForTesting + public IncidentsSummaryHook( + @Nonnull final IncidentService incidentService, + @Nonnull Boolean isEnabled, + @Nonnull Integer maxIncidentHistory) { + this(incidentService, isEnabled, maxIncidentHistory, ""); } @Override @@ -81,12 +94,12 @@ public IncidentsSummaryHook init(@Nonnull OperationContext systemOperationContex @Override public boolean isEnabled() { - return _isEnabled; + return isEnabled; } @Override public void invoke(@Nonnull final MetadataChangeLog event) { - if (_isEnabled && isEligibleForProcessing(event)) { + if (isEnabled && isEligibleForProcessing(event)) { log.debug("Urn {} received by Incident Summary Hook.", event.getEntityUrn()); final Urn urn = HookUtils.getUrnFromEvent(event, systemOperationContext.getEntityRegistry()); // Handle the deletion case. @@ -104,7 +117,7 @@ public void invoke(@Nonnull final MetadataChangeLog event) { private void handleIncidentSoftDeleted(@Nonnull final Urn incidentUrn) { // 1. Fetch incident info. IncidentInfo incidentInfo = - _incidentService.getIncidentInfo(systemOperationContext, incidentUrn); + incidentService.getIncidentInfo(systemOperationContext, incidentUrn); // 2. Retrieve associated urns. if (incidentInfo != null) { @@ -127,7 +140,7 @@ private void handleIncidentSoftDeleted(@Nonnull final Urn incidentUrn) { private void handleIncidentUpdated(@Nonnull final Urn incidentUrn) { // 1. Fetch incident info + status IncidentInfo incidentInfo = - _incidentService.getIncidentInfo(systemOperationContext, incidentUrn); + incidentService.getIncidentInfo(systemOperationContext, incidentUrn); // 2. Retrieve associated urns. if (incidentInfo != null) { @@ -179,14 +192,14 @@ private void addIncidentToSummary( IncidentsSummaryUtils.removeIncidentFromResolvedSummary(incidentUrn, summary); // Then, add to active. - IncidentsSummaryUtils.addIncidentToActiveSummary(details, summary, _maxIncidentHistory); + IncidentsSummaryUtils.addIncidentToActiveSummary(details, summary, maxIncidentHistory); } else if (IncidentState.RESOLVED.equals(status.getState())) { // First, ensure this isn't in any summaries anymore. IncidentsSummaryUtils.removeIncidentFromActiveSummary(incidentUrn, summary); // Then, add to resolved. - IncidentsSummaryUtils.addIncidentToResolvedSummary(details, summary, _maxIncidentHistory); + IncidentsSummaryUtils.addIncidentToResolvedSummary(details, summary, maxIncidentHistory); } // 3. Emit the change back! @@ -196,7 +209,7 @@ private void addIncidentToSummary( @Nonnull private IncidentsSummary getIncidentsSummary(@Nonnull final Urn entityUrn) { IncidentsSummary maybeIncidentsSummary = - _incidentService.getIncidentsSummary(systemOperationContext, entityUrn); + incidentService.getIncidentsSummary(systemOperationContext, entityUrn); return maybeIncidentsSummary == null ? new IncidentsSummary() .setResolvedIncidentDetails(new IncidentSummaryDetailsArray()) @@ -260,7 +273,7 @@ private boolean isIncidentUpdate(@Nonnull final MetadataChangeLog event) { private void updateIncidentSummary( @Nonnull final Urn entityUrn, @Nonnull final IncidentsSummary newSummary) { try { - _incidentService.updateIncidentsSummary(systemOperationContext, entityUrn, newSummary); + incidentService.updateIncidentsSummary(systemOperationContext, entityUrn, newSummary); } catch (Exception e) { log.error( String.format( diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java index c13f0f75708f7..5569fade7e6eb 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java @@ -15,6 +15,7 @@ import com.linkedin.mxe.MetadataChangeLog; import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; @@ -29,27 +30,36 @@ @Component @Import({EntityRegistryFactory.class, IngestionSchedulerFactory.class}) public class IngestionSchedulerHook implements MetadataChangeLogHook { - private final IngestionScheduler _scheduler; - private final boolean _isEnabled; + private final IngestionScheduler scheduler; + private final boolean isEnabled; private OperationContext systemOperationContext; + @Getter private final String consumerGroupSuffix; @Autowired public IngestionSchedulerHook( @Nonnull final IngestionScheduler scheduler, - @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled) { - _scheduler = scheduler; - _isEnabled = isEnabled; + @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled, + @Nonnull @Value("${ingestionScheduler.consumerGroupSuffix}") String consumerGroupSuffix) { + this.scheduler = scheduler; + this.isEnabled = isEnabled; + this.consumerGroupSuffix = consumerGroupSuffix; + } + + @VisibleForTesting + public IngestionSchedulerHook( + @Nonnull final IngestionScheduler scheduler, @Nonnull Boolean isEnabled) { + this(scheduler, isEnabled, ""); } @Override public boolean isEnabled() { - return _isEnabled; + return isEnabled; } @Override public IngestionSchedulerHook init(@Nonnull OperationContext systemOperationContext) { this.systemOperationContext = systemOperationContext; - _scheduler.init(); + scheduler.init(); return this; } @@ -66,11 +76,11 @@ public void invoke(@Nonnull MetadataChangeLog event) { final Urn urn = getUrnFromEvent(event); if (ChangeType.DELETE.equals(event.getChangeType())) { - _scheduler.unscheduleNextIngestionSourceExecution(urn); + scheduler.unscheduleNextIngestionSourceExecution(urn); } else { // Update the scheduler to reflect the latest changes. final DataHubIngestionSourceInfo info = getInfoFromEvent(event); - _scheduler.scheduleNextIngestionSourceExecution(urn, info); + scheduler.scheduleNextIngestionSourceExecution(urn, info); } } } @@ -138,6 +148,6 @@ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog even @VisibleForTesting IngestionScheduler scheduler() { - return _scheduler; + return scheduler; } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index f068679da7757..bbe0feed7de11 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import lombok.Getter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; @@ -70,17 +71,28 @@ public class SiblingAssociationHook implements MetadataChangeLogHook { private final SystemEntityClient systemEntityClient; private final EntitySearchService entitySearchService; - private final boolean _isEnabled; + private final boolean isEnabled; private OperationContext systemOperationContext; + @Getter private final String consumerGroupSuffix; @Autowired public SiblingAssociationHook( @Nonnull final SystemEntityClient systemEntityClient, @Nonnull final EntitySearchService searchService, - @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { + @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled, + @Nonnull @Value("${siblings.consumerGroupSuffix}") String consumerGroupSuffix) { this.systemEntityClient = systemEntityClient; entitySearchService = searchService; - _isEnabled = isEnabled; + this.isEnabled = isEnabled; + this.consumerGroupSuffix = consumerGroupSuffix; + } + + @VisibleForTesting + public SiblingAssociationHook( + @Nonnull final SystemEntityClient systemEntityClient, + @Nonnull final EntitySearchService searchService, + @Nonnull Boolean isEnabled) { + this(systemEntityClient, searchService, isEnabled, ""); } @Value("${siblings.enabled:false}") @@ -99,7 +111,7 @@ public SiblingAssociationHook init(@Nonnull OperationContext systemOperationCont @Override public boolean isEnabled() { - return _isEnabled; + return isEnabled; } @Override diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLGMSSpringTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLGMSSpringTest.java index c2a8de161eafe..10f149e606295 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLGMSSpringTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLGMSSpringTest.java @@ -3,7 +3,7 @@ import static org.testng.AssertJUnit.*; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.kafka.MetadataChangeLogProcessor; +import com.linkedin.metadata.kafka.MCLKafkaListenerRegistrar; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; import com.linkedin.metadata.kafka.hook.incident.IncidentsSummaryHook; @@ -35,23 +35,23 @@ public class MCLGMSSpringTest extends AbstractTestNGSpringContextTests { @Test public void testHooks() { - MetadataChangeLogProcessor metadataChangeLogProcessor = - applicationContext.getBean(MetadataChangeLogProcessor.class); + MCLKafkaListenerRegistrar registrar = + applicationContext.getBean(MCLKafkaListenerRegistrar.class); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .noneMatch(hook -> hook instanceof IngestionSchedulerHook)); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .anyMatch(hook -> hook instanceof UpdateIndicesHook)); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .anyMatch(hook -> hook instanceof SiblingAssociationHook)); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); assertEquals( 1, - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .filter(hook -> hook instanceof IncidentsSummaryHook) .count()); } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLMAESpringTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLMAESpringTest.java index 23de7707cc571..2049e974999b1 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLMAESpringTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLMAESpringTest.java @@ -4,7 +4,7 @@ import static org.testng.AssertJUnit.assertTrue; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.kafka.MetadataChangeLogProcessor; +import com.linkedin.metadata.kafka.MCLKafkaListenerRegistrar; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; import com.linkedin.metadata.kafka.hook.incident.IncidentsSummaryHook; @@ -33,23 +33,23 @@ public class MCLMAESpringTest extends AbstractTestNGSpringContextTests { @Test public void testHooks() { - MetadataChangeLogProcessor metadataChangeLogProcessor = - applicationContext.getBean(MetadataChangeLogProcessor.class); + MCLKafkaListenerRegistrar registrar = + applicationContext.getBean(MCLKafkaListenerRegistrar.class); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .noneMatch(hook -> hook instanceof IngestionSchedulerHook)); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .anyMatch(hook -> hook instanceof UpdateIndicesHook)); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .anyMatch(hook -> hook instanceof SiblingAssociationHook)); assertTrue( - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); assertEquals( 1, - metadataChangeLogProcessor.getHooks().stream() + registrar.getMetadataChangeLogHooks().stream() .filter(hook -> hook instanceof IncidentsSummaryHook) .count()); } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringCommonTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringCommonTestConfiguration.java index f6f71a12a6951..68768051eccad 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringCommonTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringCommonTestConfiguration.java @@ -34,10 +34,13 @@ @ComponentScan( basePackages = { "com.linkedin.metadata.kafka", - "com.linkedin.gms.factory.kafka.common", - "com.linkedin.gms.factory.kafka.schemaregistry", + "com.linkedin.gms.factory.kafka", "com.linkedin.gms.factory.entity.update.indices", - "com.linkedin.gms.factory.timeline.eventgenerator" + "com.linkedin.gms.factory.timeline.eventgenerator", + "com.linkedin.metadata.dao.producer", + "com.linkedin.gms.factory.change", + "com.datahub.event.hook", + "com.linkedin.gms.factory.notifications" }) public class MCLSpringCommonTestConfiguration { diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java index c4116b314254c..358a2ac0c2ee3 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java @@ -3,9 +3,7 @@ import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; -import com.datahub.event.hook.BusinessAttributeUpdateHook; import com.datahub.event.hook.PlatformEventHook; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.PlatformEvent; @@ -21,7 +19,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Import; import org.springframework.kafka.annotation.EnableKafka; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; @@ -29,7 +26,6 @@ @Slf4j @Component @Conditional(PlatformEventProcessorCondition.class) -@Import({BusinessAttributeUpdateHook.class, KafkaEventConsumerFactory.class}) @EnableKafka public class PlatformEventProcessor { @@ -49,6 +45,11 @@ public PlatformEventProcessor( platformEventHooks.stream() .filter(PlatformEventHook::isEnabled) .collect(Collectors.toList()); + log.info( + "Enabled platform hooks: {}", + this.hooks.stream() + .map(hook -> hook.getClass().getSimpleName()) + .collect(Collectors.toList())); this.hooks.forEach(PlatformEventHook::init); } diff --git a/metadata-service/configuration/src/main/resources/application.yaml b/metadata-service/configuration/src/main/resources/application.yaml index 2514060ff2d61..5b3673ddca52c 100644 --- a/metadata-service/configuration/src/main/resources/application.yaml +++ b/metadata-service/configuration/src/main/resources/application.yaml @@ -296,10 +296,18 @@ metadataTests: siblings: enabled: ${ENABLE_SIBLING_HOOK:true} # enable to turn on automatic sibling associations for dbt + consumerGroupSuffix: ${SIBLINGS_HOOK_CONSUMER_GROUP_SUFFIX:} updateIndices: enabled: ${ENABLE_UPDATE_INDICES_HOOK:true} + consumerGroupSuffix: ${UPDATE_INDICES_CONSUMER_GROUP_SUFFIX:} ingestionScheduler: enabled: ${ENABLE_INGESTION_SCHEDULER_HOOK:true} # enable to execute ingestion scheduling + consumerGroupSuffix: ${INGESTION_SCHEDULER_HOOK_CONSUMER_GROUP_SUFFIX:} +incidents: + hook: + enabled: ${ENABLE_INCIDENTS_HOOK:true} + maxIncidentHistory: ${MAX_INCIDENT_HISTORY:100} + consumerGroupSuffix: ${INCIDENTS_HOOK_CONSUMER_GROUP_SUFFIX:} bootstrap: upgradeDefaultBrowsePaths: @@ -376,6 +384,7 @@ featureFlags: entityChangeEvents: enabled: ${ENABLE_ENTITY_CHANGE_EVENTS_HOOK:true} + consumerGroupSuffix: ${ECE_CONSUMER_GROUP_SUFFIX:} views: enabled: ${VIEWS_ENABLED:true} @@ -460,6 +469,7 @@ springdoc.api-docs.groups.enabled: true forms: hook: enabled: { $FORMS_HOOK_ENABLED:true } + consumerGroupSuffix: ${FORMS_HOOK_CONSUMER_GROUP_SUFFIX:} businessAttribute: fetchRelatedEntitiesCount: ${BUSINESS_ATTRIBUTE_RELATED_ENTITIES_COUNT:20000} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index 9501b03482d04..aecb4f0afb12c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -96,7 +96,7 @@ private static Map buildCustomizedProperties( } @Bean(name = "kafkaEventConsumer") - protected KafkaListenerContainerFactory createInstance( + protected KafkaListenerContainerFactory kafkaEventConsumer( @Qualifier("kafkaConsumerFactory") DefaultKafkaConsumerFactory kafkaConsumerFactory, @Qualifier("configurationProvider") ConfigurationProvider configurationProvider) { diff --git a/metadata-service/war/src/main/java/com/linkedin/gms/CommonApplicationConfig.java b/metadata-service/war/src/main/java/com/linkedin/gms/CommonApplicationConfig.java index bc623c3cc983c..e47a2b4e278e4 100644 --- a/metadata-service/war/src/main/java/com/linkedin/gms/CommonApplicationConfig.java +++ b/metadata-service/war/src/main/java/com/linkedin/gms/CommonApplicationConfig.java @@ -37,7 +37,10 @@ "com.linkedin.gms.factory.search", "com.linkedin.gms.factory.secret", "com.linkedin.gms.factory.timeseries", - "com.linkedin.gms.factory.plugins" + "com.linkedin.gms.factory.plugins", + "com.linkedin.gms.factory.change", + "com.datahub.event.hook", + "com.linkedin.gms.factory.notifications" }) @PropertySource(value = "classpath:/application.yaml", factory = YamlPropertySourceFactory.class) @Configuration From 573c1cb8407c2a5d152e5abb6b7d9f012eea75cb Mon Sep 17 00:00:00 2001 From: David Leifker Date: Fri, 9 Aug 2024 12:38:44 -0500 Subject: [PATCH 18/72] fix(openapi): fix openapi v2 endpoints & v3 documentation update --- docs/api/tutorials/structured-properties.md | 204 ++++++++++-------- .../controller/GenericEntitiesController.java | 23 +- .../v2/controller/EntityController.java | 25 +++ .../v3/controller/EntityController.java | 27 +++ 4 files changed, 169 insertions(+), 110 deletions(-) diff --git a/docs/api/tutorials/structured-properties.md b/docs/api/tutorials/structured-properties.md index 6f6c6541554d9..00e992f2bd0bb 100644 --- a/docs/api/tutorials/structured-properties.md +++ b/docs/api/tutorials/structured-properties.md @@ -158,29 +158,37 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ + "value": { "qualifiedName": "io.acryl.privacy.retentionTime", - "valueType": "urn:li:dataType:datahub.number", - "description": "Retention Time is used to figure out how long to retain records in a dataset", - "displayName": "Retention Time", - "cardinality": "MULTIPLE", - "entityTypes": [ - "urn:li:entityType:datahub.dataset", - "urn:li:entityType:datahub.dataFlow" - ], - "allowedValues": [ - { - "value": {"double": 30}, - "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" - }, - { - "value": {"double": 60}, - "description": "Use this for datasets that drive monthly reporting but contain pii" - }, - { - "value": {"double": 365}, - "description": "Use this for non-sensitive data that can be retained for longer" - } - ] + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "MULTIPLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": { + "double": 30 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] + } }' | jq ``` @@ -474,14 +482,16 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "properties": [ - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", - "values": [ - {"double": 60.0} - ] - } - ] + "value": { + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", + "values": [ + {"double": 60.0} + ] + } + ] + } }' | jq ``` Example Response: @@ -627,23 +637,25 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "qualifiedName": "io.acryl.privacy.retentionTime02", - "displayName": "Retention Time 02", - "valueType": "urn:li:dataType:datahub.string", - "allowedValues": [ - { - "value": {"string": "foo2"}, - "description": "test foo2 value" - }, - { - "value": {"string": "bar2"}, - "description": "test bar2 value" - } - ], - "cardinality": "SINGLE", - "entityTypes": [ - "urn:li:entityType:datahub.dataset" - ] + "value": { + "qualifiedName": "io.acryl.privacy.retentionTime02", + "displayName": "Retention Time 02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo2"}, + "description": "test foo2 value" + }, + { + "value": {"string": "bar2"}, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] + } }' | jq ``` @@ -686,24 +698,26 @@ Specically, this will set `io.acryl.privacy.retentionTime` as `60.0` and `io.acr ```shell curl -X 'POST' -v \ - 'http://localhost:8080/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + 'http://localhost:8080/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties?createIfNotExists=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "properties": [ - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", - "values": [ - {"double": 60.0} - ] - }, - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime02", - "values": [ - {"string": "bar2"} - ] - } - ] + "value": { + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", + "values": [ + {"double": 60.0} + ] + }, + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime02", + "values": [ + {"string": "bar2"} + ] + } + ] + } }' | jq ``` @@ -1111,7 +1125,9 @@ curl -X 'POST' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ -"removed": true + "value": { + "removed": true + } }' | jq ``` @@ -1132,11 +1148,13 @@ If you want to **remove the soft delete**, you can do so by either hard deleting ```shell curl -X 'POST' \ - 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/status?systemMetadata=false' \ + 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/status?systemMetadata=false&createIfNotExists=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ -"removed": false + "value": { + "removed": true + } }' | jq ``` @@ -1271,34 +1289,42 @@ Change the cardinality to `SINGLE` and add a `version`. ```shell curl -X 'POST' -v \ - 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/propertyDefinition' \ + 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/propertyDefinition?createIfNotExists=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ + "value": { "qualifiedName": "io.acryl.privacy.retentionTime", - "valueType": "urn:li:dataType:datahub.number", - "description": "Retention Time is used to figure out how long to retain records in a dataset", - "displayName": "Retention Time", - "cardinality": "SINGLE", - "version": "20240614080000", - "entityTypes": [ - "urn:li:entityType:datahub.dataset", - "urn:li:entityType:datahub.dataFlow" - ], - "allowedValues": [ - { - "value": {"double": 30}, - "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" - }, - { - "value": {"double": 60}, - "description": "Use this for datasets that drive monthly reporting but contain pii" - }, - { - "value": {"double": 365}, - "description": "Use this for non-sensitive data that can be retained for longer" - } - ] + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "SINGLE", + "version": "20240614080000", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": { + "double": 30 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] + } }' | jq ``` diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java index de5d2ae1118d4..f415a4f47c9dc 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java @@ -13,14 +13,11 @@ import com.datahub.authorization.AuthorizerChain; import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.ChangeMCP; @@ -41,7 +38,6 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.utils.AuditStampUtils; -import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; @@ -57,7 +53,6 @@ import jakarta.servlet.http.HttpServletRequest; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -726,28 +721,14 @@ protected RecordTemplate toRecordTemplate( aspectSpec.getDataTemplateClass(), envelopedAspect.getValue().data()); } - protected ChangeMCP toUpsertItem( + protected abstract ChangeMCP toUpsertItem( @Nonnull AspectRetriever aspectRetriever, Urn entityUrn, AspectSpec aspectSpec, Boolean createIfNotExists, String jsonAspect, Actor actor) - throws JsonProcessingException { - JsonNode jsonNode = objectMapper.readTree(jsonAspect); - String aspectJson = jsonNode.get("value").toString(); - return ChangeItemImpl.builder() - .urn(entityUrn) - .aspectName(aspectSpec.getName()) - .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) - .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) - .recordTemplate( - GenericRecordUtils.deserializeAspect( - ByteString.copyString(aspectJson, StandardCharsets.UTF_8), - GenericRecordUtils.JSON, - aspectSpec)) - .build(aspectRetriever); - } + throws URISyntaxException, JsonProcessingException; protected ChangeMCP toUpsertItem( @Nonnull AspectRetriever aspectRetriever, diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java index 54a7724cadd34..1207eb331b795 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -13,8 +13,11 @@ import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; +import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.entity.EntityApiUtils; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -260,4 +263,26 @@ protected List buildEntityList( } return responseList; } + + @Override + protected ChangeMCP toUpsertItem( + @Nonnull AspectRetriever aspectRetriever, + Urn entityUrn, + AspectSpec aspectSpec, + Boolean createIfNotExists, + String jsonAspect, + Actor actor) + throws URISyntaxException { + return ChangeItemImpl.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .recordTemplate( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(jsonAspect, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(aspectRetriever); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java index a0478c9af1609..fbc9bf2956cfd 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java @@ -14,8 +14,11 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.ByteString; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; +import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.entity.EntityApiUtils; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -348,4 +351,28 @@ protected AspectsBatch toMCPBatch( .retrieverContext(opContext.getRetrieverContext().get()) .build(); } + + @Override + protected ChangeMCP toUpsertItem( + @Nonnull AspectRetriever aspectRetriever, + Urn entityUrn, + AspectSpec aspectSpec, + Boolean createIfNotExists, + String jsonAspect, + Actor actor) + throws JsonProcessingException { + JsonNode jsonNode = objectMapper.readTree(jsonAspect); + String aspectJson = jsonNode.get("value").toString(); + return ChangeItemImpl.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .recordTemplate( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(aspectJson, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(aspectRetriever); + } } From 469654ced75c3340276028068a6ca201eadc0cdf Mon Sep 17 00:00:00 2001 From: David Leifker Date: Fri, 9 Aug 2024 12:40:34 -0500 Subject: [PATCH 19/72] Revert "fix(openapi): fix openapi v2 endpoints & v3 documentation update" This reverts commit 573c1cb8407c2a5d152e5abb6b7d9f012eea75cb. --- docs/api/tutorials/structured-properties.md | 204 ++++++++---------- .../controller/GenericEntitiesController.java | 23 +- .../v2/controller/EntityController.java | 25 --- .../v3/controller/EntityController.java | 27 --- 4 files changed, 110 insertions(+), 169 deletions(-) diff --git a/docs/api/tutorials/structured-properties.md b/docs/api/tutorials/structured-properties.md index 00e992f2bd0bb..6f6c6541554d9 100644 --- a/docs/api/tutorials/structured-properties.md +++ b/docs/api/tutorials/structured-properties.md @@ -158,37 +158,29 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { "qualifiedName": "io.acryl.privacy.retentionTime", - "valueType": "urn:li:dataType:datahub.number", - "description": "Retention Time is used to figure out how long to retain records in a dataset", - "displayName": "Retention Time", - "cardinality": "MULTIPLE", - "entityTypes": [ - "urn:li:entityType:datahub.dataset", - "urn:li:entityType:datahub.dataFlow" - ], - "allowedValues": [ - { - "value": { - "double": 30 - }, - "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" - }, - { - "value": { - "double": 60 - }, - "description": "Use this for datasets that drive monthly reporting but contain pii" - }, - { - "value": { - "double": 365 - }, - "description": "Use this for non-sensitive data that can be retained for longer" - } - ] - } + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "MULTIPLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": {"double": 30}, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": {"double": 60}, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": {"double": 365}, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] }' | jq ``` @@ -482,16 +474,14 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { - "properties": [ - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", - "values": [ - {"double": 60.0} - ] - } - ] - } + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", + "values": [ + {"double": 60.0} + ] + } + ] }' | jq ``` Example Response: @@ -637,25 +627,23 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { - "qualifiedName": "io.acryl.privacy.retentionTime02", - "displayName": "Retention Time 02", - "valueType": "urn:li:dataType:datahub.string", - "allowedValues": [ - { - "value": {"string": "foo2"}, - "description": "test foo2 value" - }, - { - "value": {"string": "bar2"}, - "description": "test bar2 value" - } - ], - "cardinality": "SINGLE", - "entityTypes": [ - "urn:li:entityType:datahub.dataset" - ] - } + "qualifiedName": "io.acryl.privacy.retentionTime02", + "displayName": "Retention Time 02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo2"}, + "description": "test foo2 value" + }, + { + "value": {"string": "bar2"}, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] }' | jq ``` @@ -698,26 +686,24 @@ Specically, this will set `io.acryl.privacy.retentionTime` as `60.0` and `io.acr ```shell curl -X 'POST' -v \ - 'http://localhost:8080/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties?createIfNotExists=false' \ + 'http://localhost:8080/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { - "properties": [ - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", - "values": [ - {"double": 60.0} - ] - }, - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime02", - "values": [ - {"string": "bar2"} - ] - } - ] - } + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", + "values": [ + {"double": 60.0} + ] + }, + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime02", + "values": [ + {"string": "bar2"} + ] + } + ] }' | jq ``` @@ -1125,9 +1111,7 @@ curl -X 'POST' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { - "removed": true - } +"removed": true }' | jq ``` @@ -1148,13 +1132,11 @@ If you want to **remove the soft delete**, you can do so by either hard deleting ```shell curl -X 'POST' \ - 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/status?systemMetadata=false&createIfNotExists=false' \ + 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/status?systemMetadata=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { - "removed": true - } +"removed": false }' | jq ``` @@ -1289,42 +1271,34 @@ Change the cardinality to `SINGLE` and add a `version`. ```shell curl -X 'POST' -v \ - 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/propertyDefinition?createIfNotExists=false' \ + 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/propertyDefinition' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "value": { "qualifiedName": "io.acryl.privacy.retentionTime", - "valueType": "urn:li:dataType:datahub.number", - "description": "Retention Time is used to figure out how long to retain records in a dataset", - "displayName": "Retention Time", - "cardinality": "SINGLE", - "version": "20240614080000", - "entityTypes": [ - "urn:li:entityType:datahub.dataset", - "urn:li:entityType:datahub.dataFlow" - ], - "allowedValues": [ - { - "value": { - "double": 30 - }, - "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" - }, - { - "value": { - "double": 60 - }, - "description": "Use this for datasets that drive monthly reporting but contain pii" - }, - { - "value": { - "double": 365 - }, - "description": "Use this for non-sensitive data that can be retained for longer" - } - ] - } + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "SINGLE", + "version": "20240614080000", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": {"double": 30}, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": {"double": 60}, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": {"double": 365}, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] }' | jq ``` diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java index f415a4f47c9dc..de5d2ae1118d4 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java @@ -13,11 +13,14 @@ import com.datahub.authorization.AuthorizerChain; import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.ChangeMCP; @@ -38,6 +41,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; @@ -53,6 +57,7 @@ import jakarta.servlet.http.HttpServletRequest; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -721,14 +726,28 @@ protected RecordTemplate toRecordTemplate( aspectSpec.getDataTemplateClass(), envelopedAspect.getValue().data()); } - protected abstract ChangeMCP toUpsertItem( + protected ChangeMCP toUpsertItem( @Nonnull AspectRetriever aspectRetriever, Urn entityUrn, AspectSpec aspectSpec, Boolean createIfNotExists, String jsonAspect, Actor actor) - throws URISyntaxException, JsonProcessingException; + throws JsonProcessingException { + JsonNode jsonNode = objectMapper.readTree(jsonAspect); + String aspectJson = jsonNode.get("value").toString(); + return ChangeItemImpl.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .recordTemplate( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(aspectJson, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(aspectRetriever); + } protected ChangeMCP toUpsertItem( @Nonnull AspectRetriever aspectRetriever, diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java index 1207eb331b795..54a7724cadd34 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -13,11 +13,8 @@ import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.events.metadata.ChangeType; -import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; -import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.entity.EntityApiUtils; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -263,26 +260,4 @@ protected List buildEntityList( } return responseList; } - - @Override - protected ChangeMCP toUpsertItem( - @Nonnull AspectRetriever aspectRetriever, - Urn entityUrn, - AspectSpec aspectSpec, - Boolean createIfNotExists, - String jsonAspect, - Actor actor) - throws URISyntaxException { - return ChangeItemImpl.builder() - .urn(entityUrn) - .aspectName(aspectSpec.getName()) - .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) - .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) - .recordTemplate( - GenericRecordUtils.deserializeAspect( - ByteString.copyString(jsonAspect, StandardCharsets.UTF_8), - GenericRecordUtils.JSON, - aspectSpec)) - .build(aspectRetriever); - } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java index fbc9bf2956cfd..a0478c9af1609 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java @@ -14,11 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.ByteString; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.events.metadata.ChangeType; -import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; -import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.entity.EntityApiUtils; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -351,28 +348,4 @@ protected AspectsBatch toMCPBatch( .retrieverContext(opContext.getRetrieverContext().get()) .build(); } - - @Override - protected ChangeMCP toUpsertItem( - @Nonnull AspectRetriever aspectRetriever, - Urn entityUrn, - AspectSpec aspectSpec, - Boolean createIfNotExists, - String jsonAspect, - Actor actor) - throws JsonProcessingException { - JsonNode jsonNode = objectMapper.readTree(jsonAspect); - String aspectJson = jsonNode.get("value").toString(); - return ChangeItemImpl.builder() - .urn(entityUrn) - .aspectName(aspectSpec.getName()) - .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) - .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) - .recordTemplate( - GenericRecordUtils.deserializeAspect( - ByteString.copyString(aspectJson, StandardCharsets.UTF_8), - GenericRecordUtils.JSON, - aspectSpec)) - .build(aspectRetriever); - } } From 3dfbbd5094803e95bd3dbae703e3a57dc9cdd99b Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 9 Aug 2024 14:19:36 -0500 Subject: [PATCH 20/72] docs(policies): updates to policies documentation (#11073) --- docs/authorization/access-policies-guide.md | 26 +- docs/authorization/policies.md | 359 ++++++++++++-------- 2 files changed, 220 insertions(+), 165 deletions(-) diff --git a/docs/authorization/access-policies-guide.md b/docs/authorization/access-policies-guide.md index 2040d7ff79e99..0f741a95282bd 100644 --- a/docs/authorization/access-policies-guide.md +++ b/docs/authorization/access-policies-guide.md @@ -15,7 +15,9 @@ There are 2 types of Access Policy within DataHub:

-**Platform** Policies determine who has platform-level Privileges on DataHub. These include: +## Platform + +Policies determine who has platform-level Privileges on DataHub. These include: - Managing Users & Groups - Viewing the DataHub Analytics Page @@ -31,7 +33,9 @@ A few Platform Policies in plain English include: - The Data Platform team should be allowed to manage users & groups, view platform analytics, & manage policies themselves - John from IT should be able to invite new users -**Metadata** policies determine who can do what to which Metadata Entities. For example: +## Metadata + +Metadata policies determine who can do what to which Metadata Entities. For example: - Who can edit Dataset Documentation & Links? - Who can add Owners to a Chart? @@ -51,17 +55,14 @@ A few **Metadata** Policies in plain English include: Each of these can be implemented by constructing DataHub Access Policies. -## Access Policies Setup, Prerequisites, and Permissions - -What you need to manage Access Policies on DataHub: +## Using Access Policies +:::note Required Access * **Manage Policies** Privilege This Platform Privilege allows users to create, edit, and remove all Access Policies on DataHub. Therefore, it should only be given to those users who will be serving as Admins of the platform. The default `Admin` role has this Privilege. - - -## Using Access Policies +::: Policies can be created by first navigating to **Settings > Permissions > Policies**. @@ -270,10 +271,5 @@ Policies only affect REST APIs when the environment variable `REST_API_AUTHORIZA Policies are the lowest level primitive for granting Privileges to users on DataHub. Roles are built for convenience on top of Policies. Roles grant Privileges to actors indirectly, driven by Policies -behind the scenes. Both can be used in conjunction to grant Privileges to end users. - - - -### Related Features - -- [Roles](./roles.md) \ No newline at end of file +behind the scenes. Both can be used in conjunction to grant Privileges to end users. For more information on roles +please refer to [Authorization > Roles](./roles.md). diff --git a/docs/authorization/policies.md b/docs/authorization/policies.md index 91b0241c7d514..b393c8ffa3757 100644 --- a/docs/authorization/policies.md +++ b/docs/authorization/policies.md @@ -49,14 +49,23 @@ and so on. A Metadata Policy can be broken down into 3 parts: -1. **Actors**: The 'who'. Specific users, groups that the policy applies to. +1. **Resources**: The 'which'. Resources that the policy applies to, e.g. "All Datasets". 2. **Privileges**: The 'what'. What actions are being permitted by a policy, e.g. "Add Tags". -3. **Resources**: The 'which'. Resources that the policy applies to, e.g. "All Datasets". +3. **Actors**: The 'who'. Specific users, groups that the policy applies to. -#### Actors +#### Resources + +Resources can be associated with the policy in a number of ways. -We currently support 3 ways to define the set of actors the policy applies to: a) list of users b) list of groups, and -c) owners of the entity. You also have the option to apply the policy to all users or groups. +1. List of resource types - The entity's type for example: dataset, chart, dashboard +2. List of resource URNs +3. List of tags +4. List of domains + +:::note Important Note +The associations in the list above are an *intersection* or an _AND_ operation. For example, if the policy targets +`1. resource type: dataset` and `3. resources tagged: 'myTag'`, it will apply to datasets that are tagged with tag 'myTag'. +::: #### Privileges @@ -64,55 +73,162 @@ Check out the list of privileges [here](https://github.com/datahub-project/datahub/blob/master/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java) . Note, the privileges are semantic by nature, and does not tie in 1-to-1 with the aspect model. -All edits on the UI are covered by a privilege, to make sure we have the ability to restrict write access. +All edits on the UI are covered by a privilege, to make sure we have the ability to restrict write access. See the +[Reference](#Reference) section below. + +#### Actors + +We currently support 3 ways to define the set of actors the policy applies to: + +1. list of users (or all users) +2. list of groups (or all groups) +3. owners of the entity + +:::note Important Note +Unlike resources, the definitions for actors are a union of the actors. For example, if user `1. Alice` is associated +with the policy as well as `3. owners of the entity`. This means that Alice _OR_ any owner of +the targeted resource(s) will be included in the policy. +::: + +## Managing Policies + +Policies can be managed on the page **Settings > Permissions > Policies** page. The `Policies` tab will only +be visible to those users having the `Manage Policies` privilege. -We currently support the following: +Out of the box, DataHub is deployed with a set of pre-baked Policies. The set of default policies are created at deploy +time and can be found inside the `policies.json` file within `metadata-service/war/src/main/resources/boot`. This set of policies serves the +following purposes: + +1. Assigns immutable super-user privileges for the root `datahub` user account (Immutable) +2. Assigns all Platform privileges for all Users by default (Editable) + +The reason for #1 is to prevent people from accidentally deleting all policies and getting locked out (`datahub` super user account can be a backup) +The reason for #2 is to permit administrators to log in via OIDC or another means outside of the `datahub` root account +when they are bootstrapping with DataHub. This way, those setting up DataHub can start managing policies without friction. +Note that these privilege *can* and likely *should* be altered inside the **Policies** page of the UI. + +:::note Pro-Tip +To login using the `datahub` account, simply navigate to `/login` and enter `datahub`, `datahub`. Note that the password can be customized for your +deployment by changing the `user.props` file within the `datahub-frontend` module. Notice that JaaS authentication must be enabled. +:::note + +## Configuration + +By default, the Policies feature is *enabled*. This means that the deployment will support creating, editing, removing, and +most importantly enforcing fine-grained access policies. + +In some cases, these capabilities are not desirable. For example, if your company's users are already used to having free reign, you +may want to keep it that way. Or perhaps it is only your Data Platform team who actively uses DataHub, in which case Policies may be overkill. + +For these scenarios, we've provided a back door to disable Policies in your deployment of DataHub. This will completely hide +the policies management UI and by default will allow all actions on the platform. It will be as though +each user has *all* privileges, both of the **Platform** & **Metadata** flavor. + +To disable Policies, you can simply set the `AUTH_POLICIES_ENABLED` environment variable for the `datahub-gms` service container +to `false`. For example in your `docker/datahub-gms/docker.env`, you'd place + +``` +AUTH_POLICIES_ENABLED=false +``` + +### REST API Authorization + +Policies only affect REST APIs when the environment variable `REST_API_AUTHORIZATION` is set to `true` for GMS. Some policies only apply when this setting is enabled, marked above, and other Metadata and Platform policies apply to the APIs where relevant, also specified in the table above. + +## Reference + +For a complete list of privileges see the +privileges [here](https://github.com/datahub-project/datahub/blob/master/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java). + +### Platform-level privileges -##### Platform-level privileges These privileges are for DataHub operators to access & manage the administrative functionality of the system. -| Platform Privileges | Description | -|-----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Generate Personal Access Tokens | Allow actor to generate personal access tokens for use with DataHub APIs. | -| Manage Domains | Allow actor to create and remove Asset Domains. | -| Manage Home Page Posts | Allow actor to create and delete home page posts | -| Manage Glossaries | Allow actor to create, edit, and remove Glossary Entities | -| Manage Tags | Allow actor to create and remove Tags. | -| Manage Business Attribute | Allow actor to create, update, delete Business Attribute | -| Manage Documentation Forms | Allow actor to manage forms assigned to assets to assist in documentation efforts. | -| Manage Policies | Allow actor to create and remove access control policies. Be careful - Actors with this privilege are effectively super users. | -| Manage Metadata Ingestion | Allow actor to create, remove, and update Metadata Ingestion sources. | -| Manage Secrets | Allow actor to create & remove Secrets stored inside DataHub. | -| Manage Users & Groups | Allow actor to create, remove, and update users and groups on DataHub. | -| View Analytics | Allow actor to view the DataHub analytics dashboard. | -| Manage All Access Tokens | Allow actor to create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this privilege are effectively super users that can impersonate other users. | -| Manage User Credentials | Allow actor to manage credentials for native DataHub users, including inviting new users and resetting passwords | -| Manage Public Views | Allow actor to create, update, and delete any Public (shared) Views. | -| Manage Ownership Types | Allow actor to create, update and delete Ownership Types. | -| Create Business Attribute | Allow actor to create new Business Attribute. | -| Manage Connections | Allow actor to manage connections to external DataHub platforms. | -| Restore Indices API[^1] | Allow actor to use the Restore Indices API. | -| Get Timeseries index sizes API[^1] | Allow actor to use the get Timeseries indices size API. | -| Truncate timeseries aspect index size API[^1] | Allow actor to use the API to truncate a timeseries index. | -| Get ES task status API[^1] | Allow actor to use the get task status API for an ElasticSearch task. | -| Enable/Disable Writeability API[^1] | Allow actor to enable or disable GMS writeability for data migrations. | -| Apply Retention API[^1] | Allow actor to apply retention using the API. | -| Analytics API access[^1] | Allow actor to use API read access to raw analytics data. | -| Manage Tests[^2] | Allow actor to create and remove Asset Tests. | -| View Metadata Proposals[^2] | Allow actor to view the requests tab for viewing metadata proposals. | -| Create metadata constraints[^2] | Allow actor to create metadata constraints. | -| Manage Platform Settings[^2] | Allow actor to view and change platform-level settings, like integrations & notifications. | -| Manage Monitors[^2] | Allow actor to create, update, and delete any data asset monitors, including Custom SQL monitors. Grant with care. | +#### Access & Credentials + +| Platform Privileges | Description | +|--------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Generate Personal Access Tokens | Allow actor to generate personal access tokens for use with DataHub APIs. | +| Manage Policies | Allow actor to create and remove access control policies. Be careful - Actors with this privilege are effectively super users. | +| Manage Secrets | Allow actor to create & remove Secrets stored inside DataHub. | +| Manage Users & Groups | Allow actor to create, remove, and update users and groups on DataHub. | +| Manage All Access Tokens | Allow actor to create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this privilege are effectively super users that can impersonate other users. | +| Manage User Credentials | Allow actor to manage credentials for native DataHub users, including inviting new users and resetting passwords | | +| Manage Connections | Allow actor to manage connections to external DataHub platforms. | + +#### Product Features + +| Platform Privileges | Description | +|-------------------------------------|--------------------------------------------------------------------------------------------------------------------| +| Manage Home Page Posts | Allow actor to create and delete home page posts | +| Manage Business Attribute | Allow actor to create, update, delete Business Attribute | +| Manage Documentation Forms | Allow actor to manage forms assigned to assets to assist in documentation efforts. | +| Manage Metadata Ingestion | Allow actor to create, remove, and update Metadata Ingestion sources. | +| Manage Features | Umbrella privilege to manage all features. | +| View Analytics | Allow actor to view the DataHub analytics dashboard. | +| Manage Public Views | Allow actor to create, update, and delete any Public (shared) Views. | +| Manage Ownership Types | Allow actor to create, update and delete Ownership Types. | +| Create Business Attribute | Allow actor to create new Business Attribute. | +| Manage Structured Properties | Manage structured properties in your instance. | +| View Tests | View Asset Tests. | +| Manage Tests[^2] | Allow actor to create and remove Asset Tests. | +| View Metadata Proposals[^2] | Allow actor to view the requests tab for viewing metadata proposals. | +| Create metadata constraints[^2] | Allow actor to create metadata constraints. | +| Manage Platform Settings[^2] | Allow actor to view and change platform-level settings, like integrations & notifications. | +| Manage Monitors[^2] | Allow actor to create, update, and delete any data asset monitors, including Custom SQL monitors. Grant with care. | [^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true [^2]: DataHub Cloud only -##### Common metadata privileges +#### Entity Management + +| Platform Privileges | Description | +|-------------------------------------|------------------------------------------------------------------------------------| +| Manage Domains | Allow actor to create and remove Asset Domains. | +| Manage Glossaries | Allow actor to create, edit, and remove Glossary Entities | +| Manage Tags | Allow actor to create and remove Tags. | + +#### System Management + +| Platform Privileges | Description | +|-----------------------------------------------|--------------------------------------------------------------------------| +| Restore Indices API[^1] | Allow actor to use the Restore Indices API. | | +| Get Timeseries index sizes API[^1] | Allow actor to use the get Timeseries indices size API. | +| Truncate timeseries aspect index size API[^1] | Allow actor to use the API to truncate a timeseries index. | +| Get ES task status API[^1] | Allow actor to use the get task status API for an ElasticSearch task. | +| Enable/Disable Writeability API[^1] | Allow actor to enable or disable GMS writeability for data migrations. | +| Apply Retention API[^1] | Allow actor to apply retention using the API. | +| Analytics API access[^1] | Allow actor to use API read access to raw analytics data. | + +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only + +### Common Metadata Privileges + These privileges are to view & modify any entity within DataHub. -| Common Privileges | Description | +#### Entity Privileges + +| Entity Privileges | Description | |-------------------------------------|--------------------------------------------------------------------------------------------| | View Entity Page | Allow actor to view the entity page. | +| Edit Entity | Allow actor to edit any information about an entity. Super user privileges for the entity. | +| Delete | Allow actor to delete this entity. | +| Create Entity | Allow actor to create an entity if it doesn't exist. | +| Entity Exists | Allow actor to determine whether the entity exists. | +| Get Timeline API[^1] | Allow actor to use the GET Timeline API. | +| Get Entity + Relationships API[^1] | Allow actor to use the GET Entity and Relationships API. | +| Get Aspect/Entity Count APIs[^1] | Allow actor to use the GET Aspect/Entity Count APIs. | +| View Entity[^2] | Allow actor to view the entity in search results. | +| Share Entity[^2] | Allow actor to share an entity with another DataHub Cloud instance. | + +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only + +#### Aspect Privileges + +| Aspect Privileges | Description | +|-------------------------------------|--------------------------------------------------------------------------------------------| | Edit Tags | Allow actor to add and remove tags to an asset. | | Edit Glossary Terms | Allow actor to add and remove glossary terms to an asset. | | Edit Description | Allow actor to edit the description (documentation) of an entity. | @@ -122,35 +238,57 @@ These privileges are to view & modify any entity within DataHub. | Edit Data Product | Allow actor to edit the Data Product of an entity. | | Edit Deprecation | Allow actor to edit the Deprecation status of an entity. | | Edit Incidents | Allow actor to create and remove incidents for an entity. | -| Edit Entity | Allow actor to edit any information about an entity. Super user privileges for the entity. | | Edit Lineage | Allow actor to add and remove lineage edges for this entity. | | Edit Properties | Allow actor to edit the properties for an entity. | | Edit Owners | Allow actor to add and remove owners of an entity. | -| Delete | Allow actor to delete this entity. | -| Search API[^1] | Allow actor to access search APIs. | -| Get Aspect/Entity Count APIs[^1] | Allow actor to use the GET Aspect/Entity Count APIs. | | Get Timeseries Aspect API[^1] | Allow actor to use the GET Timeseries Aspect API. | -| Get Entity + Relationships API[^1] | Allow actor to use the GET Entity and Relationships API. | -| Get Timeline API[^1] | Allow actor to use the GET Timeline API. | + +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only + +#### Proposals + +| Proposals Privileges | Description | +|------------------------------------|--------------------------------------------------------------------------------------------| +| Propose Tags[^2] | Allow actor to propose adding a tag to an asset. | +| Propose Glossary Terms[^2] | Allow actor to propose adding a glossary term to an asset. | +| Propose Documentation[^2] | Allow actor to propose updates to an asset's documentation. | +| Manage Tag Proposals[^2] | Allow actor to manage a proposal to add a tag to an asset. | +| Manage Glossary Term Proposals[^2] | Allow actor to manage a proposal to add a glossary term to an asset. | +| Manage Documentation Proposals[^2] | Allow actor to manage a proposal update an asset's documentation | + +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only + +#### System Management + +| System Privileges | Description | +|-------------------------------------|--------------------------------------------------------------------------------------------| | Explain ElasticSearch Query API[^1] | Allow actor to use the Operations API explain endpoint. | | Produce Platform Event API[^1] | Allow actor to produce Platform Events using the API. | -| Create Entity | Allow actor to create an entity if it doesn't exist. | -| Entity Exists | Allow actor to determine whether the entity exists. | -| View Entity[^2] | Allow actor to view the entity in search results. | -| Propose Tags[^2] | Allow actor to propose adding a tag to an asset. | -| Propose Glossary Terms[^2] | Allow actor to propose adding a glossary term to an asset. | -| Propose Documentation[^2] | Allow actor to propose updates to an asset's documentation. | -| Manage Tag Proposals[^2] | Allow actor to manage a proposal to add a tag to an asset. | -| Manage Glossary Term Proposals[^2] | Allow actor to manage a proposal to add a glossary term to an asset. | -| Manage Documentation Proposals[^2] | Allow actor to manage a proposal update an asset's documentation | -| Share Entity[^2] | Allow actor to share an entity with another DataHub Cloud instance. | [^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true [^2]: DataHub Cloud only -##### Specific entity-level privileges +### Specific Entity-level Privileges These privileges are not generalizable. +#### Users & Groups + +| Entity | Privilege | Description | +|--------------|-------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Group | Edit Group Members | Allow actor to add and remove members to a group. | +| Group | Manage Group Notification Settings[^2] | Allow actor to manage notification settings for a group. | +| Group | Manage Group Subscriptions[^2] | Allow actor to manage subscriptions for a group. | +| Group | Edit Contact Information | Allow actor to change the contact information such as email & chat handles. | +| User | Edit Contact Information | Allow actor to change the contact information such as email & chat handles. | +| User | Edit User Profile | Allow actor to change the user's profile including display name, bio, title, profile image, etc. | + +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only + +#### Dataset + | Entity | Privilege | Description | |--------------|-------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | Dataset | View Dataset Usage | Allow actor to access dataset usage information (includes usage statistics and queries). | @@ -174,101 +312,22 @@ These privileges are not generalizable. | Domain | Manage Data Products | Allow actor to create, edit, and delete Data Products within a Domain | | GlossaryNode | Manage Direct Glossary Children | Allow actor to create and delete the direct children of this entity. | | GlossaryNode | Manage All Glossary Children | Allow actor to create and delete everything underneath this entity. | -| Group | Edit Group Members | Allow actor to add and remove members to a group. | -| Group | Manage Group Notification Settings[^2] | Allow actor to manage notification settings for a group. | -| Group | Manage Group Subscriptions[^2] | Allow actor to manage subscriptions for a group. | -| Group | Edit Contact Information | Allow actor to change the contact information such as email & chat handles. | -| User | Edit Contact Information | Allow actor to change the contact information such as email & chat handles. | -| User | Edit User Profile | Allow actor to change the user's profile including display name, bio, title, profile image, etc. | - -#### Resources - -Resource filter defines the set of resources that the policy applies to is defined using a list of criteria. Each -criterion defines a field type (like type, urn, domain), a list of field values to compare, and a -condition (like EQUALS). It essentially checks whether the field of a certain resource matches any of the input values. -Note, that if there are no criteria or resource is not set, policy is applied to ALL resources. - -For example, the following resource filter will apply the policy to datasets, charts, and dashboards under domain 1. - -```json -{ - "resources": { - "filter": { - "criteria": [ - { - "field": "TYPE", - "condition": "EQUALS", - "values": [ - "dataset", - "chart", - "dashboard" - ] - }, - { - "field": "DOMAIN", - "values": [ - "urn:li:domain:domain1" - ], - "condition": "EQUALS" - } - ] - } - } -} -``` -Where `resources` is inside the `info` aspect of a Policy. - -Supported fields are as follows - -| Field Type | Description | Example | -|---------------|------------------------|-------------------------| -| type | Type of the resource | dataset, chart, dataJob | -| urn | Urn of the resource | urn:li:dataset:... | -| domain | Domain of the resource | urn:li:domain:domainX | - -## Managing Policies - -Policies can be managed on the page **Settings > Permissions > Policies** page. The `Policies` tab will only -be visible to those users having the `Manage Policies` privilege. - -Out of the box, DataHub is deployed with a set of pre-baked Policies. The set of default policies are created at deploy -time and can be found inside the `policies.json` file within `metadata-service/war/src/main/resources/boot`. This set of policies serves the -following purposes: - -1. Assigns immutable super-user privileges for the root `datahub` user account (Immutable) -2. Assigns all Platform privileges for all Users by default (Editable) - -The reason for #1 is to prevent people from accidentally deleting all policies and getting locked out (`datahub` super user account can be a backup) -The reason for #2 is to permit administrators to log in via OIDC or another means outside of the `datahub` root account -when they are bootstrapping with DataHub. This way, those setting up DataHub can start managing policies without friction. -Note that these privilege *can* and likely *should* be altered inside the **Policies** page of the UI. - -> Pro-Tip: To login using the `datahub` account, simply navigate to `/login` and enter `datahub`, `datahub`. Note that the password can be customized for your -deployment by changing the `user.props` file within the `datahub-frontend` module. Notice that JaaS authentication must be enabled. - -## Configuration - -By default, the Policies feature is *enabled*. This means that the deployment will support creating, editing, removing, and -most importantly enforcing fine-grained access policies. - -In some cases, these capabilities are not desirable. For example, if your company's users are already used to having free reign, you -may want to keep it that way. Or perhaps it is only your Data Platform team who actively uses DataHub, in which case Policies may be overkill. -For these scenarios, we've provided a back door to disable Policies in your deployment of DataHub. This will completely hide -the policies management UI and by default will allow all actions on the platform. It will be as though -each user has *all* privileges, both of the **Platform** & **Metadata** flavor. -To disable Policies, you can simply set the `AUTH_POLICIES_ENABLED` environment variable for the `datahub-gms` service container -to `false`. For example in your `docker/datahub-gms/docker.env`, you'd place +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only -``` -AUTH_POLICIES_ENABLED=false -``` +#### Misc -### REST API Authorization - -Policies only affect REST APIs when the environment variable `REST_API_AUTHORIZATION` is set to `true` for GMS. Some policies only apply when this setting is enabled, marked above, and other Metadata and Platform policies apply to the APIs where relevant, also specified in the table above. +| Entity | Privilege | Description | +|--------------|-------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Tag | Edit Tag Color | Allow actor to change the color of a Tag. | +| Domain | Manage Data Products | Allow actor to create, edit, and delete Data Products within a Domain | +| GlossaryNode | Manage Direct Glossary Children | Allow actor to create and delete the direct children of this entity. | +| GlossaryNode | Manage All Glossary Children | Allow actor to create and delete everything underneath this entity. | +[^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true +[^2]: DataHub Cloud only ## Coming Soon @@ -278,7 +337,7 @@ The DataHub team is hard at work trying to improve the Policies feature. We are Under consideration -- Ability to define Metadata Policies against multiple reosurces scoped to particular "Containers" (e.g. A "schema", "database", or "collection") +- Ability to define Metadata Policies against multiple resources scoped to particular "Containers" (e.g. A "schema", "database", or "collection") ## Feedback / Questions / Concerns From 479f31d0f2368ab7376c419f4b365239da353d98 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 9 Aug 2024 14:42:59 -0500 Subject: [PATCH 21/72] fix(openapi): fix openapi v2 and v3 docs update (#11139) --- docs/api/tutorials/structured-properties.md | 204 ++++++++++-------- .../controller/GenericEntitiesController.java | 23 +- .../v2/controller/EntityController.java | 25 +++ .../v3/controller/EntityController.java | 27 +++ 4 files changed, 169 insertions(+), 110 deletions(-) diff --git a/docs/api/tutorials/structured-properties.md b/docs/api/tutorials/structured-properties.md index 6f6c6541554d9..00e992f2bd0bb 100644 --- a/docs/api/tutorials/structured-properties.md +++ b/docs/api/tutorials/structured-properties.md @@ -158,29 +158,37 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ + "value": { "qualifiedName": "io.acryl.privacy.retentionTime", - "valueType": "urn:li:dataType:datahub.number", - "description": "Retention Time is used to figure out how long to retain records in a dataset", - "displayName": "Retention Time", - "cardinality": "MULTIPLE", - "entityTypes": [ - "urn:li:entityType:datahub.dataset", - "urn:li:entityType:datahub.dataFlow" - ], - "allowedValues": [ - { - "value": {"double": 30}, - "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" - }, - { - "value": {"double": 60}, - "description": "Use this for datasets that drive monthly reporting but contain pii" - }, - { - "value": {"double": 365}, - "description": "Use this for non-sensitive data that can be retained for longer" - } - ] + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "MULTIPLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": { + "double": 30 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] + } }' | jq ``` @@ -474,14 +482,16 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "properties": [ - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", - "values": [ - {"double": 60.0} - ] - } - ] + "value": { + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", + "values": [ + {"double": 60.0} + ] + } + ] + } }' | jq ``` Example Response: @@ -627,23 +637,25 @@ curl -X 'POST' -v \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "qualifiedName": "io.acryl.privacy.retentionTime02", - "displayName": "Retention Time 02", - "valueType": "urn:li:dataType:datahub.string", - "allowedValues": [ - { - "value": {"string": "foo2"}, - "description": "test foo2 value" - }, - { - "value": {"string": "bar2"}, - "description": "test bar2 value" - } - ], - "cardinality": "SINGLE", - "entityTypes": [ - "urn:li:entityType:datahub.dataset" - ] + "value": { + "qualifiedName": "io.acryl.privacy.retentionTime02", + "displayName": "Retention Time 02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo2"}, + "description": "test foo2 value" + }, + { + "value": {"string": "bar2"}, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] + } }' | jq ``` @@ -686,24 +698,26 @@ Specically, this will set `io.acryl.privacy.retentionTime` as `60.0` and `io.acr ```shell curl -X 'POST' -v \ - 'http://localhost:8080/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + 'http://localhost:8080/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties?createIfNotExists=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ - "properties": [ - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", - "values": [ - {"double": 60.0} - ] - }, - { - "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime02", - "values": [ - {"string": "bar2"} - ] - } - ] + "value": { + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime", + "values": [ + {"double": 60.0} + ] + }, + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.retentionTime02", + "values": [ + {"string": "bar2"} + ] + } + ] + } }' | jq ``` @@ -1111,7 +1125,9 @@ curl -X 'POST' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ -"removed": true + "value": { + "removed": true + } }' | jq ``` @@ -1132,11 +1148,13 @@ If you want to **remove the soft delete**, you can do so by either hard deleting ```shell curl -X 'POST' \ - 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/status?systemMetadata=false' \ + 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/status?systemMetadata=false&createIfNotExists=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ -"removed": false + "value": { + "removed": true + } }' | jq ``` @@ -1271,34 +1289,42 @@ Change the cardinality to `SINGLE` and add a `version`. ```shell curl -X 'POST' -v \ - 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/propertyDefinition' \ + 'http://localhost:8080/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.retentionTime/propertyDefinition?createIfNotExists=false' \ -H 'accept: application/json' \ -H 'Content-Type: application/json' \ -d '{ + "value": { "qualifiedName": "io.acryl.privacy.retentionTime", - "valueType": "urn:li:dataType:datahub.number", - "description": "Retention Time is used to figure out how long to retain records in a dataset", - "displayName": "Retention Time", - "cardinality": "SINGLE", - "version": "20240614080000", - "entityTypes": [ - "urn:li:entityType:datahub.dataset", - "urn:li:entityType:datahub.dataFlow" - ], - "allowedValues": [ - { - "value": {"double": 30}, - "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" - }, - { - "value": {"double": 60}, - "description": "Use this for datasets that drive monthly reporting but contain pii" - }, - { - "value": {"double": 365}, - "description": "Use this for non-sensitive data that can be retained for longer" - } - ] + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "SINGLE", + "version": "20240614080000", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": { + "double": 30 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] + } }' | jq ``` diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java index de5d2ae1118d4..f415a4f47c9dc 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java @@ -13,14 +13,11 @@ import com.datahub.authorization.AuthorizerChain; import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.ChangeMCP; @@ -41,7 +38,6 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.utils.AuditStampUtils; -import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; @@ -57,7 +53,6 @@ import jakarta.servlet.http.HttpServletRequest; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -726,28 +721,14 @@ protected RecordTemplate toRecordTemplate( aspectSpec.getDataTemplateClass(), envelopedAspect.getValue().data()); } - protected ChangeMCP toUpsertItem( + protected abstract ChangeMCP toUpsertItem( @Nonnull AspectRetriever aspectRetriever, Urn entityUrn, AspectSpec aspectSpec, Boolean createIfNotExists, String jsonAspect, Actor actor) - throws JsonProcessingException { - JsonNode jsonNode = objectMapper.readTree(jsonAspect); - String aspectJson = jsonNode.get("value").toString(); - return ChangeItemImpl.builder() - .urn(entityUrn) - .aspectName(aspectSpec.getName()) - .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) - .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) - .recordTemplate( - GenericRecordUtils.deserializeAspect( - ByteString.copyString(aspectJson, StandardCharsets.UTF_8), - GenericRecordUtils.JSON, - aspectSpec)) - .build(aspectRetriever); - } + throws URISyntaxException, JsonProcessingException; protected ChangeMCP toUpsertItem( @Nonnull AspectRetriever aspectRetriever, diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java index 54a7724cadd34..1207eb331b795 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -13,8 +13,11 @@ import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; +import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.entity.EntityApiUtils; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -260,4 +263,26 @@ protected List buildEntityList( } return responseList; } + + @Override + protected ChangeMCP toUpsertItem( + @Nonnull AspectRetriever aspectRetriever, + Urn entityUrn, + AspectSpec aspectSpec, + Boolean createIfNotExists, + String jsonAspect, + Actor actor) + throws URISyntaxException { + return ChangeItemImpl.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .recordTemplate( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(jsonAspect, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(aspectRetriever); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java index a0478c9af1609..fbc9bf2956cfd 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v3/controller/EntityController.java @@ -14,8 +14,11 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.ByteString; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.BatchItem; +import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.entity.EntityApiUtils; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.UpdateAspectResult; @@ -348,4 +351,28 @@ protected AspectsBatch toMCPBatch( .retrieverContext(opContext.getRetrieverContext().get()) .build(); } + + @Override + protected ChangeMCP toUpsertItem( + @Nonnull AspectRetriever aspectRetriever, + Urn entityUrn, + AspectSpec aspectSpec, + Boolean createIfNotExists, + String jsonAspect, + Actor actor) + throws JsonProcessingException { + JsonNode jsonNode = objectMapper.readTree(jsonAspect); + String aspectJson = jsonNode.get("value").toString(); + return ChangeItemImpl.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .recordTemplate( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(aspectJson, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(aspectRetriever); + } } From 946b9f37450a51dd12670f4b383d6970767c4129 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 9 Aug 2024 14:55:35 -0500 Subject: [PATCH 22/72] feat(auth): grant type and acr values custom oidc parameters support (#11116) --- .../app/auth/sso/oidc/OidcConfigs.java | 12 +++++++++++- .../app/auth/sso/oidc/OidcProvider.java | 14 +++++++++++++- datahub-frontend/conf/application.conf | 2 ++ 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index 753edaf89d988..080ca236630bf 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -41,6 +41,8 @@ public class OidcConfigs extends SsoConfigs { public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims"; public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; + public static final String OIDC_GRANT_TYPE = "auth.oidc.grantType"; + public static final String OIDC_ACR_VALUES = "auth.oidc.acrValues"; /** Default values */ private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; @@ -75,7 +77,9 @@ public class OidcConfigs extends SsoConfigs { private final Optional customParamResource; private final String readTimeout; private final Optional extractJwtAccessTokenClaims; - private Optional preferredJwsAlgorithm; + private final Optional preferredJwsAlgorithm; + private final Optional grantType; + private final Optional acrValues; public OidcConfigs(Builder builder) { super(builder); @@ -98,6 +102,8 @@ public OidcConfigs(Builder builder) { this.readTimeout = builder.readTimeout; this.extractJwtAccessTokenClaims = builder.extractJwtAccessTokenClaims; this.preferredJwsAlgorithm = builder.preferredJwsAlgorithm; + this.acrValues = builder.acrValues; + this.grantType = builder.grantType; } public static class Builder extends SsoConfigs.Builder { @@ -123,6 +129,8 @@ public static class Builder extends SsoConfigs.Builder { private String readTimeout = DEFAULT_OIDC_READ_TIMEOUT; private Optional extractJwtAccessTokenClaims = Optional.empty(); private Optional preferredJwsAlgorithm = Optional.empty(); + private Optional grantType = Optional.empty(); + private Optional acrValues = Optional.empty(); public Builder from(final com.typesafe.config.Config configs) { super.from(configs); @@ -169,6 +177,8 @@ public Builder from(final com.typesafe.config.Config configs) { getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + grantType = Optional.ofNullable(getOptional(configs, OIDC_GRANT_TYPE, null)); + acrValues = Optional.ofNullable(getOptional(configs, OIDC_ACR_VALUES, null)); return this; } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java index 39a65a46cbf91..a8a3205e8299c 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java @@ -3,6 +3,8 @@ import auth.sso.SsoProvider; import auth.sso.oidc.custom.CustomOidcClient; import com.google.common.collect.ImmutableMap; +import java.util.HashMap; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.pac4j.core.client.Client; import org.pac4j.core.http.callback.PathParameterCallbackUrlResolver; @@ -64,9 +66,19 @@ private Client createPac4jClient() { _oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType); _oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode); _oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce); + Map customParamsMap = new HashMap<>(); _oidcConfigs .getCustomParamResource() - .ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value))); + .ifPresent(value -> customParamsMap.put("resource", value)); + _oidcConfigs + .getGrantType() + .ifPresent(value -> customParamsMap.put("grant_type", value)); + _oidcConfigs + .getAcrValues() + .ifPresent(value -> customParamsMap.put("acr_values", value)); + if (!customParamsMap.isEmpty()) { + oidcConfiguration.setCustomParams(customParamsMap); + } _oidcConfigs .getPreferredJwsAlgorithm() .ifPresent( diff --git a/datahub-frontend/conf/application.conf b/datahub-frontend/conf/application.conf index dc243ecadafd8..63ff2c9166fbc 100644 --- a/datahub-frontend/conf/application.conf +++ b/datahub-frontend/conf/application.conf @@ -186,6 +186,8 @@ auth.oidc.customParam.resource = ${?AUTH_OIDC_CUSTOM_PARAM_RESOURCE} auth.oidc.readTimeout = ${?AUTH_OIDC_READ_TIMEOUT} auth.oidc.extractJwtAccessTokenClaims = ${?AUTH_OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS} # Whether to extract claims from JWT access token. Defaults to false. auth.oidc.preferredJwsAlgorithm = ${?AUTH_OIDC_PREFERRED_JWS_ALGORITHM} # Which jws algorithm to use +auth.oidc.acrValues = ${?AUTH_OIDC_ACR_VALUES} +auth.oidc.grantType = ${?AUTH_OIDC_GRANT_TYPE} # # By default, the callback URL that should be registered with the identity provider is computed as {$baseUrl}/callback/oidc. From 4d2af40465bc26e432285999c1250f6966997124 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 9 Aug 2024 14:56:32 -0500 Subject: [PATCH 23/72] fix(mutator): mutator hook fixes (#11140) --- .../models/registry/ConfigEntityRegistry.java | 2 +- .../registry/SnapshotEntityRegistry.java | 21 +++++++ .../metadata/aspect/plugins/PluginsTest.java | 36 ++++++++--- .../java/com/datahub/util/RecordUtils.java | 12 ++++ .../entity/ebean/batch/AspectsBatchImpl.java | 60 +++++++++++++++---- .../entityregistry/EntityRegistryFactory.java | 18 +++++- 6 files changed, 127 insertions(+), 22 deletions(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index 4238c333615ec..8dd642f63dd97 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -52,7 +52,7 @@ public class ConfigEntityRegistry implements EntityRegistry { private final DataSchemaFactory dataSchemaFactory; @Getter private final PluginFactory pluginFactory; - @Nullable + @Getter @Nullable private BiFunction, PluginFactory> pluginFactoryProvider; private final Map entityNameToSpec; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index c60f89c510cd7..16df2d452a619 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -22,6 +22,8 @@ import com.linkedin.metadata.aspect.patch.template.dataset.UpstreamLineageTemplate; import com.linkedin.metadata.aspect.patch.template.form.FormInfoTemplate; import com.linkedin.metadata.aspect.patch.template.structuredproperty.StructuredPropertyDefinitionTemplate; +import com.linkedin.metadata.aspect.plugins.PluginFactory; +import com.linkedin.metadata.aspect.plugins.config.PluginConfiguration; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; @@ -32,8 +34,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiFunction; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Getter; /** * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from the a @@ -46,6 +51,9 @@ public class SnapshotEntityRegistry implements EntityRegistry { private final AspectTemplateEngine _aspectTemplateEngine; private final Map _aspectNameToSpec; + @Getter @Nullable + private BiFunction, PluginFactory> pluginFactoryProvider; + private static final SnapshotEntityRegistry INSTANCE = new SnapshotEntityRegistry(); public SnapshotEntityRegistry() { @@ -56,6 +64,19 @@ public SnapshotEntityRegistry() { entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); + pluginFactoryProvider = null; + } + + public SnapshotEntityRegistry( + BiFunction, PluginFactory> pluginFactoryProvider) { + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entitySpecs = new ArrayList<>(entityNameToSpec.values()); + _aspectNameToSpec = populateAspectMap(entitySpecs); + _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); + this.pluginFactoryProvider = pluginFactoryProvider; } public SnapshotEntityRegistry(UnionTemplate snapshot) { diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java index cecf21849f3aa..b98df05d721dd 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java @@ -6,6 +6,7 @@ import com.datahub.test.TestEntityProfile; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; @@ -262,23 +263,42 @@ public void testUnloadedMerge() throws EntityRegistryException { mergedEntityRegistry.apply(configEntityRegistry2); assertEquals( - mergedEntityRegistry.getAllAspectPayloadValidators().stream() - .filter(p -> p.getConfig().getSupportedOperations().contains("DELETE")) + mergedEntityRegistry + .getPluginFactory() + .getPluginConfiguration() + .getAspectPayloadValidators() + .stream() + .filter(AspectPluginConfig::isEnabled) + .filter(p -> p.getSupportedOperations().contains("DELETE")) .count(), 1); + assertEquals( - mergedEntityRegistry.getAllMutationHooks().stream() - .filter(p -> p.getConfig().getSupportedOperations().contains("DELETE")) + mergedEntityRegistry.getPluginFactory().getPluginConfiguration().getMutationHooks().stream() + .filter(AspectPluginConfig::isEnabled) + .filter(p -> p.getSupportedOperations().contains("DELETE")) .count(), 1); + assertEquals( - mergedEntityRegistry.getAllMCLSideEffects().stream() - .filter(p -> p.getConfig().getSupportedOperations().contains("DELETE")) + mergedEntityRegistry + .getPluginFactory() + .getPluginConfiguration() + .getMclSideEffects() + .stream() + .filter(AspectPluginConfig::isEnabled) + .filter(p -> p.getSupportedOperations().contains("DELETE")) .count(), 1); + assertEquals( - mergedEntityRegistry.getAllMCPSideEffects().stream() - .filter(p -> p.getConfig().getSupportedOperations().contains("DELETE")) + mergedEntityRegistry + .getPluginFactory() + .getPluginConfiguration() + .getMcpSideEffects() + .stream() + .filter(AspectPluginConfig::isEnabled) + .filter(p -> p.getSupportedOperations().contains("DELETE")) .count(), 1); } diff --git a/li-utils/src/main/java/com/datahub/util/RecordUtils.java b/li-utils/src/main/java/com/datahub/util/RecordUtils.java index 8183ecc21ee27..2955943919e3b 100644 --- a/li-utils/src/main/java/com/datahub/util/RecordUtils.java +++ b/li-utils/src/main/java/com/datahub/util/RecordUtils.java @@ -99,6 +99,18 @@ public static T toRecordTemplate( return toRecordTemplate(type, dataMap); } + @Nonnull + public static DataMap toDataMap(@Nonnull String jsonString) { + DataMap dataMap; + try { + dataMap = DATA_TEMPLATE_CODEC.stringToMap(jsonString); + } catch (IOException e) { + throw new ModelConversionException("Failed to deserialize DataMap: " + jsonString); + } + + return dataMap; + } + /** * Creates a {@link RecordTemplate} object from a {@link DataMap}. * diff --git a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index 7a1af12272ac5..0808c29e8ea89 100644 --- a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.aspect.batch.BatchItem; import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.aspect.batch.MCPItem; +import com.linkedin.metadata.aspect.plugins.hooks.MutationHook; import com.linkedin.metadata.aspect.plugins.validation.ValidationExceptionCollection; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; @@ -47,7 +48,7 @@ public Pair>, List> toUpsertBatchItems( final Map> latestAspects) { // Process proposals to change items - Stream mutatedProposalsStream = + Stream mutatedProposalsStream = proposedItemsToChangeItemStream( items.stream() .filter(item -> item instanceof ProposedItem) @@ -92,21 +93,58 @@ public Pair>, List> toUpsertBatchItems( LinkedList newItems = applyMCPSideEffects(upsertBatchItems).collect(Collectors.toCollection(LinkedList::new)); - Map> newUrnAspectNames = getNewUrnAspectsMap(getUrnAspectsMap(), newItems); upsertBatchItems.addAll(newItems); + Map> newUrnAspectNames = + getNewUrnAspectsMap(getUrnAspectsMap(), upsertBatchItems); return Pair.of(newUrnAspectNames, upsertBatchItems); } - private Stream proposedItemsToChangeItemStream(List proposedItems) { - return applyProposalMutationHooks(proposedItems, retrieverContext) - .filter(mcpItem -> mcpItem.getMetadataChangeProposal() != null) - .map( - mcpItem -> - ChangeItemImpl.ChangeItemImplBuilder.build( - mcpItem.getMetadataChangeProposal(), - mcpItem.getAuditStamp(), - retrieverContext.getAspectRetriever())); + private Stream proposedItemsToChangeItemStream(List proposedItems) { + List mutationHooks = + retrieverContext.getAspectRetriever().getEntityRegistry().getAllMutationHooks(); + Stream unmutatedItems = + proposedItems.stream() + .filter( + proposedItem -> + mutationHooks.stream() + .noneMatch( + mutationHook -> + mutationHook.shouldApply( + proposedItem.getChangeType(), + proposedItem.getUrn(), + proposedItem.getAspectName()))) + .map( + mcpItem -> { + if (ChangeType.PATCH.equals(mcpItem.getChangeType())) { + return PatchItemImpl.PatchItemImplBuilder.build( + mcpItem.getMetadataChangeProposal(), + mcpItem.getAuditStamp(), + retrieverContext.getAspectRetriever().getEntityRegistry()); + } + return ChangeItemImpl.ChangeItemImplBuilder.build( + mcpItem.getMetadataChangeProposal(), + mcpItem.getAuditStamp(), + retrieverContext.getAspectRetriever()); + }); + List mutatedItems = + applyProposalMutationHooks(proposedItems, retrieverContext).collect(Collectors.toList()); + Stream proposedItemsToChangeItems = + mutatedItems.stream() + .filter(mcpItem -> mcpItem.getMetadataChangeProposal() != null) + // Filter on proposed items again to avoid applying builder to Patch Item side effects + .filter(mcpItem -> mcpItem instanceof ProposedItem) + .map( + mcpItem -> + ChangeItemImpl.ChangeItemImplBuilder.build( + mcpItem.getMetadataChangeProposal(), + mcpItem.getAuditStamp(), + retrieverContext.getAspectRetriever())); + Stream sideEffectItems = + mutatedItems.stream().filter(mcpItem -> !(mcpItem instanceof ProposedItem)); + Stream combinedChangeItems = + Stream.concat(proposedItemsToChangeItems, unmutatedItems); + return Stream.concat(combinedChangeItems, sideEffectItems); } public static class AspectsBatchImplBuilder { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java index 2c65eeafe063b..6ef13716aaac8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java @@ -1,15 +1,22 @@ package com.linkedin.gms.factory.entityregistry; +import com.datahub.plugins.metadata.aspect.SpringPluginFactory; +import com.linkedin.gms.factory.plugins.SpringStandardPluginConfiguration; +import com.linkedin.metadata.aspect.plugins.PluginFactory; +import com.linkedin.metadata.aspect.plugins.config.PluginConfiguration; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistryException; import com.linkedin.metadata.models.registry.MergedEntityRegistry; import com.linkedin.metadata.models.registry.PluginEntityRegistryLoader; import com.linkedin.metadata.models.registry.SnapshotEntityRegistry; +import java.util.List; +import java.util.function.BiFunction; import javax.annotation.Nonnull; import lombok.SneakyThrows; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -27,13 +34,20 @@ public class EntityRegistryFactory { @Qualifier("pluginEntityRegistry") private PluginEntityRegistryLoader pluginEntityRegistryLoader; + @Autowired private ApplicationContext applicationContext; + @SneakyThrows @Bean("entityRegistry") @Primary @Nonnull - protected EntityRegistry getInstance() throws EntityRegistryException { + protected EntityRegistry getInstance( + SpringStandardPluginConfiguration springStandardPluginConfiguration) + throws EntityRegistryException { + BiFunction, PluginFactory> pluginFactoryProvider = + (config, loaders) -> new SpringPluginFactory(applicationContext, config, loaders); MergedEntityRegistry baseEntityRegistry = - new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); + new MergedEntityRegistry(new SnapshotEntityRegistry(pluginFactoryProvider)) + .apply(configEntityRegistry); pluginEntityRegistryLoader.withBaseRegistry(baseEntityRegistry).start(true); return baseEntityRegistry; } From 06562f320df4b363949aae717c53f71c3770098f Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 9 Aug 2024 15:29:43 -0500 Subject: [PATCH 24/72] feat(search): support sorting on multiple fields (#10775) --- .../analytics/resolver/GetChartsResolver.java | 7 +-- .../GetMetadataAnalyticsResolver.java | 3 +- .../resolvers/auth/DebugAccessResolver.java | 3 +- .../auth/ListAccessTokensResolver.java | 11 +++-- .../container/ContainerEntitiesResolver.java | 3 +- .../domain/DomainEntitiesResolver.java | 3 +- .../resolvers/domain/ListDomainsResolver.java | 8 ++-- .../resolvers/group/ListGroupsResolver.java | 8 ++-- .../incident/EntityIncidentsResolver.java | 9 ++-- ...estionSourceExecutionRequestsResolver.java | 8 ++-- .../ingest/secret/ListSecretsResolver.java | 8 ++-- .../resolvers/jobs/DataJobRunsResolver.java | 9 ++-- .../resolvers/jobs/EntityRunsResolver.java | 9 ++-- .../ownership/ListOwnershipTypesResolver.java | 2 +- .../resolvers/post/ListPostsResolver.java | 13 +++-- .../resolvers/query/ListQueriesResolver.java | 7 +-- .../AggregateAcrossEntitiesResolver.java | 3 +- .../search/GetQuickFiltersResolver.java | 3 +- .../search/SearchAcrossEntitiesResolver.java | 26 ++++++++-- .../resolvers/search/SearchResolver.java | 3 +- .../view/ListGlobalViewsResolver.java | 2 +- .../resolvers/view/ListMyViewsResolver.java | 2 +- .../src/main/resources/search.graphql | 7 ++- .../auth/ListAccessTokensResolverTest.java | 4 +- .../ContainerEntitiesResolverTest.java | 2 +- .../domain/DomainEntitiesResolverTest.java | 2 +- .../domain/ListDomainsResolverTest.java | 15 +++--- .../incident/EntityIncidentsResolverTest.java | 3 +- ...onSourceExecutionRequestsResolverTest.java | 4 +- .../secret/ListSecretsResolverTest.java | 6 +-- .../query/ListQueriesResolverTest.java | 7 +-- .../AggregateAcrossEntitiesResolverTest.java | 6 +-- .../search/GetQuickFiltersResolverTest.java | 5 +- .../SearchAcrossEntitiesResolverTest.java | 8 ++-- .../resolvers/search/SearchResolverTest.java | 12 +++-- .../metadata/aspect/GraphRetriever.java | 4 +- .../java/com/datahub/util/RecordUtils.java | 8 ++++ .../metadata/client/JavaEntityClient.java | 39 ++++++++------- .../graph/dgraph/DgraphGraphService.java | 2 +- .../graph/elastic/ESGraphQueryDAO.java | 8 ++-- .../elastic/ElasticSearchGraphService.java | 4 +- .../graph/neo4j/Neo4jGraphService.java | 2 +- .../metadata/search/LineageSearchService.java | 26 +++++----- .../metadata/search/SearchService.java | 28 +++++------ .../search/SearchServiceSearchRetriever.java | 2 +- .../search/cache/CachedSearchResult.java | 23 +++++++++ .../client/CachingEntitySearchService.java | 37 ++++++++------- .../elasticsearch/ElasticSearchService.java | 40 ++++++++-------- .../elasticsearch/query/ESSearchDAO.java | 34 +++++--------- .../query/request/SearchRequestHandler.java | 14 +++--- .../metadata/search/utils/ESUtils.java | 29 +++++------- .../ElasticSearchTimeseriesAspectService.java | 8 ++-- .../metadata/graph/GraphServiceTestBase.java | 2 +- .../fixtures/SampleDataFixtureTestBase.java | 6 +-- .../v2/delegates/EntityApiDelegateImpl.java | 21 ++++++--- .../controller/GenericEntitiesController.java | 15 ++++-- .../elastic/OperationsController.java | 21 +++++++-- .../v2/controller/TimeseriesController.java | 4 +- .../v3/controller/EntityControllerTest.java | 7 ++- ...com.linkedin.entity.entities.restspec.json | 28 +++++++++++ ...com.linkedin.entity.entities.snapshot.json | 28 +++++++++++ .../linkedin/entity/client/EntityClient.java | 20 ++++---- .../entity/client/RestliEntityClient.java | 45 ++++++++++++------ .../restli-servlet-impl/build.gradle | 3 ++ .../resources/entity/EntityResource.java | 47 ++++++++++++++++--- .../resources/restli/RestliConstants.java | 1 + .../mock/MockTimeseriesAspectService.java | 2 +- .../metadata/search/EntitySearchService.java | 22 ++++----- .../timeseries/TimeseriesAspectService.java | 2 +- 69 files changed, 519 insertions(+), 294 deletions(-) create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedSearchResult.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index 767c9b4d4e71b..4847aea224ccd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -159,9 +159,10 @@ private SearchResult searchForNewUsers(@Nonnull final OperationContext opContext .setValue( String.valueOf( trailingMonthDateRange.getStart())))))))), - new SortCriterion() - .setField(CORP_USER_STATUS_LAST_MODIFIED_FIELD_NAME) - .setOrder(SortOrder.DESCENDING), + Collections.singletonList( + new SortCriterion() + .setField(CORP_USER_STATUS_LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), 0, 100); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index 01f2e6c8462e3..6045b1e726c7a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -77,7 +77,8 @@ private List getCharts(MetadataAnalyticsInput input, OperationCo } SearchResult searchResult = - _entityClient.searchAcrossEntities(opContext, entities, query, filter, 0, 0, null, null); + _entityClient.searchAcrossEntities( + opContext, entities, query, filter, 0, 0, Collections.emptyList(), null); List aggregationMetadataList = searchResult.getMetadata().getAggregations(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java index 44604e92c35de..8372b6b5126a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java @@ -33,6 +33,7 @@ import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -181,7 +182,7 @@ private Set getPoliciesFor( Constants.POLICY_ENTITY_NAME, "", buildFilterToGetPolicies(user, groups, roles), - sortCriterion, + Collections.singletonList(sortCriterion), 0, 10000) .getEntities() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index dc57ed3c673c1..e0ecebbbc7bc2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -59,10 +59,11 @@ public CompletableFuture get(DataFetchingEnvironment envi if (AuthorizationUtils.canManageTokens(context) || isListingSelfTokens(filters, context)) { try { - final SortCriterion sortCriterion = - new SortCriterion() - .setField(EXPIRES_AT_FIELD_NAME) - .setOrder(SortOrder.DESCENDING); + final List sortCriteria = + Collections.singletonList( + new SortCriterion() + .setField(EXPIRES_AT_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)); final SearchResult searchResult = _entityClient.search( context @@ -74,7 +75,7 @@ public CompletableFuture get(DataFetchingEnvironment envi filters, Collections.emptyList(), context.getOperationContext().getAspectRetriever()), - sortCriterion, + sortCriteria, start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 15927eef236ca..5a3207633c07c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -19,6 +19,7 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; @@ -92,7 +93,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro new CriterionArray(ImmutableList.of(filterCriterion))))), start, count, - null, + Collections.emptyList(), null)); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 75796f637525e..6a880503802cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -19,6 +19,7 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -98,7 +99,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro new ConjunctiveCriterion().setAnd(criteria))), start, count, - null, + Collections.emptyList(), null)); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 0c16470c642b7..e6d4238bc7054 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -22,6 +22,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; @@ -66,9 +67,10 @@ public CompletableFuture get(final DataFetchingEnvironment en Constants.DOMAIN_ENTITY_NAME, query, filter, - new SortCriterion() - .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) - .setOrder(SortOrder.DESCENDING), + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index fce404a6baa16..0632af68998dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -21,6 +21,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -64,9 +65,10 @@ public CompletableFuture get(final DataFetchingEnvironment env CORP_GROUP_ENTITY_NAME, query, null, - new SortCriterion() - .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) - .setOrder(SortOrder.DESCENDING), + Collections.singletonList( + new SortCriterion() + .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java index 2d4b24243073a..d79634c27d881 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java @@ -21,6 +21,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -60,13 +61,13 @@ public CompletableFuture get(DataFetchingEnvironment envi // Index! // We use the search index so that we can easily sort by the last updated time. final Filter filter = buildIncidentsEntityFilter(entityUrn, maybeState); - final SortCriterion sortCriterion = buildIncidentsSortCriterion(); + final List sortCriteria = buildIncidentsSortCriteria(); final SearchResult searchResult = _entityClient.filter( context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, - sortCriterion, + sortCriteria, start, count); @@ -118,10 +119,10 @@ private Filter buildIncidentsEntityFilter( return QueryUtils.newFilter(criterionMap); } - private SortCriterion buildIncidentsSortCriterion() { + private List buildIncidentsSortCriteria() { final SortCriterion sortCriterion = new SortCriterion(); sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); sortCriterion.setOrder(SortOrder.DESCENDING); - return sortCriterion; + return Collections.singletonList(sortCriterion); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index 4a3b75deddc45..a4c2ab42227d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -23,6 +23,7 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -76,9 +77,10 @@ public CompletableFuture get( new ConjunctiveCriterion() .setAnd( new CriterionArray(ImmutableList.of(filterCriterion))))), - new SortCriterion() - .setField(REQUEST_TIME_MS_FIELD_NAME) - .setOrder(SortOrder.DESCENDING), + Collections.singletonList( + new SortCriterion() + .setField(REQUEST_TIME_MS_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index 106a2d0d1e18e..bf8d7c800ccae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -26,6 +26,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -73,9 +74,10 @@ public CompletableFuture get(final DataFetchingEnvironment en Constants.SECRETS_ENTITY_NAME, query, null, - new SortCriterion() - .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) - .setOrder(SortOrder.DESCENDING), + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index 591712ef3f55b..09039e530631d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -26,6 +26,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -61,13 +62,13 @@ public CompletableFuture get(DataFetchingEnvironment // Index! // We use the search index so that we can easily sort by the last updated time. final Filter filter = buildTaskRunsEntityFilter(entityUrn); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final List sortCriteria = buildTaskRunsSortCriteria(); final SearchResult gmsResult = _entityClient.filter( context.getOperationContext(), Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, filter, - sortCriterion, + sortCriteria, start, count); final List dataProcessInstanceUrns = @@ -123,10 +124,10 @@ private Filter buildTaskRunsEntityFilter(final String entityUrn) { return filter; } - private SortCriterion buildTaskRunsSortCriterion() { + private List buildTaskRunsSortCriteria() { final SortCriterion sortCriterion = new SortCriterion(); sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); sortCriterion.setOrder(SortOrder.DESCENDING); - return sortCriterion; + return Collections.singletonList(sortCriterion); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 163fc30fb6e6c..82c5b73d87152 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -27,6 +27,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -67,13 +68,13 @@ public CompletableFuture get(DataFetchingEnvironment // Index! // We use the search index so that we can easily sort by the last updated time. final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final List sortCriteria = buildTaskRunsSortCriteria(); final SearchResult gmsResult = _entityClient.filter( context.getOperationContext(), Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, filter, - sortCriterion, + sortCriteria, start, count); final List dataProcessInstanceUrns = @@ -133,10 +134,10 @@ private Filter buildTaskRunsEntityFilter( return filter; } - private SortCriterion buildTaskRunsSortCriterion() { + private List buildTaskRunsSortCriteria() { final SortCriterion sortCriterion = new SortCriterion(); sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); sortCriterion.setOrder(SortOrder.DESCENDING); - return sortCriterion; + return Collections.singletonList(sortCriterion); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 9f6951e44dd73..da0d5dd07a94f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -67,7 +67,7 @@ public CompletableFuture get(DataFetchingEnvironment e filters, Collections.emptyList(), context.getOperationContext().getAspectRetriever()), - DEFAULT_SORT_CRITERION, + Collections.singletonList(DEFAULT_SORT_CRITERION), start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 12e4047c2dc4e..dc7797882371b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -18,7 +18,9 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; @@ -49,10 +51,11 @@ public CompletableFuture get(final DataFetchingEnvironment envi return GraphQLConcurrencyUtils.supplyAsync( () -> { try { - final SortCriterion sortCriterion = - new SortCriterion() - .setField(LAST_MODIFIED_FIELD_NAME) - .setOrder(SortOrder.DESCENDING); + final List sortCriteria = + Collections.singletonList( + new SortCriterion() + .setField(LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)); // First, get all Post Urns. final SearchResult gmsResult = @@ -61,7 +64,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi POST_ENTITY_NAME, query, null, - sortCriterion, + sortCriteria, start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index 95be3a68e895c..aa411f019a4c0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -61,8 +61,9 @@ public CompletableFuture get(final DataFetchingEnvironment en return GraphQLConcurrencyUtils.supplyAsync( () -> { try { - final SortCriterion sortCriterion = - new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); + final List sortCriteria = + Collections.singletonList( + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)); // First, get all Query Urns. final SearchResult gmsResult = @@ -74,7 +75,7 @@ public CompletableFuture get(final DataFetchingEnvironment en QUERY_ENTITY_NAME, query, buildFilters(input, context.getOperationContext().getAspectRetriever()), - sortCriterion, + sortCriteria, start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index 04a72b14eeb02..19bccaf265086 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -22,6 +22,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; @@ -94,7 +95,7 @@ public CompletableFuture get(DataFetchingEnvironment environme : inputFilter, 0, 0, // 0 entity count because we don't want resolved entities - null, + Collections.emptyList(), facets)); } catch (Exception e) { log.error( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index a61d9111321ca..b07e3fa912641 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -25,6 +25,7 @@ import graphql.schema.DataFetchingEnvironment; import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Optional; @@ -107,7 +108,7 @@ private SearchResult getSearchResults( : null, 0, 0, - null, + Collections.emptyList(), null); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 287e339ddee50..0dbed92b7d58e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -18,8 +18,10 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -65,10 +67,24 @@ public CompletableFuture get(DataFetchingEnvironment environment) context.getOperationContext().getAspectRetriever()); SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); - SortCriterion sortCriterion = - input.getSortInput() != null - ? mapSortCriterion(input.getSortInput().getSortCriterion()) - : null; + List sortCriteria; + if (input.getSortInput() != null) { + if (input.getSortInput().getSortCriteria() != null) { + sortCriteria = + input.getSortInput().getSortCriteria().stream() + .map(SearchUtils::mapSortCriterion) + .collect(Collectors.toList()); + } else { + sortCriteria = + input.getSortInput().getSortCriterion() != null + ? Collections.singletonList( + mapSortCriterion(input.getSortInput().getSortCriterion())) + : Collections.emptyList(); + } + + } else { + sortCriteria = Collections.emptyList(); + } try { log.debug( @@ -100,7 +116,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) : baseFilter, start, count, - sortCriterion)); + sortCriteria)); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 5fb2f8f14b293..7a48e305dbfe4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -20,6 +20,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import io.opentelemetry.extension.annotations.WithSpan; +import java.util.Collections; import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -89,7 +90,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) input.getFilters(), input.getOrFilters(), context.getOperationContext().getAspectRetriever()), - null, + Collections.emptyList(), start, count)); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index 952e55ca117f2..265f4d5f5d56e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -74,7 +74,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi Constants.DATAHUB_VIEW_ENTITY_NAME, query, buildFilters(context.getOperationContext().getAspectRetriever()), - DEFAULT_SORT_CRITERION, + Collections.singletonList(DEFAULT_SORT_CRITERION), start, count); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index 32eb0e46bb616..abfdeb2d60869 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -79,7 +79,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi viewType, context.getActorUrn(), context.getOperationContext().getAspectRetriever()), - DEFAULT_SORT_CRITERION, + Collections.singletonList(DEFAULT_SORT_CRITERION), start, count); diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index c7b5e61e9831c..09a7217073527 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1372,7 +1372,12 @@ input SearchSortInput { """ A criterion to sort search results on """ - sortCriterion: SortCriterion! + sortCriterion: SortCriterion @deprecated(reason: "Use sortCriteria instead") + + """ + A list of values to sort search results on + """ + sortCriteria: [SortCriterion!] } """ diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 6c876226a45e6..020f74475ea60 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -12,11 +12,11 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; import java.util.Collections; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -47,7 +47,7 @@ public void testGetSuccess() throws Exception { Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(buildFilter(filters, Collections.emptyList(), null)), - Mockito.any(SortCriterion.class), + Mockito.any(List.class), Mockito.eq(input.getStart()), Mockito.eq(input.getCount()))) .thenReturn( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index c63c9bccab68b..48732727762ee 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -62,7 +62,7 @@ public void testGetSuccess() throws Exception { new CriterionArray(ImmutableList.of(filterCriterion)))))), Mockito.eq(0), Mockito.eq(20), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(null))) .thenReturn( new SearchResult() diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index f970f9e2ea431..ad5d7f1ef6b06 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -68,7 +68,7 @@ public void testGetSuccess() throws Exception { new CriterionArray(ImmutableList.of(filterCriterion)))))), Mockito.eq(0), Mockito.eq(20), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(null))) .thenReturn( new SearchResult() diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index 53a16ed5f6cc8..c3b1a8c564855 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -20,6 +20,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -47,9 +48,10 @@ public void testGetSuccess() throws Exception { Mockito.eq(""), Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), Mockito.eq( - new SortCriterion() - .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) - .setOrder(SortOrder.DESCENDING)), + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))), Mockito.eq(0), Mockito.eq(20))) .thenReturn( @@ -90,9 +92,10 @@ public void testGetSuccessNoParentDomain() throws Exception { Mockito.eq(""), Mockito.eq(DomainUtils.buildParentDomainFilter(null)), Mockito.eq( - new SortCriterion() - .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) - .setOrder(SortOrder.DESCENDING)), + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))), Mockito.eq(0), Mockito.eq(20))) .thenReturn( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java index 4be7eeba1d018..4750143b8add8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java @@ -37,6 +37,7 @@ import com.linkedin.metadata.search.utils.QueryUtils; import graphql.schema.DataFetchingEnvironment; import io.datahubproject.metadata.context.OperationContext; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.mockito.Mockito; @@ -92,7 +93,7 @@ public void testGetSuccess() throws Exception { Mockito.any(), Mockito.eq(Constants.INCIDENT_ENTITY_NAME), Mockito.eq(expectedFilter), - Mockito.eq(expectedSort), + Mockito.eq(Collections.singletonList(expectedSort)), Mockito.eq(0), Mockito.eq(10))) .thenReturn( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index c96dfe89adc5e..fe4fe00454a26 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -22,7 +22,6 @@ import com.linkedin.execution.ExecutionRequestResult; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -30,6 +29,7 @@ import graphql.schema.DataFetchingEnvironment; import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -46,7 +46,7 @@ public void testGetSuccess() throws Exception { any(), Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), Mockito.any(Filter.class), - Mockito.any(SortCriterion.class), + Mockito.any(List.class), Mockito.eq(0), Mockito.eq(10))) .thenReturn( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index 82b8d895384ca..96a12dc3be5a7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -15,7 +15,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -24,6 +23,7 @@ import graphql.schema.DataFetchingEnvironment; import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -44,7 +44,7 @@ public void testGetSuccess() throws Exception { Mockito.eq(Constants.SECRETS_ENTITY_NAME), Mockito.eq(""), Mockito.eq(null), - Mockito.any(SortCriterion.class), + Mockito.any(List.class), Mockito.eq(0), Mockito.eq(20))) .thenReturn( @@ -112,7 +112,7 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.eq(""), Mockito.eq(null), - Mockito.any(SortCriterion.class), + Mockito.any(List.class), Mockito.anyInt(), Mockito.anyInt()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 70b427a1606f1..ee728b17e8c62 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -69,9 +69,10 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { : input.getQuery()), Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), Mockito.eq( - new SortCriterion() - .setField(ListQueriesResolver.CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING)), + Collections.singletonList( + new SortCriterion() + .setField(ListQueriesResolver.CREATED_AT_FIELD) + .setOrder(SortOrder.DESCENDING))), Mockito.eq(input.getStart()), Mockito.eq(input.getCount()))) .thenReturn( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index 40062ed08977a..d32eb9fcf120c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -324,7 +324,7 @@ public static void testErrorFetchingResults() throws Exception { Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); @@ -397,7 +397,7 @@ private static EntityClient initMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(facets))) .thenReturn(result); return client; @@ -420,7 +420,7 @@ private static void verifyMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(facets)); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index 25e374c766deb..64042e82bbfe8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -24,6 +24,7 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; @@ -114,7 +115,7 @@ public static void testGetQuickFiltersFailure() throws Exception { Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); @@ -300,7 +301,7 @@ private static EntityClient initMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), + Mockito.eq(Collections.emptyList()), Mockito.eq(null))) .thenReturn(result); return client; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index bcbfda6c71bba..30d6f2dc6f283 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -437,8 +437,8 @@ public static void testApplyViewErrorFetchingView() throws Exception { Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null))) + Mockito.eq(Collections.emptyList()), + Mockito.eq(Collections.emptyList()))) .thenThrow(new RemoteInvocationException()); final SearchAcrossEntitiesResolver resolver = @@ -485,7 +485,7 @@ private static EntityClient initMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null))) + Mockito.eq(Collections.emptyList()))) .thenReturn(result); return client; } @@ -506,7 +506,7 @@ private static void verifyMockEntityClient( Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null)); + Mockito.eq(Collections.emptyList())); } private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index a5310a052f613..fbbf5cf314eda 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -18,6 +18,8 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -56,7 +58,7 @@ public void testDefaultSearchFlags() throws Exception { Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. "", null, - null, + Collections.emptyList(), 0, 10, setConvertSchemaFieldsToDatasets( @@ -97,7 +99,7 @@ public void testOverrideSearchFlags() throws Exception { Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. "", null, - null, + Collections.emptyList(), 1, 11, setConvertSchemaFieldsToDatasets( @@ -129,7 +131,7 @@ public void testNonWildCardSearchFlags() throws Exception { Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. "not a wildcard", null, // Verify that view filter was used. - null, + Collections.emptyList(), 0, 10, setConvertSchemaFieldsToDatasets( @@ -170,7 +172,7 @@ private void verifyMockSearchEntityClient( String entityName, String query, Filter filter, - SortCriterion sortCriterion, + List sortCriteria, int start, int limit, com.linkedin.metadata.query.SearchFlags searchFlags) @@ -181,7 +183,7 @@ private void verifyMockSearchEntityClient( Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filter), - Mockito.eq(sortCriterion), + Mockito.eq(sortCriteria), Mockito.eq(start), Mockito.eq(limit)); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/GraphRetriever.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/GraphRetriever.java index 9757a10e3f3c2..cedaac25ffee9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/GraphRetriever.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/GraphRetriever.java @@ -20,7 +20,7 @@ public interface GraphRetriever { * @param destinationEntityFilter * @param relationshipTypes * @param relationshipFilter - * @param sortCriterion + * @param sortCriteria * @param scrollId * @param count * @param startTimeMillis @@ -35,7 +35,7 @@ RelatedEntitiesScrollResult scrollRelatedEntities( @Nonnull Filter destinationEntityFilter, @Nonnull List relationshipTypes, @Nonnull RelationshipFilter relationshipFilter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, diff --git a/li-utils/src/main/java/com/datahub/util/RecordUtils.java b/li-utils/src/main/java/com/datahub/util/RecordUtils.java index 2955943919e3b..2d0881b6984e4 100644 --- a/li-utils/src/main/java/com/datahub/util/RecordUtils.java +++ b/li-utils/src/main/java/com/datahub/util/RecordUtils.java @@ -78,6 +78,14 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { } } + public static String toJsonString(@Nonnull List recordTemplates) { + StringBuilder json = new StringBuilder(); + for (RecordTemplate recordTemplate : recordTemplates) { + json.append(toJsonString(recordTemplate)); + } + return json.toString(); + } + /** * Creates a {@link RecordTemplate} object from a serialized JSON string. * diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index f8370c9efe3e6..60a991c19ae8b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -62,6 +62,7 @@ import java.time.Clock; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -375,7 +376,13 @@ public SearchResult search( return ValidationUtils.validateSearchResult( opContext, entitySearchService.search( - opContext, List.of(entity), input, newFilter(requestFilters), null, start, count), + opContext, + List.of(entity), + input, + newFilter(requestFilters), + Collections.emptyList(), + start, + count), entityService); } @@ -406,7 +413,7 @@ public ListResult list( opContext.withSearchFlags(flags -> flags.setFulltext(false)), entity, newFilter(requestFilters), - null, + Collections.emptyList(), start, count)), entityService); @@ -417,7 +424,7 @@ public ListResult list( * * @param input search query * @param filter search filters - * @param sortCriterion sort criterion + * @param sortCriteria sort criteria * @param start start offset for search results * @param count max number of search results requested * @return Snapshot key @@ -429,14 +436,14 @@ public SearchResult search( @Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException { return ValidationUtils.validateSearchResult( opContext, entitySearchService.search( - opContext, List.of(entity), input, filter, sortCriterion, start, count), + opContext, List.of(entity), input, filter, sortCriteria, start, count), entityService); } @@ -449,10 +456,10 @@ public SearchResult searchAcrossEntities( @Nullable Filter filter, int start, int count, - @Nullable SortCriterion sortCriterion) + List sortCriteria) throws RemoteInvocationException { return searchAcrossEntities( - opContext, entities, input, filter, start, count, sortCriterion, null); + opContext, entities, input, filter, start, count, sortCriteria, null); } /** @@ -464,7 +471,7 @@ public SearchResult searchAcrossEntities( * @param start start offset for search results * @param count max number of search results requested * @param facets list of facets we want aggregations for - * @param sortCriterion sorting criterion + * @param sortCriteria sorting criteria * @return Snapshot key * @throws RemoteInvocationException when unable to execute request */ @@ -476,7 +483,7 @@ public SearchResult searchAcrossEntities( @Nullable Filter filter, int start, int count, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable List facets) throws RemoteInvocationException { @@ -487,7 +494,7 @@ public SearchResult searchAcrossEntities( entities, input, filter, - sortCriterion, + sortCriteria, start, count, facets), @@ -529,7 +536,7 @@ public LineageSearchResult searchAcrossLineage( @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException { @@ -543,7 +550,7 @@ public LineageSearchResult searchAcrossLineage( input, maxHops, filter, - sortCriterion, + sortCriteria, start, count), entityService); @@ -559,7 +566,7 @@ public LineageScrollResult scrollAcrossLineage( @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nonnull String keepAlive, int count) @@ -577,7 +584,7 @@ public LineageScrollResult scrollAcrossLineage( input, maxHops, filter, - sortCriterion, + sortCriteria, scrollId, keepAlive, count), @@ -645,7 +652,7 @@ public SearchResult filter( @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException { @@ -655,7 +662,7 @@ public SearchResult filter( opContext.withSearchFlags(flags -> flags.setFulltext(true)), entity, filter, - sortCriterion, + sortCriteria, start, count), entityService); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 27b603244d3b3..6703e07bfd915 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -788,7 +788,7 @@ public RelatedEntitiesScrollResult scrollRelatedEntities( @Nonnull Filter destinationEntityFilter, @Nonnull List relationshipTypes, @Nonnull RelationshipFilter relationshipFilter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index e21c4e15b4fc9..50e5aa6ba893d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -1323,7 +1323,7 @@ public SearchResponse getSearchResponse( @Nullable final Filter destinationEntityFilter, @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count) { @@ -1336,12 +1336,12 @@ public SearchResponse getSearchResponse( relationshipTypes, relationshipFilter); - return executeScrollSearchQuery(finalQuery, sortCriterion, scrollId, count); + return executeScrollSearchQuery(finalQuery, sortCriteria, scrollId, count); } private SearchResponse executeScrollSearchQuery( @Nonnull final QueryBuilder query, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, final int count) { @@ -1357,7 +1357,7 @@ private SearchResponse executeScrollSearchQuery( searchSourceBuilder.size(count); searchSourceBuilder.query(query); - ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, List.of(), false); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriteria, List.of(), false); searchRequest.source(searchSourceBuilder); ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index adcc64fd2bea7..5b0fb554a4f48 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -315,7 +315,7 @@ public RelatedEntitiesScrollResult scrollRelatedEntities( @Nullable Filter destinationEntityFilter, @Nonnull List relationshipTypes, @Nonnull RelationshipFilter relationshipFilter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, @@ -331,7 +331,7 @@ public RelatedEntitiesScrollResult scrollRelatedEntities( destinationEntityFilter, relationshipTypes, relationshipFilter, - sortCriterion, + sortCriteria, scrollId, count); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index 16c0804538dd7..9fe9c242fe48c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -921,7 +921,7 @@ public RelatedEntitiesScrollResult scrollRelatedEntities( @Nonnull Filter destinationEntityFilter, @Nonnull List relationshipTypes, @Nonnull RelationshipFilter relationshipFilter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index 95c8eb13beb93..d07882963e281 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -118,7 +118,7 @@ public class LineageSearchService { * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 * @param inputFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @return a {@link LineageSearchResult} that contains a list of matched documents and related @@ -134,7 +134,7 @@ public LineageSearchResult searchAcrossLineage( @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { @@ -255,7 +255,7 @@ public LineageSearchResult searchAcrossLineage( SearchUtils.removeCriteria( inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); - if (canDoLightning(lineageRelationships, finalInput, reducedFilters, sortCriterion)) { + if (canDoLightning(lineageRelationships, finalInput, reducedFilters, sortCriteria)) { codePath = "lightning"; // use lightning approach to return lineage search results LineageSearchResult lineageSearchResult = @@ -276,7 +276,7 @@ public LineageSearchResult searchAcrossLineage( lineageRelationships, finalInput, reducedFilters, - sortCriterion, + sortCriteria, from, size); if (!lineageSearchResult.getEntities().isEmpty()) { @@ -303,7 +303,7 @@ boolean canDoLightning( List lineageRelationships, String input, Filter inputFilters, - SortCriterion sortCriterion) { + List sortCriteria) { boolean simpleFilters = inputFilters == null || inputFilters.getOr() == null @@ -318,7 +318,7 @@ boolean canDoLightning( return (lineageRelationships.size() > cacheConfiguration.getLightningThreshold()) && input.equals("*") && simpleFilters - && sortCriterion == null; + && CollectionUtils.isEmpty(sortCriteria); } @VisibleForTesting @@ -533,7 +533,7 @@ private LineageSearchResult getSearchResultInBatches( List lineageRelationships, @Nonnull String input, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { @@ -566,7 +566,7 @@ private LineageSearchResult getSearchResultInBatches( entitiesToQuery, input, finalFilter, - sortCriterion, + sortCriteria, queryFrom, querySize), urnToRelationship); @@ -761,7 +761,7 @@ private LineageSearchEntity buildLineageSearchEntity( * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 * @param inputFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @return a {@link LineageSearchResult} that contains a list of matched documents and related @@ -777,7 +777,7 @@ public LineageScrollResult scrollAcrossLineage( @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nonnull String keepAlive, int size) { @@ -831,7 +831,7 @@ public LineageScrollResult scrollAcrossLineage( lineageRelationships, input != null ? input : "*", reducedFilters, - sortCriterion, + sortCriteria, scrollId, keepAlive, size); @@ -843,7 +843,7 @@ private LineageScrollResult getScrollResultInBatches( List lineageRelationships, @Nonnull String input, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nonnull String keepAlive, int size) { @@ -878,7 +878,7 @@ private LineageScrollResult getScrollResultInBatches( entitiesToQuery, input, finalFilter, - sortCriterion, + sortCriteria, scrollId, keepAlive, querySize), diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index c8525f829d206..1cd738656d972 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -65,7 +65,7 @@ public Map docCountPerEntity( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @return a {@link SearchResult} that contains a list of matched documents and related search @@ -77,7 +77,7 @@ public SearchResult search( @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { List entitiesToSearch = getEntitiesToSearch(opContext, entityNames, size); @@ -87,7 +87,7 @@ public SearchResult search( } SearchResult result = _cachingEntitySearchService.search( - opContext, entitiesToSearch, input, postFilters, sortCriterion, from, size, null); + opContext, entitiesToSearch, input, postFilters, sortCriteria, from, size, null); try { return result @@ -105,11 +105,11 @@ public SearchResult searchAcrossEntities( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { return searchAcrossEntities( - opContext, entities, input, postFilters, sortCriterion, from, size, null); + opContext, entities, input, postFilters, sortCriteria, from, size, null); } /** @@ -120,7 +120,7 @@ public SearchResult searchAcrossEntities( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param facets list of facets we want aggregations for @@ -133,14 +133,14 @@ public SearchResult searchAcrossEntities( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets) { log.debug( String.format( "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, from, size)); + entities, input, postFilters, sortCriteria, from, size)); // DEPRECATED // This is the legacy version of `_entityType`-- it operates as a special case and does not // support ORs, Unions, etc. @@ -160,7 +160,7 @@ public SearchResult searchAcrossEntities( } SearchResult result = _cachingEntitySearchService.search( - opContext, nonEmptyEntities, input, postFilters, sortCriterion, from, size, facets); + opContext, nonEmptyEntities, input, postFilters, sortCriteria, from, size, facets); if (facets == null || facets.contains("entity") || facets.contains("_entityType")) { Optional entityTypeAgg = result.getMetadata().getAggregations().stream() @@ -238,7 +238,7 @@ public List getEntitiesToSearch( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier for passing to search backend * @param size the number of search hits to return * @return a {@link ScrollResult} that contains a list of matched documents and related search @@ -250,21 +250,21 @@ public ScrollResult scrollAcrossEntities( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size) { log.debug( String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriteria: %s, from: %s, size: %s", + entities, input, postFilters, sortCriteria, scrollId, size)); List entitiesToSearch = getEntitiesToSearch(opContext, entities, size); if (entitiesToSearch.isEmpty()) { // No indices with non-zero entries: skip querying and return empty result return getEmptyScrollResult(size); } return _cachingEntitySearchService.scroll( - opContext, entitiesToSearch, input, postFilters, sortCriterion, scrollId, keepAlive, size); + opContext, entitiesToSearch, input, postFilters, sortCriteria, scrollId, keepAlive, size); } private static SearchResult getEmptySearchResult(int from, int size) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchServiceSearchRetriever.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchServiceSearchRetriever.java index a5ef1c8fa58b1..8d7548e0ba90a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchServiceSearchRetriever.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchServiceSearchRetriever.java @@ -43,7 +43,7 @@ public ScrollResult scroll( entities, "*", filters, - urnSort, + List.of(urnSort), scrollId, null, count); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedSearchResult.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedSearchResult.java new file mode 100644 index 0000000000000..7fa93be62fd97 --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedSearchResult.java @@ -0,0 +1,23 @@ +package com.linkedin.metadata.search.cache; + +import static com.datahub.util.RecordUtils.*; +import static com.linkedin.metadata.search.utils.GZIPUtil.*; + +import com.linkedin.metadata.search.SearchResult; +import java.io.Serializable; +import lombok.Data; + +@Data +public class CachedSearchResult implements Serializable { + private final byte[] searchResult; + private final long timestamp; + + public CachedSearchResult(SearchResult lineageResult, long timestamp) { + this.searchResult = gzipCompress(toJsonString(lineageResult)); + this.timestamp = timestamp; + } + + public SearchResult getSearchResult() { + return toRecordTemplate(SearchResult.class, gzipDecompress(searchResult)); + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java index 5db427fa90148..cb062e0e3f448 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java @@ -20,6 +20,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; +import org.apache.commons.collections.CollectionUtils; import org.javatuples.Septet; import org.javatuples.Sextet; import org.springframework.cache.Cache; @@ -47,7 +48,7 @@ public class CachingEntitySearchService { * @param entityNames the names of the entity to search * @param query the search query * @param filters the filters to include - * @param sortCriterion the sort criterion + * @param sortCriteria the sort criteria * @param from the start offset * @param size the count * @param facets list of facets we want aggregations for @@ -58,12 +59,12 @@ public SearchResult search( @Nonnull List entityNames, @Nonnull String query, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets) { return getCachedSearchResults( - opContext, entityNames, query, filters, sortCriterion, from, size, facets); + opContext, entityNames, query, filters, sortCriteria, from, size, facets); } /** @@ -115,7 +116,7 @@ public BrowseResult browse( * @param entities the names of the entities to search * @param query the search query * @param filters the filters to include - * @param sortCriterion the sort criterion + * @param sortCriteria the sort criteria * @param scrollId opaque scroll identifier for a scroll request * @param keepAlive the string representation of how long to keep point in time alive * @param size the count @@ -126,12 +127,12 @@ public ScrollResult scroll( @Nonnull List entities, @Nonnull String query, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size) { return getCachedScrollResults( - opContext, entities, query, filters, sortCriterion, scrollId, keepAlive, size); + opContext, entities, query, filters, sortCriteria, scrollId, keepAlive, size); } /** @@ -145,7 +146,7 @@ public SearchResult getCachedSearchResults( @Nonnull List entityNames, @Nonnull String query, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets) { @@ -158,7 +159,7 @@ public SearchResult getCachedSearchResults( entityNames, query, filters, - sortCriterion, + sortCriteria, querySize.getFrom(), querySize.getSize(), facets), @@ -168,7 +169,7 @@ public SearchResult getCachedSearchResults( entityNames, query, filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, + CollectionUtils.isNotEmpty(sortCriteria) ? toJsonString(sortCriteria) : null, facets, querySize), enableCache) @@ -269,7 +270,7 @@ public ScrollResult getCachedScrollResults( @Nonnull List entities, @Nonnull String query, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size) { @@ -291,7 +292,7 @@ public ScrollResult getCachedScrollResults( entities, query, filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, + CollectionUtils.isNotEmpty(sortCriteria) ? toJsonString(sortCriteria) : null, scrollId, size); String json = cache.get(cacheKey, String.class); @@ -305,7 +306,7 @@ public ScrollResult getCachedScrollResults( entities, query, filters, - sortCriterion, + sortCriteria, scrollId, keepAlive, size, @@ -321,7 +322,7 @@ public ScrollResult getCachedScrollResults( entities, query, filters, - sortCriterion, + sortCriteria, scrollId, keepAlive, size, @@ -337,12 +338,12 @@ private SearchResult getRawSearchResults( final List entityNames, final String input, final Filter filters, - final SortCriterion sortCriterion, + final List sortCriteria, final int start, final int count, @Nullable final List facets) { return entitySearchService.search( - opContext, entityNames, input, filters, sortCriterion, start, count, facets); + opContext, entityNames, input, filters, sortCriteria, start, count, facets); } /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ @@ -373,17 +374,17 @@ private ScrollResult getRawScrollResults( final List entities, final String input, final Filter filters, - final SortCriterion sortCriterion, + final List sortCriteria, @Nullable final String scrollId, @Nullable final String keepAlive, final int count, final boolean fulltext) { if (fulltext) { return entitySearchService.fullTextScroll( - opContext, entities, input, filters, sortCriterion, scrollId, keepAlive, count); + opContext, entities, input, filters, sortCriteria, scrollId, keepAlive, count); } else { return entitySearchService.structuredScroll( - opContext, entities, input, filters, sortCriterion, scrollId, keepAlive, count); + opContext, entities, input, filters, sortCriteria, scrollId, keepAlive, count); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 578c34611a75a..4d5fe8d0b8e60 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -142,10 +142,10 @@ public SearchResult search( @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { - return search(opContext, entityNames, input, postFilters, sortCriterion, from, size, null); + return search(opContext, entityNames, input, postFilters, sortCriteria, from, size, null); } @Nonnull @@ -154,14 +154,14 @@ public SearchResult search( @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets) { log.debug( String.format( - "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entityNames, input, postFilters, sortCriterion, from, size)); + "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriteria: %s, from: %s, size: %s", + entityNames, input, postFilters, sortCriteria, from, size)); return esSearchDAO.search( opContext.withSearchFlags( @@ -169,7 +169,7 @@ public SearchResult search( entityNames, input, postFilters, - sortCriterion, + sortCriteria, from, size, facets); @@ -181,20 +181,20 @@ public SearchResult filter( @Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { log.debug( String.format( - "Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", - entityName, filters, sortCriterion, from, size)); + "Filtering Search documents entityName: %s, filters: %s, sortCriteria: %s, from: %s, size: %s", + entityName, filters, sortCriteria, from, size)); return esSearchDAO.filter( opContext.withSearchFlags( flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)), entityName, filters, - sortCriterion, + sortCriteria, from, size); } @@ -330,14 +330,14 @@ public ScrollResult fullTextScroll( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size) { log.debug( String.format( - "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriteria: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriteria, scrollId, size)); return esSearchDAO.scroll( opContext.withSearchFlags( @@ -347,7 +347,7 @@ public ScrollResult fullTextScroll( entities, input, postFilters, - sortCriterion, + sortCriteria, scrollId, keepAlive, size); @@ -360,14 +360,14 @@ public ScrollResult structuredScroll( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size) { log.debug( String.format( - "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriteria: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriteria, scrollId, size)); return esSearchDAO.scroll( opContext.withSearchFlags( @@ -377,7 +377,7 @@ public ScrollResult structuredScroll( entities, input, postFilters, - sortCriterion, + sortCriteria, scrollId, keepAlive, size); @@ -400,7 +400,7 @@ public ExplainResponse explain( @Nonnull String documentId, @Nonnull String entityName, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size, @@ -413,7 +413,7 @@ public ExplainResponse explain( documentId, entityName, postFilters, - sortCriterion, + sortCriteria, scrollId, keepAlive, size, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index b537a39634027..cb342794aff58 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -228,7 +228,7 @@ private ScrollResult executeAndExtract( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param facets list of facets we want aggregations for @@ -241,7 +241,7 @@ public SearchResult search( @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets) { @@ -257,7 +257,7 @@ public SearchResult search( final SearchRequest searchRequest = SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) .getSearchRequest( - opContext, finalInput, transformedFilters, sortCriterion, from, size, facets); + opContext, finalInput, transformedFilters, sortCriteria, from, size, facets); searchRequest.indices( entityNames.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new)); searchRequestTimer.stop(); @@ -270,7 +270,7 @@ public SearchResult search( * * @param filters the request map with fields and values to be applied as filters to the search * query - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return * @return a {@link SearchResult} that contains a list of filtered documents and related search @@ -281,7 +281,7 @@ public SearchResult filter( @Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { IndexConvention indexConvention = opContext.getSearchContext().getIndexConvention(); @@ -289,7 +289,7 @@ public SearchResult filter( Filter transformedFilters = transformFilterForEntities(filters, indexConvention); final SearchRequest searchRequest = SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getFilterRequest(opContext, transformedFilters, sortCriterion, from, size); + .getFilterRequest(opContext, transformedFilters, sortCriteria, from, size); searchRequest.indices(indexConvention.getIndexName(entitySpec)); return executeAndExtract( @@ -401,7 +401,7 @@ public Map aggregateByValue( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll Id to convert to a PIT ID and Sort array to pass to ElasticSearch * @param keepAlive string representation of the time to keep a point in time alive * @param size the number of search hits to return @@ -414,7 +414,7 @@ public ScrollResult scroll( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size) { @@ -439,7 +439,7 @@ public ScrollResult scroll( transformedFilters, entitySpecs, finalInput, - sortCriterion, + sortCriteria, null); // PIT specifies indices in creation so it doesn't support specifying indices on the request, so @@ -462,7 +462,7 @@ private SearchRequest getScrollRequest( @Nullable Filter postFilters, List entitySpecs, String finalInput, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable List facets) { String pitId = null; Object[] sort = null; @@ -483,15 +483,7 @@ private SearchRequest getScrollRequest( return SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration) .getSearchRequest( - opContext, - finalInput, - postFilters, - sortCriterion, - sort, - pitId, - keepAlive, - size, - facets); + opContext, finalInput, postFilters, sortCriteria, sort, pitId, keepAlive, size, facets); } public Optional raw( @@ -544,7 +536,7 @@ public ExplainResponse explain( @Nonnull String documentId, @Nonnull String entityName, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size, @@ -564,7 +556,7 @@ public ExplainResponse explain( transformedFilters, Collections.singletonList(entitySpec), finalQuery, - sortCriterion, + sortCriteria, facets); ; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 66ad1e3be363f..6e4210de6ef80 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -187,7 +187,7 @@ public SearchRequest getSearchRequest( @Nonnull OperationContext opContext, @Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets) { @@ -213,7 +213,7 @@ public SearchRequest getSearchRequest( if (Boolean.FALSE.equals(searchFlags.isSkipHighlighting())) { searchSourceBuilder.highlighter(highlights); } - ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, entitySpecs); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriteria, entitySpecs); if (Boolean.TRUE.equals(searchFlags.isGetSuggestions())) { ESUtils.buildNameSuggestions(searchSourceBuilder, input); @@ -243,7 +243,7 @@ public SearchRequest getSearchRequest( @Nonnull OperationContext opContext, @Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable Object[] sort, @Nullable String pitId, @Nullable String keepAlive, @@ -272,7 +272,7 @@ public SearchRequest getSearchRequest( if (Boolean.FALSE.equals(searchFlags.isSkipHighlighting())) { searchSourceBuilder.highlighter(highlights); } - ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, entitySpecs); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriteria, entitySpecs); searchRequest.source(searchSourceBuilder); log.debug("Search request is: " + searchRequest); searchRequest.indicesOptions(null); @@ -285,7 +285,7 @@ public SearchRequest getSearchRequest( * to be applied to search results. * * @param filters {@link Filter} list of conditions with fields and values - * @param sortCriterion {@link SortCriterion} to be applied to the search results + * @param sortCriteria list of {@link SortCriterion} to be applied to the search results * @param from index to start the search from * @param size the number of search hits to return * @return {@link SearchRequest} that contains the filtered query @@ -294,7 +294,7 @@ public SearchRequest getSearchRequest( public SearchRequest getFilterRequest( @Nonnull OperationContext opContext, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size) { SearchRequest searchRequest = new SearchRequest(); @@ -303,7 +303,7 @@ public SearchRequest getFilterRequest( final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQuery); searchSourceBuilder.from(from).size(size); - ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, entitySpecs); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriteria, entitySpecs); searchRequest.source(searchSourceBuilder); return searchRequest; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index e299dde62b184..c4060bbb0928b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -299,17 +299,14 @@ public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType f * * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort * order - * @param sortCriterion {@link SortCriterion} to be applied to the search results + * @param sortCriteria list of {@link SortCriterion} to be applied to the search results */ public static void buildSortOrder( @Nonnull SearchSourceBuilder searchSourceBuilder, - @Nullable SortCriterion sortCriterion, + List sortCriteria, List entitySpecs) { buildSortOrder( - searchSourceBuilder, - sortCriterion == null ? List.of() : List.of(sortCriterion), - entitySpecs, - true); + searchSourceBuilder, sortCriteria == null ? List.of() : sortCriteria, entitySpecs, true); } /** @@ -321,20 +318,20 @@ public static void buildSortOrder( */ public static void buildSortOrder( @Nonnull SearchSourceBuilder searchSourceBuilder, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, List entitySpecs, boolean enableDefaultSort) { - if (sortCriterion.isEmpty() && enableDefaultSort) { + if (sortCriteria.isEmpty() && enableDefaultSort) { searchSourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); } else { - for (SortCriterion sortCriteria : sortCriterion) { + for (SortCriterion sortCriterion : sortCriteria) { Optional fieldTypeForDefault = Optional.empty(); for (EntitySpec entitySpec : entitySpecs) { List fieldSpecs = entitySpec.getSearchableFieldSpecs(); for (SearchableFieldSpec fieldSpec : fieldSpecs) { SearchableAnnotation annotation = fieldSpec.getSearchableAnnotation(); - if (annotation.getFieldName().equals(sortCriteria.getField()) - || annotation.getFieldNameAliases().contains(sortCriteria.getField())) { + if (annotation.getFieldName().equals(sortCriterion.getField()) + || annotation.getFieldNameAliases().contains(sortCriterion.getField())) { fieldTypeForDefault = Optional.of(fieldSpec.getSearchableAnnotation().getFieldType()); break; } @@ -346,15 +343,15 @@ public static void buildSortOrder( if (fieldTypeForDefault.isEmpty() && !entitySpecs.isEmpty()) { log.warn( "Sort criterion field " - + sortCriteria.getField() + + sortCriterion.getField() + " was not found in any entity spec to be searched"); } final SortOrder esSortOrder = - (sortCriteria.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC : SortOrder.DESC; FieldSortBuilder sortBuilder = - new FieldSortBuilder(sortCriteria.getField()).order(esSortOrder); + new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); if (fieldTypeForDefault.isPresent()) { String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); if (esFieldtype != null) { @@ -365,8 +362,8 @@ public static void buildSortOrder( } } if (enableDefaultSort - && (sortCriterion.isEmpty() - || sortCriterion.stream() + && (sortCriteria.isEmpty() + || sortCriteria.stream() .noneMatch(c -> c.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)))) { searchSourceBuilder.sort( new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index ce4ff53eba91b..9b4d373d25d8f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -551,7 +551,7 @@ public TimeseriesScrollResult scrollAspects( @Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, @@ -592,7 +592,7 @@ public TimeseriesScrollResult scrollAspects( entityName, aspectName, filterQueryBuilder, - sortCriterion, + sortCriteria, scrollId, count); int totalCount = (int) response.getHits().getTotalHits().value; @@ -615,7 +615,7 @@ private SearchResponse executeScrollSearchQuery( @Nonnull final String entityName, @Nonnull final String aspectName, @Nonnull final QueryBuilder query, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, final int count) { @@ -631,7 +631,7 @@ private SearchResponse executeScrollSearchQuery( searchSourceBuilder.size(count); searchSourceBuilder.query(query); - ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, List.of(), false); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriteria, List.of(), false); searchRequest.source(searchSourceBuilder); ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java index b430313f5904b..5d9a5079f2a3b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java @@ -2196,7 +2196,7 @@ public void testHighlyConnectedGraphWalk() throws Exception { relationships.stream() .flatMap(relationship -> relationship.getDegrees().stream()) .reduce(0, Math::max); - assertTrue(maxDegree > 1); + assertTrue(maxDegree >= 1); EntityLineageResult lineageResultMulti = getGraphService(true) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index a3f6f39e6387c..7e434bf93329e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -216,7 +216,7 @@ public void testGetSortOrder() { SearchSourceBuilder builder = new SearchSourceBuilder(); SortCriterion sortCriterion = new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); - ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); + ESUtils.buildSortOrder(builder, Collections.singletonList(sortCriterion), entitySpecs); List> sorts = builder.sorts(); assertEquals(sorts.size(), 2); // sort by last modified and then by urn for (SortBuilder sort : sorts) { @@ -235,7 +235,7 @@ public void testGetSortOrder() { SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); SortCriterion nameCriterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); - ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); + ESUtils.buildSortOrder(nameBuilder, Collections.singletonList(nameCriterion), entitySpecs); sorts = nameBuilder.sorts(); assertEquals(sorts.size(), 2); for (SortBuilder sort : sorts) { @@ -1959,7 +1959,7 @@ public void testSortOrdering() { SEARCHABLE_ENTITIES, query, null, - criterion, + Collections.singletonList(criterion), 0, 100, null); diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java index 9e671bfb7e01e..c5f87c3f1dced 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java @@ -61,6 +61,7 @@ import io.datahubproject.openapi.v1.entities.EntitiesController; import jakarta.servlet.http.HttpServletRequest; import java.net.URISyntaxException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -628,12 +629,18 @@ public ResponseEntity scroll( authentication, true); - // TODO multi-field sort - SortCriterion sortCriterion = new SortCriterion(); - sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); - sortCriterion.setOrder( - com.linkedin.metadata.query.filter.SortOrder.valueOf( - Optional.ofNullable(sortOrder).map(Enum::name).orElse("ASCENDING"))); + List sortCriteria = + Optional.ofNullable(sort).orElse(Collections.singletonList("urn")).stream() + .map( + sortField -> { + SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(sortField); + sortCriterion.setOrder( + com.linkedin.metadata.query.filter.SortOrder.valueOf( + Optional.ofNullable(sortOrder).map(Enum::name).orElse("ASCENDING"))); + return sortCriterion; + }) + .collect(Collectors.toList()); ScrollResult result = _searchService.scrollAcrossEntities( @@ -641,7 +648,7 @@ public ResponseEntity scroll( List.of(entitySpec.getName()), query, null, - sortCriterion, + sortCriteria, scrollId, null, count); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java index f415a4f47c9dc..8d89417b29215 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/controller/GenericEntitiesController.java @@ -61,6 +61,7 @@ import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; +import org.springframework.util.CollectionUtils; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PatchMapping; @@ -166,6 +167,7 @@ public ResponseEntity getEntities( @RequestParam(value = "query", defaultValue = "*") String query, @RequestParam(value = "scrollId", required = false) String scrollId, @RequestParam(value = "sort", required = false, defaultValue = "urn") String sortField, + @RequestParam(value = "sortCriteria", required = false) List sortFields, @RequestParam(value = "sortOrder", required = false, defaultValue = "ASCENDING") String sortOrder, @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") @@ -194,8 +196,15 @@ public ResponseEntity getEntities( authentication, true); - // TODO: support additional and multiple sort params - SortCriterion sortCriterion = SearchUtil.sortBy(sortField, SortOrder.valueOf(sortOrder)); + List sortCriteria; + if (!CollectionUtils.isEmpty(sortFields)) { + sortCriteria = new ArrayList<>(); + sortFields.forEach( + field -> sortCriteria.add(SearchUtil.sortBy(field, SortOrder.valueOf(sortOrder)))); + } else { + sortCriteria = + Collections.singletonList(SearchUtil.sortBy(sortField, SortOrder.valueOf(sortOrder))); + } ScrollResult result = searchService.scrollAcrossEntities( @@ -206,7 +215,7 @@ public ResponseEntity getEntities( List.of(entitySpec.getName()), query, null, - sortCriterion, + sortCriteria, scrollId, null, count); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index ddbc8004081eb..ea72bac73edf3 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -20,6 +20,7 @@ import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; @@ -214,6 +215,13 @@ public ResponseEntity explainSearchQuery( @RequestParam(value = "filters", required = false) @Nullable String filters, + @Parameter( + name = "sortCriteria", + required = false, + description = "Criteria to sort results on.") + @RequestParam("sortCriteria") + @Nullable + List sortCriteria, @Parameter(name = "searchFlags", description = "Optional configuration flags.") @RequestParam(value = "searchFlags", required = false) @Nullable @@ -253,7 +261,7 @@ public ResponseEntity explainSearchQuery( encodeValue(documentId), entityName, filters == null ? null : objectMapper.readValue(filters, Filter.class), - null, + sortCriteria, scrollId, keepAlive, size, @@ -315,6 +323,13 @@ public ResponseEntity explainSearchQueryDiff( @RequestParam(value = "filters", required = false) @Nullable String filters, + @Parameter( + name = "sortCriteria", + required = false, + description = "Criteria to sort results on.") + @RequestParam("sortCriteria") + @Nullable + List sortCriteria, @Parameter(name = "searchFlags", description = "Optional configuration flags.") @RequestParam(value = "searchFlags", required = false) @Nullable @@ -354,7 +369,7 @@ public ResponseEntity explainSearchQueryDiff( encodeValue(documentIdA), entityName, filters == null ? null : objectMapper.readValue(filters, Filter.class), - null, + sortCriteria, scrollId, keepAlive, size, @@ -367,7 +382,7 @@ public ResponseEntity explainSearchQueryDiff( encodeValue(documentIdB), entityName, filters == null ? null : objectMapper.readValue(filters, Filter.class), - null, + sortCriteria, scrollId, keepAlive, size, diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java index bb10719bacd3f..4e8c0abcb0c22 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java @@ -90,7 +90,7 @@ public ResponseEntity> getAspects( throw new IllegalArgumentException("Only timeseries aspects are supported."); } - List sortCriterion = + List sortCriteria = List.of( SearchUtil.sortBy("timestampMillis", SortOrder.DESCENDING), SearchUtil.sortBy("messageId", SortOrder.DESCENDING)); @@ -101,7 +101,7 @@ public ResponseEntity> getAspects( entityName, aspectName, null, - sortCriterion, + sortCriteria, scrollId, count, startTimeMillis, diff --git a/metadata-service/openapi-servlet/src/test/java/io/datahubproject/openapi/v3/controller/EntityControllerTest.java b/metadata-service/openapi-servlet/src/test/java/io/datahubproject/openapi/v3/controller/EntityControllerTest.java index 3c7e93621f5cc..60425fc7e756e 100644 --- a/metadata-service/openapi-servlet/src/test/java/io/datahubproject/openapi/v3/controller/EntityControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/io/datahubproject/openapi/v3/controller/EntityControllerTest.java @@ -37,6 +37,7 @@ import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.openapi.config.SpringWebConfig; import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.util.Collections; import java.util.List; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; @@ -95,7 +96,7 @@ public void testSearchOrderPreserved() throws Exception { eq(List.of("dataset")), anyString(), nullable(Filter.class), - eq(SearchUtil.sortBy("urn", SortOrder.valueOf("ASCENDING"))), + eq(Collections.singletonList(SearchUtil.sortBy("urn", SortOrder.valueOf("ASCENDING")))), nullable(String.class), nullable(String.class), anyInt())) @@ -113,7 +114,9 @@ public void testSearchOrderPreserved() throws Exception { eq(List.of("dataset")), anyString(), nullable(Filter.class), - eq(SearchUtil.sortBy("urn", SortOrder.valueOf("DESCENDING"))), + eq( + Collections.singletonList( + SearchUtil.sortBy("urn", SortOrder.valueOf("DESCENDING")))), nullable(String.class), nullable(String.class), anyInt())) diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json index fe53b43ccd1da..935a429383928 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json @@ -199,6 +199,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -248,6 +252,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -288,6 +296,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "scrollId", "type" : "string", @@ -333,6 +345,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "scrollId", "type" : "string", @@ -374,6 +390,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -411,6 +431,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -452,6 +476,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index b6074228a00a5..5b6f7a290fd1a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -6820,6 +6820,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -6869,6 +6873,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -6909,6 +6917,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "scrollId", "type" : "string", @@ -6954,6 +6966,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "scrollId", "type" : "string", @@ -6995,6 +7011,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -7032,6 +7052,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" @@ -7073,6 +7097,10 @@ "name" : "sort", "type" : "com.linkedin.metadata.query.filter.SortCriterion", "optional" : true + }, { + "name" : "sortCriteria", + "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.query.filter.SortCriterion\" }", + "optional" : true }, { "name" : "start", "type" : "int" diff --git a/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java index cb5c691d0cb61..5f086e79a387a 100644 --- a/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -235,7 +235,7 @@ ListResult list( * * @param input search query * @param filter search filters - * @param sortCriterion sort criterion + * @param sortCriteria sort criteria * @param start start offset for search results * @param count max number of search results requested * @return Snapshot key @@ -246,7 +246,7 @@ SearchResult search( @Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException; @@ -270,7 +270,7 @@ SearchResult searchAcrossEntities( @Nullable Filter filter, int start, int count, - @Nullable SortCriterion sortCriterion) + List sortCriteria) throws RemoteInvocationException; /** @@ -292,7 +292,7 @@ SearchResult searchAcrossEntities( @Nullable Filter filter, int start, int count, - @Nullable SortCriterion sortCriterion, + List sortCriteria, List facets) throws RemoteInvocationException; @@ -328,7 +328,7 @@ ScrollResult scrollAcrossEntities( * @param input the search input text * @param maxHops the max number of hops away to search for. If null, searches all hops. * @param filter the request map with fields and values as filters to be applied to search hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param start index to start the search from * @param count the number of search hits to return * @return a {@link SearchResult} that contains a list of matched documents and related search @@ -342,7 +342,7 @@ LineageSearchResult searchAcrossLineage( @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException; @@ -356,7 +356,7 @@ LineageSearchResult searchAcrossLineage( * @param input the search input text * @param maxHops the max number of hops away to search for. If null, searches all hops. * @param filter the request map with fields and values as filters to be applied to search hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll ID indicating offset * @param keepAlive string representation of time to keep point in time alive, ex: 5m * @param count the number of search hits to return of roundtrips for UI visualizations. @@ -372,7 +372,7 @@ LineageScrollResult scrollAcrossLineage( @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nonnull String keepAlive, int count) @@ -426,7 +426,7 @@ void deleteEntityReferences(@Nonnull OperationContext opContext, @Nonnull final * * @param entity filter entity * @param filter search filters - * @param sortCriterion sort criterion + * @param sortCriteria sort criteria * @param start start offset for search results * @param count max number of search results requested * @return a set of {@link SearchResult}s @@ -436,7 +436,7 @@ SearchResult filter( @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 780c6c6a007c2..bc5b9e439d293 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -63,6 +63,7 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortCriterionArray; import com.linkedin.metadata.search.LineageScrollResult; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; @@ -101,6 +102,7 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; +import org.opensearch.core.common.util.CollectionUtils; @Slf4j public class RestliEntityClient extends BaseClient implements EntityClient { @@ -592,7 +594,7 @@ public ListResult list( * * @param input search query * @param filter search filters - * @param sortCriterion sort criterion + * @param sortCriteria sort criteria * @param start start offset for search results * @param count max number of search results requested * @return Snapshot key @@ -605,7 +607,7 @@ public SearchResult search( @Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException { @@ -623,8 +625,9 @@ public SearchResult search( requestBuilder.filterParam(filter); } - if (sortCriterion != null) { - requestBuilder.sortParam(sortCriterion); + if (!CollectionUtils.isEmpty(sortCriteria)) { + requestBuilder.sortParam(sortCriteria.get(0)); + requestBuilder.sortCriteriaParam(new SortCriterionArray(sortCriteria)); } if (searchFlags != null) { @@ -646,10 +649,10 @@ public SearchResult searchAcrossEntities( @Nullable Filter filter, int start, int count, - @Nullable SortCriterion sortCriterion) + List sortCriteria) throws RemoteInvocationException { return searchAcrossEntities( - opContext, entities, input, filter, start, count, sortCriterion, null); + opContext, entities, input, filter, start, count, sortCriteria, null); } /** @@ -673,7 +676,7 @@ public SearchResult searchAcrossEntities( @Nullable Filter filter, int start, int count, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable List facets) throws RemoteInvocationException { @@ -695,8 +698,9 @@ public SearchResult searchAcrossEntities( requestBuilder.searchFlagsParam(searchFlags); } - if (sortCriterion != null) { - requestBuilder.sortParam(sortCriterion); + if (!CollectionUtils.isEmpty(sortCriteria)) { + requestBuilder.sortParam(sortCriteria.get(0)); + requestBuilder.sortCriteriaParam(new SortCriterionArray(sortCriteria)); } return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); @@ -746,7 +750,7 @@ public LineageSearchResult searchAcrossLineage( @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException { @@ -773,6 +777,12 @@ public LineageSearchResult searchAcrossLineage( if (lineageFlags.getEndTimeMillis() != null) { requestBuilder.endTimeMillisParam(lineageFlags.getEndTimeMillis()); } + + if (!CollectionUtils.isEmpty(sortCriteria)) { + requestBuilder.sortParam(sortCriteria.get(0)); + requestBuilder.sortCriteriaParam(new SortCriterionArray(sortCriteria)); + } + requestBuilder.searchFlagsParam(opContext.getSearchContext().getSearchFlags()); return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); @@ -788,7 +798,7 @@ public LineageScrollResult scrollAcrossLineage( @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nonnull String keepAlive, int count) @@ -818,6 +828,12 @@ public LineageScrollResult scrollAcrossLineage( if (lineageFlags.getEndTimeMillis() != null) { requestBuilder.endTimeMillisParam(lineageFlags.getEndTimeMillis()); } + + if (!CollectionUtils.isEmpty(sortCriteria)) { + requestBuilder.sortParam(sortCriteria.get(0)); + requestBuilder.sortCriteriaParam(new SortCriterionArray(sortCriteria)); + } + requestBuilder.searchFlagsParam(opContext.getSearchContext().getSearchFlags()); return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); @@ -906,7 +922,7 @@ public SearchResult filter( @Nonnull OperationContext opContext, @Nonnull String entity, @Nonnull Filter filter, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int start, int count) throws RemoteInvocationException { @@ -917,8 +933,9 @@ public SearchResult filter( .filterParam(filter) .startParam(start) .countParam(count); - if (sortCriterion != null) { - requestBuilder.sortParam(sortCriterion); + if (!CollectionUtils.isEmpty(sortCriteria)) { + requestBuilder.sortParam(sortCriteria.get(0)); + requestBuilder.sortCriteriaParam(new SortCriterionArray(sortCriteria)); } return sendClientRequest(requestBuilder, opContext.getAuthentication()).getEntity(); } diff --git a/metadata-service/restli-servlet-impl/build.gradle b/metadata-service/restli-servlet-impl/build.gradle index c1484f00efe59..6b68abfe7fb15 100644 --- a/metadata-service/restli-servlet-impl/build.gradle +++ b/metadata-service/restli-servlet-impl/build.gradle @@ -106,3 +106,6 @@ pegasus.main.idlOptions.addIdlItem([ ]) ext.apiProject = project(':metadata-service:restli-api') + +spotlessJava.dependsOn generateTestDataTemplate +spotlessJava.dependsOn generateIntegTestDataTemplate diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index f2e025c8ed2b4..8a5473da95ba2 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -367,6 +367,7 @@ public Task search( @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, @@ -386,6 +387,8 @@ public Task search( RequestContext.builder().buildRestli(auth.getActor().toUrnStr(), getContext(), ACTION_SEARCH, entityName), authorizer, auth, true) .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(Boolean.TRUE.equals(fulltext))); + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); + log.info("GET SEARCH RESULTS for {} with query {}", entityName, input); // TODO - change it to use _searchService once we are confident on it's latency return RestliUtil.toTask( @@ -394,7 +397,7 @@ public Task search( // This API is not used by the frontend for search bars so we default to structured result = entitySearchService.search(opContext, - List.of(entityName), input, filter, sortCriterion, start, count); + List.of(entityName), input, filter, sortCriterionList, start, count); if (!isAPIAuthorizedResult( auth, @@ -417,6 +420,7 @@ public Task searchAcrossEntities( @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { @@ -436,10 +440,12 @@ public Task searchAcrossEntities( HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); + log.info("GET SEARCH RESULTS ACROSS ENTITIES for {} with query {}", entityList, input); return RestliUtil.toTask( () -> { - SearchResult result = searchService.searchAcrossEntities(opContext, entityList, input, filter, sortCriterion, start, count); + SearchResult result = searchService.searchAcrossEntities(opContext, entityList, input, filter, sortCriterionList, start, count); if (!isAPIAuthorizedResult( auth, authorizer, @@ -452,6 +458,18 @@ public Task searchAcrossEntities( }); } + private List getSortCriteria(@Nullable SortCriterion[] sortCriteria, @Nullable SortCriterion sortCriterion) { + List sortCriterionList; + if (sortCriteria != null) { + sortCriterionList = Arrays.asList(sortCriteria); + } else if (sortCriterion != null) { + sortCriterionList = Collections.singletonList(sortCriterion); + } else { + sortCriterionList = Collections.emptyList(); + } + return sortCriterionList; + } + @Action(name = ACTION_SCROLL_ACROSS_ENTITIES) @Nonnull @WithSpan @@ -460,6 +478,7 @@ public Task scrollAcrossEntities( @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_SCROLL_ID) @Optional @Nullable String scrollId, @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, @@ -479,6 +498,8 @@ public Task scrollAcrossEntities( HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); + log.info( "GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", entityList, @@ -492,7 +513,7 @@ public Task scrollAcrossEntities( entityList, input, filter, - sortCriterion, + sortCriterionList, scrollId, keepAlive, count); @@ -520,6 +541,7 @@ public Task searchAcrossLineage( @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @@ -535,6 +557,8 @@ public Task searchAcrossLineage( HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); + OperationContext opContext = OperationContext.asSession( systemOperationContext, RequestContext.builder().buildRestli(auth.getActor().toUrnStr(), getContext(), ACTION_SEARCH_ACROSS_LINEAGE, entities), authorizer, auth, true) .withSearchFlags(flags -> (searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)) @@ -559,7 +583,7 @@ public Task searchAcrossLineage( input, maxHops, filter, - sortCriterion, + sortCriterionList, start, count), entityService), @@ -577,6 +601,7 @@ public Task scrollAcrossLineage( @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_SCROLL_ID) @Optional @Nullable String scrollId, @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, @@ -611,6 +636,8 @@ public Task scrollAcrossLineage( entityList, input); + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); + return RestliUtil.toTask( () -> validateLineageScrollResult(opContext, @@ -622,7 +649,7 @@ public Task scrollAcrossLineage( input, maxHops, filter, - sortCriterion, + sortCriterionList, scrollId, keepAlive, count), @@ -637,6 +664,7 @@ public Task list( @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { @@ -653,10 +681,12 @@ public Task list( systemOperationContext, RequestContext.builder().buildRestli(auth.getActor().toUrnStr(), getContext(), ACTION_LIST, entityName), authorizer, auth, true) .withSearchFlags(flags -> new SearchFlags().setFulltext(false)); + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); + log.info("GET LIST RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( () -> { - SearchResult result = entitySearchService.filter(opContext, entityName, filter, sortCriterion, start, count); + SearchResult result = entitySearchService.filter(opContext, entityName, filter, sortCriterionList, start, count); if (!AuthUtil.isAPIAuthorizedResult( auth, authorizer, @@ -1148,6 +1178,7 @@ public Task filter( @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) Filter filter, @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SORT_CRITERIA) @Optional @Nullable SortCriterion[] sortCriteria, @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { @@ -1161,10 +1192,12 @@ public Task filter( } OperationContext opContext = OperationContext.asSession( systemOperationContext, RequestContext.builder().buildRestli(auth.getActor().toUrnStr(), getContext(), ACTION_FILTER, entityName), authorizer, auth, true); + + List sortCriterionList = getSortCriteria(sortCriteria, sortCriterion); log.info("FILTER RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( () -> { - SearchResult result = entitySearchService.filter(opContext.withSearchFlags(flags -> flags.setFulltext(true)), entityName, filter, sortCriterion, start, count); + SearchResult result = entitySearchService.filter(opContext.withSearchFlags(flags -> flags.setFulltext(true)), entityName, filter, sortCriterionList, start, count); if (!isAPIAuthorizedResult( auth, authorizer, diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java index af6efb1ad8093..ef79a404c2145 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java @@ -24,6 +24,7 @@ private RestliConstants() {} public static final String PARAM_FILTER = "filter"; public static final String PARAM_GROUP = "group"; public static final String PARAM_SORT = "sort"; + public static final String PARAM_SORT_CRITERIA = "sortCriteria"; public static final String PARAM_QUERY = "query"; public static final String PARAM_FIELD = "field"; public static final String PARAM_PATH = "path"; diff --git a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java index aaf90d279e0bd..7ed183e975f3b 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java +++ b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java @@ -137,7 +137,7 @@ public TimeseriesScrollResult scrollAspects( @Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index a3db4b029b68b..1b32ddc7c3ecb 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -80,7 +80,7 @@ void appendRunId( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @return a {@link SearchResult} that contains a list of matched documents and related search @@ -92,7 +92,7 @@ SearchResult search( @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size); @@ -108,7 +108,7 @@ SearchResult search( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param facets list of facets we want aggregations for @@ -121,7 +121,7 @@ SearchResult search( @Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size, @Nullable List facets); @@ -132,7 +132,7 @@ SearchResult search( * @param entityName name of the entity * @param filters the request map with fields and values to be applied as filters to the search * query - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return * @return a {@link SearchResult} that contains a list of filtered documents and related search @@ -143,7 +143,7 @@ SearchResult filter( @Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, int from, int size); @@ -265,7 +265,7 @@ List getBrowsePaths( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @return a {@link ScrollResult} that contains a list of matched documents and related search @@ -277,7 +277,7 @@ ScrollResult fullTextScroll( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size); @@ -290,7 +290,7 @@ ScrollResult fullTextScroll( * @param input the search input text * @param postFilters the request map with fields and values as filters to be applied to search * hits - * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param sortCriteria list of {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @return a {@link ScrollResult} that contains a list of matched documents and related search @@ -302,7 +302,7 @@ ScrollResult structuredScroll( @Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size); @@ -316,7 +316,7 @@ ExplainResponse explain( @Nonnull String documentId, @Nonnull String entityName, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, + List sortCriteria, @Nullable String scrollId, @Nullable String keepAlive, int size, diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java index 6b1f484ac0a51..68c82f0ef2e0d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java @@ -226,7 +226,7 @@ TimeseriesScrollResult scrollAspects( @Nonnull final String entityName, @Nonnull final String aspectName, @Nullable Filter filter, - @Nonnull List sortCriterion, + @Nonnull List sortCriteria, @Nullable String scrollId, int count, @Nullable Long startTimeMillis, From 17868cb06991dfded163ca5df49f23f7341ae690 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 9 Aug 2024 14:48:53 -0700 Subject: [PATCH 25/72] feat(ingest): various logging improvements (#11126) --- .../src/datahub/ingestion/sink/datahub_rest.py | 2 ++ .../src/datahub/telemetry/telemetry.py | 14 +++++++++++--- .../src/datahub/utilities/logging_manager.py | 5 +++++ 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py index a9f788acf66d3..6d7105bd26441 100644 --- a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py +++ b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py @@ -202,6 +202,8 @@ def _write_done_callback( record_urn = _get_urn(record_envelope) if record_urn: e.info["urn"] = record_urn + if workunit_id := record_envelope.metadata.get("workunit_id"): + e.info["workunit_id"] = workunit_id if not self.treat_errors_as_warnings: self.report.report_failure({"error": e.message, "info": e.info}) diff --git a/metadata-ingestion/src/datahub/telemetry/telemetry.py b/metadata-ingestion/src/datahub/telemetry/telemetry.py index 69a790b3d9bc7..4faf04ee2d2c7 100644 --- a/metadata-ingestion/src/datahub/telemetry/telemetry.py +++ b/metadata-ingestion/src/datahub/telemetry/telemetry.py @@ -283,7 +283,7 @@ def init_tracking(self) -> None: if not self.enabled or self.mp is None or self.tracking_init is True: return - logger.debug("Sending init Telemetry") + logger.debug("Sending init telemetry") try: self.mp.people_set( self.client_id, @@ -310,13 +310,21 @@ def ping( if not self.enabled or self.mp is None: return + properties = properties or {} + # send event try: - logger.debug(f"Sending telemetry for {event_name}") + if event_name == "function-call": + logger.debug( + f"Sending telemetry for {event_name} {properties.get('function')}, status {properties.get('status')}" + ) + else: + logger.debug(f"Sending telemetry for {event_name}") + properties = { **_default_telemetry_properties(), **self._server_props(server), - **(properties or {}), + **properties, } self.mp.track(self.client_id, event_name, properties) except Exception as e: diff --git a/metadata-ingestion/src/datahub/utilities/logging_manager.py b/metadata-ingestion/src/datahub/utilities/logging_manager.py index 64383745eb2d1..1eb763394094a 100644 --- a/metadata-ingestion/src/datahub/utilities/logging_manager.py +++ b/metadata-ingestion/src/datahub/utilities/logging_manager.py @@ -35,6 +35,8 @@ "acryl_datahub_cloud", ] IN_MEMORY_LOG_BUFFER_SIZE = 2000 # lines +IN_MEMORY_LOG_BUFFER_MAX_LINE_LENGTH = 2000 # characters + NO_COLOR = os.environ.get("NO_COLOR", False) @@ -159,6 +161,9 @@ def __init__(self, maxlen: Optional[int] = None) -> None: self._buffer: Deque[str] = collections.deque(maxlen=maxlen) def write(self, line: str) -> None: + if len(line) > IN_MEMORY_LOG_BUFFER_MAX_LINE_LENGTH: + line = line[:IN_MEMORY_LOG_BUFFER_MAX_LINE_LENGTH] + "[truncated]" + self._buffer.append(line) def clear(self) -> None: From b1f16f9b111b400fa8613a770f7c81bb56a75293 Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Sat, 10 Aug 2024 03:36:42 +0530 Subject: [PATCH 26/72] fix(ingestion/lookml): fix for sql parsing error (#11079) Co-authored-by: Harshal Sheth --- metadata-ingestion/setup.py | 2 +- .../goldens/test_bigquery_information_schema_query.json | 4 ++-- .../tests/unit/sql_parsing/goldens/test_merge_from_union.json | 4 ++-- .../tests/unit/sql_parsing/test_sqlglot_lineage.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 445600b8abd48..aef22dd145978 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -99,7 +99,7 @@ sqlglot_lib = { # Using an Acryl fork of sqlglot. # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:main?expand=1 - "acryl-sqlglot[rs]==25.3.1.dev3", + "acryl-sqlglot[rs]==25.8.2.dev9", } classification_lib = { diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_information_schema_query.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_information_schema_query.json index 4b9bbd06ecba6..f5f573f3d5113 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_information_schema_query.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_bigquery_information_schema_query.json @@ -1,7 +1,7 @@ { "query_type": "SELECT", "query_type_props": {}, - "query_fingerprint": "772187d1c6ce8dbed2dd1ba79975b108d4e733015ffb7bcbf9b7146e64cf9914", + "query_fingerprint": "c721ce16410601b36e5f32bd9c5c28488500a93e617363739faebfe71496f163", "in_tables": [ "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-staging-2.smoke_test_db_4.INFORMATION_SCHEMA.COLUMNS,PROD)", "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-staging-2.smoke_test_db_4.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS,PROD)" @@ -178,6 +178,6 @@ ], "debug_info": { "confidence": 0.2, - "generalized_statement": "SELECT c.table_catalog AS table_catalog, c.table_schema AS table_schema, c.table_name AS table_name, c.column_name AS column_name, c.ordinal_position AS ordinal_position, cfp.field_path AS field_path, c.is_nullable AS is_nullable, CASE WHEN CONTAINS_SUBSTR(field_path, ?) THEN NULL ELSE c.data_type END AS data_type, description AS comment, c.is_hidden AS is_hidden, c.is_partitioning_column AS is_partitioning_column, c.clustering_ordinal_position AS clustering_ordinal_position FROM `acryl-staging-2`.`smoke_test_db_4`.INFORMATION_SCHEMA.COLUMNS AS c JOIN `acryl-staging-2`.`smoke_test_db_4`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS AS cfp ON cfp.table_name = c.table_name AND cfp.column_name = c.column_name ORDER BY table_catalog, table_schema, table_name, ordinal_position ASC, data_type DESC" + "generalized_statement": "SELECT c.table_catalog AS table_catalog, c.table_schema AS table_schema, c.table_name AS table_name, c.column_name AS column_name, c.ordinal_position AS ordinal_position, cfp.field_path AS field_path, c.is_nullable AS is_nullable, CASE WHEN CONTAINS_SUBSTR(cfp.field_path, ?) THEN NULL ELSE c.data_type END AS data_type, description AS comment, c.is_hidden AS is_hidden, c.is_partitioning_column AS is_partitioning_column, c.clustering_ordinal_position AS clustering_ordinal_position FROM `acryl-staging-2`.`smoke_test_db_4`.INFORMATION_SCHEMA.COLUMNS AS c JOIN `acryl-staging-2`.`smoke_test_db_4`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS AS cfp ON cfp.table_name = c.table_name AND cfp.column_name = c.column_name ORDER BY table_catalog, table_schema, table_name, ordinal_position ASC, data_type DESC" } } \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_merge_from_union.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_merge_from_union.json index 4ba44d9e54c9d..1a75dde4c634f 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_merge_from_union.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_merge_from_union.json @@ -1,7 +1,7 @@ { "query_type": "MERGE", "query_type_props": {}, - "query_fingerprint": "38a78af8cc48333df0e4de7d6af5b9507a87dd8a2f129ef97c9b06dce2ca7b9f", + "query_fingerprint": "8001b852498d94a7f0f532dcd8cfa05328981ba437df6314466c764cc408969c", "in_tables": [ "urn:li:dataset:(urn:li:dataPlatform:bigquery,demo-pipelines-stg.referrer.prep_from_ios,PROD)", "urn:li:dataset:(urn:li:dataPlatform:bigquery,demo-pipelines-stg.referrer.prep_from_web,PROD)" @@ -12,6 +12,6 @@ "column_lineage": null, "debug_info": { "confidence": 0.2, - "generalized_statement": "MERGE INTO `demo-pipelines-stg`.`referrer`.`base_union` AS DBT_INTERNAL_DEST USING (SELECT * FROM `demo-pipelines-stg`.`referrer`.`prep_from_ios` WHERE partition_time = ? UNION ALL SELECT * FROM `demo-pipelines-stg`.`referrer`.`prep_from_web` WHERE partition_time = ?) AS DBT_INTERNAL_SOURCE ON FALSE WHEN NOT MATCHED BY SOURCE AND timestamp_trunc(DBT_INTERNAL_DEST.partition_time, DAY) IN (TIMESTAMP(?)) THEN delete WHEN NOT MATCHED THEN INSERT (`platform`, `pageview_id`, `query`, `referrer`, `partition_time`) VALUES (`platform`, `pageview_id`, `query`, `referrer`, `partition_time`)" + "generalized_statement": "MERGE INTO `demo-pipelines-stg`.`referrer`.`base_union` AS DBT_INTERNAL_DEST USING (SELECT * FROM `demo-pipelines-stg`.`referrer`.`prep_from_ios` WHERE partition_time = ? UNION ALL SELECT * FROM `demo-pipelines-stg`.`referrer`.`prep_from_web` WHERE partition_time = ?) AS DBT_INTERNAL_SOURCE ON FALSE WHEN NOT MATCHED BY SOURCE AND timestamp_trunc(DBT_INTERNAL_DEST.partition_time, DAY) IN (timestamp(?)) THEN delete WHEN NOT MATCHED THEN INSERT (`platform`, `pageview_id`, `query`, `referrer`, `partition_time`) VALUES (`platform`, `pageview_id`, `query`, `referrer`, `partition_time`)" } } \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index 3096c9b8269a1..1bd634e9d10ec 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -1199,7 +1199,7 @@ def test_bigquery_information_schema_query() -> None: c.ordinal_position as ordinal_position, cfp.field_path as field_path, c.is_nullable as is_nullable, - CASE WHEN CONTAINS_SUBSTR(field_path, ".") THEN NULL ELSE c.data_type END as data_type, + CASE WHEN CONTAINS_SUBSTR(cfp.field_path, ".") THEN NULL ELSE c.data_type END as data_type, description as comment, c.is_hidden as is_hidden, c.is_partitioning_column as is_partitioning_column, From 3f4b8ea4dc437d14b1171aaa5b5dad3a85d2b0c2 Mon Sep 17 00:00:00 2001 From: Jay <159848059+jayacryl@users.noreply.github.com> Date: Fri, 9 Aug 2024 19:43:59 -0400 Subject: [PATCH 27/72] feat(docs-site) cloud page spacing and content polishes (#11141) --- .../src/pages/cloud/CompanyLogos/logos.module.scss | 5 +++-- docs-website/src/pages/cloud/index.js | 7 ++++--- docs-website/src/pages/cloud/styles.module.scss | 1 + 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/docs-website/src/pages/cloud/CompanyLogos/logos.module.scss b/docs-website/src/pages/cloud/CompanyLogos/logos.module.scss index a6a9dba9d8d41..de404627cf039 100644 --- a/docs-website/src/pages/cloud/CompanyLogos/logos.module.scss +++ b/docs-website/src/pages/cloud/CompanyLogos/logos.module.scss @@ -21,6 +21,7 @@ .scrollingCustomers { position: relative; overflow: hidden; + opacity: 0.5; } @@ -29,7 +30,7 @@ padding: 1.25rem 0; position: relative; align-items: center; - animation: scrollingCustomerAnimate 15s linear infinite; + animation: scrollingCustomerAnimate 60s linear infinite; } .scrollingCustomers__inner img { @@ -46,7 +47,7 @@ .animateScrollingCustomers { display: flex; - animation: scrollingCustomerAnimate 15s linear infinite; + animation: scrollingCustomerAnimate 60s linear infinite; } @media (max-width: 767px) { diff --git a/docs-website/src/pages/cloud/index.js b/docs-website/src/pages/cloud/index.js index 5166d80bf3b7b..00437c8a7640a 100644 --- a/docs-website/src/pages/cloud/index.js +++ b/docs-website/src/pages/cloud/index.js @@ -51,7 +51,7 @@ function Home() { -
+
@@ -73,12 +73,13 @@ function Home() { Product Tour -
+
+ {/*
An extension of the DataHub Core project.
View Cloud Docs. -
+
*/}
diff --git a/docs-website/src/pages/cloud/styles.module.scss b/docs-website/src/pages/cloud/styles.module.scss index d1ac31f3ef8cc..b805063750dd4 100644 --- a/docs-website/src/pages/cloud/styles.module.scss +++ b/docs-website/src/pages/cloud/styles.module.scss @@ -10,6 +10,7 @@ } .hero { + margin-top: 80px; :global { .button { margin-right: 1rem; From 796483bac837c0fb78336929d12e78fc666fc89d Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Mon, 12 Aug 2024 11:33:14 -0400 Subject: [PATCH 28/72] feat(ui) Enable editing structured props on fields (#11042) --- .../SchemaFieldDrawer/FieldProperties.tsx | 47 ++++++++++++------- .../tabs/Properties/Edit/EditColumn.tsx | 17 ++++--- .../Edit/EditStructuredPropertyModal.tsx | 37 +++++++++++---- .../shared/tabs/Properties/PropertiesTab.tsx | 7 ++- 4 files changed, 75 insertions(+), 33 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx index 8c88cdce95f06..689a191f469f5 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldProperties.tsx @@ -4,6 +4,8 @@ import { SchemaField, StdDataType } from '../../../../../../../../types.generate import { SectionHeader, StyledDivider } from './components'; import { mapStructuredPropertyValues } from '../../../../Properties/useStructuredProperties'; import StructuredPropertyValue from '../../../../Properties/StructuredPropertyValue'; +import { EditColumn } from '../../../../Properties/Edit/EditColumn'; +import { useGetEntityWithSchema } from '../../useGetEntitySchema'; const PropertyTitle = styled.div` font-size: 14px; @@ -13,6 +15,8 @@ const PropertyTitle = styled.div` const PropertyWrapper = styled.div` margin-bottom: 12px; + display: flex; + justify-content: space-between; `; const PropertiesWrapper = styled.div` @@ -29,6 +33,7 @@ interface Props { export default function FieldProperties({ expandedField }: Props) { const { schemaFieldEntity } = expandedField; + const { refetch } = useGetEntityWithSchema(true); if (!schemaFieldEntity?.structuredProperties?.properties?.length) return null; @@ -43,23 +48,33 @@ export default function FieldProperties({ expandedField }: Props) { const hasMultipleValues = valuesData.length > 1; return ( - - {structuredProp.structuredProperty.definition.displayName} - {hasMultipleValues ? ( - - {valuesData.map((value) => ( -
  • + +
    + + {structuredProp.structuredProperty.definition.displayName} + + {hasMultipleValues ? ( + + {valuesData.map((value) => ( +
  • + +
  • + ))} +
    + ) : ( + <> + {valuesData.map((value) => ( - - ))} - - ) : ( - <> - {valuesData.map((value) => ( - - ))} - - )} + ))} + + )} + + v.value) || []} + refetch={refetch} + />
    ); })} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditColumn.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditColumn.tsx index ac50df6a5381e..6a0599c0cdb33 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditColumn.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditColumn.tsx @@ -1,16 +1,19 @@ import { Button } from 'antd'; import React, { useState } from 'react'; -import { PropertyRow } from '../types'; import EditStructuredPropertyModal from './EditStructuredPropertyModal'; +import { StructuredPropertyEntity } from '../../../../../../types.generated'; interface Props { - propertyRow: PropertyRow; + structuredProperty?: StructuredPropertyEntity; + associatedUrn?: string; + values?: (string | number | null)[]; + refetch?: () => void; } -export function EditColumn({ propertyRow }: Props) { +export function EditColumn({ structuredProperty, associatedUrn, values, refetch }: Props) { const [isEditModalVisible, setIsEditModalVisible] = useState(false); - if (!propertyRow.structuredProperty || propertyRow.structuredProperty?.definition.immutable) { + if (!structuredProperty || structuredProperty?.definition.immutable) { return null; } @@ -21,9 +24,11 @@ export function EditColumn({ propertyRow }: Props) { setIsEditModalVisible(false)} + refetch={refetch} /> ); diff --git a/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditStructuredPropertyModal.tsx b/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditStructuredPropertyModal.tsx index 73a280031ebd0..c8def8bef5e19 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditStructuredPropertyModal.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Properties/Edit/EditStructuredPropertyModal.tsx @@ -1,7 +1,6 @@ import { Button, Modal, message } from 'antd'; -import React from 'react'; +import React, { useEffect, useMemo } from 'react'; import styled from 'styled-components'; -import { PropertyRow } from '../types'; import StructuredPropertyInput from '../../../components/styled/StructuredProperty/StructuredPropertyInput'; import { PropertyValueInput, StructuredPropertyEntity } from '../../../../../../types.generated'; import { useUpsertStructuredPropertiesMutation } from '../../../../../../graphql/structuredProperties.generated'; @@ -17,19 +16,33 @@ const Description = styled.div` interface Props { isOpen: boolean; - propertyRow: PropertyRow; structuredProperty: StructuredPropertyEntity; + associatedUrn?: string; + values?: (string | number | null)[]; closeModal: () => void; + refetch?: () => void; } -export default function EditStructuredPropertyModal({ isOpen, propertyRow, structuredProperty, closeModal }: Props) { - const { refetch } = useEntityContext(); - const urn = useMutationUrn(); - const initialValues = propertyRow.values?.map((v) => v.value) || []; - const { selectedValues, selectSingleValue, toggleSelectedValue, updateSelectedValues } = +export default function EditStructuredPropertyModal({ + isOpen, + structuredProperty, + associatedUrn, + values, + closeModal, + refetch, +}: Props) { + const { refetch: entityRefetch } = useEntityContext(); + const mutationUrn = useMutationUrn(); + const urn = associatedUrn || mutationUrn; + const initialValues = useMemo(() => values || [], [values]); + const { selectedValues, selectSingleValue, toggleSelectedValue, updateSelectedValues, setSelectedValues } = useEditStructuredProperty(initialValues); const [upsertStructuredProperties] = useUpsertStructuredPropertiesMutation(); + useEffect(() => { + setSelectedValues(initialValues); + }, [isOpen, initialValues, setSelectedValues]); + function upsertProperties() { message.loading('Updating...'); upsertStructuredProperties({ @@ -51,7 +64,11 @@ export default function EditStructuredPropertyModal({ isOpen, propertyRow, struc }, }) .then(() => { - refetch(); + if (refetch) { + refetch(); + } else { + entityRefetch(); + } message.destroy(); message.success('Successfully updated structured property!'); closeModal(); @@ -67,7 +84,7 @@ export default function EditStructuredPropertyModal({ isOpen, propertyRow, struc return ( { propertyTableColumns.push({ title: '', width: '10%', - render: (propertyRow: PropertyRow) => , + render: (propertyRow: PropertyRow) => ( + v.value) || []} + /> + ), } as any); } From c9cc9e183cfcfcec338cb4cb89884ddc8a5ca751 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 12 Aug 2024 11:21:10 -0500 Subject: [PATCH 29/72] feat(tests): add md5 and last computed to testResult model (#11117) --- .../main/pegasus/com/linkedin/test/TestResult.pdl | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/metadata-models/src/main/pegasus/com/linkedin/test/TestResult.pdl b/metadata-models/src/main/pegasus/com/linkedin/test/TestResult.pdl index 11c4aa944fa4f..5f90f038d4475 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/test/TestResult.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/test/TestResult.pdl @@ -1,6 +1,7 @@ namespace com.linkedin.test import com.linkedin.common.Urn +import com.linkedin.common.AuditStamp /** * Information about a Test Result @@ -24,4 +25,15 @@ record TestResult { */ FAILURE } -} \ No newline at end of file + + /** + * The md5 of the test definition that was used to compute this result. + * See TestInfo.testDefinition.md5 for more information. + */ + testDefinitionMd5: optional string + + /** + * The audit stamp of when the result was computed, including the actor who computed it. + */ + lastComputed: optional AuditStamp +} From 31559147f2e6a579e871eb21d76d4dc3ca35c40a Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 12 Aug 2024 11:21:40 -0500 Subject: [PATCH 30/72] test(openapi): openapi regression smoke tests (#11143) --- smoke-test/requirements.txt | 1 + smoke-test/tests/openapi/README.md | 33 ++ smoke-test/tests/openapi/__init__.py | 0 smoke-test/tests/openapi/test_openapi.py | 111 ++++ smoke-test/tests/openapi/v1/__init__.py | 0 smoke-test/tests/openapi/v1/timeline.json | 526 ++++++++++++++++++ smoke-test/tests/openapi/v2/__init__.py | 0 .../openapi/v2/structured_properties.json | 332 +++++++++++ smoke-test/tests/openapi/v2/timeline.json | 526 ++++++++++++++++++ smoke-test/tests/openapi/v3/__init__.py | 0 .../openapi/v3/structured_properties.json | 331 +++++++++++ 11 files changed, 1860 insertions(+) create mode 100644 smoke-test/tests/openapi/README.md create mode 100644 smoke-test/tests/openapi/__init__.py create mode 100644 smoke-test/tests/openapi/test_openapi.py create mode 100644 smoke-test/tests/openapi/v1/__init__.py create mode 100644 smoke-test/tests/openapi/v1/timeline.json create mode 100644 smoke-test/tests/openapi/v2/__init__.py create mode 100644 smoke-test/tests/openapi/v2/structured_properties.json create mode 100644 smoke-test/tests/openapi/v2/timeline.json create mode 100644 smoke-test/tests/openapi/v3/__init__.py create mode 100644 smoke-test/tests/openapi/v3/structured_properties.json diff --git a/smoke-test/requirements.txt b/smoke-test/requirements.txt index 861c69f354fe5..952e8ed355d05 100644 --- a/smoke-test/requirements.txt +++ b/smoke-test/requirements.txt @@ -18,3 +18,4 @@ types-requests>=2.28.11.6,<=2.31.0.3 types-PyYAML # https://github.com/docker/docker-py/issues/3256 requests<=2.31.0 +deepdiff \ No newline at end of file diff --git a/smoke-test/tests/openapi/README.md b/smoke-test/tests/openapi/README.md new file mode 100644 index 0000000000000..68e2a75a6509b --- /dev/null +++ b/smoke-test/tests/openapi/README.md @@ -0,0 +1,33 @@ + +# Goal + +This test is configuration driven by json files which contain request/response sequences intended to +detect unexpected regressions between releases. + +Files can be executed in parallel but each request within the file is sequential. + +## Adding a test + +Create a file for a given OpenAPI version which contains a list of request/response pairs in the following +format. + +The request json object is translated into the python request arguments and the response object is the +expected status code and optional body. + +```json +[ + { + "request": { + "urn": "", + "description": "", + "method": "", + "json": {} + }, + "response": { + "status_codes": [200], + "exclude_regex_paths": [], + "json": {} + } + } +] +``` \ No newline at end of file diff --git a/smoke-test/tests/openapi/__init__.py b/smoke-test/tests/openapi/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/openapi/test_openapi.py b/smoke-test/tests/openapi/test_openapi.py new file mode 100644 index 0000000000000..6561ee6d5c5cc --- /dev/null +++ b/smoke-test/tests/openapi/test_openapi.py @@ -0,0 +1,111 @@ +import concurrent.futures +import glob +import json +import logging + +import pytest +from deepdiff import DeepDiff + +import requests_wrapper as requests +from tests.utils import get_gms_url + +logger = logging.getLogger(__name__) + + +@pytest.mark.dependency() +def test_healthchecks(wait_for_healthchecks): + # Call to wait_for_healthchecks fixture will do the actual functionality. + pass + + +def load_tests(fixture_glob="tests/openapi/**/*.json"): + for test_fixture in glob.glob(fixture_glob): + with open(test_fixture) as f: + yield (test_fixture, json.load(f)) + + +def execute_request(request): + session = requests.Session() + if "method" in request: + method = request.pop("method") + else: + method = "post" + + url = get_gms_url() + request.pop("url") + + return getattr(session, method)(url, **request) + + +def evaluate_test(test_name, test_data): + try: + for idx, req_resp in enumerate(test_data): + if "description" in req_resp["request"]: + description = req_resp["request"].pop("description") + else: + description = None + url = req_resp["request"]["url"] + actual_resp = execute_request(req_resp["request"]) + try: + if "response" in req_resp and "status_codes" in req_resp["response"]: + assert ( + actual_resp.status_code in req_resp["response"]["status_codes"] + ) + else: + assert actual_resp.status_code in [200, 202, 204] + if "response" in req_resp: + if "json" in req_resp["response"]: + if "exclude_regex_paths" in req_resp["response"]: + exclude_regex_paths = req_resp["response"][ + "exclude_regex_paths" + ] + else: + exclude_regex_paths = [] + diff = DeepDiff( + actual_resp.json(), + req_resp["response"]["json"], + exclude_regex_paths=exclude_regex_paths, + ) + assert not diff + else: + logger.warning("No expected response json found") + except Exception as e: + logger.error( + f"Error executing step: {idx}, url: {url}, test: {test_name}" + ) + if description: + logger.error(f"Step {idx} Description: {description}") + logger.error(f"Response content: {actual_resp.content}") + raise e + except Exception as e: + logger.error(f"Error executing test: {test_name}") + raise e + + +def run_tests(fixture_glob, num_workers=3): + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: + futures = [] + for test_fixture, test_data in load_tests(fixture_glob=fixture_glob): + futures.append(executor.submit(evaluate_test, test_fixture, test_data)) + + for future in concurrent.futures.as_completed(futures): + logger.info(future.result()) + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_openapi_all(): + run_tests(fixture_glob="tests/openapi/**/*.json", num_workers=10) + + +# @pytest.mark.dependency(depends=["test_healthchecks"]) +# def test_openapi_v1(): +# run_tests(fixture_glob="tests/openapi/v1/*.json", num_workers=4) +# +# +# @pytest.mark.dependency(depends=["test_healthchecks"]) +# def test_openapi_v2(): +# run_tests(fixture_glob="tests/openapi/v2/*.json", num_workers=4) +# +# +# @pytest.mark.dependency(depends=["test_healthchecks"]) +# def test_openapi_v3(): +# run_tests(fixture_glob="tests/openapi/v3/*.json", num_workers=4) diff --git a/smoke-test/tests/openapi/v1/__init__.py b/smoke-test/tests/openapi/v1/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/openapi/v1/timeline.json b/smoke-test/tests/openapi/v1/timeline.json new file mode 100644 index 0000000000000..36459d1b9e824 --- /dev/null +++ b/smoke-test/tests/openapi/v1/timeline.json @@ -0,0 +1,526 @@ +[ + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV1%2CPROD%29", + "description": "Remove test dataset", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV1%2CPROD%29/schemaMetadata?createIfNotExists=false", + "description": "Schema version 1", + "json": { + "schemaName": "db1.nested_struct_test", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "varchar(50)", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"varchar(50)\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "nullable": true, + "description": "Service provider id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + } + ] + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV1%2CPROD%29/schemaMetadata?createIfNotExists=false", + "description": "Schema version 2", + "json": { + "schemaName": "db1.nested_struct_test", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id3", + "description": "Service provider name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id2", + "nullable": true, + "description": "Service provider id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + } + ] + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV1%2CPROD%29/schemaMetadata?createIfNotExists=false", + "description": "Schema version 3", + "json": { + "schemaName": "db1.nested_struct_test", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "description": "Service provider name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "varchar(50)", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"varchar(50)\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "nullable": true, + "description": "Service provider id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + } + ] + } + } + }, + { + "request": { + "url": "/openapi/timeline/v1/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV1%2CPROD%29?startTime=-1&endTime=0&raw=false&categories=TECHNICAL_SCHEMA", + "method": "get", + "description": "Get timeline response" + }, + "response": { + "exclude_regex_paths": [ + "root\\[.+?\\]\\['timestamp'\\]" + ], + "json": [ + { + "timestamp": 1723245258298, + "actor": "urn:li:corpuser:__datahub_system", + "semVer": "0.0.0-computed", + "semVerChange": "MINOR", + "changeEvents": [ + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.type)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.type)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.type'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.id'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.name)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.name)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.name'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),property_id)", + "parameters": { + "fieldPath": "property_id", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),property_id)", + "nullable": false + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'property_id'." + } + ], + "versionStamp": "browsePathsV2:0;dataPlatformInstance:0;datasetKey:0;schemaMetadata:1" + }, + { + "timestamp": 1723245269788, + "actor": "urn:li:corpuser:__datahub_system", + "semVer": "1.0.0-computed", + "semVerChange": "MAJOR", + "changeEvents": [ + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "MODIFY", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to renaming of the field 'service.provider.id to service.provider.id2'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id3)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id3", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id3)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.id3'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "REMOVE", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.name)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.name)", + "nullable": true + }, + "semVerChange": "MAJOR", + "description": "A backwards incompatible change due to removal of field: 'service.provider.name'." + } + ], + "versionStamp": "browsePathsV2:0;dataPlatformInstance:0;datasetKey:0;schemaMetadata:2" + }, + { + "timestamp": 1723245279320, + "actor": "urn:li:corpuser:__datahub_system", + "semVer": "2.0.0-computed", + "semVerChange": "MAJOR", + "changeEvents": [ + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "MODIFY", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id2)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id2", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id2)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to renaming of the field 'service.provider.id2 to service.provider.id'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "REMOVE", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id3)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id3", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.id3)", + "nullable": true + }, + "semVerChange": "MAJOR", + "description": "A backwards incompatible change due to removal of field: 'service.provider.id3'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.name)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV1,PROD),service.provider.name)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.name'." + } + ], + "versionStamp": "browsePathsV2:0;dataPlatformInstance:0;datasetKey:0;schemaMetadata:0" + } + ] + } + } +] \ No newline at end of file diff --git a/smoke-test/tests/openapi/v2/__init__.py b/smoke-test/tests/openapi/v2/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/openapi/v2/structured_properties.json b/smoke-test/tests/openapi/v2/structured_properties.json new file mode 100644 index 0000000000000..7eb67ffc8c320 --- /dev/null +++ b/smoke-test/tests/openapi/v2/structured_properties.json @@ -0,0 +1,332 @@ +[ + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV2%2CPROD%29", + "description": "Remove test dataset", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v2.retentionTime", + "description": "Remove test structured property", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v2.retentionTime02", + "description": "Remove test structured property #2", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v2.retentionTime/propertyDefinition", + "description": "Create structured property definition", + "params": { + "createIfNotExists": "false" + }, + "json": { + "qualifiedName": "io.acryl.privacy.v2.retentionTime", + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "MULTIPLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": { + "double": 30 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] + } + }, + "response": { + "json": { + "urn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime", + "aspects": { + "propertyDefinition": { + "value": { + "allowedValues": [ + { + "value": { + "double": 30.0 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60.0 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365.0 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ], + "qualifiedName": "io.acryl.privacy.v2.retentionTime", + "displayName": "Retention Time", + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "cardinality": "MULTIPLE" + } + } + } + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset?createIfNotExists=false&createEntityIfNotExists=false", + "description": "Create dataset", + "json": [ + { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV2,PROD)", + "aspects": { + "status": { + "value": { + "removed": false + } + } + } + } + ] + }, + "response": { + "json": [ + { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV2,PROD)" + } + ] + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV2%2CPROD%29/structuredProperties?createIfNotExists=false", + "description": "Add structured property to dataset", + "json": { + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime", + "values": [ + { + "double": 60.0 + } + ] + } + ] + } + }, + "response": { + "json": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV2,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "double": 60.0 + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime" + } + ] + } + } + } + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v2.retentionTime02/propertyDefinition?createIfNotExists=false", + "description": "Create structured property definition #2", + "params": { + "createIfNotExists": "false" + }, + "json": { + "qualifiedName": "io.acryl.privacy.v2.retentionTime02", + "displayName": "Retention Time 02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": { + "string": "foo2" + }, + "description": "test foo2 value" + }, + { + "value": { + "string": "bar2" + }, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] + } + }, + "response": { + "json": { + "urn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime02", + "aspects": { + "propertyDefinition": { + "value": { + "allowedValues": [ + { + "description": "test foo2 value", + "value": { + "string": "foo2" + } + }, + { + "description": "test bar2 value", + "value": { + "string": "bar2" + } + } + ], + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ], + "cardinality": "SINGLE", + "displayName": "Retention Time 02", + "qualifiedName": "io.acryl.privacy.v2.retentionTime02", + "valueType": "urn:li:dataType:datahub.string" + } + } + } + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV2%2CPROD%29/structuredProperties", + "description": "Patch ADD structured property", + "method": "patch", + "json": { + "patch": [ + { + "op": "add", + "path": "/properties/urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime02", + "value": { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime02", + "values": [ + { + "string": "bar2" + } + ] + } + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV2,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "double": 60.0 + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime" + }, + { + "values": [ + { + "string": "bar2" + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime02" + } + ] + } + } + } + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV2%2CPROD%29/structuredProperties", + "description": "Patch REMOVE structured property", + "method": "patch", + "json": { + "patch": [ + { + "op": "remove", + "path": "/properties/urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime02", + "value": { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime02" + } + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV2,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "double": 60.0 + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v2.retentionTime" + } + ] + } + } + } + } + } + } +] \ No newline at end of file diff --git a/smoke-test/tests/openapi/v2/timeline.json b/smoke-test/tests/openapi/v2/timeline.json new file mode 100644 index 0000000000000..ceee67b39a6d0 --- /dev/null +++ b/smoke-test/tests/openapi/v2/timeline.json @@ -0,0 +1,526 @@ +[ + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV2%2CPROD%29", + "description": "Remove test dataset", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV2%2CPROD%29/schemaMetadata?createIfNotExists=false", + "description": "Schema version 1", + "json": { + "schemaName": "db1.nested_struct_test", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "varchar(50)", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"varchar(50)\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "nullable": true, + "description": "Service provider id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + } + ] + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV2%2CPROD%29/schemaMetadata?createIfNotExists=false", + "description": "Schema version 2", + "json": { + "schemaName": "db1.nested_struct_test", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id3", + "description": "Service provider name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id2", + "nullable": true, + "description": "Service provider id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + } + ] + } + } + }, + { + "request": { + "url": "/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV2%2CPROD%29/schemaMetadata?createIfNotExists=false", + "description": "Schema version 3", + "json": { + "schemaName": "db1.nested_struct_test", + "platform": "urn:li:dataPlatform:hive", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": "urn:li:corpuser:jdoe" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "property_id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "description": "Service provider name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "varchar(50)", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"varchar(50)\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "nullable": true, + "description": "Service provider id", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "tinyint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"tinyint\", \"_nullable\": true}" + } + ] + } + } + }, + { + "request": { + "url": "/openapi/v2/timeline/v1/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetTimelineV2%2CPROD%29?startTime=-1&endTime=0&raw=false&categories=TECHNICAL_SCHEMA", + "method": "get", + "description": "Get timeline response" + }, + "response": { + "exclude_regex_paths": [ + "root\\[.+?\\]\\['timestamp'\\]" + ], + "json": [ + { + "timestamp": 1723245258298, + "actor": "urn:li:corpuser:__datahub_system", + "semVer": "0.0.0-computed", + "semVerChange": "MINOR", + "changeEvents": [ + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.type)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=string].type", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.type)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.type'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.id'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.name)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.name)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.name'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),property_id)", + "parameters": { + "fieldPath": "property_id", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),property_id)", + "nullable": false + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'property_id'." + } + ], + "versionStamp": "browsePathsV2:0;dataPlatformInstance:0;datasetKey:0;schemaMetadata:1" + }, + { + "timestamp": 1723245269788, + "actor": "urn:li:corpuser:__datahub_system", + "semVer": "1.0.0-computed", + "semVerChange": "MAJOR", + "changeEvents": [ + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "MODIFY", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to renaming of the field 'service.provider.id to service.provider.id2'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id3)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id3", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id3)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.id3'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "REMOVE", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.name)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.name)", + "nullable": true + }, + "semVerChange": "MAJOR", + "description": "A backwards incompatible change due to removal of field: 'service.provider.name'." + } + ], + "versionStamp": "browsePathsV2:0;dataPlatformInstance:0;datasetKey:0;schemaMetadata:2" + }, + { + "timestamp": 1723245279320, + "actor": "urn:li:corpuser:__datahub_system", + "semVer": "2.0.0-computed", + "semVerChange": "MAJOR", + "changeEvents": [ + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "MODIFY", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id2)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id2", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id2)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to renaming of the field 'service.provider.id2 to service.provider.id'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "REMOVE", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id3)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=int].id3", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.id3)", + "nullable": true + }, + "semVerChange": "MAJOR", + "description": "A backwards incompatible change due to removal of field: 'service.provider.id3'." + }, + { + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD)", + "category": "TECHNICAL_SCHEMA", + "operation": "ADD", + "modifier": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.name)", + "parameters": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].service.[type=struct].provider.[type=string].name", + "fieldUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:test,datasetTimelineV2,PROD),service.provider.name)", + "nullable": true + }, + "semVerChange": "MINOR", + "description": "A forwards & backwards compatible change due to the newly added field 'service.provider.name'." + } + ], + "versionStamp": "browsePathsV2:0;dataPlatformInstance:0;datasetKey:0;schemaMetadata:0" + } + ] + } + } +] \ No newline at end of file diff --git a/smoke-test/tests/openapi/v3/__init__.py b/smoke-test/tests/openapi/v3/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/openapi/v3/structured_properties.json b/smoke-test/tests/openapi/v3/structured_properties.json new file mode 100644 index 0000000000000..b000c5da0a283 --- /dev/null +++ b/smoke-test/tests/openapi/v3/structured_properties.json @@ -0,0 +1,331 @@ +[ + { + "request": { + "url": "/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV3%2CPROD%29", + "description": "Remove test dataset", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v3.retentionTime", + "description": "Remove test structured property", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v3.retentionTime02", + "description": "Remove test structured property #2", + "method": "delete" + } + }, + { + "request": { + "url": "/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v3.retentionTime/propertyDefinition", + "description": "Create structured property definition", + "params": { + "createIfNotExists": "false" + }, + "json": { + "value": { + "qualifiedName": "io.acryl.privacy.v3.retentionTime", + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "displayName": "Retention Time", + "cardinality": "MULTIPLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "allowedValues": [ + { + "value": { + "double": 30 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime", + "propertyDefinition": { + "value": { + "allowedValues": [ + { + "value": { + "double": 30.0 + }, + "description": "30 days, usually reserved for datasets that are ephemeral and contain pii" + }, + { + "value": { + "double": 60.0 + }, + "description": "Use this for datasets that drive monthly reporting but contain pii" + }, + { + "value": { + "double": 365.0 + }, + "description": "Use this for non-sensitive data that can be retained for longer" + } + ], + "qualifiedName": "io.acryl.privacy.v3.retentionTime", + "displayName": "Retention Time", + "valueType": "urn:li:dataType:datahub.number", + "description": "Retention Time is used to figure out how long to retain records in a dataset", + "entityTypes": [ + "urn:li:entityType:datahub.dataset", + "urn:li:entityType:datahub.dataFlow" + ], + "cardinality": "MULTIPLE" + } + } + } + } + }, + { + "request": { + "url": "/openapi/v3/entity/dataset?createIfNotExists=false&createEntityIfNotExists=false", + "description": "Create dataset", + "json": [ + { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV3,PROD)", + "status": { + "value": { + "removed": false + } + } + } + ] + }, + "response": { + "json": [ + { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV3,PROD)", + "status": { + "value": { + "removed": false + } + } + } + ] + } + }, + { + "request": { + "url": "/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV3%2CPROD%29/structuredProperties?createIfNotExists=false", + "description": "Add structured property to dataset", + "json": { + "value": { + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime", + "values": [ + { + "double": 60.0 + } + ] + } + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV3,PROD)", + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "double": 60.0 + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime" + } + ] + } + } + } + } + }, + { + "request": { + "url": "/openapi/v3/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Aio.acryl.privacy.v3.retentionTime02/propertyDefinition?createIfNotExists=false", + "description": "Create structured property definition #2", + "params": { + "createIfNotExists": "false" + }, + "json": { + "value": { + "qualifiedName": "io.acryl.privacy.v3.retentionTime02", + "displayName": "Retention Time 02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": { + "string": "foo2" + }, + "description": "test foo2 value" + }, + { + "value": { + "string": "bar2" + }, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime02", + "propertyDefinition": { + "value": { + "allowedValues": [ + { + "description": "test foo2 value", + "value": { + "string": "foo2" + } + }, + { + "description": "test bar2 value", + "value": { + "string": "bar2" + } + } + ], + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ], + "cardinality": "SINGLE", + "displayName": "Retention Time 02", + "qualifiedName": "io.acryl.privacy.v3.retentionTime02", + "valueType": "urn:li:dataType:datahub.string" + } + } + } + } + }, + { + "request": { + "url": "/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV3%2CPROD%29/structuredProperties", + "description": "Patch ADD structured property", + "method": "patch", + "json": { + "patch": [ + { + "op": "add", + "path": "/properties/urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime02", + "value": { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime02", + "values": [ + { + "string": "bar2" + } + ] + } + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV3,PROD)", + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "double": 60.0 + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime" + }, + { + "values": [ + { + "string": "bar2" + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime02" + } + ] + } + } + } + } + }, + { + "request": { + "url": "/openapi/v3/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Atest%2CdatasetStructPropV3%2CPROD%29/structuredProperties", + "description": "Patch REMOVE structured property", + "method": "patch", + "json": { + "patch": [ + { + "op": "remove", + "path": "/properties/urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime02", + "value": { + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime02" + } + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + } + }, + "response": { + "json": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:test,datasetStructPropV3,PROD)", + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "double": 60.0 + } + ], + "propertyUrn": "urn:li:structuredProperty:io.acryl.privacy.v3.retentionTime" + } + ] + } + } + } + } + } +] \ No newline at end of file From 3d4b3b900d6024645b0de7bd8c1b46998dd569a4 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 12 Aug 2024 10:15:42 -0700 Subject: [PATCH 31/72] fix(airflow): fix tox tests + update docs (#11125) --- docs/lineage/airflow.md | 4 +-- .../datahub_listener.py | 35 +++++++++++++------ .../datahub_airflow_plugin/datahub_plugin.py | 6 +++- .../datahub_plugin_v22.py | 2 +- .../airflow-plugin/tox.ini | 4 ++- 5 files changed, 36 insertions(+), 15 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 2d7707637e2d1..65da1fd5251dc 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -18,7 +18,7 @@ There's two actively supported implementations of the plugin, with different Air | Approach | Airflow Version | Notes | | --------- | --------------- | --------------------------------------------------------------------------- | | Plugin v2 | 2.3.4+ | Recommended. Requires Python 3.8+ | -| Plugin v1 | 2.1+ | No automatic lineage extraction; may not extract lineage if the task fails. | +| Plugin v1 | 2.1 - 2.8 | No automatic lineage extraction; may not extract lineage if the task fails. | If you're using Airflow older than 2.1, it's possible to use the v1 plugin with older versions of `acryl-datahub-airflow-plugin`. See the [compatibility section](#compatibility) for more details. @@ -84,7 +84,7 @@ enabled = True # default ### Installation -The v1 plugin requires Airflow 2.1+ and Python 3.8+. If you're on older versions, it's still possible to use an older version of the plugin. See the [compatibility section](#compatibility) for more details. +The v1 plugin requires Airflow 2.1 - 2.8 and Python 3.8+. If you're on older versions, it's still possible to use an older version of the plugin. See the [compatibility section](#compatibility) for more details. If you're using Airflow 2.3+, we recommend using the v2 plugin instead. If you need to use the v1 plugin with Airflow 2.3+, you must also set the environment variable `DATAHUB_AIRFLOW_PLUGIN_USE_V1_PLUGIN=true`. diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index c87f7f8fb1a8e..d3727e41bb378 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -143,6 +143,20 @@ def wrapper(*args, **kwargs): return cast(_F, wrapper) +def _render_templates(task_instance: "TaskInstance") -> "TaskInstance": + # Render templates in a copy of the task instance. + # This is necessary to get the correct operator args in the extractors. + try: + task_instance_copy = copy.deepcopy(task_instance) + task_instance_copy.render_templates() + return task_instance_copy + except Exception as e: + logger.info( + f"Error rendering templates in DataHub listener. Jinja-templated variables will not be extracted correctly: {e}" + ) + return task_instance + + class DataHubListener: __name__ = "DataHubListener" @@ -360,15 +374,7 @@ def on_task_instance_running( f"DataHub listener got notification about task instance start for {task_instance.task_id}" ) - # Render templates in a copy of the task instance. - # This is necessary to get the correct operator args in the extractors. - try: - task_instance = copy.deepcopy(task_instance) - task_instance.render_templates() - except Exception as e: - logger.info( - f"Error rendering templates in DataHub listener. Jinja-templated variables will not be extracted correctly: {e}" - ) + task_instance = _render_templates(task_instance) # The type ignore is to placate mypy on Airflow 2.1.x. dagrun: "DagRun" = task_instance.dag_run # type: ignore[attr-defined] @@ -459,8 +465,17 @@ def on_task_instance_finish( self, task_instance: "TaskInstance", status: InstanceRunResult ) -> None: dagrun: "DagRun" = task_instance.dag_run # type: ignore[attr-defined] - task = self._task_holder.get_task(task_instance) or task_instance.task + + task_instance = _render_templates(task_instance) + + # We must prefer the task attribute, in case modifications to the task's inlets/outlets + # were made by the execute() method. + if getattr(task_instance, "task", None): + task = task_instance.task + else: + task = self._task_holder.get_task(task_instance) assert task is not None + dag: "DAG" = task.dag # type: ignore[assignment] datajob = AirflowGenerator.generate_datajob( diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py index 437c42713ea01..137cf97f69280 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py @@ -4,6 +4,7 @@ from airflow.plugins_manager import AirflowPlugin +from datahub_airflow_plugin import __package_name__ from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED from datahub_airflow_plugin._airflow_shims import ( HAS_AIRFLOW_LISTENER_API, @@ -23,7 +24,10 @@ from openlineage.airflow.utils import try_import_from_string # noqa: F401 except ImportError: # If v2 plugin dependencies are not installed, we fall back to v1. - logger.debug("Falling back to v1 plugin due to missing dependencies.") + logger.warning( + "Falling back to the v1 DataHub plugin due to missing dependencies. " + f"Please install {__package_name__}[plugin-v2] to fix this." + ) _USE_AIRFLOW_LISTENER_INTERFACE = False diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py index ace7669bfa998..db47f37bed562 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py @@ -7,7 +7,6 @@ from airflow.lineage import PIPELINE_OUTLETS from airflow.models.baseoperator import BaseOperator from airflow.utils.module_loading import import_string -from cattr import structure from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult from datahub.telemetry import telemetry @@ -52,6 +51,7 @@ def get_task_inlets_advanced(task: BaseOperator, context: Any) -> Iterable[Any]: ) from airflow.lineage import AUTO + from cattr import structure # pick up unique direct upstream task_ids if AUTO is specified if AUTO.upper() in task_inlets or AUTO.lower() in task_inlets: diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 4d66dbc860aa9..9e0a30df6fcbd 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -23,7 +23,9 @@ deps = # Respect the Airflow constraints files. # We can't make ourselves work with the constraints of Airflow < 2.3. - py310-airflow24: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.4.3/constraints-3.10.txt + # The Airflow 2.4 constraints file requires a version of the sqlite provider whose + # hook type is missing the `conn_name_attr` property. + ; py310-airflow24: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.4.3/constraints-3.10.txt py310-airflow26: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt py310-airflow27: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt py310-airflow28: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt From 9dc85cb5bc07d001a82caf226fa1577118adb33b Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Tue, 13 Aug 2024 19:51:54 +0900 Subject: [PATCH 32/72] docs: add chime to adoption stories (#11142) --- docs-website/adoptionStoriesIndexes.json | 22 ++++++++++++++++++ .../adoption-stories-checkout-com.png | Bin 0 -> 64754 bytes .../adoption-stories-chime.png | Bin 0 -> 39983 bytes .../img/logos/companies/checkout-com.svg | 22 ++++++++++++++++++ .../static/img/logos/companies/chime.png | Bin 0 -> 4843 bytes 5 files changed, 44 insertions(+) create mode 100644 docs-website/static/img/adoption-stories/adoption-stories-checkout-com.png create mode 100644 docs-website/static/img/adoption-stories/adoption-stories-chime.png create mode 100644 docs-website/static/img/logos/companies/checkout-com.svg create mode 100644 docs-website/static/img/logos/companies/chime.png diff --git a/docs-website/adoptionStoriesIndexes.json b/docs-website/adoptionStoriesIndexes.json index 9697bdfcf39a9..d54dd6bcfa4f0 100644 --- a/docs-website/adoptionStoriesIndexes.json +++ b/docs-website/adoptionStoriesIndexes.json @@ -77,6 +77,28 @@ "category": "B2B & B2C", "description": "“We looked around for data catalog tool, and DataHub was a clear winner.”

    Zynga levels up data management using DataHub, highlighting its role in enhancing data management, tracing data lineage, and ensuring data quality." }, + { + "name": "Chime", + "slug": "chime", + "imageUrl": "/img/logos/companies/chime.png", + "imageSize": "default", + "link": "https://www.youtube.com/watch?v=GktS-XJhK30", + "linkType": "video", + "tagline": "A Story of schema, contracts, and data discovery", + "category": "Financial & Fintech", + "description": "“At Chime, DataHub serves as our guide for navigating the complexities of data cataloging and discovery.”

    Chime uses Datahub (DataHub Cloud) for ingesting metadata from diverse infrastructure components like Snowflake, Looker, Terraform, and so many others." + }, + { + "name": "Checkout.com", + "slug": "checkout-com", + "imageUrl": "/img/logos/companies/checkout-com.svg", + "imageSize": "small", + "link": "https://www.youtube.com/watch?v=emkpKO0bTkI", + "linkType": "video", + "tagline": "Self-Serve Data Governance with DataHub Action Framework", + "category": "Financial & Fintech", + "description": "Discover how Checkout leverage DataHub for advanced data management and compliance, especially in managing sensitive data types." + }, { "name": "MediaMarkt Saturn", "slug": "mediamarkt-saturn", diff --git a/docs-website/static/img/adoption-stories/adoption-stories-checkout-com.png b/docs-website/static/img/adoption-stories/adoption-stories-checkout-com.png new file mode 100644 index 0000000000000000000000000000000000000000..f6b574e1c37910c816a0aa8c9b918a284f205353 GIT binary patch literal 64754 zcmYg%WmFtn({2a}?(XjHF2UU$!eGHY=%B&fgS)$1fWZmw?gV#tzms#`yT0>d=tWP@ z?%Gwm?5V0SWko4OI6SxyA3h+;NQ(jkrPyPun|>B^RaFyk0t^`MdgAFu8uae1O|?5xF{#|i^><3rA`0u zv~@HNiA)&f4sC&&;gt%_1u|xj+54jz-`G-jGwyB9d9S@55frFWM^o!T=T2Go%@l9- zwj|7~xy;mc1=NCDkB5~yxC!B712=)#P0_1LYI=2SxwN3yh5~9+g5JKGeJalVQD|Ia zpuKFKKHHN$dOt)Yv{7x7u%(bJtY`XG-|(r7PPUBZi;gv# zlZfOrENP%Pj1?p&-6{7G$N~j3Q%Zp|UUp?RV{{~g>qvk(XPg}Bu>eIBh7>gQ38<^e3cf+9VaiG>>>+9LDQkJg86%I3)Y2*HUtWbG%!|LR7LFX zJv60~YiUXgLsU_iFaiut@Ix@9mw|jS>H+^gCi0)h7^hu*XcMrP(iEiUAGj00@GHkP zeRH>UclrEW%AFkhX3TAPLfzm*>CtnY7JGESm!&d`vV|zvf4io4a-!I-ntAm)YjyA` znsw?2Urg+g@_t;2>P*)WyW&CPtIQ!6^!ui{Y&|Q4yw&hfAeG`LoDb_b(!;HO!_StY z%ZplLyrdqJCIsPC9Y1Y`q6uEz0SS^)`bqo~3Ex3g*#MQ>c2SX9HWOqF{ zkFP2*j+#uOt#(!vjFT6TxjL`+6*=rd90?eQltE!Zy=p}!luY*_J|PZZCG!nhEHL1IpzUGJPJ zXb`^0(clEkzKshO=b3U#$1~o| zI?+>e642D>jC$M6^E-i;GX$S~A!KH=F~19uB>`BDUrXGJ7~CwRjq(P>CoM?Y>o)ux zShcwE9fS`*K%G|km_HK@3`d7jQXrK?np;1=5>J2+hGWd|?b_Uidql#bQajQey4~5Y z#X@977b3F5R`!f9(k3}<$Q5V#ntoUkq4|1Exf{V4k{!Dno=yhx1JzNTv~ZA zML$|JO@JeYNPF3plltj-N%q2wUcIPvhF^C%Tb78!L#hxYqz<>r_HTD_>B*9>8FVbouqYAGpT4E05JS4KY>1}u> zQv{qVKIf>-qTiv5X6Oh^6RqtXr3CgTRdjsJLa9C&FN=!%#RW&8^&73hs+T_F7`(hW$AMtVHQy?5tHF!p0E-( zg=|g7=P-UjRN2HrpoFx8xTOsY`ca_8jM8~{AHfmmX%ZBUBp~}&;ULE7!Ab+@btV

    _7!b4hkNFvd&Nml9EHl&qptIZD5Jaj};r z9whV|gPZV(@39ndSY=I#kXA4q=P)>$57~tGI63auOx*ZLD})N!IA3)VP$ILK`Ebd$ z7hcQg;(ev5fydN~?`7O0^aK67fREp6!!PPn_qssRp0GMLAJ=EGxbfZg@d%V?8P%Wh z50FL)N7#jz3?;1G7FMZR534g$rZ_J0WRN%EGj^jq6Rcc;3bC0#a?zV;X2Wx<>O-RW zV)n=_Ke0$|3T5)yhwAQOx@4_K@-XnUWV&W_TT^jz&wo;W7#(Lmuw<-oCWgZKl?msF z0Elu0B=n2B2proKf!U3rJVlg{hNo2R+{9xL8Kn5f9YtsosCaGwKIG)xKtq@`t*N4| z-0n|8q_)0avaPO$R|k*|n9bqSUl~zaT>l9CqhY3^+ij&IL+r z!8C--9D9nYZ69JSLH0e00=fud4MWxJYdB zev(|K6@EGMK0OpmmPj8lc#ZtGe_Y(odCM#*=hLu{?@Mx;%9xl$nIA*NYxIt83uH}! z$Q+>-+?>dmt)W#lW?(BrpTZC;v>bYq6mfKOg*Dhu(VoNh5&_zL$Y})E+MLP^9%-yu z&5i=ph*13%>K|`w@SKQpfSAZuSZzH+5zCq>pFEsk51H2|Be{@Hc(xy>O1=L4o_FqB zRvPRKVKqK2)~sL0^yz;sfXDe+jJtPI@OAeZk9#*Vo;*rEKiZt5TAKWWlbJjnySsO9 zi6jX1cGv->bTV#y@X0UG**sw{is(+wsrJu(bgDDhM!OqGO#{vMpkOualr)gb=gv5Z?75j1>{;xNj1X&@t z!+`y0A5ogt|9sk>#&%b)G}u?pPlCWs5$x}%IjZsCiDbd6)c%LGIrj?(H1M~S{~@hE zFytrr1t`t`zCcHWNJt5dCfFvTox zt$)X~`3@Ax8olf}upfjXI*9UrZ=?TcVQAT(1QC~zt$<6VnRg+cXK3Lk+>wRzB(XiR z*1xwaXg{28hV@3cLJ^2E9s8f{gnmY37<2a#GadweglUjfe|tJlN8dB)9`3n5=ajF= zcO6eP!^&#{a~btNTZvQs`@%gxe<@A|k(%Hzji+0ucNv_RpO2NMW`vFCYKX9;m;d3A z#PH_kd?a#vq4Kt@)uZC$lR@ujtD%f6($|#u6-_Erg=#RbD*pQdS(cA-?EM^Jt94qM zi zpCX{Vt=X8jr-4f>vHyQ%6{|xKO9-rhlv;P zecv%nJey(=uvaYA0nuziy@zT};Nv(~l8C1bAC4z_GGl(gXZ@SsRDQ`;W_}bb?k!<( zd@$4X9nOUaf)kM;@V1@x2f9rGKU{9TiJBr6hss4D#3&<ImI9TtD z5yMaCjby&BF@sv>;qtbqrch($@tv8~TeffB)}l-gp0ajpz3v-}f*I!u^l{tbKW1+QUUH;C@{6H$2-`ZO?gmY{mB|CM?#N@_l`uk|fkro~)g zXwFW;6f@!*sqE3lip2PvL7bYK%wV#c3z3R>$HY4(S#`y--mQBFKWTG&($56fJy~Yb zf-ZbWtKmJK>PRYm0RY|7*d?ZKaj?P*6W9sz% zY7v_4DmTouN%eANnx+)rKw;SJJ&WVmnm3o@iq7EfenjRc0g|{as%WT$lAl7F|<$l4KB-_s~PTP*n{0aZP9QSrSm6wDcobNk4Nx(W1xR<&Do|-@uvp-+v7KoMOP|Tr1+2zr)T2wX?h2g7nexv8z?>C(}Y+FHb)~NuA_su z)fBs2^iM-i^_1y;{uk7pvGIEA`Wt<03MK1(7cp_n&#nDZUh5azsG^c+;ugGQ!p;%0q+s$$QGr z;H2-5$axE?zb-`efXD9(aSZO;BPxs0BM`ygaa~VH3s<9s3HYVE=?Y7d0JgMgQ zMnhv*Pgc6^%e*pJ5B=SL`0)sMTO3%=)_2i4#2SBG352k5|7G=SMO=E-QX%~NvQquT z0+|G&rML$RG?&alg<3^S$jJ{xT-Z+sAJa?|r&QEb_EFIU?3@Ac;5>7y)UOayTjEfw zB7eI4p;eX2=9dQ5%RW|bmoCr4sspdup&;CyN_U&Ay1Rx>c$S7YA@gE~ESj%yAxhLP zNuxt{7+G6m#LIzc+}sLX@iS>%SymWlk?x1=L|GcK^sk5c_Cu7WK>O3zXpIHNmOrgz zA~Xx-5m+}|sM9cpb9#kBA4lnh^L=TpU|_N5mZ>KLUeORtRCne4J}u%32h6JOV|o|v){863X5+S$17Jn)S!{gl%_Q=kp!0RP0RNLI+e}m z_=TUvdx6@i>mIsLq|9P8)|iFI&n|$Q3yzQF8UasQA_3~% znQym@xb?TNxB%s0JRG=ptdB*qh^1DLQkEsEJ`Xd!9XM}pSG49qYjTe}D+ugEJm|5i z7O$30o)H^8eSF)$MJeVNsaVG<-~qB44C{dF?VmQRS#Q2b+_<7Wbacnq`bf|$UB;CrTCWmj@bNTZ^&>DMXVDhM5_N?+!I z^iLV~GgjN2Gm3n``PQDgHV=Jk>g^}Z>R z3f4>(PC6vDW-~Z5ibMD2(Q_;wB3EZR1peAQ7z%na?fk0a9@XLrBROtIrRl2G@o z9d~S?Fla{qQr|zo#Mv$D8@#_RFzh~&*~f~)MMHBerA1MJQ6_$oJ6NfhnFdn|eS|!F zJR6?Pcr25;-;~XbzUy3Yz4yF?iosItO1P`!m#&)!Wo*@Y*&$C)sCOU2rCVY}C+tF`nylUgdl;B-Jqx7ZTbu*3#QV?{=58w zwpZ!it~4vaFPD5MNd>|uk79N>U(8^vR@|yigDaK>zP+%M7s^Y2HMb$WXA&bHmFBoM>!Gso`iuw*aUITvUN`!hO4 z8tuZ7;nUZ2nslnA+%#=tXI1tgex?}4`JiED9D(*ej`nn^uUg>3;fJ4vTvY*L3d~qOZe|cWMyja3&dq0KJ?2IjO-8qr=?519&Q%V){w?$V=yFQV##G-u>DF?VQ$t zbBh|k$CAk{5Q^oo)|f;%9-5Rx{bcsUl49Zyq}f9=LT*ibwsC5og0csrKoTp`3OqKI z?x^nl+ILPGZ;@dZM~$97 zxk+*^{k3bi(i+hHOZV2c6vvq3!M-Old|0Vt@WD;^dTDgtg3~iFEQn9*zCs4cr@*Tv z{j+Rjd8TC8S1xvIJay|P?7YSbJe`L|a0;?3hJIDl{rPQ%yw;z-mV&NAE7Z8_F-m%v2YfF8oCOa?Ls8x_q^|S&U@doLY*aL)S>Rmm zcm%XYCT1uOMv>$J5fVp+ig;=cIbsh2|Frbwp!F26290Gt$zBN`$@z7ok$fI)M3}zL zPUNy}v+@ot%s8sN!SPU`Wl;V;g3n-uk$MEIn7d$=Lk0JUJdFN3SZYT%~> zUu73AK9=i*6QaWtmM?y#{K{f?2Zl`1X#OP~*+neCfh2zs2)QCy%*c)-p=n78L+^y* zbgOxQHljq!S}pnUnhtpeZck%5U%fjP*95_E0mM#E?8(QNuEJlv+Q+tNTQS(^#9pUX z&I9!f(lZMB&|hXiUuL3aMGKdM%I^?=Fm-NBa~^itP6BqJ6b?*E(IqXAYH-)5%YN#= zFXb0t>LkTA_8~e)$qJ;w_K|0s>MD*;Nm@Kq(=PoZ3Y)gds;=tH$Z&O|NgxG4aD}DX zE^+wx65{kWMdwH&8M+c0vy9Dg+|JPg#l}2c3vDJE6iZPRP)k$Mr>f^$OwTSP5I5f) z%E$!+GHk}LiF|kCzzXZ~+)e1=c-k)kMfft8APh@+76pw_Io(=Rtv?crh7l7&Vc&cW zV+%nelY4!DPk={?rHWsF6s(jgex+^XV3`xwa@Ty}Q(>Xb*qcw|tX6JVds>&p23f8*jMTL$C6AujCy&~lvk;41u5<717SQsx*lM7q zt1bpe7}XY0gY!V@F$~dS3{9C#%#{I>ZS!V18+5pijynBpTe3d6G=2F=M-Xj62`NQu zSEOMSz4w&PelAQW(~5X#aziT)x*phYid(=y$_4ecuE@fH4}JAjq5hHNMB+^J z+WR`mSo7<$*T`36ntA%itM8VNZI@k1ARx#__{d>qa99C{*(V_H%-4(D63dmFR)#vT zmeco2`IVkJXVGWgOsgOV3Ku66h0e7JR@6cZ$%{gigb{MTau|+KGiUqLUkLsoBARc4 zj#xBPY70i5Mk8|}S4WMjS$Q9fX*9{&7*Jjv@v1hikc`q3lU0BDz9cN4&k$l!{B_gF zJ97l{$hLdO>UF7-?Jfhp@HBcu`ui)?CHidd(~+G{>9f=W zJ0vD<>1GXPqw4CSc^}22WNmSTttWyY{u1&jpo-6aK?Uq#d`gZ4{+OpG<$xu}E#QnP z%^>uJqP}(~RkT&YVZ{F;B-dO^fn9+{kWs?6NR+eDP(&_vh3` zn`~OPkLJsc82a_q8F%7pSCUs4O~n{w9>S4M&L1DSppU5~23+l&_zxuc;IMb=GYXfY z8ti4-uZ;7*UlRD?Ec>D(do1i+~(fR-{+M z1BwAHxaTYpP2_OphT20psit^3EX@_Z5vaPMgI3Q=mD8>>l^v2Mq=_}ExU9`8+KrR7 zXGrjU@ic1EVS~$9+$fxmrSJa~Fg#G3iptbkz=@zZZpO?c`z){*%mU;JKMlCZa*Yip z+W9P$PXL{u6?n@4wLbeNW+Az0WI?+kbSlkus-di`+*UQ%p}KWp-|3>{U6N)xhA z=kKwV2Cw1FjkU^?uY&#CkM9(c9Ht=KLJ!7@8u!Zb^0RZ$RQk`3RR_uh7O2N5^!~xc z+EdY&;uoGMTjJxxjtvkn!DhW-Y2jYEu)K$NdFil&o>nu1HO%d{xU7jcMvvbE3;}>4 zE{>0k68ey9^`vS#tAC3oqLAgE*q>x&`4N*+wA4C?LV>1rV(H(6e10L=CL0-7p2Xku zm{UGp3Bv^^)*6-2+M9CeLTPP9A+kN?oewrudrQFk0-pw49%19CVQ9q;uV~W<7LAYWm+j|7Bno^JzDp+Hc!v*bsrKO=908erD=(wNv0C7lEPeo>SJN7 zi5F(d%*aXX_Wq=}w8Vnl;dKYp#UXC|VdQkJz>Va2mJ@)S3$c)+j>b%D!UA?1|G(;q{8q^kY4xJ+)F{5r`20zs*0 z?=}7Wq}M&_VJFt3!Fpd3px*2>As}|=y}=(Nku^F81R2pJ5DNr z0>fjnI0g8Kjh&EJJ!!%Wx>g8s>*vN~+4lh?W*p6=xWt6+t@FA{Dc6THjd+%~TQqSa z#Q9db`9F1|6`u1~KmFre7X64h$TML`H^2N}y$%n%2n7!u9qGLc%Mvp9r)hzGMCK5I z(=Tbga*nmT^jvB=^8EEhl6)W6^qVYTn){Fn|L$nd>E24>Nz=drTrkWrjBw`!WuZG)cOYz$3Q+5zlE{vO*t z^dnGzFccOT<8G|(QkHObCL8SrO|v|eWsK$$$G;%iPTy?n!Ry1-_X`gNQiKPst1*LU zEVtu;_e5Km&*v%J2o>?v;Z*ZZ@3aM|lBM{$pZO-*eXH9;1e?W_ti-WTV7#sG#6QqvjZVk zhPzk2R>qtgZ}>`Tup9hB%qB*@NnC^sKCch>tfg=k=qUh}Qsu68f&%ocC(HKy2beJU z|I|r85N$2Qv6=05VMb!tw-Jq&!-yLs`b$^%i3xF7=<0CReuu&GFbOCi27U#7dk=dw z!cE5V-I&&fO8T7|?58o*_jFyal&+#1i1MEEzw<{u3+&6QRfAz)BTLU>@I;%^0-jnM zXplc&_EJ2)M3|kH_r+41$NL;O4iSJlQ|%5upL?q*o&gy(9*QVJ5!YbwF;lcz%HOU| z9D0wAh`*AaZyW<0Kjt;Ry_~4+b?R~1$19|B$$36&AKIVB%CwEe(YB2x5xQmByxCP7 zsovot6GEMF1N0b2vOHy*-mDV~JX)cfJx3Q^Q(2vo9v54j>QL+#LBM)}>rwWrX*s#E z>bTa{>+#HKA=iqQ>icDD<}m($ShV$_;j4kq&NbhmfT({kU(CQy+XI2Q!m%O3kF!~hmV~I z-)qU~JEV9!UvCM`M4oh4FISGBR=KwA-}d&o2Lqa4#G7oFC~kk$UL8(k(hYcRxrcq^ z@zF8>{;t`td4(qQDk-VC+NJzHQfR>5sffFvpvT5c%zwut^$b}wr=g`!J0bKeKelmv za`H3A_eGoc4t)Tx->A3`@gL*um^1G+pCC6FpLGM1DJ8uMj-7Tn8vGIuh;Y44r-@Xq z%cOnw_$k_+S14Ofho5Pn+eJ;_Ijvdw4$+i)hvRk?&BtTN*<)&vyBhUrd&vkbPLXu&_0=3#tmh zy~N?Z`uwL{hofKqsbu#v*f{k@%C{ZpCDY-Hh+OI}9HBm=4QONH_%I9J&(`d(YYnV2 z>~2Lc$EVvprB-g7vPIKJR&BiOF+C1zjmIhBNNRrl=fI*tw?4(!+2yw9>hfx?;P_AVqnS9`THQt_h{igX(=jY+IK+xGDn?{)9L-FpN||@3WuW2eT%t|3b1XA z>^_@U#*T^oS=t4O@8HAAo(3&T-8kRz$W;LGS6as(e~qj(!4F5%0Z?7n>O!XF>UY8d zR&b?n&Jb=h$5Dd~^@K$$4+%`6tu2|LpeZpkY1lqi7|7h<(jS~1!E;s*?@e*Gn_8wK zLXlPWN81u7RhdihCKfxcy&m(AN*7q2uoUU9gRZ|Vbw79-?$%Li%-s0c;%G}#+pIg; zr_Ll{3i}luA&XV)lR7JxG-(2Gc@Gnf+q3`eYsn3L-ItDSc*9$ zD02#&ZMa&N#Ug(0bG+(~$s$4D;I!|XKyke7Vib-Gv3nf~%_c=V9(b=`jO@{^b$t!Z zCf7in0u<(WZlz-K(P%=hAkLJcl?>fxK_VppJvN^g%1HaNG;S-^O1`}DDMNM1h?t_i`}1odco{`@z&sWx{NqXgdD~Dyq7Q1=({`2asl;nsLi+6GR(?xX+RbaJo?>k?0 zy!Q?$b-tCSaV8bG*_Vf+3)D3lqa)&7%Y za~pe&At0BejnL3x&Fh+xJWGntxbT2MKewb5IsAjzpn7X*x7F)#hkw_h$*BPtSofC) ztOXv8Om&f6`tel@-8vKbti$kC8>zvdFBv1|zFc7Ryp+(Q;kyzTlms+Zn%7JxV>-;& z`Ly3Cq>$Jxm2xr{&Q)Fg?(8)Ef=4v#OlLlJG#6lyFN8IX@P0Kq6QH6kjW7N)UZNh+ z*8isth1}@GFLT&p|SUwQ+7i3tBw1m8;e}v)0RV zc+p%Z_z$|MbfBeW&QDS_d$3HzGP%Wj@w6ZP>|_w4pT#T#!&KC^H9WjRG>7a@abE)G zJ4o_SX}*%JfSY+pWU3U%gQIHI#zuoZfE(Wq+VQVG>U=T*s)S2Oe% zpg`*ScBN^W8RjBsq67(6o&t<`Uhy%eCEboP1V+2)PM4+W>XVH(U1qyQ^%+t$3k9w& z$iFWZD`FDnmedaD2Bks62#1htz0E|JnbhsEk`_3o#Ez%NVbV2Bcafon@J$ebvo1$d zL!*J4oym_}q_kqrjg*u(b~sJVmfA;uCKq70J9!kKAB_D>>xLxx*z^Mr17A2>{1OY-)E&jC#gC?M3B2eVDk$d!wmlJ85*p zZx`L<2o(|&f|Z4Sh3O?C&Da<#bB^Q3S``8QbG&)j;HJIYqR+P}nPYJKJYNC_4%C0? zGpOvFm)9zI&$~zGWy_B&0ckYN_KSrLJYA>>*dcXM=ze65bAw?AT*U zk3nR^75KZLC_~-??#?hDk+G?C8p5?s;V~r$X}9?Xsqi6D+{jcJKjLB78gn!?)6G?r z8jfm0IGHJKAE0lNra}MSA^!HdY_-%RaV}lR9ck;e>HAIrjm>F5`2lnd7 z?&wJ3E$&lqBYxjwuZ$$cvoZP4*9|#D7(p@7A0w60D^6s8zoX-VrZJV!IVwf!s+p6S zC9(0e^%X7Woyg1nyc7ogrC~d*Tpb5%|KXDf>1Xayk&jdBbIE1PmbQ70YewnLUn@xa zU=#Gd*HI=n2xgfOt=5JG2!}Dv0`CsZ>w{5wxN0=*l!l-1%0Fq2o7yu`XEJspR5cQ; zPTMZWj}0}b{I-?Nhu3bYC;xpGcuD$PeH$;0K~{@KseoP2-w=+CfMsp|ltWDtbOsLB zVmk${BX!V6;CrcI00#B4MBWcZz&B*%G|BXAs1<}>Q0sQ{{rqdYsP6WK2yr>h?&BX{ zpFK<7w-j5+ku#7Qi` ziWWa;6CU**@7Y)$E2?b_NCjuEW9#Y;8(*zZex_)fq!nzIV$7+Pj$?jtM&rBs8$(da zGU&C6GqpS%2QS?6HQ@G0R|Dk{+%-UyBBo-qtmH?w z^BaDlA-VkdV;gxjzxFiw7O9-|(kvO|SPmf7|=TU<#vdj>_1P9BQ9{x~6! zil8-&KWq8gUYGr>aVEkIvxO{Yrf!NNgHO>e+1S=PD;ehM?n^V&PBoF#Ddh?4hM`QT zWS0Fp*xn%`Ia|Qpp>&o95G&58gk&Rof_rwd*=~Zy*{QY1%bYE0Z`a35 z1p}gXWEt}m-ClwIAdz+H7)7viu}Z-kHZoew`bv7*JE3uai3G3P+vmTIU}BnqQZ14; z9at0ztkj9a9)pE#f>^(@1tXhMT-E6Sp}Q-I!2#lFoAXNJj>*trB` zKR%kag1B9Yida#ZKAV@tWow+=pW`l;yA5y0<=+u!M~j8ih)=6Ie8S~=51WA96x=qQ zzx<(V!ij(zqiiKm!%@;G?$@MbZ#as9W2>|!gg1_QD#e#fZnTx7QvUM0Lr*UTfhLJd z`|VGIXDkxarH;WyjkOOpQ$9n^_-@G&2yISP;+cM(ulcb;G`#@@Xin>-fZC?!2Z@e? z+WcAKco?n;Gu`^%;(^L|h@Qdbh zseMJ(J-dw9T<(|Hgff#Y^IsG=VH3vEXUEQne&ZQcx8J4x%2oZjfn+Ey_8|cU#fY$Hy7>cj` ziY!dO^qp zBAOv#KC#B49ggPy>qGL@US@{NkS8zM+IzpI^qO9iy!VR6v7$>02VJseC|pL<3Il1c zWBtW6zGySfYJ=G7Vw9{T^>{PeTG^(f8cq{6y_u(DwPU@)WSPm<5^DyEXlTf#J$XykWTsK}tG-9%ZQ8`K1(y{p(eUAQrQ{Xs$M zJ)s}3t^QQM<8V}ce@(zJ0|AVTp{P>m&Gt=&*x5lH9UaG+?l=7wpsTl!E!a2Q;A26x*fm}#{t zdUYqxf0I%KpD8X2N7e<75S%y2Oz!+dzKJ_h4_ zQ4`F(b}N;k$l4L>>Pn)^RfONC9nV$#lyqG=(vnkwb)|Cgm+XHRl(4To8LSKX*U6@)P z+bU(UHo%DFQ2;x0`$914#5OyZub1y-{J?u+MT*d`mBoIHIF{;h!RrWcgs(nlF637B072F=tcI%L zFaui?U#bz)=IcEWsxH39d+|q1N&vck+7e^lzUnpW*Je?=PnmkddqsVRz*dWr*Oqp@ zqi_ij)s%+=LK_vlCIvsNtckjwOsg+*dw6>r!OtK$Ois`eg=N`I|t1583 zUGB8TQB`qQSNRdq&CP|;C=D?btXwr=;THn4sVbva#KDcDK8E0kI_PuefW_T~BSoVd zVPYfW!KJ5~-U~BoINYpFV!WYEs_>GA9utT?OjdJoo>>RH8tf>rJEW^x zkA_i&#cmR1#~LLexTjN?PsV{L9FAuX;K7k2%f%?O<|^1u1znp2eKEL|2I~+1GUa5e zYsC=S+}mN69!b>5%vw_Op3+pT=xHh#rw0Nl?*~p3EN_kly-s-}1>KdBZE1}HOZB>k z>Qn{YtC1+0{(?-$wVXtNoPi<0w(Vp-TzF*vdfIC+Yuc;C4GbF-&ujAh3bwyp=iWCh z<_e==fhkwpz3I~r|d=)5pwzpa*jzeNtlXEk`I7Y&k2k4U;$Cz)c69_yg=}~9!p zVm#8vF*0sHe?4c7D)|cprkdguYVwgB>AyZLxDDC+I3+*2@MZ=({yr#wYQVr2WNB7U zzh;3p8aMd_6Gq1Za3rec9>08}D`rK|Wsrq3#n|)^We;kEq_u4d0|QwgaDx(6Gy=nq z03KU)MXJ5;brt42BSus~Dt%bgnDS+^D}jTxy1=SOSSdWSxkfe+xlwKkf7XOjo3wM` ze%eNgWdK&_ui<2jTiFg%P=n9>%0W4&AlZEd6JW=^T)g|KH~E=c`^k{fkj(4xNw?@G8 zv2~}pkXeTt@%6Kr3BkdNs(0JvJ<1dYI9o!MGZ1?I+Oo4%t@YBfW^(ZtpnQAoJI^RY zKWo_skgGx%sc#x#r)aso@3}>G)WP;4?tvUrL7+N8&aM4}r-3RTilZ-LxzV%2kxn3H z?w7!R{~bkA7`LCE2h!#{yX$c)fk^TRxR>A-?> zwPWJj!z3axpxF|#7^-+^8oW!)!%yve730c&$u!!-#4!UzSthV9`26Om1?@cgQU?RQ zlYpOvZsz;3ex7q9q(T$xS+rR^M{zU9X1Z0`P;@Pd_Ms6yw8p~*-ao#@RG}Al;gsL5 z-vT@b?9a=ebe$J_x9xqaI@WIUW}Nrfv-!v7ntDNeub#qKm?*Zly~F^8wWQ12w$D5fP4)p& zdmntT%}`XKGJh<9<00GcQ_W168p`Q%yX}?t1J=j_4=7UA0o@_57#i2ZAE8$7eVr@G zAyM{+eD$4E>wu;Z=*JVjKIBF+AS4lf4TxQ+N!q z1%z+dd#Qe+4bC)|#@Tsm6oGnckbZi<$+I-nFm~UWLkb4EBo(Z8)Vq%fmZQ}b6@Py% z&!>{2e>$4U=C8Dyy5fe4kEhl1TDM%qSvpiOLA*Au(l0F-!u(Ifz}xiVyb)+ z!!vq6fZbN_=ueT^8oayuyV|1Myr*^ZEboq&$ED((sSskKCM6VG&VLgVoy$hycA%fm zhE;c_+c5lu5TktsuFfC<)x`7Na`BdHuQ>hM+11C(KY8{Be8%aZ0x$fo=lj*-vttKP zg^BCEzS}T3EyqQ*4K}$G+O{Wj9whx9>v+->eH7dHdk>oUKm9i(^&1QTk61QFo zGytxNvR4Gp0X)x4-{7y|sIaDCd*zNvH`6#vT%a)bvQM7X8yG=K_Rg&W?mDM4AbEk? z%W8rb+PKN9iJl!KS8EE)zh%PD?!wet0JK*=v2QibeP)M&ihrD3{PFJqBkYnHj>1Z~ z*$mJ2*R;ny)?C}>MukE{f-d(z*gSUW+?YbXg(jM%OMzM|f%j)ex{Rk10TXAuYL@BB zoi>W;@A}`$YmG=q>7OK_Y&$(0+F1J$_3B`C*2@>T;wqg8M;!G zWfxb}kzxu3^BfplNDve>kcBxD=~SlF!5LoZzX~F8g^2HGie+m63>)l{#MQjEy0DHj z3f2@N4KvM7d}({5ndXyt`F3s2#F<|eY+H>$&(E2s&2Qnc&yR3@#32)TNgZRaeWQ`O z(d-+r&~b%_gB*$wj>nI)H-sbs2_BYWHjxVA<_(63*3%9qTzA_WJ;-GLTPgn17_Q=TO1WN5~GbCJ`cBR^HJzqlFhQm1NYpK<*$^i%Ns|BEYxzFV{kv$aVQ7e7qjw=LA>|O#_T{_eo7Z+i{KLMv{IzB1RcLvh3a3@sj$3Ex#i#*t z$wgcLxsATf8^e2*u)Ccx=Q+RDk&gBO!{YbBEI%AA!!fSjE)QDX(PKG`fEmNDnH~7~ z|4#G#eccxE6gZPn39=|#sN(Dl&SV>Yc=wDPNMa?9PRYOYdL1XZM!6@57uxA zmsl?L&Y!eoSyOpVE-2OZS#0N zG2Z>@lx)VA1c+lm4C`ho+QD(0 z>XYp^=a~7mN!g5H$%?FqjMXgpRqKz@%46mG>(Usi(1kY0Se?Ay_x(pKQh8$i;9?}b zGw76k^rRzJK+8r=Dd--_Dg7U*=LJWnl`V6bmR`eL{v5>B9>PXukqnLceI5)~MJ;=* zz8mp*5jvYrftVpp(OyXO$`gzluxZCMd-rnGPTtyoo4ViHcwVTCe=smvnO~=J{NPf} zd0q;pAjKy3#b{bp92V=z?GamE*JCoWt+bv zBcjr?lPYQ=PE5y@AcZ9X7a~#sS}8s4<&$l^RP9~&_zz0`%L0@j&Kvo>f-{YLX;&mcRa_j3KgG28Lbk2kXxQk}< zAmbyFDb&z_s*Zw|WsUQDe>60fS-@IWU84ehliKP(dZ7MMSB6`maeWdvf_PmfECsI2 zqKF^UJuTR3w%l%^i=D~l;e;g;$L(o7ls>+Wn@hj7PeeqnDE<7xw>g@QuwCx`;JGu> zRP2^$^v!#oqYf=NZf><%)ypWS?cxGu2IK*<06>h;Gi*-Zf7AJuX#uU-nUSLUpr0b= z;~(pYW97OOd+GZ>`A8~_r;fvms)_7ZI+tAIgh_<*=ec~gHPEkc)7W#^k06>hY%l*F z0GwRa@%CUQ!>~G6|D=cg#!KtFky5gQnnrSU)NXGE=2;A;_jm|< zXlO-D@%3=W&za36u6?5Nx&}mJgWuG|+ zk6_>qbq^^SW!5SGo(aCc)Qw(cJ;Cxc)~@>Oq`}F^*~m5QAsUhJT(xBYE!F?ALJ}qPo>q-aNxOVPLg@Z zb(V(6ZnZ6nyT0ad;3nBoHA+HX#EA7qE9q$qpkQ;Y;`zKccb|o+(^yJjt1P=trBdSQ za`?PPtL_4iST~^lZ3W#6S{CljI}PNanER3oqVNI_JQtG8 zxwb5I%NRq5BMQZ2av+@$yZ%3_-ZG%7u8S5{I;FcqS~?Hi4bmZvbi)DZ?nY9YLnGbN z4blw)f^>s)$~O`+2-n-H=xnKq%BUP*|B&)M+mG0($8i3$7`4ewgaJMn zaf}g@L>KdbvZtz^lE*K#RH+XUq10{K3UOm6`FI?8;sBv9Gx7e@(WNW`#gw?T;@pu5 zgOw7%cYYAr<=Z#-VHiCS39X&`Zoy+pj)HUO^w~xB@z`I?*o7e5UkjmaIJE$O$Lx4~ zEoqA1S8ia|={s!<={+bR9;0ab%1<$!X2GwWQH#HD5~?($&ez`eoA;O+`-$+4+=q!M z_9CymZ~EqESKfDC$%SybrQYU>?+6|h?&eEBr@04jeG(A+II5RG^N@wJfSTNvsWW zXp$N^l#W0ux)`K4Ey*{1c#Jy+&?tD5J5Hpug~EjGJkfc9_WAZO;Ip(KQ%&R-TDg|j z=@a-nbN#jE#ZhK0F$_GXJ~jqQ=~;=(Db!!3B%YqhWI5&>pGSL6m(1iJ;77yy(-f75 z3XYHnW;(^r>7Ioq-$fkJ^fDha+j~saTW2vL;v0vG!p;nrZ)$LsXTI^uh{WuZ0e zUc;*O$PIMSIEVk_l{nP!Fx@td6Rc{_1^3pbrw<98W=aYU3W}#dNt5=~C}Ss_+nn9T z8MTrK#)NObr;U?NmJ8B=5(2-0MKC%^X~BhVV$7UbIkAQ!?8WdzjjyVUW()CnA#b)` z3AJ0^IFaln3ezhrn<~HX$3t$}Hn*7dacurqggYACKPia#W0S5NgmfCCe@=WiOs?t> zrZr73G;bkSMe4lS4Q6turzq}Zq?K;(y?X`2@wLj#kU(wvZSG^$)>nb597eY0yO}3!K5;NplU{)m?vJHTHKZ?2K(K0n@Xp~00v z_2mhF3Z962davwgoZ*IQnIYrtI0IXnU<^+Oyu{*^2NBAJxx5lt7JLOx{rGbK?U9?S z!TWS;Ez=Bxnkj|8+0qJt)kiSPNEOi-pfOxRPl-S3*~b-L<~i|!1K7RxTSyQRRBu;; z=zmx#81PY!$Q!l%ypZcrz|*UH#|A0aENszou*c!s)~5_R>SL6UzCQ?0S1ljM;(*4) z!hQFBm{w~NXlHk3V>E$@0X^XelIYr$I}A=_+)d_G$~$Z$Vi~I%MQWk?YV>l@kN$X!e29wuQ)>i&u9Ia}z z`j}U#`JR0EU{&k$`QRLT`G5wc3Ki3bnUs5DeDwVH?j)rou*@=w;kF2lQU$G6>N!u zlJ*XX%buJyuriDbN6flaXajCfdlQal+zy3|mUK#!>As3xiJYBvcw12A$48yG3*{~; zM4)XVK!lV>qs`no8>B3)2qf#a6@-8s`Yt%DZT&6F*GRLt6iR{Y)&=#S^*|=LLgyMg zxRzqrD$>~8l`!s8J0C@>Yk0IBZk?mRXr){gyx$*X3NFXH&|sbLr#nB?xLWq8YWWbE zuSokrbmKyPQc9~$$kOw4)si(L9GX5aklOlv{2rY>LIgkSS0_8Z*ZU|=;bLNCL!hwZ zLRzl753|6gcr=}*v9LL*iw_+Hi220-0yYYv)yXdBSJO~&p3+4|AD^m>KiauoI`0#p zd)&qJY))-+)0RuboXQ7Q!YNN+&I}(gSrTf6%5wYNWLg=lAL2DmcSS-8^0~rJ=2ThQ z+`s8ZrRZiaTKqC>@8wj-p(bC=Bj{^fh_pfo(bJ6s>7O14(A`fIzOx~hjM7!h^t4yZjhb{g+vC!JC8Jt;)${E`D7zQ z`(D_ijAD;)8|Gpy=UrA2L~e8Bef;Gu4p;sN=~L+uih^&xx}V&FpC;@-kpX%TrKo^g z(Y+zDs~D%Ebs-0e!_a1~rmW5Wqb2712W_^UA(P&t3jXMGNtw3?R{X|I;gsgDcB~6_ zKXWA^&M$S+N%UZRwbmIFuk9#Ca4-yVFe;6E`J~->5|Ge4jlAr+YBHjUJfp|wL=~{mgv{5pUo@X|!d^#jgvJ|nM}ElT&`@GGhaZ=y zdGdIq2m3S3HVtD7v-1mMh=GfGSr-;i!!ZFo&Pf_me!*(sR#3-T-3HUYsuKNF^68_Q z3t40p?N%Hk)_G57;`w>YeN`3FHw|;WKvNyAwU_3^y9XH%b_2-*jk zp8~^6fpZ+lneg33+!8+2ZjRgdtA6#7H@xxFw6VehhzIDAcyc2=F)_+leR9P$OY?f0 zk^K|oDBzRY{>p#qR=ojk8M@ND-M2r?uz|@^P?HxRDI+05jk^TVEVBzqZYHSsA_LCO zZT=fGAs7z^fgIdE3>=k|@dMV;gS&KtQp<^Op z^Za$3UWByJH{dy|5h(IRo&G<)EkG>jZ#9Phr`p@U)x`gwYNCKO2<-n;4fdtllF0vE z%^qkH!W2>@(cch{JCJdN-~|10y1Kpp*Ruchi!7Y<4Z;FbPn3c0rqMz zZvY%FVsi3wVo}<}&ey15U(<8l`HVNCth&n%E*U6Jb-wObQDf;Mhzd}yoVHVf zCGsq5(NW*)djrD=iL`#A6Xu#5h;A9*)FUNU;iz%9kFU0zeTc3{$Al_#eZy)jDg!QP z&-tdV$n{EvGcV#(r08oWs^vlx>%h16MNQ^!aeC5Xzi**#7P*LdV;(r7lsfVJ0jxgR5vRMHlOMJw_QspE=BY+SG zlJN!~I%l$~zN65_j*UFG8Nx}Mg5CNZ9;dza9&1mkL;{m5sgrDy>uTjdqsw}LyCPQ> zyrVsL=hI^{Wl|v^k{N_e zQ33K@oj~xWEg0m0K&VZPE!E0<@#viNvpmT$`K}&bx4FRP z>pBWJf`|-02w!(VphTP|s;hx~Ac!RrNUV~AFG^C&YN8xa97q$B5i`zY!>}9T?W|5u z3?ViY7HJQ0h2FpyZi-m!RZ8RY9jwapw(Gp;slN5GJ?*2(@d`~`%%9dkHr~T;(j}T4 z6jQ7QcOh>sE-*+|e2B>~FY@|K2gn2pfzaOXhRCbClI|?Yjsc*Mgdyv_iUVOB?}5O} zE0DvGtKO5u?kA?#Zs0A_Bxgihe-)$FK8VOL6TGjGFf#eFNF7W!lZaJVsrxJW_)I_R z>)>okrbiG7Hfxy^)h3uL8;&P};qPQDEW2n{(1nhN^9<&)d>c<5` z1$WisT9{)Kf9&LE1Z1|b=+Q0obf&^|jdwsj_XvIO+OY@a$~iST7SLd516ZrT-253A zi(H>2vh=0-_;-UQ)#>by)OXvHLCQpsq6j8?|`dvLXLQTYZD z!n^UOr!8|1|FNSS5C7b3YzMQE(jgEM!yuZW&6@PTFDpiUTZydrJ{?yx#`K)Cv^Tc~GLlG|6npGj=8Dm?%?0lgH%wndHsp z_48{*Br))kG1;?79JJI*_0?TXjGDNIaxlqC;ondaG%OnotjtI+kbT2SgQ%HvPiCVF zd_iaf*sccPh|l9T#?Y_C1VVs)iKIMKk%I;z`z@1z=0_?^W|}dsbX9{QU75USzOV{9 z4-=C7%Ji_y5Et+QnlK0lx_f^&IbbZ&Ah{R1Rt6ZU(M8PouThyGsOVTN6{>s9ihV-p zJe$zr?fSy;CC!l^;?8VeiRewIFiBvQ%&OI8EMeiXV&_(X3n?fpv5*~oDPrJUNzc-A z!KCRY=V5AEmNSRsKh5ie)$F`V$BZ9%Rm!8p zKh`B6X|0^kCisPGx#G|SAe=j2Nvi&jSN`rW(R8$5+hSG(8w~PebCFxI0OB`UvQq6{<7&@-u=}g%_p~kV zPC#*~fTEZCD@ZBG4m@Ye>^2*1D#L<^1}?9o(kxdpjA1IX`upoh;OwkfuSa4Rl>Dmj zRFO@UT^u7M3gA%~F<1UbeXgdh&(ntN6S)Hex8Q93m~aP?MR0*j?KtnBVKrSqtV(IT zehw-nq8}^<9C2>+$7MQ|7mMwmZ)4`ZyW0gF0t*Lt0}4$Gs)Wo-9Aukf%p*&Qz`U5< z96QUuzb{uhKC(4MPOajdjV#gU=brO&u!?yHnS=%-PITT+_5l$c2$Dh&IMC%CVwlvN z2h+L8%XLX;2v^O&DEez0f)_kMNSOX2vI)@E#JK>mGN48n4IrlpFuqpbNBq?zLs0y8 zhm)0uMDb|lAd;26`&$F_UjZJ&-vV@^{|eB`{ye4?{qwe{4P4-y`H89zBrblX!8?A)foP>f{ZUNK zpr>!z!v|+z`icO6BQm^z;|)lO&IZAyD2o(ysBTgoa|bJZqIvfc7tL2`0(cWgk%UpG z1B%)$$^~^Ho7hu9o;33n9K2`7i?6P*P%`0RTbleWQrAQF?l)1Rweg>ZDAtH#+N^%8o%|oM6+Na z11>Ojjtw2rF?1u%h)KqnMaj{~ICKkUz64AIHtOjPF|CdZUT3VW~$ zk~nq5gw|r9H1!_3Ljl_dI6$OARdJVsgm5qp6!%JRc^EXoI|vGiU!Uwc$Da#-r2vIq z<%>XqZK2gYVOJ*I0D6-~&Zc7jz3Ahyf6cUDWWS6nvty1WP8SKoM=RZ{F1%IN`c=vd1_$b-7|8B!}ftv2|&;?*o&l z?rB?19#ssos<`@~;_M>1g!Ia27X9j>m81(yY+;5D=ia5jLE9cg7j4{1Py~~O zb)bY$rKDI&hOazeo2^1tC6X>+=5@Am2jr!Y#r z8)LjFR3hx_Q)kPM+_Z1wm)Nf?w_JG2j5_+cc6T2`J1f!yCvYs}PKM zTXwazVU=Y$_qS`PA{A_h`ML9HxN53dRG?i@518)x)=DY~Im+G(tMenmS=F}ktp zzVw%Xn?y1q1E}gt7L%-*yRJJ5vr;i0T^}^FYUleOG{1JaH7KJ&D1H2k7_rU93n~D` zmX*7v4#O#EjRsdC&Kx!b=dw-y06M0qmKQNmkPZU5(7s?_sum)13R24|Cd3xoKtp9B zOI!NvO0CBa?U+X5peFyj@>4o8e=sLgvn&udoFA?q27lIfB;7AwqAI0grd0NAs~F1UJ>I&(W^~} zClUa%$%yyU`^pU8SUQ7k(2Yjy?ttxLj(ldhQ8Z6Tfs6jm9c+OaG$6(hum?2{N4aYH zMca3_uV<}y&Bx5EtO)m&Hs^+Q-RduG2%K8G$%ZkBd|e|Eupwd0-W{*AO6r;H3q-5U zYr|$O6v^7DL4-N0y`e&t46VQl}YBIIl7rXb{OqKyTC~))R5e! z(#*2~?(*YL9j{6DGj*hkRh(uZ^Ni%7JRdWwCe~MiemTID+r-VnZ>ogtxhQ@xhlyZr zJ}aAL>iUE=!iw|$(cP4rZ*-_6*X`UJj!NSkj>3nyr^)!0h$d*v4xd>*@6#Vp+OE8 zF1I%)iME)xrgV7K1Dl8r=U2SXi#`)my8&5Bjt-8?$uf~;C68oz`y(p`gj<)9nTD*FkDK)lIB3fIIQrL%xU zxr$#J1(PIMCT}hNm1lq9O6_* z5cyI-MCFWBNe#~UL<_~eQ6bABJ+;kt7m!aDA8coWF}H|z^y1Eun)%M}5+FXY3p^bcK_#p0VJub$2YznF9n7AR~ z{=a4r5h-9DQDcEtQ}fbrpkg7H=Dq|xaskJ&9u{k>UL_63B?xvm1FM_}6V~yl9D(t0 zQ8XTcK)-B>B?k5fkjlDP7M_0Y{)=yFECL-R0ktY3fk(ya%kEmw#{y`~w1Fmq4cfUm)~9STRNL?+FUY z|DHhn_e4DAe+BUWD`5Yx08lS|u^IpW1EBxe(D)BC{f(snsL5L7AF87E{wrNh5j-34 z4~NM#|CKx!L#TPJ_|MqyTp&6Ew9EwSf6#3mc=4B)>wEpbrU(9&L2bw)~iOe)f32vZ_a$%I0Teu zfvaZ0|Fx|-H#(Z?Mma6{Kbw{8e&9ycO&}u9mwfqP1R0)9TQ_&rH*%Bc36WyOW+Pv58Sf&OD zOa#YM+(&yrUG!_3u2wS@>`3H|&!(vx=tAk%_O8rkjC@(%fJxJGA1m0&M4zS;*{v9K zwaL6D*d(-+eZe^h1QCTV4&A$M#K?4tWK{;w=p0uIBv)}@OM`P$gA5m)*#D?6U4Cq7 zZx>(U2fcL6C-BuT zFkPV`=6BagI(P-Rw|u@V3Qo{2*55ZIYr)?sb=s{9&$Qot)vDq|`KmZ==52_KualKn zoK`gdadYjvTG3EBpNm=JS(<=Z88D2c6Mxn7O9c^)I=aT3{e! zPpAhZWMAG1G1WOs8am1{j6}uMI3nXqg^n!j`#-p$b83FNbjn0=@MO)Mm9iSA4(qS8 znI8y7E81^4tjO%D{BJ4W2o~ApY|`aqB(@&{u_l@RHkR z1}-Q#XfW2>cMk@HhfO#wR@;60%2wOxHlxPV11hl8_1(%ui5w4b5YUypg+fbX;t zU$QbZYjU__fA#ltYC2Ed8P8e5-FRONDeS%Z`4aJa;)``^pCzbYFCR5Dl1 zmhk&)un8EMgkThrG?LHJZw$XVM9Dw?kRzn~26X+A?`$P%`(0=ruwxVmJ?%in`lTD0 zsEwEZm^^hfFpW$S%+=7n&#&nuZ3Bb}YND_v7tL}!9wO8Z_S@`xXAi~;h%{dYBSHuj zE1;o`$1F67yDY-Y&7197bY9BFfl-3r(kbzclZyS+jPg*j;d8wel4OkZX-mKNsPJwE z`x&~LEuCs1l55j30k^uR9 zQZLa;u?^Ak0?c5;hVP5)HDDPPWN5Hsjj4f-$O;U}4r(_UIE+9-|7i3g*ArLmR&F^{zXrI~71gDjqPkkXLfEKD${FyMJS2*B52=j%m~yJ~_mnZQpzA z+QE?YBI$y%nl&kqAt^RpyyAN_UyhqaB9*nX34iVf?dEpSq|GE{2;F&taGV%*${INr zD+9=(%}MbE!&ESU;`%aTbPD>WyJE2b$;>2c5K9VuFlS9D^i(;N{d3)&Q9i_tX&%){ z*S&Mb_@G}+nK|ihkc=v$<*Lec8#_%q=XvPa`5AUC{(_}VCa}v&S45hDb~EF#{h8b4 zO*Dyb4u_NITg76X^MsYo54Gb_g~|xc??F37R{6iP`8beP(zP}paW7E|dc3tRpeL6(e<@)yGd zFF}%?1VCP(uKl&)8(v?)={W4g5R)-gmEG<41m#9(kXr}PVP%{*IvBDCxZ=+TG83F zCP_-;9|eh<&h|4sR=CxN90p@#ERbU}7HMQI%KjYz@Dy<5aP)3EmH+A}n?=>Bbx&~E!9gkW zx&EnqozA0dKJkUI&SHXVjFEP^(ep~qWAFK4Yo9iUv-6rTO?z>Cro$b$NRnBpZb9EP zp8EndcI1gGZ)t)GPZMV?C8`G|1y0mkRT*KHy|z%+Y7wdoZLH|zT~vbILl-+x6FKlD`wq*cfNM2@EO z4_;kkiiMA}LR#2zBe-B%^ec(b2m2&nwht>}!m{@w`SX2&bnh|XoG^Uo7Ed(FNCArR zMtk#7PVKMCRv)$?8s#sPsrpXIZMWyb+bu+X%!#zhI8|wWM{EHF?*IjR3g6yzZCKzg z@}I@ZX|w!vZ6$s#zGEQs2`}?gqebXg9vR;>XXSY3FQQmgn4gY45wu^V<_onRyErFS z0`ET*=Xm~A)OYfy(F_3MzB3+2N7BIDi{#vzoeQT)(b#GCr5={`1l}&C#T!Ol zFr=~%aH{_`-n#9!iIL=_F^;G7=QES{HS8BIi^}HhePM!!?G_tA^By4rnzIJ!9u>$w zq)sWk!1n<*UBL3ul048(ATiFIQ}gY6WMkm&t^5lPe207bgA5fIL+|$J!g*0THmO?F zdOa~TVq22J4cpa%0f;8XH;Z6>ZPxywJL!>pExz)1*aq_Rg2ITnvZ{3%oywQ`3lB%O zQ}s8Naz%MWB?J|-*)_l$wIfyRhmPL@N`qhaQ8>kNs7a6gKA8$UTHqp$qJYin_UJ~6 zt7ZeGoAxoYB8*MzV=wIL(L7UkgHQ5&lAKuZjdA%%=T#}}R%UW~2`bk*DY=1+t z@){2~z93Y85o-Kx0xo;FnUMx2XG#Ptn|f6u86sNKlvb+28T7HW!QAF`>q*Mf>K{^_ zcF)3+-;BLopy$3sY9!l(la#li`;9N4s@5{WcBx)A1>Z2JvNqi!kw!UrzJ{bW=xot! zUhhlu1?LyRPn8P7Mr0BEC4a7GbFw*6hVcO9vW#80YYnaB7DwP9PLG>99_UE_7Iq}$ zRS9mh(A=to)yO=lQZntVZ)(aQk+YciVwygb7;}NTixa+|h@6bCx4Oues58{2X|D#NZp_avpkE1yJ5C2T@0j$>=i;RU)=f2#*{POnB0u zK>kmM-@mCkL)&IC&Nmd!@CHv@-yeP`>{M9gvd!nhf)eKTMrWzU1(2LnbO#yI z3tzZY0481ZTJuQIN9gJBSYrKzeExbK$(#{y1mCtC^N@M0{AuFp-*lqoRXNJN5Q&N=ltg4%-CFKEb}6X0=@tdkKSu9Oe!#g`$(G{q<7suR23I(UAjkY>w^n0+-MSJ7(?vrq?`jnNol`{EGMw4 z{jj;xQqzY^Lo16+E9`nqex3SyURA@YvxunAzwCecKAN&>Ttqk38HVmFcv*G?_6<9n zonTTcgmZ=GDkvKhNi>w6vdmBGJd`=iu1_%sJUMA?D^e2fu=d0h)UmunN_Wg6+|7GD-xf%B zJ<^7>smGSD;87X;#>S*SLxWN4&G-!+55|^&|CxkER)CMwU191FDaV@qNd_QD`YHxb z5>+#*Q|Ku;)8!I=JA;<&_?ri;qiywslE5|o_O}fHsGk>7-P|Zze4we}i!=CCfxf|& zS!yGK#Rwwr7k4F>zMMm3L-^Dm8I~af;l~YTBqh(C#9>3&EF&Hnn*N#AE!4vuS`#N% z6C808$T8|thTZlFJ4U!FfC7A1cyfSE3H96Cw7c8Jy%HU;>1(^xJjH3TYL&)eJg$>6 zp60hQ*wE6*#{6MG<9(3>=!~UkEZsp9`=2LI8~>XY=Ou2g&3M?`8s|s2N~{O3r^l-^glr`L8%s-K1d%8JiGr zfRyS}hGzRYYhj8jdteecL78fW68*Oq(3vdu8MdR@f?CY28w_WJ9QWUw8~-&`=+SB& zt9EVm)K&1>{d>^OnkgX~>s$|vDf`YT^7w_=mmEI7aZcpR_%p;S#c3 ztN`DPgZ(HpYCOpp>rcJcvMLO+;q>KwZ)2qPN z6I_-W&C#)px2RPW(=Ko2cwAl%_shHQt_wp6VWcI;iW~|J)sPya=+mZ!R`D3_ijZ$U zg!?_Z*tkZ$KQ%Mu8xHU~o|nqHAeCjt`>)cq`;dA0mxF0iI+|t4R`^5h43!@?2pWCS z^xKqfw`867afDf;R5ma;OTwT{rFcdoG=qjHZ8Vf`=ulkvQELd?DxlNQwN>WnKNk@$ z4tQW-?Ad<-hF8x=DO$sT=SOuPSbaRXWM#A;FH@I` zHLLdELWnw?e-rz{^;)mju?$5v{7E_Vy?3e0ki~~RqJXT-_nu)%2&DCVRCDK|fN)PO zqnh17+xYFbK=dLCCC`W~*x*Hvxk0BbL=NZuZPWDD)uhgS zeY1qTom6Rg7l9+YwcteoLMzpF@0#uNV7EF~KZBkXMCy?_qm)#HH%b~Ptmaq1Ex&Gk zjZw?G(cW`v!zfdZje_fTgUe5YJB5bNspWmcpCCt64yRDgl6}v27Rh@|D z(5o$?c&$GA_24V8x^HcV;Ne9fnFp)3c3{YTohG7~Y`%qepJM3`U zZg_GxSFwSkJr&sedqnT|aCCP^;BUd>&W^32R{&Bb-7&2akNs?J+P3uA88?D6fLco= z8sZZ=u`OK(vm&7P%Q%&uSZg+x|MEAL-CX#GN58Mz0e?JHNn0;8)%-nR`}kD-@EmeJ8qRk{(QD8NxN7@q9~@fIy22D~+Fri!sg#nY%xIS25oM#IErXWu$MKHgRCyso;5!6Gpac&hvOJ*wzz zT?2{g*&H1?5^&?V{;W6*hs8Z4MITzG^4U>U2_A_vQj%U-o&7-P!}-`kyZX?E%u-fn zS5e~QxVRqTnN;F}-9hvUjf-3#^4R2004CEu1LAe@;h{IF2muA_Ng6A76dkwMiqd+#$Hbw}K>Sugp zpwvZwck<`@E)+qu#<#%WTZ}=?^^+^l^GchnCajf{Z+VZ{mPbOQwU3fO_1aOImUi{t z+L@g>=fI8MAK~*v;b+Bbp}q{UOHXc4_PZ(ddH;ur;-?#_I{Rb>gA*bNXy)cadufpE?}mZr0Mgdg zNemWdul}Pz!5`31Bw#z$Vx#iTCqJ#UKPSJ)&oLfGmb4OmI+VYtM}r4?_j{Pe@9+$t zi)o{1<5pdxb}@zx{9#LaD5Z|skS@9+k28@2L%mk0EKNM%`{X*ud>ZU#XjF`Ie0CA1 z@X9YKxTMTX-kF)~s(#Z1P0bIyyrXE zg`Y~r3R%|aXg5NSX|y}gLKz2(SL$r{Ki=&?7DuX3q*j|>@1Tr|Mi(03`?RFrhj^Q{65rA?r9woq2gF0UYQ31{%b5hC&F2sm+2t+rYlBU6{$!3 zr_e0kvIIH$Iq!P$Hm5f;R2LLY`xw=*mwKq>m~E}r&U+HY=4enT zf4cT=GUXU%|Jb(ZV6Ryikl|8JOY7>;=9t}`#CwxZ#(79u>fG*>P2xG1 zQeYCgl4;zHDOaV$c|R+7M8PLa*o&$YaE>qeo}qlDDTq8N8us?tium zYY(YfAuQMvGlfsXT>F#N8ZI0>Cwwz2R(K`?JF3WV>e?+UJ)(;jqE|8ZOU0C5fCMY` zoes61m+litQXPBxF>vY6F5&5RQGCVR$H3d5EEy&V+=Tm*6=JA8Vm-s<7@`(mb}NuJmN56?Wt-`*c8gEyL|z<7g&_521nEy@@1pf*H1P&Kh-{$5E_C z6haBZpJyYm5V=A=XEVd;lyPxgI?gi6}`txEXc5{@YA>8h_ct=OQvNHs6~C^H*2&E?l`M!pB(}5wFL@kJ|)!N)BGrm zX>?bZF%J9Yp`VK6ssxWM31AnyJaj+pDB`h!yX|?sIMuU1B~VyVt_pMh11@U(h*f%z z(Yn-CIH)*qtmE(1CJ@u$cXGUH^v+rb?e;Y}D}Fo229F=QRm((7)0Ls38hPs7KQT(; z%)UoL?#!=(TiZ<>AJ1{#IobA$ksy|y8;@uyKzFu{gR6{eUH}9dno+NooJN!}WY>1CBV(!JC9f{@J_n!M7+|$UUJ(-@%DZ z_;NchVwMtPpa6*$cl3(tjV4hlI*DfoI)sH}z@tM+v=@wj|9jiwS^C@a{vay!ucuZw zIqf;mPW$M0ar0?6Y-q%8w-~hxz7OB)(R|g@iJ2Q)=&cRydyHZ~bc^R>CpKyxFt0FoUl*N!J%3Vrlj6TYiJgO`5ky^)v1C{zxj&PAA}Gk4_J&G1VDB zCv(Evcgq9&@pjg>&7$22BMVO0r!P#8YtrA?=LwZZwpH1b6{XRSu88s%js4CtP1N#J zXFoo97H%Mk!%F4{(D#^cu?xT8Xru4lgBM?e(5$SKT>Hiqb?(;nt*y4gJ3vpJ9r$I} zUNiOIciMCa-)h*A(#>YpJ{1efk}<(A|iX)`plo#F|%vtmQrG@A4!AbFi~ zD1Yxt7vgWW6PRVCWZ^xvb&4d?Uw;dvF%00Mp&!-uIa}QtB;q-h-QM2Y$!XuDf?&Wpm)4t^O|?xptQc8mO^87u z`kqd7Lt*zz0;;Uun)MZ}G5csX(Tbw?TO&xG6gp*0TQ+=kd;#cMLoUE&hQ57J5|4)` zUEy1GCvtU#C^K$JcCS6if#0`^UJVfH+U1jd8@%5zLbuuq7h#dd{1Smtu!Tfba1fyJ zgGI`N3Fu#)1%AQE$$iD?!b1>RiK~6^x1HoK;hV=>#9s^qq~#Dt z^k}IOGWP@ypP^jBA_LxR?loiWM7{adB9#wUwfmZ9!TarI2TLD2j!dyuw|$Kl3*u6_ zN*0#+*cU2M%qs+=k@#A{uqN{~D_qv>6MG=i#pp;nW;uc~4?_<>p=@iO?Sx+-IC}cV zQ?(-aK=Eq^<;kC$&*_UO9J&>lW&NNW&*>j>H5n(Iz7pl4C8KNuw9#tGFmL+e_(BOF zMepDsaIPcFotMiSp(B;IqU5D7Tnt>C>+D@S>P#~e3a)BZ!L(_Yg7n`S?FnA@^jCD8 zIJLI?O4!x;7n5gGEmhhrTaqefrwEcQAlUt&2|B5wZuHWEfR#OgCPEA@ObyYX1ad@A z(Hai8DMgj;*l}?XSPyJ4>!=U?m3Y37(qPKGWl3k|Qer7xgT)wJHrc;Zb%c$9WZ&DnnlO%e1 zX6_c}xJ5R<{#g+Z+T>wP)=(pUqU%G>;;_S3ieBH9u^C?F1$m8=LIQEN@n^^80NRd6 z9Gi5V7nYNpV3x(}L@6#G^r>XyVzm1)HD*;t#@82)KmF;Z4xEXrckGfSd;WZAp;NWG z6P;QWQVJ}XY2H~$BASeln)FcV!y}4F(C(9W#PUBer0a-~jvD#ylt`1s42k>^F_tgN zKh*YU-HiBJ>r1YokE~C~dmfBR-9Oz{Exj$wTy?A^2*Tk$Hr_ozURKphsJ+g;l|frk zSU6V-)qdLYG4>6kp!5-;p#8LAmK0wL1yu-SMv$+}0}@J-)g-~z;+z7(Aby*|5bHA+ z?G>$62CCAP$ih<3Z#6{ag<@o=|5T6cB(3LapnfI*H4+H4(?MO_%F5`Qc!Rj~TQKWK zCM=qgIfz{bcLWma(ql9I&v@B1{UA9jw%9XsvJ84#=Vr7G%ol5Qv3DEcY#ez9IP zFq8)O3~TPtOLKGvUaOV$E1QxQbjY<=-8T$my*bN@Nkt8uxyyav3p4bq3G#VErO1DC?lU6Jlw zI&+8f)-m6`+rtopC)d&oUq!L3C`QTLldo)fz5%p|-J=TFsdBLVglJi{La5KNh!t{cYU(Jt@d=zmFSyQYrWY6Tq=9sN{R$QsEF|qqu zucAKrBWV(cWm?_X09K>^CnJz`r7>8f2@SQOulDUHWn`8?#M zTHj6#Sl0xYl6~1CUzMkTl|8dsIj=*t2h|j;&Dz%fe~5d_pg5ZM3v_|t!8N!f5Zv88 zxWhtl2=4CgP6+Pq?(PtRyE|cV0*mWi-rv9O=lktawNtaT(>vQuch4i|ImdtF>$e5e z;OJh*$>%-6gK~4e)s!#`y)sGSIWXA+@pwu(swhqCx(v@lr#S`HczW7`(L>a z`Jk-m(VG-jlzWJ!O;C|6q-d0Um{mKBo!j;VTF-L`Ej4nj_kS?aZ|K2B22k%w1U2wJ zhDEOeBnPk3YxCzXo}$2z`XhOS;Quh1yIgsnfDizy!`SB2IdwU6Onm%5^9zId3A(=e;#2c6g&M^?~)>-6KUU{NTqaflOoq>>M zX^sEw&*S*N3~9NK9gU_T?6}N07k>L_^;=I{1=awFTrVWWIo^ZiO#N+rzg&FL(uZ=X z%CFq(OCo?O*-&%zPxpLyjuzFTvvIVA?7@&MX9K%1y)v<^z0F=r-IduVN_d%T4h8riI3Uxa5!4oz^Q z5@UlrggJQSK@L+fZJaZ}&x5UE2oX%$DZ&x>UK2`rGxGWO3p?K}hZ_J~l>~JN z7@fPVcaynpAlS9O60G$gC)h($vgs~Xn~h>vd~9&N;u2z}M?dk3FwzeDd&_?u{9GN+ zX47Qs+J)bROa1!}#Q6|nDPA_2fKblmNPAFFZm2Z{`1ogV_Zi1dt>%I;rpAsbyr8Yb zva-wOfCK`Y5z?mGhb(gikr-+mGWQGMHvfx~W;|vY?YH>sVQ0EG<(RMaD!aR=v!66m zir68c<5TwISS$s8>B*jd6b>*SUFP?Zp%ABCQ_T6)MrWX2AzW&en2}8C$C>&lr7Mv%{{b`i3VIKKtglB=x zczv+jBzljzw*Fzq_L<<}rFn1BoQ!NDlf5ujfs*q4ecopt+ALBoshqzlr}9U&Ckk?jCRJdtjKdc1)0Z zrVeZ+$(H>_6Q&EgPm!g8;olc#yr#i$A9VS~Afvh??vg>n_46vsfJ;sZ<;a(zMvA-l z(BnIbHIN}vw<$w9*I}-0MMv)M&)v!QGpA&hx9^n&YyABDqO101M*kueUtnqG3 zZ~@0*PT`xXk8gbMGW|{@ z;w)hob4|{rY~>(>ftq;Y=b7jjnLUK3_)>Yn_0yp(VW7EGGNg@1;S5PvMIL;p=vV*p z1d0A>8|*krE3UG)(ANFJNU4)-GK}-^9qv?)$*|IEF_lDX4-Zw3S)|(BRiDcf74Jr8 z4g3boD5t_5oUB+=6(tDTQ{NC>d} zg*@AsiSG}&6L5yh(E)Rhf)9O|kP9Q3>-C;@2C9d5uRWsuIY|=PSfj)m=U}=sl9;&$ zKSY97`L6PJDzzfHE*cF91nn0Xz#(|HJ}9cJ#MgxN?YeLOtXKm%5~ZTpI(YO%m+tRl zP0p9{T=$Ohub(YXeZoC?7l~U=oDxy$JRtUfgL*~G0iQVes@AJfT zt*nEHv!ntGiUVMmuq4atq~B+wd1=`H3&e4R2sjKF2Z<2K8}dwi2X!Jm1;(PF;ei_>4h7Hws7}yNj^S`KB1vrlb-;t5lB`Id zBp23U+S%D0jn}6(WCgB%9Y(F4H`+qPQTXF5A+#|@J4!A!HS5{z(G@204gu=mA6-Yi z3XJM_xHZP8HpbhWNMlokbV99H(TZd3)k*nCwyw7;DT%)Lty&Dulvkb5lIj-hsb3~# zCo17><`)lJ+7KM4>yEP|yEtDK@$eihC1Q8X(7D60%aXFKH+};#v|AJUx(7H<>S^^* zhofa{?gLt(KvDX}RTZA$1cg64+PbY=5{KHt>yer!*K|$G-7ZyF02`In-)j~|=kJgR zfJuGJ1?}HH6^qyqm~U12>J|hQbD*8F?43Ez^KDUW@jsY*Tu*-gyHLawNgV4oBTcZR zw0s~iEU<8be-u<66&S46Up*P5*&iFxr|J;5CtTU`smY{C@RkpA>}+dlf56FJy0$f@ zzdvRnA~bwjS6J_%NO6oUZk^J$^?W!ZICx+t=c&FFvw2y(a2ijsViRHvHPEXb3>wF= z^weM*&ZpP6v@@@^7H?pi3v5Uv2B|RPRh>34Y1p=%aY|WWymvn^g5#|ROBfwO z_BvXp=LoO_Y-WxbK3D_)X~DvVnqE#2{7=B~Z;E0vSg^UrdQHbKV?#VAka@zZf$;w3 zSlK|1G@5B5Vw+M4lAdvu`ApmI%~~>qVQr&Iy-ij_8n&8j#=j_3HkIT#Y>3V_4L36U>gl!&{8wgM;#sm0I4p?E2=H>Ip zR2}`r_=@;%6DvxCH5_}MA&nKB4_r@zI}u+?w* z@4tSRm#c-uU$;;4BhxlXen!ChSQWQh{nUwiWBsd7o^8K}eE{bI7hq+Q3Y0l(?t!@du1U-nEd9!l0X z$Z|{R`OreqKBh@X36axoKl8tUOMag*o?!66Vo=zw+7^VW@L**)HsgvM;Ld$en=>_( z>s?N*0-oX`^DC%i+ulEFRjYV|e6QJ51f`x}5A2ifcwcub{;u>eCP+;2lb0K0zV4eT zCe-LXAzwzq2s$M@QVZb^gb{n^K3`wEHC z$F0C&QZ9={Q;@^2+rf^kTjd+^b8P)6lIn4<@fnjz>ju{%(WiY6c=?@jcfN@VU=Wq9 z$gUblt2aluiym(MgIGON8117N+CpGDx4o|$5=0M!FM?0S8f!XS=b&B^*bY0ttEe!|aus z9hJe=GHimTt%^4qq)a8vAl2osQ)5;bQtE*ECURVq-9y*OBPH6FN5#7HLrYRi-ZRVC z-=-$h8<~e$Wzt`7&URPU8RY_;_!j|p+E$$^yBYx<$rLk>sI-yDJ z;onX;Bo`$x$312ZASWE4Nu@e=1D#bCf0A|3Vgn%dLh} zseR_>azLfHnX2+oXRyIhu73{>rU6@(bM|)+RwFCbFb-WQ{rwzgwBCz|u(`(FJoG*w z&KlM{zr3|_)~^l3k6%J4o7*9G%H=)1$+BoSf(mxQxam@LhtwHWE4gwcN(HL;%B-B)8d&Fb^o zkb<=Ml5B6$COr^|D!WU3;s~Ev_W-!v+_s>$S7V^G?eWBHbSWBElu5q%Jce+B zirRJ}t>x>Y>y^=#|9fRbnOhep8Oxhz9T}Xw!{Smm`DZ;KuibH}eW0Ssbq$4xHveaY z0&^qP&B~@9C3;BXbdvfj@Wj}hCZAn(OxD$Wj$tkw)uW2BQ$M(mr?a(~$4`LiiP4I2 zD8$wmKa_rmeA>JV7rg2GjsDF|qKkw}Vr3#77`ptNg_w$9Ja-N zetQd7d;_glyTiOPGnz%GtX60|cj@}6XImTp5+UGD;^@miQL9wUZU)$GJdCM)(?e}TagAB~of}Z7kiPe}IiHKZH69BnN%IFMr9ew;1+88Kf z6^}H_RG{ulS>z0JYd6WuLe7wz0}CXXz<|_b{ZSPE=dJ8jgKq> z_ve2Jt_~aIIdTkw=R)211TsSv?Hwr$uaiQB!gMLLXA&0eUe&k8$Psoyk5S*RWIa|%`Qrd9j6`xz0wKvY` zF7j;~9sew^x!k$O;k*)OjdM{LxaD`1i$@(tL|wquXy_>Ti$2eQImb{O`av zaNa<94qcIfoYaRuIXT*g4^1~OIqURxIm{ZzSxgFi5_@oui6Cz=T%T*iOK7Vd_@!k- zQl9=yYFYVP#NpljRJGytNtCqx?VG0iw2-Uofje>9-Ez!#;F|j9g?DVRAagS0pOHw! zpk$y=v3kgmSQ6P99zWg@KQF)|bi7ZsKiJS?7~OZ*%E?*f0a$W0+~n`izehy$q5UHe<%w^>4aG*cG#O6e?*GJfWt*ymThai=9{l8 zhh0?fL*y)Au&RdZbFWiqoS|_g`b^nJ%OOW!cy~`8Y-qaUAhiO{QXiZ$6_bP0>Vd%! z4D;FlM*irSYRK+UXJiSrWce=fxkLJQsE-7au-$NJ!S@7q^M!QtNjNtuifs3O!Y5zG>JD=?ie zwOslZwf9lgrY>v=*EX57>jW1JRh~u1tl6Pab7fOG;^J}-e}}^diu$v={|Y|vz&rx} znH$RRK|bxMI~$Nss!qjW^>X#wu@XIXiE5{8KuC$@$4M|>&foSpAZ!E4{z4mvjAA`HYGgeCaNkVDD8m*WJ zg@GS@eEZEiZ-+DXAu0U&A+6D^$am<|Rn}O|ubLdc*!w=AB_4o|vngmlrWUr@=e zrY;)Q@v-zcED;hR<(pq!X}Y;z8+0TUwR9=FzJB(8%Ib+3Y6?LJhIQLKJLJ$v{J=;2 zmLAoPT9d}=Ql5DtB63JZ6PG*#RfA-eh=l*-`nrNQ7Cyr zp$`!W?3FgN2D^P{lOz6^S}w6(<@tf`Nd*VIScT4qD;-C*Hy4A`w3tI$-lIeifio0X z>+0{XZ-7t!G`Kz7JyWZJuYA93a~8=Q;BnmCk*{m;m{*Yi(ajJ+U-~MYImR5bo;{Wv z(e!TxPbUp4JTT(12^K8F0o})1SIMjTS9r;#(9smWbe5vM70UNM;0Y#3Xg$mUNSy2ip5~Xw{wTY#SB%&y5qUSMJc6`EHJ~xnO zuD;a7Ag|$TiFX!!zO4~#6%7Iu^J!f!Si|_4_MR*RWSHGpaugIC!7zm$2)~g6kq4TI z9^Zw`E$E=>!h@Y%M5@I*6fC-Oh7&r6)g!Z1WhC~Rd^FoehX#@_GoggmnSO@W4UkN2 zKRNB-MrVDk9A!(Ht|@3~Du=?hu6e}9(>R{Yb7ZWJm;kjMH61lBme+`=EchMf*i4g} z>{0aO9`WM7meqr?D^NViX3II%^@#T$NT<}_0|2Vt0#a%Gnnh$W3ofETsOw!vxEM%h z8sVCOMBQ5f@X?u@iLtBS2lTa2KX0S!yMd7aL|;i_XSKgS7&Royg`;lT z24xK=DMJT)v6~YxkdMIvAxYHh{&!Z8DiEX(j{1%B5b~|#ZAep#kZf4kzPt4lhha)yHu&UM zb9sX>llUzER{YWlVrbuXh+*(#s9sUup?5WqCJ2~14Z9&npDLor57yZ~7Vvd^ErCdY zw6dR0BdUU8{*0PUJr%FtKEGMCaq|qN(mB|KsXzUz9f)J!($}2B0}u8Kv1Rz9t3aqo z3U`Eq0(opcHFOgOqasC}FaJuk2(5FuHB(@xc4?qYJ?Ov6HCE)5=Ms-UV|LAqcMl3r zcn90+xb00f@o=GW$l<55@Zyp!$>*M!Uxz?_EO;*(ZvA^T)_s;15#sb?Cnx95q@xo6 ziu8cFJ4MzeU}LU(Z9XNqKL?o?@8ae(5)fSP4W~&>5Fc{jQHA#8MH0=McP|<_D`>KEOIvk`FhDbj7*^^$Y^IP`goT$ga zDD^~5mn~haG%*)+!@)RcfQ7N4 zL#wZa4_}8WY_z*9lRWeUz2AHedh{ACM&x(%!u$sL(e|hq>*ADT5wp(hiM^n1XEwnH zqnpD$*n5{ypg$0x8=d0Yk#shlOYdtNr1(r=MHAoecEv24i0iEo18lIYmL7&Nqg$pN zO}m)d?R*(EY+RSeq48;|s`7#05dM^UUU9(qV7;xFChBFxQ-2cZ5f6U?>wg8s;ZW$SY)IpezQQa%i#R@*+fF$a@ony4vU=e_gpU$fB<-D zs_NE4ucgw8c+NWbW2t=MKwRJ5y8Mw>)*$xGJVn7+rG<{w!eFIf=3}KJy_3rkMP^FH zct(y4$YdBlRC~nr_h44ZAM?EfkWIsFd{#90Y7pkuBDFN{$_k$=3h-2kS+sOs+2(~^ z{e90%Qfny3a?hvA3AR`i^lS8hJ8lSZV;sNd>xL#1gEo6>+@BG0E7GQx7O7!-L`a%U zqAlUaCrxqUk|oYbO>u_@2b+^pFo}<0#Iw&3tq0_{>|N$tudOq5WpMquWuSDaZnAss zE{6PC$eM}x#9kZd4~Kh+NdUYxcZL~bC6snv@?=3VLAQLnMY-kLd=7-MZcrBU zZH2S9Y!QC(6CBRR5YK2ukjd)TSo1HKwtlI`DSsY_}As$QPqlYa~U=QBfm`FYua2g4rDN3nbC}uSu>W*Ak;1ayi-p$ zv&s<0?ok;jUSL*BAM=97H40iOggqF8YU*eK9$o9lNGCKT$u*ZmT}a>~F{5Kw4|h*2 zcj{XMSi?atTHC+-lelMdEDu{x(7Ya63dL=@T%1ir$%aTSPW@<|u1QwvkvbSAm~nt( zZju%DyQ1`jA72{6dD0p*88KpzEAE+#WniR^Bt`w9^0a|Eoq^dRMQYw1<<-}3a)q<% zR5l`qdxHuAk+@~F`avGTFo2H~8qmOFw*eS&G(y9XZ)&f(bl&VS$_MKiZbOq-Z{zH< zFy}wwWV-aAz+=kW3zns3oUpwv?D$I~&n?j06Zr^M@wTl{2~8N!i#tSay^Ej@3YNE{ z!r>OweiKl;8H1B|$<}{_%~2o1UQ`Ad@d8>h5^(1Iv7?8`#*Fb z!g4A*<*-Rmqy}UI+nKxWk}^tJ+xCn$?9~Z~@|*f-8(J<6O>6i3SU#mv(q=cmP#bCDhsIjy2J}*OZzOw}$4s$)#wt8V%z|EO$0OU^d)fm!!6(%V+&v=%YoyK) z9zpJ$W`Yr8Ck-JR1H9DdugN-|^3M26gQ*C*SmlSF3@&EZ8DY}i_623NK%TnMCn6rM z1Ekr#=UKJ#`=YXS2(?h?CoU?SHv)+4-k_B|KZTjW&IfG^*Y9+s%$Qfh!uLaL8~Yqj z9EXXAkbF7qdas9{;7uzAr@Kv}du+zbzNT90S{>OraKrP@$kn-=Onc$EReyJG~$gHyfomIToMRSql7WMCLLcGr`@^^QE7$VCD!kTXyvx z#eskf2@wVtJw2hUr7gffAaPb0kNlEH8m}T#F1S6CrXzv1meysA7M|2n%Z~S#t)BnR zZm>9VkNAg?z&|FDL&qnGCJmgqmwdeGs0$N6$tyj{ULM@jQF$?7ZIqBZhJIdtBG0`1 zvfP=sqC z$2^h^W4*|_ceeInDR@>iOA~Qi*;Q4L<*ut;S18M@<%B+W5}YE#%fyNSVf}d0%AyX` zD-&0^;DvRV&io~;8NyQ5!=l8~$aYSiU|RC+Bj-v&4lzw8g2lq$h7KO;XBjDp$-8za z<^CF*Sr;McoUZ}tTn--shb=HPI^hyi7?(^mdm3y_3|##37GbT3JQdX%ML6tj`Ks~) ztx73HUAoFKlB_H$SxD@Dmu?t@@Fc5()E7Obk@+mISIWvx1FXtqSRu zJF|`MoOSb4YZ(+k#WGb63?+$%2X`#1?^0wR=%)`lPegs@>X^EFWrB!hU7Nv zwfC{s_BAnvMe}5uY>=LYv6sR~oy2l>M?_N}f{Xv5z(;3oL=uPT%~*%9po&Wqsu`d8 zucE9V8HaRy*>!4=Q8p#I$UO-upBym6-E64z zY{>OevMRc&F>`VtsN>F^?e?sB`n-bG04(spR?JIUall`i1!q0pa~uAkb3hOjBC$wj z>lD7u2)v2GN>vk-y{&PT> z*|G`TIdsmTda#a^X6`_pb)mMn9_BD`+nFSL6>}5ic-1I{s$-j9hYLM! z4XT7Ho{<+#Syq7~X^W~lauWPqb!XGka?pi@odnreQ!>mRadK>iOhp%=06NoGoFOk< zmqFQo6bi;ci2^>cQRWd@P)wFWM{1n`KVD6vo&1AKX;R>5^$Itf6}*c2z_?+kkpyFD zk_DvF-3kiSx^<8QYs;#Qsin1{@0AhA9!ZwHsuHrGt_zWB#uO|IpQo5Q)aE%FhC zNN)6+EHwa^>{nhUdikP)$$^)|qB7?h4+P2F zNvALpFsq9m_P0326TjSA#(c}}U`XH)CY>=a|6`OI%n_1J`bTr_oOse|F_9A$B5JuWKT`68~ z-I6{2kEP&wgmxBThEnjUSe99wXw{Ene!f!9GkotVbS8-U>+GE55f_xEk%Qu@Ll`Qy zq!}M1KM~salRL4*E5th+ED}n(zMUR$EBN z09K8lpdtT_`9MDVt;L>zHim$QZ7rV|7m`Z8$ zT~l2)0I7YbIcH>vAz5PU!t@{Z<8my13X)D4P+)@)K-Gndmj4p_ zjx2eFu`AR{<>koAoh*jl%O3X!7~59Lm4Z*7ohg$mtCfeqgqY234ueh4Ads_4PWGdsQ==pg9>gaA#vqAOT$aLp%$zjEUWn2Jx^&qqDIIo-+>r zyso~uchY;zJfFO^WtvOVGc^nSMEipmwb_!YA*q>eW{c;<9L^JCx&KLIA}TEKsQ1{_Gh|DDwmz|#Nk3>X8A`@geF zNhsw1ooQGR006^>C^1K9RaZ*7e|s)b+4lvN>JSNi^xO;3E|wo$v*Y+5kP!o_Mp#Rtpn9`^r{X) zs_a<+RJ=e|*v5_#C_OgiqRO=GtOT1X!TCgtmkq5}QfB#{xa51`-(=_pJPISit2GZ$_j0m-YkJ3mWS^HK;M62h2^ABNJUa z=1g<|RT>9q;d6R{6j!!CowZkb=0Wjp?4A8NVokVuQk%3b@i3iUh_hIcp82{OlraB3 zznV1L8I&&EMAS6#=e5n(%Sv>4(-GxdZ^{@v6%{dqRrZT-1dfFR#3Pqrm@fx&_$K zd||XBE40mxoqZ`8{-a?a8}_39(ZVz0^q0)cG?IK_2WwiRv+LQc=ew8D$%%fIIx#w& zW{bBhiAgb>m2|MD7!t4))l8FIPPD5Qf%kklj)yc}cC!=F%ATs9kGRSF`!;F_At=8VM|g zOZNp9IX#Oro_pidDr^c?KE1(P7aBhnAV=!L)vBa|=d*zs;IDuJQQUm^S30kwtj` zBjSsVADMZt8Ng?uw1i}=SdB}F>&yUITc6Kk91Y8+rmeYm9v!DTM$NU=2WOQqv%Ie9 zrOk%&t>TKyxVZ(r8x^(49qQrt`S(i?Gw&vT@zfk9namyV3v7P|^>I$P@z1kWe_Pwo zr?y@xj)}4LzzapGVYQu`-Ta{xzGas5Q7t$Y#E!*Ihfr4GF~cR?{(5?ASvMsTLZAU1 zLbvL|7X`Gi{VMKMrnd7u*TgtsBx;pR@0+(N-aZ*ueMqdZ4zcDv+7&24c{0tQ-yRY* zpDRvg4kL5szJeGYyJ$}L)f&okmtvs=5SqcE$H^I9uLpOs-uv`2&yBA^?@3cEV<$io9!~P zzWxC}qVz%UrRD;k&UyrG+7z@pDINe49NfS*g=f##zwSX6I?ocf4}+t993qSOAczszBgIFLk%7B;m>(q5G~J8nF{ zzr5ClJ8!auu;L{GYeGt2^MLi#1Cf;T*eQqebhYQw(&Esu?cv)1oEz44_95xrVtd^m z@!$LWy{I#pB_u`195RzZbeWlhx;tm13-6KfQs`>`+)!n#{2DHJ)49O&n`56OQ&b{( zwX$vNxk0dmGFqJ1#=hNpvJa})A^&Wq!Vin7y8cvokYE6uy2*D8*_9vK)id_7^(A7z z6K}&j{cPRsR+-RmPV74UbY*Wgr@V3kPz2o1i|}{5^k>&YIVnKobUQco^y;G??bV!G zIC!WIo;actP<3@C7g$wokb!6X%_%3S&)#K*L-O-uftp>1tlY0ijI}Q{8v9$;u5z>| zDDAeIeH8-6-H+N6PqS|)uWvVIcc@1`+oA1s#>KG2OrsFbFLw1Z0Bhw0QuYEo>T|4vIQwS-K55@yz^-Ty~Nm+xb8b4TtH zhaYOTEAuakYT2q|lrsOP{qsmnSj*&Y!z(P4S8Q*>TB8_nTuSyMk zFPQId%$kF&Z)%r@z3cix7)Fm1)rokK?_iyRP%-bIRVNF$sNzXr z>1A(1qrBz{vgsh=Z7wBK@R))Ld#0q#Du>-3({k5^5B(~BHf}6Cn+}xYX4q8)`i7EY0_X16kxfyPbB;~>b;{)VV4rO>0uv%mSNI)K>VXTsl z1g&ciGqpkOGWf*ybClx}=ve3&#@FWYFetg*sfGqjev4QvTgH1VY^-@*(H$iJF+J|e zG(5e^G{qj^T@(<~ja1Ra%1pP(9B?F|&oYDjP?Y6K4JJl3gM34x-m{Pr`AxdV+!syK^?9OnlO84P%2R zyqCm!%lTY+eDf#AJsVX|U>zvoq*AgsD}9^PnoiDt$XaN4&LzT=L1V>#`8SZe7oPYW zN#apv-C_eRy{<|Cc=UQ&7{Z==p`z@Tp-Ox4c&ITCOK;P;?y$MD?Cg&v-OK-KJOI0& za=@Yc{A6OE|7F@RqP#E`IIzH@0+wm$^?YtPetz{o_Ij-~75s;WT?YW%=K8AxaEeTE z>YkG{R)fTc7y-g8{ku3|T_}hE8sq}bJb!g`oMQG@kM(tpN46JYtS2acpRamQK$G{6 zn<4GVVxnwg+TJYQ?*f#@YF6!pu02#6d~BKGe72$h0Dk28!vcCgXxqf^;sEf$NlTUo zdv0wm0Kl&>4FrHci{Oh<%mN$~*{+|*(%_V}>O4H)SC9!Rpci4{MpO|7z%9?#_6k~a z3B`m4$cXU50}KXB^i2-Q0MP{xwXgA;WdMK7=|=znUF3oekg5Pn?`a4Hh+Jd`0Jc#W zacX>>=TfC&zh6ytnT#(6h{&X9L_c)f@jSdx>od2__2qjc<6oy)u8EJ7#HzL`jwD9{h5UXP003W+-c41C?%MC;(|tm( zBo_w|T6n)*Jiz9ICUwu-1R?cMfWG_c(-R0qWw!hNj9Sq*#|`S_1uxET5`2UNq}6#@ zS2q@P=D5G%?r|GQjq?@oAwNm!*+|uYxHWl`bo%Twr4Kt&K3RCD%kNS1_Vi$c1bpNt z(c*pRvI^1lvCZM_78Dm!Tr2gznZtv}iW33=>WA?W0f1PGzaj_SfoE#W_j?y8k+O4< z@U-?Xe>*1naqxYgS&RkWg8G(9x|hhPy}EXhUI74v_nyT1n66csDtEE=*JyUYIl2Em zyarh`yxCLYbK>8e>P;tFHHIa69==Q_7Goa^TwCrtEz~FzulC3tpaQ;PX8A$^xHUBF z0;kUK!F4rLz8i;1wK?8@$S7%F$Jy}ImDS=sw}uk8tg0)}UN?^NGxWglc0x3cFNel` zI%?pLTdDzyLe#N`&EETTeR$IBIN!&CZPNl>QUbux;m1I5 zwK8EkRRO#bM$T4M5+tIL_?@yE;$})lY36Ue?Qg*e)Tj}?WTXU&F7fLfIYjT$qfn}0 zoMfHs|Dz=bLDeQD!d9LYq^}$;Oz6E`M`~AI{d3b6h(eWs|4ZhybGo&7UoxN_w%rhiSF!BWLVzk$rA^DY^>lIhuzgy*oD-SXh^#~te8)?r;y*H!Ic@IK(TFQ)@#S% z@y|Sq3ijBwmL1Ck=_q$C-f@qYj5vfX$iFx%oIy>KNs*qoWx(Lc}N zoHOLM)2UN|a+N1<<|sWCbai0eRqQl=qz%?Cg1|Tmc0$`PUy?4k`PQkuW2j)4PKt5# zufdJlTZK5-96ItsZmUS{GiQtCk?k}&jk@XK>le!G#vRvoHv+!rf3DAF%l$~(JX||` zPwVC?(0xzdh~IonyWZ@F1iJ3H2Sh96%zkOGMpuj8@02L^NDhz>B%$7<3H-$=^*ynn z9+CQWyW_OU^6!L85s%}>DSyOs{%?{&|5K^`kDeLh?Wdj}S5v5Q5--m=o|diwtYa5n zc&kgYIvz9kAj8!Ld>*m5>*DmQVC#0c_z9*pQ+6UL?@Jp{9jf@Q8mXz=jB&*+jF?4vq}~*Ka#<~IUE?_7*eUl;gnU13aiMOS;#!^`A?T&zD>!4z zg0+O?+sxUgrH{u1XU%}2QNhfqy_BI0y`*jPl^<%+QdnYuev`67uZ#@ByEd!pf? z6B5+0XkPdhs71={5#8*p!SZ4$&9{oa!yS`ZosvCEciEhs(h|tZP?w5bXOCBO)Nh>6 z9rhh+*n~?J7S8H|J~fKAumctB)>i9Y^Uq48_kL6fGZg7U^p``43pf3DgtNEBpJ+K_ zLZgnH(1eU}5Vycfw>6pgBZue;#08JmH=vn48BLlS+sBCC#SsbjOo_XoP=@l6 z#ZpZQI*Qk*Z%oWHcWP*&eyKZ`Cu;W}drCrSU;FqJe0_g;>Phry-s|=6iFc7LB+@_X zIqO+u*GoS!pZ0Nj#zvDY(HgxB$24@UUo~r9?)oK_F)027KMs)5tpmS++78bRH>~eW zFD}Xt_|?SNCMRT8oa@$)zSMl!;w$%prDv>VRGNwl3V0dW&5r{lSHR%tHAb66UsLD%rLgJ`LT=6-`CONYn+2gI5?(1^AQMO;*!)rf zHZd+1d+z$RY2!E#Yqt4^R<8=iK3cWiN4=ApBE7^W~IA7n! zmK6P%a2AWZ@$A0)zR-yzaBHJKt6~ia!fJHeGh{tGVBA@RfMQ9!dg-QsZBJ`%RyL%o5U z?eW_x%5}e*K?Hw4YM6gQ^O_L%!ucT~B({ciTRwOD9C6p}yIb}g3EVsQ89AFI44do` zvtxfl{*U=iY2^j2Zc5WNUoB=8FJGT4doZ%E3^)==>Kk{g&O_8Q2*M`Zg z-_=YpwWCN!nmEX%m&gf832gc9$d=qSygp-X;XG>(90;b`FPkL%TJ(LJ-0wjP!wC)P z;C4!oFQD6s6OYZ&0)?CZ^uVGiwPd8nLAmM57l69aE5=E_>h~V2cGTkz(ed1EzltAVx5L;FInDM3EbEEV6 zwdca!wz%;^hz5r?KPLdmVaB7)7i7~H-&lK>Da3!%I&J*KueG`~=NV5kdvsACFm>R< zQs3iDKS@BvqaCj%!m)l3m2md3F54hv_u`Z3gE{HJF?z`3$P+a6g$}#M|0)5j^d*VO zfP-7VO8l~X)m>vRc{3iY#JM_b?IuUV-UpWIHge>sQ2N}ijEW3IYZ>kt&XIeGWVz`1 zH4$kxHH?2%>=DffSy(ehqM6W(ood_D<@R2g@ZBs!343u@Jq1Jg*!0 z>i+7!WC${2qB23a5I?K%3YI<;Z8Z!s^i_*I7u(qjm{@JLO1eM^g%CO%DJo?sfJYEdEZ`7_uKO| zWFn7-8aMtP9W_&q)QTbsn%TQd3T$!O+}+`4q|bR_i`4voDtqr}xW4Xxd=Ny7BwBRI zTkoQd5<({IA-+Dgx zuUUJ}*>~T2+H3FqI_Hi*#}H^IubDC%5$T(z+Fm%9VhATBsv`~g7M?WM4dlnwHOv+x z(pen1tMM&({c2OLup`Vb_!HqBlte%N1bd$3 z4)iEu#GgR`61r!xC)b3ZwW!1wj&E?_UxGEM$)+xEl``pSdNb_$c-FypV{cyd=p8)z z;B0jN#Pv5TMV9y(96^KZg?g)I;8<)36%jxia-i$}A!HF)f?jh^C&0+m#o{DB>$Wtt zj)-cMBPSY$7*g~3k{pd~{GK$~cBT^{gwG_SHn(jGYQ z+Rjf7rLg^UzfXgdCuJr8*>F#2t|;5BSb1qmoRrgn{my4n^&zm}0 z3lH%QUQY5Cd&S{^X&&LkHgXO?d965d&K)9?DxyVW)4i6>EjBVd(9>IWn+_qGQd1w6 zlV&ASLR>|+Hll!&8B_cVHHc}sHqxinD)u}V40fc?GZ!?b9^JWCT^Z-W?VL#N5*|G{ zee))U>vGPayV~`R(4qF(S}a1SMuShH=)u>?3Y2}U=p{ML_K)GK)1Pf}&C9tld&)9D z^x4u6;9jOjL63T3*DB6_^LW6D>BlNwGRtROKeZ0r`Z>CIUYt3Kye!~ZR7V{Io;&|| zzr53aeD-ngvF0metxq{64+PBbg25gC10FQywd6=MK}3Qn$A67+sL)N6_7k-IMLN13 zD3<2LVNq1LvQoM0TcOO{5dDuGXo`38#lcL@7oK-YTnjv25!xz7H(!L`f(S>8UUrQ6vs#7l!*Mjd1Va8Cz}}~*NNqQN)ECX( zGQ}w=-UwhXW*{W%{nA0;Nha4vXL^C5_2V<#uwt&?1Hct5l@~JbbP?;0a!}?uj5{;S z|8tO2+P>8jP}t_SvO4wG->MA0z?!$H&c|@I<^#WM(Qi51dnbSDVea)jD!Tl{CDJhI zE*Dm=Qf80K#Q}oL^ z0}me~b0Jgz@JfFZQ9m)nKzFMB=LQYoZ1>v^&+MPTvz62JUIREXmkiPWj2@qoD zI~%zoX`5`w$qYIOaT}>e(nlK7JjIl@RzwuQ>PKZ{K3qRqm2DV2jae0o^v?Cw5BQY6 zNy>VEU3T}5)AQf3=gtjxv88@g1L*{IIu0(y>wly!{pM~h0qA0#hWKik_+`$lg!F}z z=R!$QY(T(~nQ>m={zi2|UabH1(ycw{;WNf#h|Nre2FfCBf~r_;j9|;!g3Rw_Xl_rZ z{Z@h1n3kMZlVM^xLBbPV)D!;Tv?A6#y#hkldV%Oedu-rZh#xS3}7aqxF6JnMpi z#_o{*>dzNz*^Og&FN+qa^F>wy4kXEM1sLKegbI8HMJf2)VYEf~oRPYfgT)L}N-J}T zyZNT9<{D|gYzXa{jcy9%@qJH3X-GK@HdkIOA<_>z(jPHBE)t@tJ2*rf?`$*{J~W|= zt=EAsQbvM}X-E$)7rk0SU<=Ky$gqf{{j1J^oa^veQRkp_uc$hP*EPpUPZA7c3WJC? z0}hi{R%9+-ihpR0G*CTun@Zqo3K|I%$|&->oHC7`w$HlqaA%3MvMyw(*APHT9jx+# zDSLb4@D18a%)aOP5@u)vAH;hSSic*D2g00T=|Yg)K>25QWJW4VBjCqR_em9!E(W?k zK;KV;K+oUSe1RVK!XT@%AccF}mako?ks!V3wT`oKCs`4~V4Y1U4YRZgh5$qzichp|fJgQj=m-@mdM#(H z*ZD8*k$qQuwZA!8o;BdF3|Nr}SaCncm(cQ_P8WVf1P}@Yy;Z~`tGubx%WVb34O0wn zgxfbl;AX+u5ry^?EgVjbIKLQU2s&inlB#Jb2jgFb#b-l>)X=zew&L0V|dx_55 zc2;yxpSmE5JuCy0fHGCRYjH!?Lr*mNZiuf}(8pY4;DPj5{QWNOZPaxqnxoeh~8F{DNpEDHuy z*A*{a?ibc;(3u=;x3$)D@&_ukfOlY0Pdi&uz=xyee;5$I4N!XU8pw64#`5ybY6%~0G15x}n;WxLJLPmAT#fgvSI?$U*X zD4IQ|#dEoCz6Qnl>J?h#GbDxoli!URZXXSFWxD7k!9!>3)AkQ{Y-X%yfdsMQLh&UPp53kMz+!S?CFY)Mgz=8d(IOPvolg z243xb^AF!P19cCvZ}S=j%&KYEjHCoIQqTCY67XtOc1k6U7rhUJ`e!#d z+AL?2aUGwrVC)hpsS%mVQ=oN9-XOECVuYmVswgEKFqK2CiZfq%q7`$?sd&l5v}pCa zOx}@Sp0YQz6LF3Tzt7|{&3wZZ>Eg)3G@iyv=rMYjaihW~28Q_3P?;F;6bgqSaKL$4 zFocej|0Td+t`JQ9w!-IyH%k!0u530du(P~wB*|%b_u={g%QsB=ob`Y zIV!LrJPAE&@fk(Qt$ohL$O|asAfjJNP`*qlJT6$DtF8@Jme~%g0c*YOg&v%AOW{^q zS{{$fo>p2|VKT%*2UIcERnRGcQT${Ejm*2!CwVI$CR-eL8*MB5C#+kWUt3iF=@%D@M>XJ0@q= zY{(Tm_@3zxkxWnfNlHOcqg?AF#>vmB2cEJ}xW=k81+Xvh*dX(F$h;(~Iat1Jb+*eHJ>!T17Paayq;) z29a^TuwOGGbnyA%_Dc9{j4B7x`o#xTIjRwuq;mLSjgscUyMt2>3%c9f7I3Ac5%~F1 zn?MGq+|~0hN#@NxR1>Alg%LL4<^O<%jMgpB+RInel?B~d5~CQ*n}J3F?+xE~U+|91 z*#SDC{Vsko{40Co{sxPDt*-T_Wi-{7sN3V2D_riXa~or5s)hY7;jr$403_G5l^+LM zdC$Q}Z6+7pk&ZYxa0uEMmfqysqsPBW_?$J~#a)it%unTmk)HG1&3E&?efiqG)F<%{ z$8wqLl=}Y$3^5Xcky(~kY^bZB<19Us_|+o1=xPmR`6504MDJP`AYUu$gU7eQE0dd` zvNCT-TR~?qZ6iatVxAy~M}4ovN7u}+JfBbTI%~+M#b$Wh20k*TQXF`wYwI~H1S!C2&a$iXHM8p74`9e4>VVGQeoW1`Dp$LZrC_@C-j zl~_Hkr5!r7G82?tsDelJx@MmA9o-s>Hu(Bb%cQDh`}FDf9Uq7A%tC9G9kW-AXsbxz z9s~ajKt~@Zp@F{fm6qLpVjUH+nOaJmEWr1hjb8tHMV9AwnI!Sm=xWRK1O=`QoMI7Yrtlns0GRTJCQtA zgo{q=0fgOwAImut>$haWi$93kM0GVZ<5e zdFoF}=w(udhifm}eVZB}rkdXG*L@$2fpBDl@&X6KetDxsnfz%1HfdmgZHm|ToL7H+ z?4s0-*}-q)alfht3?2g6w7Gp!;^xay^TBcLjXjA65)r9l1D*v7xLCw+LAG%t33tp6 z?hs_09pOd0Bm8%jt}?lY)ne(>D3gW4RWH<416V+G26?^I#cUPC={%xXw9?<#$k+J% z<^%=La$998%Y2cImT3)<$A&S0gdr{;PsdKyqvAgDi^Zlu)_n0ntD)dZ&OGMl@QxK= zjR#pW*yiDbKvK`|0d&Fdn(V?LQ1I=@8)D(n!vKIO_$51n5CpQM`$`A={op2h4cdOX z3DigpegsergPB##0FkbC7a-zhbpVtAAoZsD??(DvpyIp#rr0__xZNsMT9Q6@q`OOgk{V zu|itll-8XHtffD)74tTf`&!VM5RBHzW*vq7ko{90_+T3!d-pgDr$PL-v8Y3t_!QgOCRlwlk{Z#6Zgea zG84P(jL;v9-7#(l5F!6_N7T>ady~w2CpyA0(%67NLzFeA&`fwe<<`4fqi!YcbF@DX zx9BZP1CY^~dR<@ncJP$>)z=w(k)|0lt?O&2RrP}m;?{ER3l6I-t3Q6r(zxlv2BVIa zn1vGfJKRj!-C395T(b9i)~}EVg|n*Y8z8YZ_T%Znaed2=(v?58FK5e-g$2eU z7D_`{I;FP@5YOAAEe~U7AI)pH%FS-N27*+DVM7#4ht3A)RimX^0xC0#V69ntF6_Ll z{a2i2s4!Nb<5D&L>uQTkIE=M|>ctw9r+X=%_t(jHtkbxn@*65(c}2YI_%}psUR}$j z@<;ouUmK1WIL&i2Eo+g?pa3;jIbBeb)(P?boCuqG*C}JOO?|tS&Y#+;I9r+(McyCt z(lu?|l0OYnV1|SAGa)SIuWY$cb(q%|Dop(E85oDMj?vsM$D(0V zScA6!NGwRLx@(=vPLtnU9$X)pQfOEFIDT!g#_e{3ivqCD(Ix3KGeUfXZ?8#eC}Oin zS)q46mOe&nB^hGW{pNUA9Ii$AJ!P4<96aDvqQ9R`!(khQx=Ie6(FddF(Onc> zxx>1DxeWa$&~7>3hvc4os&Q_5$UEB5c<*hVwuf_CoOFy|*x!TuQC(rDg|_I=gg#T! z7UbY}67CRLK>$EwSKZSh{p*=olnI`Z8;4fhA}zC_s~B4e(H6~c;g{}AH!e#G3cpw0 z<$k^XmXck=3i|rt|kh2oqKmg@QH0Y%TXyc9#aWFT3VlxfD7Kr&;r9zlLL`xu?`*eokoD);sBEZsqrz z2c6jbRFNUDqfQMLdg2ZdpK@p6UD4==o&jNxI7q_J-I3AJk$e*`Ycwf6Bl>9xk+v4O znJHQ$fvymWoH4W^QjklU-6$}naqw`^o~nCScV=r{oXT4GTpn$7<$?a2<;h{(VT5<6 ziO}io!Zp$68waRQR`(0IG%XRKKh}%CuEaO>vzj;B5b0`rmcA;5rYI=m%PK%a9fiy@ zoKzdx(3`)mNS#(&(t4r$_OmN+k3MKUWt1_Ys56G@w>E%%N8T8p9*nuNQxGxInwcq? z92rZFs&=Hi7>z45NE|^Vi2$&BB}=So=9pyqYkmiDo?WYDaTMvTOgLn@8(|fgOj@KG z)g$fFe`kfXShcg36iMm^wLs9k9DxiPRWXzs;RG2WP^;z03<>JWvAQ}6te)}M8kc#z zep>GYf2IVsIMYi2>%h8>;>19A+&uvHCjSoAp{82LIN(GglCp6NM}UVni4WMSbUg}Q z&v)TCxn&rm^g&?e>WH0hhh7;KWAXeFt$mtT39uLKt$S%z1<=Q` zgKg@4z;t&v+~t0G4OhLt1Uc6;sONij=+IQvg>-`oSZidpr7ApgXD}XD(iBp0^s~Ub zYGvX#ETxbb!k`)pFz?6Nw;fbz6P36Jp5D&5?eyRB@U_4Zk43pK}gK`60lrS=`Y}gG*42*d4UqW8s2YT(} zL1M_SdDceXP?Yjp))IPee|^KXb!D+__Y#+NbYxA!lK(s-Y}c{;xFo7c<9rMAy3_x0 z!tO^yx@~lUBi&L*0Ql^n@5jnf6tckL;IOPTAn5dW?#pI)qm(46a7+x&p|jglv`uH< ziJNnDKc~*7hO4H$8{6_I)~V6n1T*So?p#Qn=hC**AqGCfL33;xwxz~J~)*)+TTD4^Iit}e>A*#P5T+Hpvp%IVwp z_PA@8+mHtNaaGp;q}1Gp0xU!{dt4Cdn3>plyWj5}5QC0TWdTJMue&|$R*RrZ?lYIW zXIF+GzilJ5Poc-`VTH#{21ZJcthFRbgDF))o)HFH z9e%#cLd0ju34|T3=>KxxbB#!DaEW!8s(D0~^Mhit1(d$F!K1YvY&AqhrT{pZyJY^O zwf7~c0QMt`!NXg@Av{wsr#l0XHt!aG&p0(h#xq3l&@1@#gUH!FYv|CPA21Cj^-EqR7DV5>*EGt{E0_%px=2Bgeh@2 zkPon=RHqXOKTU0%EIYqu^};l}LhLDCW?WDhy;%Ce#85pi;~x^Xw05SpDCS6{psCmO zxNwiAl)?HMqWtZb7Xd-y2N-b!;H|)(>k`TjtuIRoj`ha)w7<7)bJ)e*qOZ6Qw8!6J zDzduB4BUY<0KKtzT=vDZunx7k?^VhtMBkJf@cMwDa-Hf|;C~had~e|W;)UI{&0jIs z8e4B$lbhdUvXC`EkylJSI5V$y-cBxa);z0m?)SgUTR6|j;I(9JO0$kH_W9Y5N{4bh zM#j#(u5EXWigFC6*EFJ#4r5^=RN6g?_0gYg_AVa_$B@^nC*TgwDBM{rfjvO9#zyaQ zf|KwLUosqbIU-qaIbLRVUl#ECbJ8)fe&Gbb#7 zT(4*p_0&=6y45g-9v)nrNHdNDD0pXmA`3y!HH^ONGjEO{e)@!{R2elRTWq zs*z_mPKMCIT+2(KcHAWkmO=zXq5r|Yj{mYI3l|!Aif1SD^q&35rjX4P!koyRN>I1e zgPLEzTv$hJP3`-m3KX-gCVxFUGhmfNsWtX$)PISAay5_c=m$F%uaO}IxBemNYe>7d z&}sK(44o(3toGP}IrxCu-ZN)@Ba_)L_u)&rWTGC_vp+Ie`+K>g>zga?7&%I0S$QkLeR9AN)!;3qd2g${Kn^NwYSdMwn!sql zrhBudGX}yLxdxz=F=3&!(Y;V3vHXJjiNk2mM1w(x$qhr&H#wqt05LDajxhU?#+L0J z{@ly^(u#u&nzwzX>l6ZQ9~ zZ5Go)yCX(OUbpqjmp)s5Z zT|IY|m{epFO%J!L{p6vx zWQ&q~1GSMVTS1le*35_QNtgbY=G}&M$GmaoHXAgaq`2ip(Cs)Lu zhkA(3A?5ttvIcGS{euU ziMd2~W5rwST2q0Vy^?qPHQrqM#RT!*Xm|=Uk%YnuFod=Ng>4&~{mzAfTebV$O zwBIK-@CV`+t}kFP#DYosi&z-(YB;bb^UX7@b+O5qFe(;u$3lOYMa|!76Qv-@Hy9w6 z0U`#bgTFe!VoF}zfAU)bX9|Qd^eDiT+;n*!z)#@%pZ9v+81(N|3h4XyNANhe<$siX zcl9s$G*c#!-l{*KH3wEZGz=ui? z%{+bu6*{x@P#2b*hgg$??!Ex_;TasD7A0Gd0E&9Yk}&hj3rPV%PzWaOKqGG69k9r) z0?g{b3h*KJD#!_9eb?+lsD?3 zf3Y?))ELk%LIg^+d4);11l+tbW9L7~Br}!)4=n^&ACejY!K2W0({PGW=CQ(77lfgt z|AJGw&@qXm$lTV%CDP$+AtAR+bN*U>iBsO~C^Y7}OcP!FAC;l7lvpWe*AwX+3QQ~? zG-8w#^!P5<*rx+&x%N)a*%!4&Ih^^EyGP4A1nj!w|JI542unew`2U=>JiM!TG_v~e zGUKu9Eg+h;>reNm0VuwUU1`c*=|WLI{C($x2%W=Gw}<)pKhI~ZzYS7zfhvUSbk|y{ zT>h9>COzURa#du6p~Rr}rKXxJj!&w!ri3~RCcb}@?3X(idul4xT5DdvqGq`wF}+rI zd}}>;q1s@hKvr-3wuQw8Bco@`N8>yRF%7c6$sraa72J+04wF?WwifG)_VP2*IXkjF zQ)q%15tw(N!>_?>TPk;WMlbXT;CqR|T&+LDBr&qie?}pg>Locn)tP{t#>Ges6paZ+ zS%g7Uo$CU336LGqGv`WlqGf-yC-OgTwpY!Tn$~L6jdX|L>KKmH7h;!qrJDmh9IJll z?-eRaS~GGid_j-OMB_SH7+mZ;@c0B`M81%g=lo{fs#}vid^Y_M+pUNxjj8fjXf?cz zT68u_!>ygZJ%Fr-+JrxLXGv%figQSy+*Hi|{g_5D((L!sS!!8_D!Yu}+{Eb<&5RoFIiI1U?_SjhbO)Z>1k^t$`UUrs?vjnoQ0kXY*tTjmkZ}_#Y9%sw0lpn=Fidr zEhRl96b$A3+3$4tWtWMnbw|fD+-4%iP)9BIi+;+THK{BMf!9XiPx9D1TMkS(@i0BV zy~U3SdmfZn!kzpfN=IuyW6xJ*jco6^FF;5%7@aHR%x37QD`>yVg4vL&mrc|d9T8On z?WR>4f-zj4J?@*I(T~edaE!UdZnvZMApUhJlo)yd^6F-Rt++MJKmoXiGd zc2_Oj=^w3XUN}gRZ?_zG*FLnEq0{A4kLns{{$QA!h|ENHBR5Rc23>hb#GGg$v2)Ksn8t%FV z#)r39w{C|W=Ya9`r*ZM0v_xqhbP{usS0M)u0*J|z3I751Mft>?3?~pfGz=&NESPF`Ps_nb&OANSC>-;OBsGqNygK23aC% z(l*s%)Q6q?M}Ut>SO=KlXy~EcE z3ch?h05#Non;?Kv02vnigr=DAUy}F$(Zstuel1CaV~2VuSY*R%3PYI)-9%o;j9}%v zK3+_rYzFV~KWfmrZ+<1cp)|e87_UqqqW1-p^lR~#uhxq1{X3xDh_x2CG_}r8p3XwjSaS9>(fMc5HSpb^kQ;*7EPV;YlFgEkqF{bMKcM|FdPD!}DSjUSfNW?+Eid54aQ6hjX zWY4e@dC}E!l|b%@UW4dA;NZUwhxi{?`2Y2+iT|@YlTnbu9X};&Z$I(sNo2;A@JV&j z*^ulE(+zgen})R6LXH;veT-xd7bH`N3enaCdfyvx&H7}Ry~9FAtdB1oKveH8ru{rD zCs12dSRT+z!%%NF0f4xTS;J-E(*_UBZ%v>t{}JD;)eR~qx^9p6!8>3$-Ri&W6aSm} zLBvOf;yNIPbiscfhI5G%swXbg5e-h*_3zmSQZTsL)O0%UZTU7t7J{Ps_vN*s)2|zP znmdYFB?=a!3%0x&~+GZh|!sh3&vH)dGbq3F!k}lTzXXilj3a)2}!_SGhxQX_uRZ;fpo0 z!lhnkZun5YxT?DamU*i9<`e2r=&*lV>xg6>NfQxn^)A;4%L?fpsg%S3VO5Gdca&eG zL?dx!ACNJdHhVOQ2ZT~_V)?tZ0q$^(Ug!fZ7s|t;$L^47-JHsw3BpA5y^FFqCjJgD zd!`I*n9*23$TrVhBFYMV^_@M?k7e_PL0SlNNE})dFY*~iJqBozY#Oj6>HDItyNaN+ zb$6lo2a?@Jx8%rTaAI0Ccp$D`=uaN34!7d?p+;qo6!3CyuE!JKbWRDX0IHz^r$IwH zdtJvI!0~e?E2j`}VFOtLnUGZzFPXLATYvQq%vR83Ws%>U9Yb8w^^JzfU58Dq?Y(th z{wV(ukhgKN%!V50B62Dn5}U@#)gD(lU0`a6`qIkmqun z>^Eraqzf~<1-d(I3(3IDNJ?I@xDYt+NySnnF<(=3w{}v?8_t`$AzKiv&rX!98*4Xl$n0j&I@Z`aeTIC?ZGom2+Oc) z;M_Pxy{g|TMwW}K8Y+!N(*a{EzaQ#I+zSn61P)!HqJTOlCE}#YKyRK)hNoDCF@}DS z6yPFLlxQAf>AEA?G&P#ZSUwE)FJKm$TQB@Q<32d`T++fV%19bf>bgP*GA5b*(11j> zT3N`%(E?sKV!_XlYPDsH<9ACTUNE_HNs3aQ-L5x-H8x{dStr_Wgv(e^yJ5I%`#6Td zy7^ZaPDA7kW%;Y8c4(P4aWmgJn2~gwm&+`}Ty20~M?-+#3NWasJG?DRXCKILCGY%a zG)T*>HtS?%;s6g4{q^~=?sxn+uvc1Z>&P*-K<2?MDvLTNB_~c#OhU-~k5_|1gTxC; zeHdZiJxcL|#=G{vo@6c>rl2ws+P}-U$#k;4T)!}=5aJtRzXwO~R&8-U8@zkSBBh0A zS?4Y1`{3?ci@i)m5m+msV75XEpa{%*00#kp8ixtBB|mQ2?lX7>jl8^WCwxsbP7Pla z#J#KeI@nJJ5+1iF2I8kVn{VaX(g^;)Q+)oh Iik2b&5Buf@B>(^b literal 0 HcmV?d00001 diff --git a/docs-website/static/img/adoption-stories/adoption-stories-chime.png b/docs-website/static/img/adoption-stories/adoption-stories-chime.png new file mode 100644 index 0000000000000000000000000000000000000000..4c17b1628a36f6e419f30cd7f647f11452ad9cdd GIT binary patch literal 39983 zcmcG$bx>SQ5HGp}0tAAE0KqMRAi*U-@C0{)yIXMACBfZ-1x;}G;4Fb)!9BRU%VNv! zzC*ryZ`FHs>-}+Gy;DVTcJ`d^nV#vM?wm}Nfy;e{SkQ0hWXgN>IbG* z6;^BI8D-^cIA43MYNfBTeGm{^cRa!mDd_LGSd`HJ%|hgW_M z44wFmq$g!6STeYd+zWvXl_|f8CYd8L1ug_mF5m$jHGPeY&mMuODzx!aQ(6X=`W=_c zpMx-4d*_}A*gN~JU%Y`a=exdU0O3EL0)zh0sL}!>0j|I}0%Meacb&7CfG{4eRXa58 z|L#tCAF=*-cSD9k{NJ5l3<&$byAG*9VEhl45q;;Qe>Z_u0J+B>)sy2((|`sVx?&_K zm!}j>%#h5AF~2~BcDO<6EHOH`Z=G(bo73i`Dt_DzFc9`>ZlRhB zv*D*czMkQn2dW&Z4{rceshXG!KULrygC8auAqK|)ZG8xQ>{aa)3!`0|2c{VW`t|;Q zv^M4A2$l)Axs-Y}0PSecpwytR)U6*oUttjd;+lqko40#Jym+f@QG0LdVyJBIj|sUb|2*Ghglu?Znye$gqAZh zt%W8C>;D(CHX)AUmgx=oDuP|Id5_uiE z<`x%C%{lT#1tKQ;y%rZ8SbJpaG`M0-!0f-~FT{W3^h)n+JwIgV&vui5E(+*NWg(ZZ z*OuE40Wua^5I!-$;XV+AEjDrqVoP1`(G+E&t#EaPh=6XAk72g@(R#D^*r2Y!JG?+6 zd}EY;9r<>cX@0|JRZP$qMZ9G{h!X{H( z3gXsT|59U|lFpfy2r6MUix!=jkRJJ*N=p)vgIhRfXLni;Dd|&$)Mqt;!E$=#JTVm05PSPENb#P{iEBvhY zGv(`~{GD?aDN=EK(__dUw_})1tgo>yuF`s(ViFtJSBiwei;o$ldztg2#xcKJ9(A`J z!q70E?~CS3!o!Ml|KmhTRd-&>WEmFyJJvprV+d8i| z7R9jBq^R3cZ-0@9qdl*SR2MDscy!+&6KxD`G3xYc!8;sEis2}TRW`p><8zIPagB+) zFKR9U4||=9z(!|g$?@*ggaIu|iNs0L7i}E&SR5d$P>g)Apubec2(-)M zUhwbq(u>+Fa^5{)u#EymmXPUDP*`pxU`@{XTH-M(482KaU&QWk*G~(SpWWo{o$wTDwsyF!E_0ERN zYPAgVd)grx@x{h0?9$X9(ZUzw`iUjtYv5pZs5Q8~`zGBH`CTGlg7(}fwQmdZhW%b! zdPk04FZ%RoIEb!$K;^D@j*f9EV#=9|;q+2qxw?_djv!Cn-s8oBb)T45j+1XlVW`fN zi>J|wqbmD;UYgdCDWUt;G1RjqFXi_OD^&_%=u3$1Ho-Wm&Y0M~;n@I&C3?=e+A4|O zw@gvt#s$sRxrjRRGUVm^unr6w!D6JG5t6|8_V1!?`^)@>k-K|T>0NL|%m>tpoffn6 z!I89gvxIe0c5}}L>zu*%fC{j^U<^ffV& z#pP;6R*Ut{t}WBvmkQ;vO19bD4(M-9-mEu>9Z=L3l1-7HjQh|OjY5=a_NZdhk&nb) zs3M2;k?s_^h^S}4G|YlOU{|}{_^pVG6IAec{9sA4`s{8#)4srBo>XaSecdX@eZ-Nl zxb60SMmoD9o@L>g%k;Q&*@>pw#-{-rFQ-zNN8cu+F9Whrpkllx#;zD@97o`S6Im{# zmfx(4?<6=;Y*(!I<0W7Prv$b55nlO`X&i^w&`-9m6-|7CR54@g^*!8GBYAP(i3QkC zN-*uyxXEgmzBs5LAE(aV#U*e52)kqF-iH|fKn#v3Lyg8CvA#L}c^EYlTaoET;ypb+ z)5m@taX*2X`17e?r7?yqQ(o<{P27N4Z~9kuYO^-$B=W6(^WBc->O{KaB##oYmVD5Koi+$E+`O_}##B zM4z**9JI*mLcdO8Vqs!6LUT-C()|d$V<#&{PF(20`&yO4-|!?(D6`jnDfi?l^y6+; zc8^V*0Lg2y9*VNomK4$L2((*z@j_=Ag!+{s4nt>TvwOi}@z&i+jh6~aM0G5WK8&Vq z>;B#iP|KJP&El(o*3QRgaC?CM$orBsmmq)CPEd%!NpPmXleaa9ya?Y2jb51f$SzB- zMcxqAf=Pe~>=ZZMLSK!t)PYd(bEs7g9Y$HofYUJZhD&6$7EjAv2AO$<9^3hXzm)Zq z4Ya>DC_fmUv$+E~{8>mgKzn|g+Qs4nNl4#W06DbMc-m9j>JDQQ|A!;aWNJ9c@3 zHkm2h#c^8{xgFlm-x%Sy^&(+>Q;uHHZH&#l-f53CLUu;7K1u=W0PO zv?BQlN~a^RGr8>tR`OXvBJ9?Fz#y-mk~eBNXJWk%buDOCfEqo=T?D-+gs!|4odKh1 ze=rTx#J)RD*nbcHHZC*bus^diQ|e!`G^75~bbGf#fS%mKWToAu+9&l^CCcSID`zGxKnfoOK}BfY=is?0IxSZD7X z)@5+U#~$_WvNQ!{*YcZwtSVD3VI{fdfUe_|Yz}Fg26saXr4Mc!!WS`4``QV4wv1p4 zW=fp`+7wEc0`-iN%9^5Qg?=JOJ9Cc+XqM=YJ$thV1m%NH$=JMM8!!nrWIv=G8RR$ zuY8YdCm9ZQ`k34J6DOELQtK7qgd=(dv@`ZX??ZbTMUh5Q#n537X=K?Hfo8;0qtt;F zc&rb~qYqlvE_LQjmF5yT_`xAcjQbSk?o=SLdKDz?pLL^&f!`C&um>Pp7i&uc`#K{p%^X5YzI^2I*t^1*c<5+^;7!up5xcl=?ctb-(ZD zIzk%~uS6pOmqB`u=$@(|0n7mtjQ;uG0zZ z9M8JZZj*?$kF?VNE^B`Tcn86wDR3gLlyG@FNxBX9%b|tBZ0m=q2}cK}COu~#aH;TY zWAOL>F4-l~SDMqw)aJ+5KD%%=cvIaNP;ohJ^a)+7*~!px^xl0Nq9K*5QH8UPLB)s4 z25L+4Tn%hn%1?^3mmT%rC*}=I=rgJ#Z{MGD$_13FV?bQ$`@3vW9?6FF z;8Iocc&t0SCNu0jtD{8KTQKNNX&&ARz4f*fHY$((gngiPr#ex zf=mH=#I_e+M(^Xm^(Ei1CW(xt%>`Gdwxbf|t*la^XvWsd z!nzQ?NjZx4^;A(h!j-)&WLp&>{QcR5T?%_FMamRR6P$Flt}loA)g@SQ+C-|yzL6EX zxp&JGKlw*QOdQ{M%h=Y7H!D<&O!tVdt4uuYhk=iue(i3-UbY|EG)5gYxu(oleL!yFBfaf0^&8aRVZJd zCv5_s<;}+T7hGQ|bTwr${t@}mi&@TEzmv>o-#DlK_{|f6kL?PGu{-mTGOX?We&kJZ zy)YvDV_fVDFpr6pg}VPY>}&u&Pe`KuMC^vS(59nl&w-ih4ee^)6-H*a2&^hR+=vm)(T)KtER~uuPZ7J|FPja4_if>&{us$*C6}a<6f`6h*@*FexofNIDNRn$bx{v&a)l;bz+e{aRr5q z(vh;=C4(tSV4km6eG@=v_HgBDV^&YR{k}m_5_v7yVOqgcjH^LyAFKso(yhB1zr~Kp zWi~+1QJhWGsVSVKS-oR}##2N4LCs*SK~^gHwy_5WH~4RbM|%5yDGg+{1|E?XCz`2-X*5 zw!1=537oBTI2uL@kl9sSwa6(TTVlI>GFAV}k>twke%{Je$7nd2Hm|!%n~VD6u!!iR zQzTbt*t?h=Bsc)Sy7-Au-Cy31-ydIN@-HgKGmEXx}zGnBoy8Ch2t2=JY z>%PMEy5u%n0PEo1GNAM^eTA^diuRz?gKK!mo$ovxTg} zZR1yUjHNLedC780N`kQXRkv=E<+_yqFiHX%>FWi6gS9}4&~_lJt~ofFwQefRf$Nk(p$r{CJEq&4pk zmTLF@KoTWFV3srbea^VgFOje#y-u{1M6kxy)b5S-*FS=^$|(F7vLvwsQe{(u^=; zQtFq_k|wd@ymyVT1(Q`Wnn%Yg`Z(tY5jR^R5zlejdGd9ySCE$T*KYYU!&2e@S*Erq zzrco03V(~y&vjo|?PV(0H1MU21da?{D}i*;+NXW0M#OYUuzb#4^79EHvYuw`jbMK1 zSBY+qyCAn2d$cLBV-x?Gq*I0obxf8G{{aGRCBZvp6D8`WKVn58GT#jSKiEchGFw`? zka3Wz7NBc8lA@$u4KbrphCmnmnT!;)Co%JY@fPI-=w^Fwr!yHL2Mksc8zC2}&XT@% z_K5arGdr(bqfm5ML1jR;z;w;gP12S%psnF~za78$vOnGN8<`*QT9+q*pU8sm zSI7}+-(rl{*+pPmHYhTc>Ld+XTAsL{bp*)*mRv8i-Yp%2xh1}rb=)Y9imR0DnkC(7 zXT!x$)-psiOCTq|JvTc7J0XkbjptDq$A9)(d_6tW#Ah7?&ggGYabMr#+q&k zwamjUPa(FgR@&rMKH%-4EmZ;wX5BgcFf94rtCGN$s5Tv@T2e=lgts%&1&jY<0 z^|v)4YNBUn$AtxT|0rT^=A%TROO)gVM=Bzhk#HDf=)mJZE(RHZr2Mj|4;$wd2-8v` zGhYhm-r1CvO8d9$#R-oHv;phN)fB1d=kImJ{GyqcnIZ++ICX<6_ybb273C^PfcO2K@z0X%>OAaD47!!zC zYDU!ZIR7j8f3`)OU=CzABJO*coYnS{0YO?R@S!UgHZ)zkISb+HX7SBTzv}r0>!rpf zHs0frI3OAP8dkfL`+m7~M&DSe%GKxST6s_))+S(nXMcq&`rE`_;5-SFL2;h=-@NM; zldCMb0{R-nZH?A&9JuzojQ zQ|MNY?FaF`dpEu}Nae)mYZOPp(~(oPt&?Q%d&nr~9^P&C%v-wXiECzFMLqRbCGN%h ziAl@X`EK*Zm9+6kU*e3oE4+OX%uqx?g{2x;&}+Lbvo&`5 zTeu!sK|w{JL-RPniEYz5K%8bzdhXC2jfV_v&rPu4dvvIh;kUGdlksX+cOTQ5{N-bk z9KTO;MpgPYtJ8}vU)?;FL>NZmxW_wX$ycPOrG@#7W>xj(@dio%wqH$fZ_BXa^gNh| z&QV|?aU@k+u$whBBnekj_*XNBg%zw~R=%55)e=nnbF|B%x8SS}W=l;_2+3{K%7C;| z`djGLyX}wSpnu}|C}4fz&d%fYN$5D=#t0k$hy0WFje)tevEsKDKSPDC9^9&ze9&y- zkNc*vCcgOoGScJLG6nRxZXah4*dOQJHZRYZZ2Vkk^mUB*^0RsjWSh`et&-(Dl9Q2F znP~`vbGwXNk3K^h`qr)e^T|aWnAyb5pAOb6`&gavmaf1 zgD7U*TA1ys`ukj%hg{mnqa#id?J0bgZEZJaKc$N2U1LoO2sI1-c~imfj2>V{`*?&8 z7YOSSPPyA-aRUw2?p)Jl2b?Omo^FZpzS~}9qu1hN9`~||ac=FEq3E~|lMModf?8f9 zhf|$sGx+}Rv!qh5s^Aa9Be4hCM@0ec6^; zLU)nDAKaBkH!sCA&Te;WyDHxDy;;{N$M%b9&`F zYo#J?2~Nt}kNfztj@QEc;GXHxzm2}CgR0wR-ywM!BH99j{GAUbG%z>PDX>AId|az$ z6U53AQW6PHC`+0d>CGwbP$1kB9Ag$mz|((ah?bNR*V>X=2hh4|@tt zDWEQtJdeuHA{k0D)3~gA*poN>xvVaU9t`-G$}1@UkOrnY$zuw^H7};G5um|8j~H1q z-y{@%SFq*C`#!P|!||tZ5`8j6%%`1~x^3ubVCL-T-L;@R$)i1)DuqVaD0{1v((Gz! zpyCYN_H7);z`^KttF2iJC|cD3=m6*o%|sbK?rddZ==UOX+rG2lA<6*eQQL4HRv-p9AyzZC2Ah_0jcA=vuXlEN(VLtRL=%iZM6&0$xO`aPM?cm z{;d@xe5J_WK>DLV0pF)tCJ#fPCz2j1uQ35LU(){b(&L~s(7~}0QJJd{ArI`Q-b3wI zEWSqtRxhcK9P^)4$GTD*+pJCSvD22N#>&TR@W^ztCQM65XRH8PB<7`p{DntHX$pZ%A z>m!d`wu4+#nYN#9UK&I5J=^pai#faYCW-A5o+6NCB9C39~Y3WzQj}y4G8N-&H&Rdw`~dCTsYErFeev(>0iIF#t1SC4@f9yKhQTcg12kf^E z8pOttz#>4~nnYWcH#>vIy;GS;Fbi?I`-WZUf3!l~)D3-Io@1`l&Pf9CT4hs?yS3-P zht^lMpZthxArqceOYJZ5s^J}#mD%_dngq-y8WEKG{^j|bS)Z&)OtU2RdKxAAPkxuP z6E}aWUM?M*ej1n9gRJT^<-fs5q|Oi4F5;EVw+?g$!m0#iBlqUZYh^s{O5_!MvLnUe z`II_a&}$q{#0d_V$)UhbrZTU|<%#J;jKutXVk^xbegCt{dx=b!xA4q8loS!mm6koQ zOl3VvK*#4i#4r*XqTGTt65uu~nosEu1w13!MQyd6ju+pl0d){_Qbqn!rlU3n0?{st z2nz;w!Yb~ApjR-*q}(*vW?k)RHnlR$ht*P1Vi`$7{Z+!p2GW0O(nmr8E6$UA_ej2* zqz_4ef#JnvxV+Q zC@nvDZG$_M&c4@PiLz$Hk|){lM4sBes%XK$;m)r>#9?1r^e#a^;C5^BE?Z!Zb_cSg zleVgqQG+}xV$XJdnSxjXH*Yy^^+^RL%SjtGXDQ8p+yLq_(mtR7!hfjCI89=Xc#Biz zvu(f>b-ltI%eT4R7j?bFU~n<}3Y86M>vc|QgD7g9jBS&9`HahH#CAiiX<17SwR{h( zu!%;$p#4_wk*s8LT&8Z*j&64ur0H&u6jG(Q;F(`;eho$<*oPOKuQ1r<$=R$ymw!<` z6%KqGdYd{j5`-B$HiRc77~%Im^!gJeE+uQ<&|h8oj~5<^%94r~n0yNm1WzBanyq}P9EL#Mx3*D4us5k;Mz+TPn&LQFZn?{ldU)8~nuq6rkJU^48B-Y9OO zvQ95l&tO)&)hD}tkSI%U1@c~Hx9DeOk5`a20`_Bt^p($I3k}Vpi2D(cCK;>}SZ@C$ zkbB+DiPRa-3Al+3SEN?3I$mr(Pj@#;;bt2aX;((gS*ZYj(hV=jbGxh^;+&Ta!o~SH z@tBItaPP#0(AePg%x;J&s)XL_($QKhW$(%orUUa3l4i1KT_QW(a}RoMyw@+#z9jgK zIV

    ATAn-uZ5nxh++8Vc6f+(N6m!w20Y-@5 zJTLTfOPz)B$lMI$6WW(Jx2L>cp09BRe$Cny*#5IqWO`$u}YBTGIFM;cW{K*ya>X z>umBHL;gO@stzEXD<^D?oHMtzu(D*?M)$lk7U3Bir`fZ@6Q+xMcOFjR{_%J%Fm`vG zqn|1vi3Nv-=f36q&M{I;uxoH|Mf2(?KUXC9K>#q@oT`mBDpv)sl@+#jw@qF=ML!RH`QIwdQt?&%RMQZO( z{b_asR|j|o*$bQ1ua!?=^&Q$ITT}{rJfd(IdnI`2aLGI)sjX@-R%njGdvnX_itny4`fm-vpN+_%4p zrs9#UX&8K)W?Y&GKtIvtVrvjk5~1cC`77Kg^x_27N%`8dWu>m+x-T00vEB2>4G2`M z&qt!)lnkEdSUXhGf6c~mZkL2(6miXhBY+f%Ag|Nyv9fIUm`}R(wt1~+_S5RiZqa(U z6#D*5KHLC09T8#a?vJrlG^Io&+0At_Zj^4KF&v8(0-Y~z< z%R&8aX)sYY0W2a+8x(fYdLpjbf3$K;S#XG7f{*f^^J9;jw52@~4u{uTA2+0vC= zB0Kb#BqwRq*bgBzZS@sc-qpR(hg(obJk%)u?F!62k^q_mb^iJjS?q|aidFp_x&oNw zE$nK#h2I8RFW#A+D5?>i5V6G6pt8Ws2!xsv)1c6w23n-FGf6JiGh7#~EgKG58Syir zH}%QHf*2KyE^hkzcJTGtAyqA3f}#BTJ^qg%b(;Y z!{%j5Q*+%uMJWfrxtCQaHB+AReWA05#QgTFLbi(5reA;2$%gD#2Awu?Gx!~|f#~vG z%&J?4jZnwU=kyL)I}?}vA468_^?@t3qKiF)2{=!5L>%urAf7yy$H*UUG~~QJJ}1&n z@4d-oV9V?ha1z?NpP!3oz^Y%G@==v}rv0z7^u6&juUm}tTmJc7hY$7@ay?}N_=1H)%FA+3s#N|HD00`b_? z!V5n0>**`7mEDSXHr#cIk`CQSHRR)83McR>@R~H7#C{4q1rESZ2bH0qWy96B0NLd_ zyjrTASNVy|P%*kAaiI6;Zp<%#_${^$KlJX7q3{DDDl0i5ON~I&XPZ_Xs1RJ}lrxh? znvw`Rca`?NeS7d^4rN}!4WcP(w|%f@XU=L!ONsiN(}Yvq%VP2xK}fP2zKJ#RJ2ct> zb*_sgBlT|dWU=j99T`FhrT#PpPFF<|8NI$@737kW)*L4Fe|2;X^>thc_(tERHd6A1 z>+J0NK40Bzvf+dgzsY@8OyUh3_DZBp((=cLp&G?zNDy9Dz<%`XxQLeEy6q0-N+KNW ztUBIZc09fbg!!iDRIe7d~x=mwXy$P#l4?K zl6aG2968D)MR1<&*=3ckbsnewv+=}FQnDVm1< zrjMl5LX2}ZdC*4UWf~v{GoWfPD?7O=nKX$qS=(`~c@b0Vxh6}9gULv!zFb2}NzhPg z*}*_=FQbY<)M==`z6h_7+<$9oK39_IF>Q_0$fdvvw+XqJcGH_)Xv2gWwUtE&{+Pta zqR{5MbmoEEc51|q1-TP&ww=BlN0uu6wCy?!8#pD7z8f`qL~1-8>+sdgvm$vetlUIg zQ?zhkT@0p|#c?UZ9bU@URWWd)XcOcRK*f#l1a|j{fXyY#)vbEpahz?4{PptN+l%e0 zVh+(60&PiUDi&TxsJ0l)`RUfwz(EKgSME&KX`*v`;4=%$~m^5%VVetEH1^5m@QYwD$~cM zp90cE6Q!AJ(Tr}Zb(*ND{{ zi9dGN>?w?!^nivv~BoL%>1Y|4lR7J&k%^=su@J-V|s{^0~| zNl3s#J&oOIJX&wOs*~SE@@8oe6Y9cOCCWAji{t70-M8%b)h|xp5(P4$VG33aes%?s z;09%1`+tcKKj-6Eb$<0>A99+Dko>5Myj8A#RF3dXtVzf!RVZYs*_0{&U2hXG;|X=8 zv~RezXU|YvZXd%hgZG~ddTq6pfay7*SHGn_$i!!g%y;=U;|tWWTcxp(Dq6AE>8TPE zr~7&yUImBcV`~d#8#?5TLO3r%7GATjj_p#o7=>SwEj*LAq2KLYZ+X+Yf)EOOAxVWE z)Qi%Vs;b@Nbt3J3Nfmv}Hzu$fumV_7HlSLA8L0hzGDRRo!gTOuU>xer>1i#F8wF+`U#{Zr z5>+Qq(f7X;xdCep*v=?&6e)pXw)eYF8`Zv4KJu|ESKQ8NP~3K3e2KAg({g6@EXlvI z5UzOOixZ@rPQt4>_nN+;Y*TEBOyKQ-wE&M@dNjGwWK>e98rl3XAEJVj4ca?8D$>+_ zSGcr2*t9V}h6t|M=`VA1B&iC;jar0gY!+mf@GmvrtqKuGwplq3`KiuqS8we`Wsx}E zeA+LGS0{68FqKzc;kqtqLc}0JYcG+g(9--zoqB^-m0H zJRP^sEn%>%WPMAxgg%cKY(@UJc4f&|6l!z>R4J7fBky0Spv?T*&x^sgD^gHeszVaG z25Tk7EL9w6=fr7j^PSs95v<;uuqEGo*6Qg{ z70lwK^(fw2^ww)^wQXZ31@?(nu;#vdA#(A(X)=X*N?M!mGj2&X$|YWX-^eB)>}li! zX3ovedB2+x+wQ%H{})~geWR~)%bk}Yt=(P;u2*j||Dxkt2tL|ydS~*S^DLTWX!r9$ zVYZyqN^m^Y@3`2$I^r5Zd#M~GCmAFxQ3uJ}Q_w(r!8a!SEC~P+NHpM_F5~?~c~1(> z0i!W@l9dSbkcA{awP#Jo3O5h@`qVGfPIExv7_7LNy1*zpi*L8o`u^KaKBObt;HX%a zt*_(2!sf793)ddr@;hvS(X}CBAyrV2MbYXq;-tVv@D-D&$dRW`j_+RBv@qTIaB7lp zl^R!_b_kI%?zg z5dF@IN#rd8>s_y%UU=g>+);P}uCM7^>2+(Tf|W_`22B1s1bJEqjvL+q%!svq0dxX4 zCKvk(7y-%L^|Q*`gK|96y)QJ*$tPQegxTrR107MZ_G0<8^q3@;avGb z)MxS6G`{99pw}y_JpYNZ4Ex3RP_@qj0W2t)1kR8Sx5fVI6bEB7@pnqhSE%WY#7i6- zC%r|$G^cPEf%-luViggCWVnKyRc?mZrRHZ2mkBeqGG?KKRN<-ZjsZOlsYniz!2 zwJrp`;lZgrF|ZYmuIAMDp?LClC?=*Mnn-#{um8^0RhU12lPI1~U4FxO=#?|K zx=6Z}Mx7D#^;s-a=##njU8y~rHq+jf)@qOQ4Q++O5INu&Hm%|6W=I_?3wK19;i$qh zgW9gDtPl}q0TjVbaGZYiZsjqTf2oE{5&oi(7_)W+BbE=aUSfnV!DH+!kZn;G`Bz^v zVZKaRTsQ1hdcbj1EWW%*u@5yw;(zqXkH+ai=Pb^{08e>iVI@^RnIqvIJXAxL8%|`n z6hFDk(qQ_%A0tI?5|cY7{LVg%;Qp|#vM5!5Z%Sn6QUs^|aQbn>S<6*rDcK1>;wbB$ zb|Ziy6R0KR>teX#>RaJC&T%GHn$;e0&WUrt*J(^VeE^mLhYgae zk63{k$Fe^A)DZvcKkNRDWVB;a2jo(_+z!D)?XK)8*}W_lxyPUQA(|oqiT#=Thm()p zpQkJx!V@<>EH5xuHXmsuSEXK&HQvTCO}$MgaR41U(H8=PVZZa)3tP#ZujNX0|jap;siUwj^wU;lL(bR%coJGV)$FR)i!N`!4`_*rFdKLp%rxH*!_vW0yTIVi0; zG?mcaLjF@T{pcLvBb^h&Ajf<{t@bo z!>qP6zHV+A#w_%>8ujzXsl7bkRI+;oA&ka92!1>hUKt`#ZkWcB)m5yHiZn*kVn1>Y z)*Knv93jHZQpf1gbwA@~ByWsS)%uU#4raAW?yt#Nz=9lQ(BXBMgh)4!?S(Bl>WUV* zeVrV?c(1#oi?j{TH!3SrqIQ@ATkW8*#|>m5N=P2ClOx~LfTT4sblSA-S`obBSfc#G zh@5}+~9L)Sk`pEd_~T<`gsDOxD}JS>tI*H%RWC`Zr_N#(`zPgm0L zo!iEP__Op#61{fk(EZKrrr;2f&vTI^jO~VYlHtt!AhDNo^bVyi#T1vH7HeYGyjS{O z?u!DjIx#nEcM}9f8gXZaZGKUdKHZZvPFuAVQKRU$y%1lJ%DqAezphYc3Lv zpqVFa*CBXNqhkQi2om3Eyc=~5oL1Wr(U}w?bkhx1N;h_jzovdT_QPBiY zt2JH<=(rJ=l;%icODNR}kC zQf*yQvCaY`_n|v4x9H@?uJV};n5RbcIHQRKe2414Kqra~04!uL$F^58a1(z?M+*~P z4y$oS3GA%ZqLeof^jwDxBsW?wQYx8vA*B$-$VH&r_~ujK@$J5AM`e^0h6=;weqTwB z-%h9TjW7net677AgP2-A6}w1L8l=b1w(gA-!&N#E(yGO`cdFBj!0y@ioEJfy8a@wiZ< zx3?vTL0=Oy#ZA7$im!J%Pi!g`Up%RA7{>*2g(uc&T5J=rS>BIK0OqWMh5Mt%%V-!m zH+JiBg42p8sNXWU#O!;)iSq*ntw?^?H>cy|z;~rzoAvSpmi8Vl4S;Y`i5{rLWDG$(Zow>5OO$lG>u}*T}xC8)stV%1;qJf5FGPq-7HPYbtw%l#5 z!S2(XfW-&c2CV@2S&++*?k;yIf$!EEJ1BwObWMk9!msY9W#6FwAs4mN$~@Zzy}`p4?OO0Bki&z+ZKV=FAszM6D7>ND((voc}npI&brv2 zCNmokt9yV;oMJrCVh3pDDGC!=ns5=ekA$`-KM27X(9*zj+RjJs$P|egvUEnu-6_Hc zfGqy-0r2(1$^fwE+W5r%LBtGtR~j^liAHg<*D}#Wb^v{e&8n>GJDjtAdc8X$Ppv5Y<3p&Y)|Z~fF~RLn)Hu|JQ*J0Y!UrfN zrV8>i08yvLH~y7uma(oT%F=j1Wu?#W?#20~%HD91RmJ7k!*i#q|MNLot2?PFwHz2} zGPBu%ZVe3^Cf&cex6)fKxaJYt=!&p2LY+5>aG z`U8Oq{L%p=H^Mj84MSg)#pF2J{id9ciQ?V!q~nC$!}No)9zy>`>s?Y@oVrC44j$B;fz;^ReUyKrOcsi4y2y_$o-9 zVx$;6>=QssuB~Ax*WR!zvoFV{ufl5M66VuhO-_1o#m3Y7{AO&m`8~tc{`)x}^WAGV zA{MgNN2fyj5ZNj4!xvNF*SwF)Suck&Dw`6p+YMdqo1)S>g4Ul%hIQx7c7FuD@kx`w z53n>z>`pVZONXARWG7P|o+JskdZeZPLiK<2pv#5-1a#jgV zt8m?`RdsMceaRdu1kGZ~MZgv1?>0WIVCXtfhxMSjslPj$^tIT)K3KXB&qRJ_@h3G_ zMJoP4WJF#gP66=-^Af|3H#F#<}5 z^bkWxDH5VmBF)e+beAZg2n^jNJ#-B*#Jqd_zvnui&xcd0l@hJ;U5Zm#n)QqRXtAh#a9}y~W@Cbj^?~4e(tlcU2R&^I zme6n`XwxRM0mmpDH=y~BR%bX+R~tWh+CN}jZeY^y@?qGf2ew-3_0!chPIp+M;}0)l zc|wWx{|i>}_;X2)ma)Y|M>2W&2$mbBVBnv1NfE$^fTM}Jn4eU(Jwxh?;aR~ zR?eMVsvGkr_%~2n;Ue)=ayXEz2)q93ze@)Fl@esk@6nb8=bOY-Wgake8^={?_zlfN zp(gE!{wMD#>0gD#*?yQ5d*i4p?XYKAX<*`0UisiK+R<~ZY06?rvg75Smi-qplOHd> zH@bX2Yw^{oakp)Cq+d?RvF%4Ds(232O6($!_&#wr^P# zlaZ9bDWPr6N+;%GxpQ^w4?ncwJ~DT6Ig583y}a)!&X~xQGdx||6AsK-z}D3si4|;S z9|_#m1W6TrN=0~cYDmYmHw=e~VN)JSBVs{sObj;yw5E0F_lI08HF#V_u<}$#4dJRL zO>6l@QY`_graKi^>)B?!Q)pgZ7^Qf`?*`cZUC4#i`kXRkOoMgwkv0xNABv?q-8077 z1)1 zW@x{`iEg(!WOo;K@1}<_fxUS`pKX8yi7+=dAxd))kRCCe5p>+7-Tv|jmw(^-{OuflpH??b(-4ju8@jE02T3!c^SdM+3_Mw$gGYN>a;fz9 zWUrpH%`KSLAjtNhVN!M6AoCd_OnuDICvW8O-KGj!sh{hZlUZ@t_({Dn0U;3Iy#p6D zeFByW3!hRPC9y57B7}@ru&Ctl`oTSOd&G6*`ihupv#U`BP$qV%rgTy<(2rn7V?K$- z2wJ~G*vwCh+E;I}M*VKDCxpm@#fPJC6N}n9niDo`LSuk~(ML|1>|ve6Gfh)e3#~h8 z4EnO_G(IcLfbsdi1x@tdx;pE%z_T*uhJe?J{_;I~AMtAOnilQd;X!IOr7E>jW3I58 zY7|^pwy!Ffr|)c+wqn2Mp>x2`=}`z|8F-m+4`Fu>@79wVvXELn+49EwAqlM6^X3UK zrk59F9;uBFEB)x!85|*TX~5p#xg%2bfG|R>)YRrOCDtmcO---}1$780B}gcZI_de3 zie9v*VR2)ud!M#wzoogrZgZFTZVvB0?R_#l!nSa+D>u2^DaFPIyOl%7RfHmDu#^3?9^M1{H&e50BDFY6@c^ziLP+A~h<@6P z4CZ>ucA4jDpU!uLch?w_*AP~XZK}3@<9XV^WTrvW6A{xhz`pSn8KAPxxQ|Pj4B1H7 zK!U*b`7L~%v02ZOgw@pCu*^fnb=LBh*;(jUTi^r)ksiGLB$2Gfj^Kx?9-?;d6tnW8 z$=}YWTD4Lda{`)QK- z(IXF=?!TfcRP0BGPDb#usow>Gw!9%ew;_C{LWd^H_P2$3{vpyzaO$2{uc9(&;Yh%1 z77{=ZD*q7}K0ye1=5~Rg=2QYq^4mWf4w%O9mBiGq{+(uEz(DJK(|qqf_uqI8rD8ps zVj5(PQ@|5e)Z!kv3rKf~DoB-*{8gZKfjq64rmw##yk)q@OkstqY0y~QA*%#V$>1rq z?T!G&OpVh7_f&LXFRHf5GC&Y>xp83VawX1_BHFFzeNhx@YAK1f=+#KF!xhA;|MtFLKMrq3fq7ouHZZ<%WNYholSqD$(l=u@=kAlr1p<;Tpl2&chl3sB7e@&v=e3 z9rjUDtV%dMT9y@ivUY$#nIy3>L4sQU4aeQN0fR*I>OT4+iUtJf2WZo&bN!{&>N|Qb zLB3WJDc`o@?4N1ybB=?W6;-N=<2!O*m)~Ma{c885k+L|;Iq5m`c9mwfFmkU$Vb%u% zVV6_QAny3QGso5He`p~>mW*~8k5yVfXxfi@=yn-_6up#bV z;(wLzvq)r+rYRtX$S@THi&hy$V%V)V9VOysvr}m#`g(HkxXj9vbnnz~qiq`7hTAz) ztJ~*PW_BcrVHtlvr9ckBe_o$&?bw`O#;`~&k;%I3I6$Ub+PeAeZG=IAN7XO>F(^+W&WNeG zjr`@VJaYQ7)#a0S%R%aXH4i9k?8d1 zpUe#F22ANG{v|RgKh`Q0%b@O9anJOv#0yH0(c#noU<-`x1c$lp8_&y#5MiC$Oa}o= z!VB5c1Tiu1>cO%Uf2U6MHQS4gr`&R+lmSQG?poas>C^7bogHZqQ;?4o{ZNKU*48_V^ z@A9edIoI(oC}Xfe0TB~#QnMJ6CMzr_8`Zt{=#?VuI)wWQu&o+X##Is6FK-OW-@~)&50oKTy=GPIN4Cl3 zN@0bj!BAKJHxB&*mGpYVU)NIb@E`^MM(rc&PyuNXn{MVd((tV`kD|Pq=42$$yJ-5D zZEFQFd>C}u=W>GdX<@N+6-o^YH-KWlFi3xyk`Hw0+*PVAcO!AsTqZ9UZ5r(A63i}r zFd(F(5@E+C4S`IvgQfRMd*^EU4P@W7U-vDXi>2M8haZY+o;8r@HwxtopqRE3@45%%YwyN0D^sIuhtf zI}j}>tyk9ndrK@MxTg_i!uuzBDu2kulhGOe05Ur7)=&1kwz)++O~hr@m*+~V>dB9( zR{dSO9;_cui-1TP?&yKtGg>Wu$d+=q(>;-&xt4x}d)R&b8yz`B{vO`1ac(H&u3*BM zf9Q?e!PRvlwuakz=H`s%Yrp7Zf&$KM!W^!Y=w8JfhnKq;3!D1yb?l#_0?ipUAE^My zA{P!-D(hEC7Z)746w^ksMd@|w=b!kfr1zR7x^N-D5TA7EPqkO!9khOowI zmfbvushJ3vxQwQ-!3H;1;~fMAFIrfo1NW>K+fG5e0z&$ynQMh!znmv3Uhi)}amE2h zmFd4HY|^*ZZBV$~_)hnl<_wh4Jx0@u{%Kif588e{XD5-`lEBN3KR#M1LjyW;f-eIf zrb;HcEc(LvN>A6ld&Xi&yiUU%M%^c!V=`20CG7=nSnD1y99;+O0Mox&5>9#ef&Sa? zWAQU~Al&y) zXKFT}aM~Z|vOimD4*x!N`({DU$5p1&FlkA*br6aCx@%y@8|L*}Z|pZqtS{9CJ`#N! zlYa#gdhOq-FPB`&|7IanuAM)ldUFjL73PEcmOB!@PDB zwc3v&#pvCN9d}ud1es_dD2Oju-6x9;iuxqG{G)GXGc@ueyZ&r4bdo)V$ou}A8V3!8 ze?Q%3&G{}RWqZF z#i&0bTGD){{R0NaGnWtdR8>CDz`9UsgY?hYkW5jn%V9AAuWo{OmIW-cLCEEyn@NyA z>5(#oj|~zU_ex0*uuLJp@9iZWzDLbH&spWSKHqLVW~P*|d*gQ6(3)u3KLd zuOnqA!T)BVi}o#B2#6R;KU&%)jV;MAS9C57I(v{EuE=6&XGahFMAr z7~p1V)%F|Is*k!IWTL~X42Xy!ZTLWxTJ-}1d|Aq?5~E|}*~xgYKug;HSwF2McMVzK z-_C*H6hPtL>?w-HYxUQ_Kh&BBsOF70i7K)P_O%|W;?Wo$(o`?35mk1XXqn&kdle8y za{U_@pVS9G)_8ICV24*NfG-&DG=C%?fk~~2hH{TqN#~I)K0Mn#XeQ-ntcY*_ZF8Os z`|D%Uon7uzkk;rZ`uOOEy6oP8-~T?EJokt=^~=-W3pb)-o^j1-8=4!mUH`nym8SER zCR%yCV$kYQjzX^w8#d?5+cA0Teybn^#eB1-jm*vuqQ?9nwPQpi*Mt_z7Q zWzI%A;t`9c$(FmVol_QXdaj<9tgUr+OSgB{rW$^oVh|B9SBdmt&<0ezcJsL@k5Y5; zH}8V8smviVG&s$4q_zs##DuBy&&?)&A-+mW7gi@mHDGT+@SUInBv{&tDQZcssl0J;f*a2M)m zY;|`!w9#PhZA+S!w*0(yo6o30UFf$in;56CqPM#edm7gIEv;3MqvttTQr*nPWWCWQXV3NIO6G> zSiDnf&3)lLpZIbWrvf!H@;?rctlq$@<=rlEv2W9{^%^>4dctul61uH5Wnvr;;|Xs@ zwWJ7H_QS5vPI3gLgB*YP2N@sa%`++;1HN}^y1m1ePp;j%@ST!g-1PPiyUrSObw3gH zi=LwVe3bN_{rPZ8K6Q3}jN{W^`#!D*nC)R{F5u*fSd>?vJOf6gRtz^G*|E1YuHgWZ z-W^M02$_EixR%-|Enf3n(>zt=e~Bx`F)y!mIPA{bo%)21E^x*s_*eC+0YTT$58gb0 z!vRO*X=ViRlEcScl3n1_+QtSRU|ZM_U961^%yY2PRh{Slb>t9PbLl*B&LEn@>0m*^ ztTsS@dyKdO?BuZo=Kx&aB?WKiuQ=AW(Yhp9NtmVs5ovkYn;vqPqH!myD|F2J=-_3s zkArcBQSF35^8i;m2V@4st?H*zNMC|)cpQDZTP(q7dJXkU3!Ry1pM@|@hJwBRhgFC) zqFWF$V9MDhXS+aoGdRt2Ok5yUIZNrCO4k#tvelr2>7mt?&$8*xXrsEXg`rU~!Z$hD z)Ql6~(@%-FT8cpgUUPbzr9M4_jt6(GB|SZUs*a_sptzz3udXGE!x|B?D*{9ln8DU( zT=G&7@@2GD{dQ?6xF>mT=>3X?h_*9~#fVx@#Jbv1Kh3oUhKQav0BB@nNsIB)crI*` z1Sy$X?Dyy%AWN#;Iilka4R;3ZL8bWi$YXQwH^xis zuqp_~((&8dhfrV0qUYWA)iju4q{at&r`@kko>?mrR}Un^95);A**kH^!}Yy~(#qwL z(jyPAa5nMeHbAZby6dkpFi-8)!#!c(EDItW$htEYz-7F2owZOXBL4OxU#iX=M7P{*c}JwY#bL=KY@2Y_?gi z&yM~U%|1bSv8Zm5v0Jz_{;qFpB{sJPhj`@84<9%|dVR1kXb>~*Mt>Y2Zc!xc|oZAs%^$|?`+@{QQ+ zv6l?OeJu`DDULRVLl^2)qy@h|$H)RdzmZ<22gd~50_&~71yxCMxe%9CI!}-wCg7nv z9~Rf=&LZ6t&T!Yf#>f4UkCn8Cjm_lQkF)R+ja$`$PaKq-wZ5joHx@JsU+SmlWnV45 zA?U$qQBKGwhzy(ww=dlH+iN{p`^?aqmzf%)Ur8j#5{FP2C1x|_wMoCQSvZ1^%eoNA%6X1~-u}B)tt}_@SMuqvt~3dR&w+W! z@gJEo@Vpd@obH~Xui7QhH~V$e$+~9$Ur5B=!*9GL`2$M~>)fSP{cGj6W1RsOL$iXJ zxugia@{_1PoDO>x1ze^!hM0#)q_>#`qHu71C))yEpl?v9rTMv1g{3}Ip%29U+%Q2} z8R~+)UCqK+s}|njizVDl!ht6SVkk&8>vW;6r4=hG))2^7$SG|rKeq@-`H!FJI}u*t ztNsoyd~VrDay^_8K| zuz*BRBpdVK8XZj%y0L_|k*tOdE3Pcd#1~fu^$*6~Y9=kIyeJ^8h=@+?C=E`M@bEA8 z2qB2n`~cjG7VP>O43VdOndI3p{>$$lG~XZGcuB#!CZbsExBRJS^w}+G%&=~S>!R}B z!e%Av=%)Gi@fgaEDC(j?|E;`hj=m}-+GKpm(dyGB`ScS2k?#pwWdN7Nl2t8#C*-LyMF>{jEjbH1rn!VehgASm3a! zsx$vQWUW$uNiXL)hNY zOJcK~Fn9O&SzUIfI9U^KP;tVyJ#+Nt6yD#N32$~*nOM8o{#|LoxZGLm9&%Hf9f4oU z3BK$Jbr<0iFUW^~5{UKIvVD__RqrF1 z{rfX>88_`vAIpZ=kDXk9*ViKl3+`F0#SPxb+c8T*D|}!w-t?fmCOGFmOc++k>2P@?(E;~*?$oI_X=@MGQ8NU%SYy{;JTa`BMHCU#F_8q_wfoClQ z0vw4VB%Ri}%@fYZF^Ro;QDJH3TK#;+*Y_#?`!*bE$BT;A-i4~kd6vNN$wW7}{5xS) zz$-To?diKR)B8O}H<8A4=9FUR*l(G^p(ln^f;OSmuy$V1V~_QM|QIK32=m z4ZSrG_ydqC7K>w8RII8t&f-4nlcnOC?y0`w`-&#X_x2{6M2BM3z0F%U3PmGX%zbxT z?b%#%CHXryxcNN)U<{IjpJ%QSyl<%y!36db(aW?A&~7#z$_&I=jX}L)mO=&$gCf5!?0rJKBa}`8ZMap^NQQXsdo{{qBqUbJXM#<>F#A3YjPbmgS_A`pbh9%dx(E{@-d@WLbCl7e9(gl>_&hwKnY`L7!Cw)y~ z7Zi&X74`jRGj^1`NFf{orqF*K=D$VBEYMq%w!_UQ6V!lSS0f+f?0j~O)@ifep{>ET zHN+p}xOvj%fd?~$>=~BA630sEH<2DU^<_CV4@z*7Qj^OqQ>_Vmz72w-Z#1>;ul*`k z+&t(8afia{F&2d+H^Xtr5b`Y*5B(^!fiK1k+TWB*)$Jg0u(H#ESKoCYVPHmw)UP4&Hq z{1iT#XzE$A8X}p(DuhlBlnXPI?AZGN3g32iRYQ1vp#qZwV+4d4-^5pZ7Sa6wB=mqm!B> z${7YciuWBTqa}PF%ose~<~EeuIk|K)LO-h3B7i%Oj%p!PTxgP3(UpZAdrR*uf*L)U zGr!(8bptrY{@KO@e8NcZ+*biSPK_&uZ=3-A?5^@~NR@Z8j#7TtPJ4fIyE_W$BlFUy zx-rXF_65k7*gp^ zb>gVba-|(zd@#1sj2yb?;X;{j^W>)&T>8y7)^Q`L;8#v=%1=kX(}6cpB! zQ(D}n$}3+-krP#XIOK2{-pIzx}ztSaXwwFYk_si{IPH$xu z>s>ierb@LY)>1ZmZZ;+6^2N!-oX7BG{5m#e=Ux+`qPhwfnAEL;zsTW?!y8YI8H!Mi z80VMs9b}$( z@r(M8HSu%gyK_hEoJJQVQuT>n@WA0Mves$?b6%!&9F-4XBy*@e)(-)xU~T>-W@)%A zU=lMssXcu0HQe2RK0gm+q5rbi%&ctENCrzpW!`m-IbFNfTU*2=?B|;Wp;NLas@g|# zjL&H;@4gThZE85ZoCBUn$^AqC@{sg;M;l+@0X>gsjD~C|6Hv2I9DMC*`edeFIlEM zDu`?D_3FjbovGAV=ZpzqYF7oyaxx?Y;*2Mf6g8;Uoi7JITgT>?)g2akTEjX#m>}Jh z<%18+k9nJWz0Vh3xAQ{e`|xRbT!u{6_F3$ae1xpz>)p;q)-QNG_*57&-Xp(Oo=8M9 z3dp4!B2BkLNg;I#0MvMQUB+;bdgFum97`P$mkjqsrt6eJdaXW$y*Y_t6!(MZ@|#Zn z_p^u4i?7M z94tUFwnN=UstF-M^dKbb5V_6HwH zapdvGX!*CihK6}icHAKcPB&_5GfVdlQ^a|R2inD``L!qK&;+x9Q}|>MP1J&)*{}=* zsVCzLmP*U`Z=mKNRA0XAicetvS)~nYpkbQd0>|m+@<1lZey-EOkt+tNt~K>?CY~#d z3U%7JCu(0}z5nuFRR*K?F`P9Dy*#K@A|f|NBrVfFb%a%(IMZ+R*qXEpaC*~L_e$c# zq{1Uvg19VTTiU3qN+W^Ur|Q%%nT{;O)B~4&Cgz9B4Q>uM#!I5)`f6N?#NQrSO(y*Q zMC`u&MK`M7e&oJH*}Nzr3?e^(e_S7lWFDE|aXLbXg(c}x5enrLud6%<7}el=+y4ZD z|DP2X{$Ibkc_TG5#jn;}x&B+@oprlkojgLE?>S`z!-g*MGozze9 zzj%x9y<=b0l&Vpb-eL3zNDJA4WNse+wz%nc0KV^^6AR=dtY{{VlBlHM_A}Bf`&T_v zhP%REOz+s?t*@|G@a=hkIz)~gIFxrhw33pA@0zUM`_y}O?^v-pTNuq4`D~RMMM+Hy zC#}v4E@BgoWR3f-w8;gui0hxkN}ZD8lO9>gNLn}B)Epc9TC0NugO*Qi$`_=sR4xo8 zjfjfBsyh50_9)V{wUTYF>iyS!NQ<1B0H7ysuYT0MXuQ=}AmzkPUiD@G@k1BwXRFps zb7eR-1sQP^K<;~DB7lK9D5;xo<#Lna^{l_5u*j+h+eQ_%g1V&!ugzjD+~iN9J7#F_ z5Jmiy)};zH%UyVl#Vm9N%emj5ht`hcrqNg6df;~^ig_Tfc%dti?#Q*6+d`;Dcl?%U$jUR`1heyzi^WXy*@Vu6O~6|fRf z9~>|MZ9!<2N7gxW<=*MhZb#IjfvE(Sg02?(?W#u;Wwjtj9WyXFyS^3nK(bR7GO%zI z1hpF&(ovxy3IwgNgn-DqP4U8jRWHnxZUSqz*%ZV*&_V_$^_k)P>XvFBe`$jb3wi^P zKG1XoZ%bex%SZJ;3UCOtNL4(-C?Zs>=f)TbWn4QEs4PLrJIuw}C+3A5+K^CPo#zEJ z3ep3H*T2w%(*3^Eg3qMovAf8bG7p%E1H#sHw}r>x)D0w771gl{{kx#d9s;q(pTv$$ zCKWZkQE?0Xi2Ac|t~(**_PxZ{9jS+#CSB|u*UrJkK!T>9w=J;?+W6t!A7PEOy~#6bog59AjG25P6b>~@!52Q)bWSpBP9;_d|a zHw&dl;NkzfqQSq~z^e&-{&ijZ_pkq3MbZDx@c;0ts(%*($(W~PjZ!;q(ME#~2)>jI zY;0dbB087b+AIiybdsi?JDFP>82pM;b3ph6L^J#RWCok;JACZGO-)hak=n{HVJD2+g6qK*V5^8H+}X7lUpT=^#_Be?stD&D z8e#{9)C5tjRa!BQ$(%MVR>2ZFpw3PX&swx`zU74|hpUh%)Q2TXDz{_5T+s2I!O++rtI$sz5JiB7x5 zypum@e)E}Btb_t01N0svhRDYp&TrGl?uZ%CUkNH33^G?q4mfGo&%H?q@edig531$l ze-x$5PAb5N*>7yZS%-26{^y5n*B^Z(qJFY@1`ec%KM);r8^OB=N5K$C>%aRXavJXo zW(vx$Jj0=7X&^yvK<^J>Co*&T2+CN0ZsPLJ9)rC!3`BlGqGGKBE~P(-A!Xx5{}Do- zQ{u^poPKQs{J20Mx-yt)a|zfRQ!wEFO7V0Y0~R5=qy+2~Gvczxs_8%iW{>~LBg1Uu z8rZLs&le_S85j|nQjmJ)kB%PL_Y`$#jJUa(Z2eqq z^*s%``9ap@kBM~(o6C+gH_GK`RyV&7c8T#uZ$V#~ioO_c@f$vG4j{XwX)dcmCvLpF zlGY+TQ3W@o_7CZ-!Mvlfgm1zRh#^5b9BtjYpj-hE3hJlE4k2_=w1W~iKbI9g*f!pn z4i)W9^PMy3iB;>8S0|y0IBTgL=E?!4f%f;GDE}Yuy|Ud^8e8nN7;x>g(r7<7#IQs? zI~GDYec4=Rj?+0A%`@o@@e@C~MKF?gX>s;*HNE~#9D`}T3ob`DKqF{5S(bhb4*UIc zwc~nO-vh#8_c>t{R!R`#M_wjcXMDDOul_{lpf`5N=Oo#@9%mqv7E=6YwpCs>U|Z2Z zE?e(=-sQ2^#FUNGht)JO?`}B%lORNF#DVaH187W{gWr4Psp4kpR?6Py3rxA{YKkeZ6$1!ha?_QRQt;#zU_&^WU*sy~6vK8+c>AnVUP zqLFZa0!qyuDgp&3!FNR}J}S{B$>!J6PO#-10u+dAdbK4jGLJi&v_u}KOG>JtI>X+_ zR7sWEVVS0fH-0XUEBh*5(I`c2q19l{Kb%ec$`T^RXpD}+0z^+4m>JUdVc6uDCN&CV zjb}`^dkX3D$QK$-NyVLOY<6*O10O@2M4LPqGeLdC+dE^cN5fB~YlEOdgnvS6I)6qH z#AqP%vdhIbDOB|H|LW<*ct1h?#IUG`THZ&bSqzDnncD7&Nu~)!4F14POV>&QnfS0! znpvOTdW)bsLG$E@QQD=nWXbv^bSGucw}7C{pk<7M`+90;qLyHx&VYP(+NZp#xskEJ zIzLsMNORwuSwJYCOSz%|OBiBkU_ZGfPhnVm+mAPILo%-szBACgLj1RibHbrl;`L`x z+bKwfKdEgnR&e>?<47PGB_UtpVTV1Yt~+)6u*x8z4?+20W?-yt+VZe`otgZSRbWi~ z(JDrYl(+YiY*)M%RYI4)b#bG9b#(hUQF6bkmwU~4y@caPLC$cZ+Ru=Z^*+i-pCwh% z-C^l=wTrPm4B_F2-S?JSYKW~$nu^yv7|h^h3;D6=n4P2?qG2Y~Ru6fbb@|^3GY0CM z1xzpuL1nZ(@WeqPcGVF;A~!ch9tm6)hj8DPzp zYRzl@?a*<>u<3T$5SLf8-q#V*Yv>&T=vPl##jW^PvE!%v$C6987Tb<1-03wUfn2cS4wW{>pt8J z(Ql4^UQ15_@mxx3hCf2P=TE(r4eUdwHFUW;Z)mDT%^j4Wm>(`X9CZg2Oa+AtC12Jw z^LsG_;&??IKZISHsTzFiIhwpk+5>LhDUfVug1nyju zW}p0>d?!i|sNxH?}a9dN7dOHlyT%x>{ z6?dmI%Q|liz#>K~SDjH}_VYz*TBPM{?ZE_rNa;kuRf1LtIL`#?X&$FU%G;EpQ8?_q z!~#z|$%DQSHx`O)-Na38Hp%tJxEMIgg&|d6rG^VoOmwH+wvJutPnY36l{xdWlgG$^ z)z&B|+DGqTM|RF$HM+iZ(xUk0#U?j+8|p35A@Go0C{LU*79sOVWvBIEXKl+uZVn4CQTM2tT3GU0 z@n=0eo6{J29Pmr~+qV>Ef=8##b<&6G$Wy~9REcxf(N3{=O>pbQ&%yVtJ2^wFsjYun z1dqJ3hL}+Pem}m94dltyJ=&I0J%v zQ8Bu>*Eg21RNobYmURFfuK&)jIs1E{@G3CQHun5)B^6e!(QL9HsOYV(#G&1Z-9F&D5hv28%Rx>MyihfJ+B$o6@=Px@9P}CjprcW(h*PnBl z-to|V_2GB@dQLP}Yu?>_y;X=AwS3zi1AUHuC!V@)YVw-+UBleKh5k4_BYT^hT@OrV z<9v2Be7ZJjR{QD`k*?BA^2LDI6v0oX=t;2b?p!vDW*dJY zd8&1bbkpJU&?#N7P_!y~N5i{ORJ6b4>Csk2il5XOTZu-pSWJew18W&$apJ%zT6OUfCy82F-qE^xFYwFXhgmfYv5oG2O(!!4c~=Y>cM;*= zlG`gy0tb0Ow8@aj$mT=RqY~=G?YR>8EkUIq!_66aNuPg zuhP0b5^X%=!Tc`yY`%=d=!zlh8cBJXSS`DXH)!-q8pvRQ_&bvTrU_-T3z5V{K?8vtI-B9-}^*Ebo2%w6V3;*{KXe zd)SLUqOD{s1f8Gim>B`ar!2Sj5142kd?~Xvj=Jc>owX_jKl`st^|OKS^P@-3N|}B) z5HXM4E>pq8e7(=YdCOH6h4pNtpXmQp=YJ8=*nvDWJNj$mClU~->t>wEGMze{5Xe?s zsjuEgvoRS6>l}EClu%Y29nnu}F5B$iy@-hm@`YA7@nSDrrfa9J!)(YetQ*W?b9|!D zjk~$jG3lf3&dTfm=inf+OEsNiY-9wSfH&ug#Nz&bT6de)vStM24-thMzNTK6{xua4 zK~E0rG8Cq!ruo3C8{17EJe;D0exPsO0r8(-CC%(|1!LFfY|>%WnvezYNs?|2Kc3f23b}sSPMz+X%svTJGrIg=1BZe?^eWKB$iMm zgx~jmo%G3oyI%Bkl*m=8?dxZYrH>QpaL?>-XMLS-qXY(do{N+-I%`q!uInh)m=YHI z+qGq8n|av3EHLm_+d4+*UiIwGA))JIwLXgUJkGO&?ZnT7@G!CbjgS4#9MF={yKeQM zyqULh)i}ZK6Z%nz`KrgnU&5n|uV~j1rDQKFlQN=WiMz7`DNIF5W<!jbVo(gE4|pW4mAP%XZ_e3p{&> z<*jS6Luq$DukO0AOMgnda;j{tWQ7jOaM*wewZpejAFYE5@tD?3@2~u#z~U+Cr+?a~ zgAVqi){h$s&}EEymBp#h;7j#;_o;t(VZ>>7Rkx8q_D^z9bpRD|3X|YEyX?Brfrc;p zt7BZy9`Y9kA;qmN@UAZfVMkl%dYQf)cYo4P+47`(UTtTvl&tOY{~=vV^5>+k=d14P zmcVcZ3>1d197JFRxjp8i64TNAm-Wq`fO!rzg|lee`z-ERZ+?A}URrBz<96ja|LQju zj6qL6i1Vvf87--Ug6Lr9>8YldH6A$aBN_5a`k zY`(c;BPd7jDE8s4^5N|L^Os(pbw7By{)xg;?yG_=Lv*wbl#Dj~UFU2c=}iT=z{hyH zyPQ^6-6!8Pv;hO#`}#*ZRuV{Clt)7qFt>lg?C~Do_FmBCP(KMurL;FF-MY(Z-TqS|Ydsi>c_q^i zt#1+~tzwjYl=mgo)AAxp&V8>j#oBQ=RaNP#4R<_$lTHUCe8ur3745R-=^^>|sBfTH zj*WIAH+fycZuKBZ4JtB2Ip`#ZZ8Y~hFYP!evYDnlJhLgyzjn?o!O~35{tdtQ6Bj8N z-vT8(zmh3Ss5W-7D`5|<*@{CV@~N#qmCVKX1hqDKa6U+b70hk+A`W-f@9Nflnf#%~ zl;OwVT4@uw!v0i6n%3b;LglmO%V?44Hi4PKNXB*eX(P*i7|5~4%14;~Mv;gHY;fwS zdsa{E93Y{xLc*#VY-~>bxEa1{_t&0EH@5&w^Q|?{t+@;1vtK_kmcKUw?*8F!-jHp2 zWohEm2V-q@oin*1+JJg>g^oJFq7?U5F;7Um{+n`s^K^U7+k1G`tR>4kyLZ(NwZ}Y< zEpkcGM{8dyA6((`6)GJXQ$@$w_yQ#}X5>#|w|8K_6snMqTb5{OOq&Ap1Z9 z10^X{jwm`+J2C|QS@|H1?_`jfp9MlT7wflKrdEp)SX^k*T;Catd)FYdw&Nx{i>(Yi ze$SbEB^A{w%dX(@>e^pg$QQ-qcR6PKB1}bzMs>=+{}DqT>(2nC&LDkw7;peFpCH{s zF_6yqLXe~jU`PMc0Xo?S%uE>Mj~}|&J+dHGVH7pBxgy1=zonrm>?q=FvcBc>=G69z8%`GqG!Mch ze){&p_`acnWue&Vahahim8r3vDSp2*$`oCk^MzgFwDT3eBaG+NfRhnZ)y5H!Z>?KR zzVc(?mq0@8QZ{VSuttRLvqi|?oaJtyIVb;Qumf16aI^}i36}-m~gh}Oj)gtrkSYw zJH$nW80N*-B8PAa{M35JXrAip_E{P?DEs{dqdJ=0JH0-`sL$U|X>!B%KL-kzZs7`* zyg~HbNQ+YPTi#|k)h&ydycPpH{S!5W?zt%6eXrW9XD#N3+sYETcMtsEdK??%>4d|{ zGkqUP5DQHm&F5s79G+JDnIf`VwepB$&kups{!jAL#`CCI1ITk@#;0IvTVBFC(V@VJ z-sJ55>)nr`I-7f2SG`l)K)sVN=x%XAS|d!(dyV3XhjE%_#9)X^^Y{?@Tt3ehEh^j$ zw|ALKVpzF(ziMC(c~QE#GVi6d<2D_5R^K1EHT8Mxw14A?(?i8()RAf+Hpw=&*byYf zF<9n}BP+SwNqJ}pPc5ok=F^|kbJ&Ghz(GMQe_}nwcXI_A$lF3-FaCA)PELy?ht*Yz zTFXHhW|1@VFhjtiL4t0gT8Dpd1CjE4k2jx?0H|jQx}h41ECPleK9ZQFQ|mx_NZSkR zoub7xI5C79sBs_=9b3WTN&~?g;1816$bt|6?EZs$Da=^SnHq>7e0gcZKY=YvP{se? zy<2;OU`K(-52J?_u;v7iZoH;F=(@-KzoaOrsvI9AF88dBDzHq?8nps?xjdI`x(2w{ zup{3AB5_4Ni1w=pgTrP*;B10_)FSLZ38|li0^>Tle@7<`HH2j4_ykaZH?_^cGS)@w z4nTXCmgY0-q;Wd2j^o3;5NhRy$qX^ayjAcOl^b}N7*=)~d!hWXT8r{JD;t~S)n!s?qUB1-VzUg zH5^BGm-bnR^bE!slah2^fv8$IE5*|o>U|B4_bd<=4AqAOLH-p#{9|D5KDxly^MVib z|IfB{_y>Uh{J&+)Q*?u!6jUSNJ+^>JlYfb~=sh^8)!Uv!SBJFEAHUw;aAjiokQ_U; zR(C}Qp!lLa54H`$JOfFnjE-&iILL%&0#Y2f(vLG-x;i=+iP-_<-2@y4ZaqiU{aDmv zykeL%@XwZC(7EYWUO+Q&l7U%7Hcgs=rUyK)zE!=kb5MWkpd`I4HA-3HE_83}f;M;} zbL2&2u_1N3PMdKt3e!?`TuBkytB&&PB5;t(4XWOS5#X(^j)9VkWLRW-7Z$INUt3sJ zU*$YeooYIX8k+*XKn>wABHf1nq@4EZ;@WLm)93<$mB%5RMi=TdlU~Shk9lLFF7dd` zo${La)E?f+K5*8j2@-l0lkPVh_?9{&%{8K{Xz9jbo1$_(o#6UoPS4T59n?k>(g&LA zXwRPZwl2@~?Jtr)zajuQqPsWcnJk1t5^mMp-OMN}JCuXK?Jy6R?SY%ageF8pe6 zGgjg8$W2FSm$J~UO zo`5d_Yi|6Psh3IT@XuPp9AAZ#q`a-wFFH@S&K#)`p#w>b=MHHP>k z1I2_lQvu}ypi@M8))hnS&&MTHkH>9k8p^z;M>HMhKVnE&UpLG&^1qynUO@cGLnd~D zHie2cGkG0`JhlY?g~Op5dOeszof#Tg+mY4jtwqdAP$2a9m&Rfe^-Dy?pHJYmAY_`-`g&@YKTPMn#t6Fn@SK^FV9 zUvEXs$34erw%coeU>qb7qF^925C>5=USC%JH&m2 z+^Y%6YENGcHl)!yw@487)67P0Y_FAWWM)Zdn0v2!zIxl1YW~Oi3qtdNVVshBI8kpSkU^C{~?r*16;4^Pye zgQXHl%!^maJ0RHyBK!EA)PD}{xkeheicpe3OTBG)pDx^0R`sJJW;XA6kbDQC*i5Qdd-1>#I(^_@1t+#w-^40ASxgQVWDdXnS<|`s zz+bN-r(c7hi{gV#ZrO0>+r}1?4nfLSijYBcwy#`H5z-?ok=vR&^S~H$py@PyMzETG zg3ZS;EF0Cb{ls(Eulgr;+1x;*}v#TfEP@ogpO`&TZ}!vontjGppTxRJ~? zAUr5~^OW`CWsPrnU-s?6%AAL~H=Q3O=KaxigiCnb23eRV+%L5C? z$YpwImPDtY?8l69KBGiO=neiWu)#;O5bU71HQ{gT@TApD+IfXg+cOAC?b3q@w>G*H zDMZ$nb|_8I5b!pRBsc>mC7~->u;a#@*E~*-q(b41F4&PeN6xZ~UVo7}w{Obm!$W~& zk|I|ypq5x)6>u=*EHipk5~e#9vL1yYjuBKY)oZA|S3KP$)?}AwGnR)&-h%hjsy5#5 zFKk*+(D{rw8!>v32!6Z2tZ?C>?qt;&iM*?2A(&9AqvF#a%e8)wHc<%h`+Z@mOZ9Q- zM}Ut{OLM$xm-*+XP$LE%munBVUUseD&yz-~fMUv?=Y`())rCA#7^JSiJxPs7*~r{v z?zFEref0Z#ve+eHGheTnM`#Y9)Lm12rC9Hdz&EmOzD|mOaz_ zt~Xc4Q$A;&yZZQh+`GFw45imG?&T@5d0hdaAT672}a_8p`p?K1-@h zt5r7rC++rtQ@#zQw-vl$ zC1h#w*pCHRQz1*17l()w%($Obl0!MBlGQ>%)=OKZelgn7u!Mz&aK3g zOiBL6Y@0^JK?`M23~?e(Kn*w7^q=F*k1tXB`m-BArgh`&%ox8u!&p4NyDQtkJ5-P; z6mb7IWQfE$pTb4BvCLL55%C5*>G0y|1aSAe?5Spk%~b9~|I*ckFg>e@uZ!Gv!%nU~sGfnUR)+Y@A?5gt=Ve$2 z_mAKWy8Rbz&2Nwap;~I@PRkj;$tLF?%{udMS-QWe%+-nKrvj55{sE1-bU~`nR4$P)qgiP$TN+_~2e$*>PpA)5P{gjrmG916Fg+c|SN_ZZbT$)+fHa zr?9-PgyB%!E(0Bgy`2-X0(dI$*j|4}x{=|#>^w|ybD3UFo`wqBXMTG(W27gNO4#Un z(^V*CXH2R)=Pan%L#%fj{z`VrT*`6w>&~+)gni%6E8HgJsz~3 z_7T%4Cs>I3dDRrwi7v*3C~F_wJVylQ62S0XlWE`=cS@0p`|*KBs?FI*2#H^kD3CPP zg8X>YDhYkdUi#QlY@SdJ02ZJCiHFf)r4@s7)HSjf@p9NbLj**B09KLeg6At-tnf*HL1SN(~6lQ~w{WY%C6AB*~BMrA$S1HG_I+n*pQ(g<^8# z>pV}A8kAsAp4A}@TsX|L7P?o;?gC2lj%KY6^f5~UJS1X(70C7r3Y1m1+zegry`0lr z-?)YjqZg;xnBp1y9X*PI9v?IFu60Hdpix;ozd)Qe&Waa@yCV*J8My`CAs==CZAGZ! zwtc9UQN2WgCgb_>#__3=V4>eYB+*n3Fdk{fXEql%<6B9;$r_vc>zuOsDtn~hh4tcg z0?kUkW0$Ir9m*GT^07GJXelIuO;MuPxG85+(ksY&OlFqIbNj;<565fv4(h0MfaWKm zZwZKlCWHiOiKF4s@;aUuK9*f<*O}XLI}kG`1(c+Up2h9IH6d>=$&NAlk()ubyzyi@ z$j@lCd%Yke!kDf-RL$J&g>ltJ^M@JJs0^jPvVcswWtiI-p_6uph37H`LKto&TFn(` z4DRB=`&7VYyxvp;(oqhD*|Jm{qYhp9ss9(hQ7XJu^Fv_>#nW+p?a_DyAH8WWU!Fn) zftEv{3rff@@tsuKYrD15g?`V%OpuyvJ^*_V^kiH1(@Z8OqoS@@LD%!sV|H$UV(6>Y8}fn z2ppX~^`4!dKgd-hK#AD`GJLxd*w$Fet@i4k^wl5wBwZZydO#w;Tw5^~C1@V?VmWJT zd{xVKU3w$7VpX_&oxBBZ5H%(F+@l9Kq1|3-pZHUP3d`UUn((8C!7kLwn7nT?fH!L?>$~|r!6{{2`_75-RDi*GSd7#l86<;WlU6sKC^Be-bL*C zq_kwGrw{Ai@@YX_>Zu?NT$WJYk7#_c1k)>>-xhkKE9D z(v`GAZ4*vTpLQmrk;R15w+T1#x2_`sA_bBG51^5yh+m3$asH7~rLo2Om9;d(S10ayhYC zJ%ovp?V=>!sk}T!#VSeUr4A)M3@JZL-1(WntSXasZ_QRv1g4(GFE6_kvAZyTw;iev ZiuIxFws`;0QWZ_6=gh567nojq_#g3^))xQ( literal 0 HcmV?d00001 diff --git a/docs-website/static/img/logos/companies/checkout-com.svg b/docs-website/static/img/logos/companies/checkout-com.svg new file mode 100644 index 0000000000000..1eae8d3dbd406 --- /dev/null +++ b/docs-website/static/img/logos/companies/checkout-com.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs-website/static/img/logos/companies/chime.png b/docs-website/static/img/logos/companies/chime.png new file mode 100644 index 0000000000000000000000000000000000000000..c94f9742eb6d5599a749921b609dc719aaa4e3e4 GIT binary patch literal 4843 zcmZ{oc{J4D|HtjJjD0t#kSK$Qu@pLIdO#MQ+4`1oK> zCmr4R_(aC}_ypAW1$UG?1D9fUg2>s^9>-C@X-#_cn0xWv9fkjvn~OW&(dm(3K0die zPLB2-F=O)+_6)9p9N#5vrSKCR0c}tLVJ-*HERQ*VW3Hv?*TFlLyuK;?_nNYQ8{|)~ zMNjMzD}fykQ2!K3>G--d9NoNim^p29u;C)nH?R64v%ay~jrhki>V*O?F1P3aZOa#V z9zS5AX3>RQj%GKUB5u51^w33YcHGK^N5Zm1M8PJ}Td^(nj+;G4BLWUQ2%2koG z>H4kJ0lnt?#(`NL^Fpdhlja_YyjSata3IDiXkDXc4zcIU*E}>BLmA82oQOWC!V3Ld zb=6~2ME?}kesoylXFWFAt=C#7A0}Fn7yXW?E=AAGWrtIT5(*8dt=L+A3oq<4Y#4y8 zS~5QYtK?K{81!Fb<0iVW|1kPB_S24MC4ZnpbQlvbu`8rQWEitHwJW4u#KHqR;GaCF zzF>!3IShHIl4{kIulaeMT}hYO)`iy32Y9}XBhBV!d)8m5yWI>9&2@S9L8R&L62)#g z-dc>|H&@bXW1EzHmd$NFe`tEHwEItFo=AE%tf_$D8Gx^*w+vVAC4FVvUU4r%#uvtX z`VER;m)hN!^<lc&W!__kXuMWS1&P5p9iGrPX?e9{@V((Mopz7TxG z_cWv5UBwE;9q_@WRrH1H!NPYBEZPrBofC4A6+O3PQFQTo>qE=PIYZ29=H=|B!G&_6 ztZUnOQ-jPlifygd?$lMII%(8puE5f(&MBSX9wH2?qk4|9&2fP&j>3|8^UIdOv7*d( zc2XPJlWd#VF92~*R%t*<#@49HT6e&(!7F65A%kYgNx1X7XXKtOxZftB(1}=)Eu1Cc zIOR8DPg$5#|5JBUozOC+Iuo3@!_+OPIr=+XGU>(3QEj_vi73o83;xl| z*7`&YRO2-E^(e7V6Di5Vx3|3%oxTzfZKX{oqY#Gx$6OD@$t!Je<~O23)z3Ns^10;t z0MVn*dJ}!kPJ$UJBIK)Or_7a~-Sm|Trx6rz#95PUQ^#DJWweZ<%e+zs*P*>f8;SDJ z2gX)6$vqdaR8R-?D1nx;y;SFMgjrPjd_~n$O0mr05(o8iWCmeey zLEcpdGQl-?SW)3xW^Ik#Gxd)1Q@1uhtOPs{o zhNMNg8}nnD>_J^f7pFbRd%R@i&ejpn+lzh%rpJ`g>%w6=Bv0W|8mS=5NLx1)o zgclV0{+ELCz}s$1&OfGCHwm@eNA5ke-BTPdUg;4NJ#%*N7Ztn2YtLncphjV~WE4mXIyTcUQEG;k4)Gu1(nhogOj-BY``|6a4C=lH`G?ZFPg|8z zB4$#xJ8F}t*QOk7sMU$ z7Fq=s2Lq5nv;AFtUntjN{`q_|t}#)ZcKn1c4s z8`{vx8*-c`NQa^u)F+v7tR*yn^Kvv+$SxwOJuT*!%=5>XWSp&@l*Ij4uKzHe_h3s2PyX*xFSZg@+|0uN}v zA`9M9y+0C*!%?F?@ja5Av7$XD|4H3d*U|5rBqaD(XCe$~jKTtf4MD~amdECUrN!b! z9^&R61Y0_YN)~+ot%`n{tedL+*<=yWAK*wg2_MW2dGf&p9emr^fuOAsiU-DI+(>bdg-dLLZ-Sn9X9hQQZOqbXj2;)a7Ol%W0f#Aq9Mlvk@yHVbhh1P zB*!UPJJ~u>{S^5OlJSUK)VWeZR)s73Q7k{IxNu%5x=ZT|73}Q;G+R@Ji$iaE?*o>@kXx}tzhR>^xtuUyq{uV$ zU1j{*S1aN}PFpmG?#lf;b`ObHx2hRmW$Da+ho-idLNu#x5ayyi8x%-Cp4*gF)lZ3o z6ZS+a>6T*YYH7~zUSX~AUl}Tlm!N(GxGn&{Z)@v94@}lo!ub1Vb2(LD_Rj$mG$f@Q zb!?wR$Sx-1a;yB(c6-g0>aWdE?CNRhOoGQ71kA$LI-7ON61uv*>oWn_v=DUBTpvldv8nwAun>z}2B%Y$z=vL!#H`%f=7<0E+RgX^pvUXI-49}H zRF#h;5A7{zy*_rRZj5kH%v{Q z5gBID)e!ycyZXX)sP8Xm#BoC*es35f-eL)`Xm_-i2;xQiwhj}m<5H4bsJ2iEH3T!y znv>$ihLy4EjIN3Tu;CeBv%S&%@PEV|7o^YF#c?7-%82i@bnt`G_W5M>F#5{TE4;mc9@-^an8RrE zGLBYcO{4Q)A7)%>LyIBf6{<{qNhu*jX)fz)u&e#{XXLS#e;)#$^wNT3DlWYc4taN8 zEIcb#4-U+J$^>t3C8}wWox3i`Vp@KL_^UAL#23L2KQu|*vk7Ly<1DDd!d;0J$Vs!^ zBSFCrE6&fFu~+^K{uVak#xZ`645e-OkHgzYN10YBF=|yhs=3{-xmzG!DL5rS}3so+#~81xhaA3H4plphucF#ar;BK)6(3^??((e>s<2QinrWV zX&GB3G@Nmb*JEC@wvtp9LN>i4nWEDPu0~!-?)@bY8tbe@xBVev^97{V0R!&4kb+gp zqv0z8%z+TQxOzTXtbLoe5@!1wkB!AzYbZ-`8e3mp42mdjVap{&pumB*!17fn9*tY} zVz;GQ8m{pQL6dAB7wT564AC~d{FLi3n|Thcz+U_`I{__7qFK86l?ucF9tEG-q|BMu zMK;seJr#FMcb;P|Vfjta3?ZLOQ3C`kXSga|A0u&G;!ifku)HKEJy2DV4 z>xfa8egBy(IQ)qO&JaVtp`VlFzJ2wF-_3DsB42_OSvcQDAe&Nt4mEJ@u8?7~an~#R z0{Z8>8fR5SRu0K1*rpRz<1QTlOY35868KdOxUbg`i`$>6suOv=0*5wvc`MH?=-a2tca?e6fO>_VSC z!QFeo!Wqi6REeZ(3<`{%M89M`433(ZG^%%lB)4ueE4wD1Xvhe)$c;~HgGcl7CkL31J>r@vO4T0tI4Bmlsabw4r>^qUHmtBPDsA%nAb~G9}V2FX{mr_h`vr zYE8Q%vuJY0|AAy1$JIt5zgj?;oM@BTql2NYLRgNR=hHN#S=nHAuXZdKw{5MUYzXZ; zk>kR#`rCO7Of)zf#B?bR@oD&Z>3Fmsj*|da+eJE+OfrUGbGgZ2=Kixm@Iy0WRlPl} zgVZ8A=bf4iIhP^bHloVpy-M3sLzfmGrBC4foHiAHVz&kwpwl!nAw5rxOqzzbf;z!@ zu=glX-L!Sc4YyhR`fjVZa`hsqb7ILa*6z&lABLH<2SdP@{N@9Kw@b=OezHjbRa~T!>g=dkO4+(I zlxV%VcpW(#BZdw8rG|;bmt$T~x2|V@8eQg}!q%?+BSiI_T{4jS>B~B#xoM0(XZunQ zo%45dFDbT@->#$!7)`200E%^$#^^>?x&+!`7*+<{4U~gFbtd@JuV_I@eYPh5X`(c` z^K*&`+#C2Px=<&ov3LyZc>RH24Qh;W2*>cGqrj#^>@f*7;v%#Z~-} zI>I0#e;*n#2|Ru8o1Ta4)F6H1;$;xg@laXsjxe&xZnP`j#lE?MBeXFqLd6zwg7<9L zKAgq0u-O<2DrZ8Q6gnqgs*BMCosN7N$@8}1B)ziRG4N$0KlDXEOKaTd^1Ln{;=`Gi z&tEx~brnz3LtJZu56g+JbtRhjf`$!uXo}bK0xxo=74v@`%L*4;OqUpQJlfJVVVHHW z=B2f;<>&_G%Dwt8@pV*f#LOMtNxA4ng7&&@Trwri2)Pd1@QY2nt-LEGuv(g#< z+k9sHlxKkj`qN;phvtz^i>!?rwT5=)8*ZbSXo!_bOn2g6nr%m0-x0)B6m$9cz-G>J zOMC&squ8zcx(J#TQayNQG9cZx@l9s9iaf&Zg1o+#Q8E4Z(n9g%xKZ=jYdw Date: Tue, 13 Aug 2024 16:57:31 +0200 Subject: [PATCH 33/72] fix(ingest/databricks): Updating code to work with Databricks sdk 0.30 (#11158) --- docs-website/sidebars.js | 1 + metadata-ingestion/setup.py | 2 ++ .../ingestion/source/unity/proxy_profiling.py | 13 +++++-------- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 75fc1f2dcd0c5..a3aa54657d067 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -924,6 +924,7 @@ module.exports = { // "docs/_api-guide-template" // - "metadata-service/services/README" // "metadata-ingestion/examples/structured_properties/README" + // "smoke-test/tests/openapi/README" // ], ], }; diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index aef22dd145978..abb716d2434ac 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -104,6 +104,8 @@ classification_lib = { "acryl-datahub-classify==0.0.11", + # schwifty is needed for the classify plugin but in 2024.08.0 they broke the python 3.8 compatibility + "schwifty<2024.08.0", # This is a bit of a hack. Because we download the SpaCy model at runtime in the classify plugin, # we need pip to be available. "pip", diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py index 5d6d2bec6d2fc..51546a79e05c3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py @@ -1,15 +1,14 @@ import logging import time -from typing import Optional, Union +from typing import Optional from databricks.sdk import WorkspaceClient from databricks.sdk.core import DatabricksError from databricks.sdk.service._internal import Wait from databricks.sdk.service.catalog import TableInfo from databricks.sdk.service.sql import ( - ExecuteStatementResponse, - GetStatementResponse, GetWarehouseResponse, + StatementResponse, StatementState, StatementStatus, ) @@ -125,7 +124,7 @@ def _should_retry_unsupported_column( def _analyze_table( self, ref: TableReference, include_columns: bool - ) -> ExecuteStatementResponse: + ) -> StatementResponse: statement = f"ANALYZE TABLE {ref.schema}.{ref.table} COMPUTE STATISTICS" if include_columns: statement += " FOR ALL COLUMNS" @@ -139,7 +138,7 @@ def _analyze_table( return response def _check_analyze_table_statement_status( - self, execute_response: ExecuteStatementResponse, max_wait_secs: int + self, execute_response: StatementResponse, max_wait_secs: int ) -> bool: if not execute_response.statement_id or not execute_response.status: return False @@ -230,9 +229,7 @@ def _get_int(self, table_info: TableInfo, field: str) -> Optional[int]: return None @staticmethod - def _raise_if_error( - response: Union[ExecuteStatementResponse, GetStatementResponse], key: str - ) -> None: + def _raise_if_error(response: StatementResponse, key: str) -> None: if response.status and response.status.state in [ StatementState.FAILED, StatementState.CANCELED, From d36edcace9f8525a80859c0a7674446779e0727b Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 13 Aug 2024 15:53:23 -0500 Subject: [PATCH 34/72] test(smoke-test): updates to smoke-tests (#11152) --- docker/profiles/docker-compose.frontend.yml | 1 + docker/profiles/docker-compose.gms.yml | 1 + .../profiles/docker-compose.prerequisites.yml | 2 +- smoke-test/build.gradle | 35 +++++++++---------- smoke-test/cypress-dev.sh | 6 ++-- smoke-test/run-quickstart.sh | 6 ++-- smoke-test/set-cypress-creds.sh | 2 +- smoke-test/set-test-env-vars.sh | 3 ++ smoke-test/smoke.sh | 15 +++++--- smoke-test/test_e2e.py | 14 ++++++-- smoke-test/tests/consistency_utils.py | 27 +++++++++++--- 11 files changed, 76 insertions(+), 36 deletions(-) create mode 100644 smoke-test/set-test-env-vars.sh diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index b43db8297cb1e..b5b2d50143927 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -10,6 +10,7 @@ x-datahub-frontend-service: &datahub-frontend-service - ${DATAHUB_LOCAL_FRONTEND_ENV:-empty2.env} environment: &datahub-frontend-service-env KAFKA_BOOTSTRAP_SERVER: broker:29092 + DATAHUB_GMS_HOST: ${DATAHUB_GMS_HOST:-datahub-gms} volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index 8cfff2280e2fe..c9448fa34c687 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -40,6 +40,7 @@ x-kafka-env: &kafka-env # KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 SCHEMA_REGISTRY_TYPE: INTERNAL KAFKA_SCHEMAREGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + SPRING_KAFKA_CONSUMER_AUTO_OFFSET_RESET: ${SPRING_KAFKA_CONSUMER_AUTO_OFFSET_RESET:-earliest} x-datahub-quickstart-telemetry-env: &datahub-quickstart-telemetry-env DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-quickstart} diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml index 7cd9c9039539c..eed23a749628f 100644 --- a/docker/profiles/docker-compose.prerequisites.yml +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -234,7 +234,7 @@ services: env_file: kafka-broker/env/docker.env environment: KAFKA_NODE_ID: 1 - KAFKA_ADVERTISED_LISTENERS: BROKER://broker:29092,EXTERNAL://broker:9092 + KAFKA_ADVERTISED_LISTENERS: BROKER://broker:29092,EXTERNAL://localhost:9092 KAFKA_LISTENERS: BROKER://broker:29092,EXTERNAL://broker:9092,CONTROLLER://broker:39092 KAFKA_INTER_BROKER_LISTENER_NAME: BROKER KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER diff --git a/smoke-test/build.gradle b/smoke-test/build.gradle index 95f3ba8ed56d6..a9e5a8942b71e 100644 --- a/smoke-test/build.gradle +++ b/smoke-test/build.gradle @@ -44,12 +44,19 @@ task yarnInstall(type: YarnTask) { environment = ['NODE_OPTIONS': '--openssl-legacy-provider'] args = ['install', '--cwd', "${project.rootDir}/smoke-test/tests/cypress"] } + task cypressLint(type: YarnTask, dependsOn: yarnInstall) { environment = ['NODE_OPTIONS': '--openssl-legacy-provider'] // TODO: Run a full lint instead of just format. args = ['--cwd', "${project.rootDir}/smoke-test/tests/cypress", 'run', 'format'] } +task cypressLintFix(type: YarnTask, dependsOn: yarnInstall) { + environment = ['NODE_OPTIONS': '--openssl-legacy-provider'] + // TODO: Run a full lint instead of just format. + args = ['--cwd', "${project.rootDir}/smoke-test/tests/cypress", 'run', 'format', '--write'] +} + task installDev(type: Exec) { inputs.file file('pyproject.toml') inputs.file file('requirements.txt') @@ -86,10 +93,7 @@ task pythonLintFix(type: Exec, dependsOn: installDev) { */ task noCypressSuite0(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' - environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' - environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'TEST_STRATEGY', 'no_cypress_suite0' - environment "ELASTIC_ID_HASH_ALGO", "MD5" workingDir = project.projectDir commandLine 'bash', '-c', @@ -99,10 +103,7 @@ task noCypressSuite0(type: Exec, dependsOn: [installDev, ':metadata-ingestion:in task noCypressSuite1(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' - environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' - environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'TEST_STRATEGY', 'no_cypress_suite1' - environment "ELASTIC_ID_HASH_ALGO", "MD5" workingDir = project.projectDir commandLine 'bash', '-c', @@ -112,10 +113,7 @@ task noCypressSuite1(type: Exec, dependsOn: [installDev, ':metadata-ingestion:in task cypressSuite1(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' - environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' - environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'TEST_STRATEGY', 'cypress_suite1' - environment "ELASTIC_ID_HASH_ALGO", "MD5" workingDir = project.projectDir commandLine 'bash', '-c', @@ -125,10 +123,7 @@ task cypressSuite1(type: Exec, dependsOn: [installDev, ':metadata-ingestion:inst task cypressRest(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' - environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' - environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'TEST_STRATEGY', 'cypress_rest' - environment "ELASTIC_ID_HASH_ALGO", "MD5" workingDir = project.projectDir commandLine 'bash', '-c', @@ -141,9 +136,6 @@ task cypressRest(type: Exec, dependsOn: [installDev, ':metadata-ingestion:instal */ task cypressDev(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' - environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' - environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' - environment "ELASTIC_ID_HASH_ALGO", "MD5" workingDir = project.projectDir commandLine 'bash', '-c', @@ -156,13 +148,18 @@ task cypressDev(type: Exec, dependsOn: [installDev, ':metadata-ingestion:install */ task cypressData(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' - environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' - environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'RUN_UI', 'false' - environment "ELASTIC_ID_HASH_ALGO", "MD5" workingDir = project.projectDir commandLine 'bash', '-c', "source ${venv_name}/bin/activate && set -x && " + "./cypress-dev.sh" -} \ No newline at end of file +} + +task lint { + dependsOn pythonLint, cypressLint +} + +task lintFix { + dependsOn pythonLintFix +} diff --git a/smoke-test/cypress-dev.sh b/smoke-test/cypress-dev.sh index 59346b2606905..bce2d794b1869 100755 --- a/smoke-test/cypress-dev.sh +++ b/smoke-test/cypress-dev.sh @@ -10,9 +10,9 @@ fi source venv/bin/activate -export KAFKA_BROKER_CONTAINER="datahub-kafka-broker-1" -export KAFKA_BOOTSTRAP_SERVER="broker:9092" -export ELASTIC_ID_HASH_ALGO="MD5" +# set environment variables for the test +source ./set-test-env-vars.sh + python -c 'from tests.cypress.integration_test import ingest_data; ingest_data()' cd tests/cypress diff --git a/smoke-test/run-quickstart.sh b/smoke-test/run-quickstart.sh index 1923d42eb5e93..2bf5cdf8ca9c4 100755 --- a/smoke-test/run-quickstart.sh +++ b/smoke-test/run-quickstart.sh @@ -10,6 +10,7 @@ source venv/bin/activate mkdir -p ~/.datahub/plugins/frontend/auth/ echo "test_user:test_pass" >> ~/.datahub/plugins/frontend/auth/user.props +echo "DATAHUB_VERSION = $DATAHUB_VERSION" DATAHUB_SEARCH_IMAGE="${DATAHUB_SEARCH_IMAGE:=opensearchproject/opensearch}" DATAHUB_SEARCH_TAG="${DATAHUB_SEARCH_TAG:=2.9.0}" XPACK_SECURITY_ENABLED="${XPACK_SECURITY_ENABLED:=plugins.security.disabled=true}" @@ -17,10 +18,11 @@ ELASTICSEARCH_USE_SSL="${ELASTICSEARCH_USE_SSL:=false}" USE_AWS_ELASTICSEARCH="${USE_AWS_ELASTICSEARCH:=true}" ELASTIC_ID_HASH_ALGO="${ELASTIC_ID_HASH_ALGO:=MD5}" -echo "DATAHUB_VERSION = $DATAHUB_VERSION" + DATAHUB_TELEMETRY_ENABLED=false \ DOCKER_COMPOSE_BASE="file://$( dirname "$DIR" )" \ DATAHUB_SEARCH_IMAGE="$DATAHUB_SEARCH_IMAGE" DATAHUB_SEARCH_TAG="$DATAHUB_SEARCH_TAG" \ XPACK_SECURITY_ENABLED="$XPACK_SECURITY_ENABLED" ELASTICSEARCH_USE_SSL="$ELASTICSEARCH_USE_SSL" \ USE_AWS_ELASTICSEARCH="$USE_AWS_ELASTICSEARCH" \ -datahub docker quickstart --version ${DATAHUB_VERSION} --standalone_consumers --dump-logs-on-failure --kafka-setup +DATAHUB_VERSION=${DATAHUB_VERSION} \ +docker compose --project-directory ../docker/profiles --profile quickstart-consumers up -d --quiet-pull --wait --wait-timeout 900 diff --git a/smoke-test/set-cypress-creds.sh b/smoke-test/set-cypress-creds.sh index 82fe736b0a7e1..fc6e7dd42f5de 100644 --- a/smoke-test/set-cypress-creds.sh +++ b/smoke-test/set-cypress-creds.sh @@ -2,4 +2,4 @@ export CYPRESS_ADMIN_USERNAME=${ADMIN_USERNAME:-datahub} export CYPRESS_ADMIN_PASSWORD=${ADMIN_PASSWORD:-datahub} -export CYPRESS_ADMIN_DISPLAYNAME=${ADMIN_DISPLAYNAME:-DataHub} \ No newline at end of file +export CYPRESS_ADMIN_DISPLAYNAME=${ADMIN_DISPLAYNAME:-DataHub} diff --git a/smoke-test/set-test-env-vars.sh b/smoke-test/set-test-env-vars.sh new file mode 100644 index 0000000000000..dee3af2b68747 --- /dev/null +++ b/smoke-test/set-test-env-vars.sh @@ -0,0 +1,3 @@ +export DATAHUB_KAFKA_SCHEMA_REGISTRY_URL=http://localhost:8080/schema-registry/api +export DATAHUB_GMS_URL=http://localhost:8080 +export ELASTIC_ID_HASH_ALGO="MD5" \ No newline at end of file diff --git a/smoke-test/smoke.sh b/smoke-test/smoke.sh index c16865fe1e71e..5b3e8a9377a6c 100755 --- a/smoke-test/smoke.sh +++ b/smoke-test/smoke.sh @@ -16,16 +16,23 @@ cd "$DIR" if [ "${RUN_QUICKSTART:-true}" == "true" ]; then source ./run-quickstart.sh +else + mkdir -p ~/.datahub/plugins/frontend/auth/ + echo "test_user:test_pass" >> ~/.datahub/plugins/frontend/auth/user.props + echo "datahub:datahub" > ~/.datahub/plugins/frontend/auth/user.props + + python3 -m venv venv + source venv/bin/activate + python -m pip install --upgrade pip uv>=0.1.10 wheel setuptools + uv pip install -r requirements.txt fi -source venv/bin/activate - (cd ..; ./gradlew :smoke-test:yarnInstall) source ./set-cypress-creds.sh -export DATAHUB_GMS_URL=http://localhost:8080 -export ELASTIC_ID_HASH_ALGO="MD5" +# set environment variables for the test +source ./set-test-env-vars.sh # no_cypress_suite0, no_cypress_suite1, cypress_suite1, cypress_rest if [[ -z "${TEST_STRATEGY}" ]]; then diff --git a/smoke-test/test_e2e.py b/smoke-test/test_e2e.py index abb4841314c4a..74d64a8193173 100644 --- a/smoke-test/test_e2e.py +++ b/smoke-test/test_e2e.py @@ -21,6 +21,7 @@ get_frontend_session, get_admin_credentials, get_root_urn, + wait_for_writes_to_sync, ) bootstrap_sample_data = "../metadata-ingestion/examples/mce_files/bootstrap_mce.json" @@ -150,11 +151,13 @@ def _ensure_group_not_present(urn: str, frontend_session) -> Any: def test_ingestion_via_rest(wait_for_healthchecks): ingest_file_via_rest(bootstrap_sample_data) _ensure_user_present(urn=get_root_urn()) + wait_for_writes_to_sync() @pytest.mark.dependency(depends=["test_healthchecks"]) def test_ingestion_usage_via_rest(wait_for_healthchecks): ingest_file_via_rest(usage_sample_data) + wait_for_writes_to_sync() @pytest.mark.dependency(depends=["test_healthchecks"]) @@ -185,6 +188,7 @@ def test_ingestion_via_kafka(wait_for_healthchecks): # Since Kafka emission is asynchronous, we must wait a little bit so that # the changes are actually processed. time.sleep(kafka_post_ingestion_wait_sec) + wait_for_writes_to_sync() @pytest.mark.dependency( @@ -196,6 +200,7 @@ def test_ingestion_via_kafka(wait_for_healthchecks): ) def test_run_ingestion(wait_for_healthchecks): # Dummy test so that future ones can just depend on this one. + wait_for_writes_to_sync() pass @@ -1384,7 +1389,9 @@ def test_native_user_endpoints(frontend_session): unauthenticated_get_invite_token_response = unauthenticated_session.post( f"{get_frontend_url()}/api/v2/graphql", json=get_invite_token_json ) - assert unauthenticated_get_invite_token_response.status_code == HTTPStatus.UNAUTHORIZED + assert ( + unauthenticated_get_invite_token_response.status_code == HTTPStatus.UNAUTHORIZED + ) unauthenticated_create_reset_token_json = { "query": """mutation createNativeUserResetToken($input: CreateNativeUserResetTokenInput!) {\n @@ -1399,7 +1406,10 @@ def test_native_user_endpoints(frontend_session): f"{get_frontend_url()}/api/v2/graphql", json=unauthenticated_create_reset_token_json, ) - assert unauthenticated_create_reset_token_response.status_code == HTTPStatus.UNAUTHORIZED + assert ( + unauthenticated_create_reset_token_response.status_code + == HTTPStatus.UNAUTHORIZED + ) # cleanup steps json = { diff --git a/smoke-test/tests/consistency_utils.py b/smoke-test/tests/consistency_utils.py index 4335e2a874c1e..1eddc46bb220b 100644 --- a/smoke-test/tests/consistency_utils.py +++ b/smoke-test/tests/consistency_utils.py @@ -8,14 +8,31 @@ ELASTICSEARCH_REFRESH_INTERVAL_SECONDS: int = int( os.getenv("ELASTICSEARCH_REFRESH_INTERVAL_SECONDS", 5) ) -KAFKA_BROKER_CONTAINER: str = str( - os.getenv("KAFKA_BROKER_CONTAINER", "datahub-broker-1") -) KAFKA_BOOTSTRAP_SERVER: str = str(os.getenv("KAFKA_BOOTSTRAP_SERVER", "broker:29092")) logger = logging.getLogger(__name__) +def infer_kafka_broker_container() -> str: + cmd = "docker ps --format '{{.Names}}' | grep broker" + completed_process = subprocess.run( + cmd, + capture_output=True, + shell=True, + text=True, + ) + result = str(completed_process.stdout) + lines = result.splitlines() + if len(lines) == 0: + raise ValueError("No Kafka broker containers found") + return lines[0] + + +KAFKA_BROKER_CONTAINER: str = str( + os.getenv("KAFKA_BROKER_CONTAINER", infer_kafka_broker_container()) +) + + def wait_for_writes_to_sync(max_timeout_in_sec: int = 120) -> None: if USE_STATIC_SLEEP: time.sleep(ELASTICSEARCH_REFRESH_INTERVAL_SECONDS) @@ -44,7 +61,9 @@ def wait_for_writes_to_sync(max_timeout_in_sec: int = 120) -> None: if maximum_lag == 0: lag_zero = True except ValueError: - logger.warning(f"Error reading kafka lag using command: {cmd}") + logger.warning( + f"Error reading kafka lag using command: {cmd}", exc_info=True + ) if not lag_zero: logger.warning( From 897173f270e780e08f936b219ee156fd3ca5a8db Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 13 Aug 2024 13:54:50 -0700 Subject: [PATCH 35/72] feat(dbt): support prefer_sql_parser_lineage with sources enabled (#11168) --- .../ingestion/source/dbt/dbt_common.py | 43 +- ...test_prefer_sql_parser_lineage_golden.json | 3031 ++++++++++++++++- .../tests/integration/dbt/test_dbt.py | 2 +- .../tests/unit/test_dbt_source.py | 1 - 4 files changed, 3022 insertions(+), 55 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index e2b5f8378732c..d2b41323e5115 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -276,6 +276,12 @@ class DBTCommonConfig( DBTEntitiesEnabled(), description="Controls for enabling / disabling metadata emission for different dbt entities (models, test definitions, test results, etc.)", ) + prefer_sql_parser_lineage: bool = Field( + default=False, + description="Normally we use dbt's metadata to generate table lineage. When enabled, we prefer results from the SQL parser when generating lineage instead. " + "This can be useful when dbt models reference tables directly, instead of using the ref() macro. " + "This requires that `skip_sources_in_lineage` is enabled.", + ) skip_sources_in_lineage: bool = Field( default=False, description="[Experimental] When enabled, dbt sources will not be included in the lineage graph. " @@ -366,13 +372,6 @@ class DBTCommonConfig( description="When enabled, includes the compiled code in the emitted metadata.", ) - prefer_sql_parser_lineage: bool = Field( - default=False, - description="Normally we use dbt's metadata to generate table lineage. When enabled, we prefer results from the SQL parser when generating lineage instead. " - "This can be useful when dbt models reference tables directly, instead of using the ref() macro. " - "This requires that `skip_sources_in_lineage` is enabled.", - ) - @validator("target_platform") def validate_target_platform_value(cls, target_platform: str) -> str: if target_platform.lower() == DBT_PLATFORM: @@ -438,15 +437,27 @@ def validate_include_column_lineage( return include_column_lineage - @validator("skip_sources_in_lineage") + @validator("skip_sources_in_lineage", always=True) def validate_skip_sources_in_lineage( cls, skip_sources_in_lineage: bool, values: Dict ) -> bool: - entites_enabled: Optional[DBTEntitiesEnabled] = values.get("entities_enabled") + entities_enabled: Optional[DBTEntitiesEnabled] = values.get("entities_enabled") + prefer_sql_parser_lineage: Optional[bool] = values.get( + "prefer_sql_parser_lineage" + ) + + if prefer_sql_parser_lineage and not skip_sources_in_lineage: + raise ValueError( + "`prefer_sql_parser_lineage` requires that `skip_sources_in_lineage` is enabled." + ) + if ( skip_sources_in_lineage - and entites_enabled - and entites_enabled.sources == EmitDirective.YES + and entities_enabled + and entities_enabled.sources == EmitDirective.YES + # When `prefer_sql_parser_lineage` is enabled, it's ok to have `skip_sources_in_lineage` enabled + # without also disabling sources. + and not prefer_sql_parser_lineage ): raise ValueError( "When `skip_sources_in_lineage` is enabled, `entities_enabled.sources` must be set to NO." @@ -454,16 +465,6 @@ def validate_skip_sources_in_lineage( return skip_sources_in_lineage - @validator("prefer_sql_parser_lineage") - def validate_prefer_sql_parser_lineage( - cls, prefer_sql_parser_lineage: bool, values: Dict - ) -> bool: - if prefer_sql_parser_lineage and not values.get("skip_sources_in_lineage"): - raise ValueError( - "`prefer_sql_parser_lineage` requires that `skip_sources_in_lineage` is enabled." - ) - return prefer_sql_parser_lineage - @dataclass class DBTColumn: diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json index 81754fd6cbcac..d421fc4ba42f5 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json @@ -638,8 +638,8 @@ "json": { "timestampMillis": 1663355198240, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED" } @@ -659,8 +659,8 @@ "json": { "timestampMillis": 1663355198242, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1097,8 +1097,8 @@ "json": { "timestampMillis": 1663355198240, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED" } @@ -1118,8 +1118,8 @@ "json": { "timestampMillis": 1663355198242, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1420,8 +1420,8 @@ "json": { "timestampMillis": 1663355198240, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED" } @@ -1441,8 +1441,8 @@ "json": { "timestampMillis": 1663355198242, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1944,8 +1944,8 @@ "json": { "timestampMillis": 1663355198240, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "STARTED" } @@ -1965,8 +1965,8 @@ "json": { "timestampMillis": 1663355198242, "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" }, "status": "COMPLETE", "result": { @@ -1982,6 +1982,2973 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "model_maturity": "in dev", + "owner": "@alice", + "some_other_property": "test 1", + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.actor", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "actor", + "description": "description for actor table from dbt", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:@alice", + "type": "DATAOWNER" + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.actor", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1581759273000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "actor_id", + "nullable": false, + "description": "description for actor_id column from dbt", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "first_name", + "nullable": false, + "description": "dbt comment: Actors column \u2013 from postgres\n\ndbt model description: description for first_name from dbt", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_name", + "nullable": false, + "description": "description for last_name from dbt", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_update", + "nullable": false, + "description": "description for last_update from dbt", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),actor_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),actor_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),first_name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),first_name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),last_name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),last_name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),last_update)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),last_update)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.address", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "address", + "description": "a user's address", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.address", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1581759930000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "address", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address2", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "district", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_update", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "phone", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "postal_code", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),address2)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address2)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),address_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),city_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),city_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),district)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),district)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),last_update)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),last_update)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.category", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "category", + "description": "a user's category", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.category", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1581759987000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "category_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_update", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD),category_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD),category_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD),last_update)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD),last_update)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD),name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD),name)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.city", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "city", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.city", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1581759925000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "city", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "city_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "country_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_update", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),city_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),country_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),country_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),last_update)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),last_update)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "model_maturity": "in prod", + "owner": "@bob", + "some_other_property": "test 2", + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.country", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "country", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:@bob", + "type": "DATAOWNER" + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.country", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1581759840000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "country", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "country_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_update", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD),country)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD),country)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD),country_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD),country_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD),last_update)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD),last_update)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.customer", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "customer", + "description": "description for customer table from dbt", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.customer", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1581760640000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "active", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "activebool", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "address_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "create_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.DateType": {} + } + }, + "nativeDataType": "date", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "first_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_name", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "text", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "last_update", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "store_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),active)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),active)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),activebool)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),activebool)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),address_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),address_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),create_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),create_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),first_name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),last_update)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_update)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),store_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),store_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.payment_p2020_01", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "payment_p2020_01", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.payment_p2020_01", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1580505371997, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "amount", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "numeric(5,2)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rental_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "staff_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),amount)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),payment_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),payment_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),rental_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),staff_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "an_array_property": "['alpha', 'beta', 'charlie']", + "model_maturity": "in prod", + "owner": "@charles", + "some_other_property": "test 3", + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.payment_p2020_02", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "payment_p2020_02", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:@charles", + "type": "DATAOWNER" + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.payment_p2020_02", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1582319845997, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "amount", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "numeric(5,2)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rental_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "staff_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),amount)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),payment_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),payment_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),rental_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),staff_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.payment_p2020_03", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "payment_p2020_03", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.payment_p2020_03", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1584998318997, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "amount", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "numeric(5,2)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rental_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "staff_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),amount)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),payment_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),payment_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),rental_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),staff_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.payment_p2020_04", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "payment_p2020_04", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.payment_p2020_04", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1588287228997, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "amount", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "numeric(5,2)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rental_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "staff_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),amount)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),payment_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),payment_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),rental_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),staff_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.payment_p2020_05", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "payment_p2020_05", + "description": "a payment", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.payment_p2020_05", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 1589460269997, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "amount", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "numeric(5,2)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rental_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "staff_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),amount)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),payment_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),payment_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),rental_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),staff_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Source" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:dbt" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "node_type": "source", + "dbt_file_path": "models/base.yml", + "catalog_type": "BASE TABLE", + "language": "sql", + "dbt_unique_id": "source.sample_dbt.pagila.payment_p2020_06", + "dbt_package_name": "sample_dbt", + "manifest_schema": "https://schemas.getdbt.com/dbt/manifest/v11.json", + "manifest_version": "1.7.3", + "manifest_adapter": "postgres", + "catalog_schema": "https://schemas.getdbt.com/dbt/catalog/v1.json", + "catalog_version": "1.7.3" + }, + "name": "payment_p2020_06", + "description": "", + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "source.sample_dbt.pagila.payment_p2020_06", + "platform": "urn:li:dataPlatform:dbt", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": -62135596800000, + "actor": "urn:li:corpuser:dbt_executor" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "amount", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "numeric(5,2)", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "customer_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_date", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamp with time zone", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "payment_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "rental_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "staff_id", + "nullable": false, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "integer", + "recursive": false, + "isPartOfKey": false + } + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD)", + "type": "COPY" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "confidenceScore": 1.0 + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "dbt-prefer-sql-parser-lineage", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an-aliased-view-for-monthly-billing,PROD)", @@ -2344,8 +5311,8 @@ }, "assertionUrn": "urn:li:assertion:ba2c6ba830d407d539452f4cf46c92a6", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2426,8 +5393,8 @@ }, "assertionUrn": "urn:li:assertion:10f2a119dedcaab43afc47ff13d9cb5b", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2507,8 +5474,8 @@ }, "assertionUrn": "urn:li:assertion:c456eccf6440c6e3388c584689a74d91", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2588,8 +5555,8 @@ }, "assertionUrn": "urn:li:assertion:f812b73477d81e6af283d918cb59e7bf", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2678,8 +5645,8 @@ }, "assertionUrn": "urn:li:assertion:08c35a6481d3c37c93eaf9e424faa6d5", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2752,8 +5719,8 @@ }, "assertionUrn": "urn:li:assertion:08c35a6481d3c37c93eaf9e424faa6d5", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2833,8 +5800,8 @@ }, "assertionUrn": "urn:li:assertion:f6a1fde3ab4919abcc04bdee93144958", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, @@ -2920,8 +5887,8 @@ }, "assertionUrn": "urn:li:assertion:60ce4aad7ff6dbff7004da0f2258c9df", "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" } } }, diff --git a/metadata-ingestion/tests/integration/dbt/test_dbt.py b/metadata-ingestion/tests/integration/dbt/test_dbt.py index a46da9707679c..d213cffa78045 100644 --- a/metadata-ingestion/tests/integration/dbt/test_dbt.py +++ b/metadata-ingestion/tests/integration/dbt/test_dbt.py @@ -227,7 +227,7 @@ def set_paths( source_config_modifiers={ "prefer_sql_parser_lineage": True, "skip_sources_in_lineage": True, - "entities_enabled": {"sources": "NO"}, + # "entities_enabled": {"sources": "NO"}, }, ), ], diff --git a/metadata-ingestion/tests/unit/test_dbt_source.py b/metadata-ingestion/tests/unit/test_dbt_source.py index 01d7a4809b01b..90ff78b16f652 100644 --- a/metadata-ingestion/tests/unit/test_dbt_source.py +++ b/metadata-ingestion/tests/unit/test_dbt_source.py @@ -247,7 +247,6 @@ def test_dbt_config_prefer_sql_parser_lineage(): "catalog_path": "dummy_path", "target_platform": "dummy_platform", "skip_sources_in_lineage": True, - "entities_enabled": {"sources": "NO"}, "prefer_sql_parser_lineage": True, } config = DBTCoreConfig.parse_obj(config_dict) From 18ce10448fbff2b861dc64c8c20c1dec5e0c1741 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 13 Aug 2024 19:14:40 -0500 Subject: [PATCH 36/72] feat(actions): updates to gha workflows (#11150) --- .github/actions/ci-optimization/action.yml | 7 +-- .../docker-custom-build-and-push/action.yml | 31 +++++++---- .github/scripts/docker_helpers.sh | 12 ++++- .github/workflows/build-and-test.yml | 5 ++ .github/workflows/docker-unified.yml | 51 +++++++++---------- .github/workflows/lint-actions.yml | 5 ++ .github/workflows/metadata-ingestion.yml | 5 ++ .github/workflows/metadata-io.yml | 5 ++ .github/workflows/metadata-model.yml | 2 +- .github/workflows/publish-datahub-jars.yml | 3 ++ .github/workflows/spark-smoke-test.yml | 7 ++- .github/workflows/test-results.yml | 5 ++ 12 files changed, 93 insertions(+), 45 deletions(-) diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index 2f677a0e552c2..ae429c8d8b9fe 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -1,5 +1,5 @@ -name: 'Identify CI Optimizations' -description: 'Determine if code changes are specific to certain modules.' +name: "Identify CI Optimizations" +description: "Determine if code changes are specific to certain modules." outputs: frontend-only: @@ -44,9 +44,10 @@ outputs: runs: using: "composite" steps: - - uses: dorny/paths-filter@v2 + - uses: dorny/paths-filter@v3 id: filter with: + token: "" # Empty token forces it to use raw git commands. filters: | frontend: - "datahub-frontend/**" diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index 1c4a777c14802..763cd29343f5d 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -26,10 +26,13 @@ inputs: build-args: description: "List of build-time variables. Same as docker/build-push-action" required: false - tags: - # e.g. latest,head,sha12345 - description: "List of tags to use for the Docker image" + image_tag: + # e.g. pr12345 OR head OR v0.1.2.3 + description: "Main tag to use for the Docker image" required: true + flavor: + description: 'Image flavor (e.g., slim, full)' + required: false target: description: "Sets the target stage to build" required: false @@ -45,13 +48,17 @@ runs: steps: - name: Docker meta id: docker_meta - uses: crazy-max/ghaction-docker-meta@v1 + uses: docker/metadata-action@v5 with: - # list of Docker images to use as base name for tags images: ${{ inputs.images }} - # add git short SHA as Docker tag - tag-custom: ${{ inputs.tags }} - tag-custom-only: true + flavor: | + latest=false + suffix=${{ inputs.flavor && format('-{0}', inputs.flavor) || '' }} + tags: | + type=raw,value=${{ inputs.image_tag }} + type=raw,value=head,enable=${{ github.ref == format('refs/heads/{0}', 'acryl-main') }} + type=ref,event=pr,prefix=pr + type=sha,prefix=,format=short # Code for testing the build when not pushing to Docker Hub. - name: Build and Load image for testing (if not publishing) @@ -74,11 +81,13 @@ runs: if: ${{ inputs.publish != 'true' }} shell: bash run: | + IMAGES=""" + ${{ inputs.images }} + """ TAGS=""" - ${{ steps.docker_meta.outputs.tags }} + ${{ inputs.image_tag }} """ - echo "SINGLE_TAG=$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT - id: single_tag + echo "SINGLE_TAG=$(echo $IMAGES | tr '\n' ' ' | awk -F' ' '{ print $1 }'):$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT - name: Upload image locally for testing (if not publishing) uses: ishworkh/docker-image-artifact-upload@v1 if: ${{ inputs.publish != 'true' }} diff --git a/.github/scripts/docker_helpers.sh b/.github/scripts/docker_helpers.sh index e031a6d2a4d84..421a77ce4df4c 100755 --- a/.github/scripts/docker_helpers.sh +++ b/.github/scripts/docker_helpers.sh @@ -5,14 +5,14 @@ export MAIN_BRANCH="master" export MAIN_BRANCH_TAG="head" function get_short_sha { - echo $(git rev-parse --short "$GITHUB_SHA") + echo $(git rev-parse --short "$GITHUB_SHA"|head -c7) } export SHORT_SHA=$(get_short_sha) echo "SHORT_SHA: $SHORT_SHA" function get_tag { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG},g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1,g'),${SHORT_SHA} + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG},g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1,g') } function get_tag_slim { @@ -38,3 +38,11 @@ function get_unique_tag_slim { function get_unique_tag_full { echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') } + +function get_platforms_based_on_branch { + if [ "${{ github.event_name }}" == 'push' && "${{ github.ref }}" == "refs/heads/${MAIN_BRANCH}" ]; then + echo "linux/amd64,linux/arm64" + else + echo "linux/amd64" + fi +} diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index c93267947b65a..b0666f4a42aac 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -57,6 +57,11 @@ jobs: timeout-minutes: 60 needs: setup steps: + - name: Free up disk space + run: | + sudo apt-get remove 'dotnet-*' azure-cli || true + sudo rm -rf /usr/local/lib/android/ || true + sudo docker image prune -a -f || true - uses: szenius/set-timezone@v1.2 with: timezoneLinux: ${{ matrix.timezone }} diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 9487e71e8da3d..c708b562864c5 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -47,7 +47,6 @@ jobs: publish: ${{ steps.publish.outputs.publish }} pr-publish: ${{ steps.pr-publish.outputs.publish }} python_release_version: ${{ steps.tag.outputs.python_release_version }} - short_sha: ${{ steps.tag.outputs.short_sha }} branch_name: ${{ steps.tag.outputs.branch_name }} repository_name: ${{ steps.tag.outputs.repository_name }} frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} @@ -157,7 +156,7 @@ jobs: with: images: | ${{ env.DATAHUB_GMS_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -221,7 +220,7 @@ jobs: with: images: | ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -285,7 +284,7 @@ jobs: with: images: | ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -349,7 +348,7 @@ jobs: with: images: | ${{ env.DATAHUB_UPGRADE_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -394,7 +393,7 @@ jobs: name: Build and Push DataHub Frontend Docker Image runs-on: ubuntu-latest needs: setup - if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true'}} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -415,7 +414,7 @@ jobs: with: images: | ${{ env.DATAHUB_FRONTEND_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -469,7 +468,7 @@ jobs: with: images: | ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -490,7 +489,7 @@ jobs: with: images: | ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -511,7 +510,7 @@ jobs: with: images: | ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -525,7 +524,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: setup - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} steps: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 @@ -536,7 +535,7 @@ jobs: target: base images: | ${{ env.DATAHUB_INGESTION_BASE_IMAGE }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -552,7 +551,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} steps: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 @@ -574,7 +573,7 @@ jobs: target: slim-install images: | ${{ env.DATAHUB_INGESTION_BASE_IMAGE }} - tags: ${{ needs.setup.outputs.slim_tag }} + image_tag: ${{ needs.setup.outputs.slim_tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} build-args: | @@ -593,7 +592,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} steps: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 @@ -636,7 +635,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true') }} needs: [setup, datahub_ingestion_base_slim_build] - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -647,7 +646,7 @@ jobs: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Build codegen - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish =='true' }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 @@ -661,7 +660,7 @@ jobs: username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Build and push Slim Image - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: final @@ -672,7 +671,7 @@ jobs: DOCKER_VERSION=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} RELEASE_VERSION=${{ needs.setup.outputs.python_release_version }} APP_ENV=slim - tags: ${{ needs.setup.outputs.slim_tag }} + image_tag: ${{ needs.setup.outputs.slim_tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -723,7 +722,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) }} needs: [setup, datahub_ingestion_base_full_build] - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -734,7 +733,7 @@ jobs: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 - name: Build codegen - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 @@ -748,7 +747,7 @@ jobs: username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Build and push Full Image - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: final @@ -758,7 +757,7 @@ jobs: BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }} DOCKER_VERSION=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} RELEASE_VERSION=${{ needs.setup.outputs.python_release_version }} - tags: ${{ needs.setup.outputs.tag }} + image_tag: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} @@ -776,7 +775,7 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_full_build] - if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: acryldata/sane-checkout-action@v3 @@ -965,7 +964,7 @@ jobs: echo 'datahub-ingestion head-slim images' docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' if [ '${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }}' != 'head-slim' ]; then - docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }}' + docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' fi fi - name: Disk Check @@ -1049,7 +1048,7 @@ jobs: runs-on: ubuntu-latest needs: [setup, smoke_test] steps: - - uses: aws-actions/configure-aws-credentials@v1 + - uses: aws-actions/configure-aws-credentials@v4 if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} diff --git a/.github/workflows/lint-actions.yml b/.github/workflows/lint-actions.yml index 4d83adbeba08a..8a1777522f416 100644 --- a/.github/workflows/lint-actions.yml +++ b/.github/workflows/lint-actions.yml @@ -14,3 +14,8 @@ jobs: - uses: reviewdog/action-actionlint@v1 with: reporter: github-pr-review + permissions: + contents: read + checks: write + pull-requests: write + issues: write diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 51b97552eb150..a27013c4bf488 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -46,6 +46,11 @@ jobs: - python-version: "3.10" fail-fast: false steps: + - name: Free up disk space + run: | + sudo apt-get remove 'dotnet-*' azure-cli || true + sudo rm -rf /usr/local/lib/android/ || true + sudo docker image prune -a -f || true - name: Set up JDK 17 uses: actions/setup-java@v3 with: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 6797c7ad67c0b..332330b4ed898 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -47,6 +47,11 @@ jobs: timeout-minutes: 60 needs: setup steps: + - name: Free up disk space + run: | + sudo apt-get remove 'dotnet-*' azure-cli || true + sudo rm -rf /usr/local/lib/android/ || true + sudo docker image prune -a -f || true - uses: acryldata/sane-checkout-action@v3 - name: Set up JDK 17 uses: actions/setup-java@v3 diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index 558b7c80f727c..d62c03057db3f 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -49,7 +49,7 @@ jobs: run: ./gradlew :metadata-ingestion:modelDocGen - name: Configure AWS Credentials if: ${{ needs.setup.outputs.publish == 'true' }} - uses: aws-actions/configure-aws-credentials@v3 + uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY }} diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index 7137302c73564..aceee756339ad 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -45,6 +45,9 @@ jobs: echo "tag=$TAG" >> $GITHUB_OUTPUT publish: runs-on: ubuntu-latest + permissions: + id-token: write + contents: read needs: ["check-secret", "setup"] if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 8ffc8420ba941..d1618c6528577 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -44,8 +44,11 @@ jobs: run: ./metadata-ingestion/scripts/install_deps.sh - name: Disk Check run: df -h . && docker images - - name: Remove images - run: docker image prune -a -f || true + - name: Free up disk space + run: | + sudo apt-get remove 'dotnet-*' azure-cli || true + sudo rm -rf /usr/local/lib/android/ || true + sudo docker image prune -a -f || true - name: Disk Check run: df -h . && docker images - name: Smoke test diff --git a/.github/workflows/test-results.yml b/.github/workflows/test-results.yml index c94a5fc340f47..a122ef3835f4d 100644 --- a/.github/workflows/test-results.yml +++ b/.github/workflows/test-results.yml @@ -10,6 +10,11 @@ jobs: unit-test-results: name: Unit Test Results runs-on: ubuntu-latest + permissions: + contents: read + actions: read + checks: write + issues: read if: github.event.workflow_run.conclusion != 'skipped' steps: From 082ef5b3510b4dc4a7052e0cfb7f0d6541ec92b9 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 14 Aug 2024 14:23:11 +0530 Subject: [PATCH 37/72] build: fix docker warnings (#11163) --- docker/datahub-frontend/Dockerfile | 8 ++++---- docker/datahub-gms/Dockerfile | 8 ++++---- docker/datahub-ingestion-base/Dockerfile | 10 +++++----- docker/datahub-ingestion/Dockerfile | 12 ++++++------ docker/datahub-mae-consumer/Dockerfile | 8 ++++---- docker/datahub-mce-consumer/Dockerfile | 8 ++++---- docker/datahub-upgrade/Dockerfile | 8 ++++---- docker/elasticsearch-setup/Dockerfile | 2 +- docker/kafka-setup/Dockerfile | 10 +++++----- docker/mysql-setup/Dockerfile | 2 +- docker/postgres-setup/Dockerfile | 2 +- 11 files changed, 39 insertions(+), 39 deletions(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 2a9354cbf6a04..89974e56575b0 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -25,7 +25,7 @@ RUN apk --no-cache --update-cache --available upgrade \ ENV LD_LIBRARY_PATH="/lib:/lib64" -FROM base as unpack +FROM base AS unpack COPY ./datahub-frontend.zip / RUN unzip datahub-frontend.zip -d /tmp/out \ @@ -33,16 +33,16 @@ RUN unzip datahub-frontend.zip -d /tmp/out \ COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend -FROM base as prod-install +FROM base AS prod-install COPY --from=unpack /datahub-frontend/ /datahub-frontend/ -FROM base as dev-install +FROM base AS dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 VOLUME [ "/datahub-frontend" ] -FROM ${APP_ENV}-install as final +FROM ${APP_ENV}-install AS final COPY --chown=datahub:datahub --chmod=755 ./docker/datahub-frontend/start.sh / USER datahub diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index d30dbd8493057..b15bf3c6f9f17 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -11,7 +11,7 @@ FROM golang:1-alpine3.20 AS binary # Re-declaring arg from above to make it available in this stage (will inherit default value) ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk @@ -52,7 +52,7 @@ COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" -FROM base as prod-install +FROM base AS prod-install COPY war.war /datahub/datahub-gms/bin/war.war COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-gms/resources/entity-registry.yml COPY docker/datahub-gms/start.sh /datahub/datahub-gms/scripts/start.sh @@ -61,11 +61,11 @@ COPY docker/datahub-gms/jetty-jmx.xml /datahub/datahub-gms/scripts/jetty-jmx.xml COPY docker/monitoring/client-prometheus-config.yaml /datahub/datahub-gms/scripts/prometheus-config.yaml RUN chmod +x /datahub/datahub-gms/scripts/start.sh -FROM base as dev-install +FROM base AS dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 -FROM ${APP_ENV}-install as final +FROM ${APP_ENV}-install AS final RUN mkdir -p /etc/datahub/plugins/auth/resources diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 8a238c32704bb..75e67fdf14d7a 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -12,7 +12,7 @@ FROM golang:1-alpine3.20 AS dockerize-binary # Re-declaring arg from above to make it available in this stage (will inherit default value) ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk @@ -24,11 +24,11 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM python:3.10 as base +FROM python:3.10 AS base ARG GITHUB_REPO_URL -ENV DEBIAN_FRONTEND noninteractive +ENV DEBIAN_FRONTEND=noninteractive # Optionally set corporate mirror for deb ARG DEBIAN_REPO_URL @@ -75,7 +75,7 @@ RUN python3 -m venv $VIRTUAL_ENV && \ ENTRYPOINT [ "/entrypoint.sh" ] -FROM ${BASE_IMAGE} as full-install +FROM ${BASE_IMAGE} AS full-install USER 0 RUN apt-get update && apt-get install -y -qq \ @@ -102,7 +102,7 @@ RUN if [ $(arch) = "x86_64" ]; then \ USER datahub -FROM ${BASE_IMAGE} as slim-install +FROM ${BASE_IMAGE} AS slim-install # Do nothing else on top of base FROM ${APP_ENV}-install diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index b8eda54849122..34ac6ae9eba58 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -5,7 +5,7 @@ ARG DOCKER_VERSION=head-full ARG DEBIAN_REPO_URL=https://deb.debian.org/debian ARG PIP_MIRROR_URL=https://pypi.python.org/simple -FROM $BASE_IMAGE:$DOCKER_VERSION as base +FROM $BASE_IMAGE:$DOCKER_VERSION AS base # Optionally set corporate mirror for deb USER 0 @@ -28,11 +28,11 @@ RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEAS cat src/datahub/__init__.py | grep __version__ && \ cat airflow-plugin/src/datahub_airflow_plugin/__init__.py | grep __version__ -FROM base as slim-install +FROM base AS slim-install RUN uv pip install --no-cache -e ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" -FROM base as full-install-build +FROM base AS full-install-build USER 0 RUN apt-get update && apt-get install -y -qq maven @@ -44,14 +44,14 @@ RUN uv pip install --no-cache -e ".[base,all]" "./airflow-plugin[plugin-v2]" && datahub --version RUN ./pyspark_jars.sh -FROM base as full-install +FROM base AS full-install COPY --from=full-install-build ${VIRTUAL_ENV} ${VIRTUAL_ENV} -FROM base as dev-install +FROM base AS dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 -FROM ${APP_ENV}-install as final +FROM ${APP_ENV}-install AS final USER datahub diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 0ee55821f2579..6edaa29ee1a8b 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -11,7 +11,7 @@ FROM golang:1-alpine3.20 AS binary # Re-declaring arg from above to make it available in this stage (will inherit default value) ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk @@ -47,18 +47,18 @@ COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" -FROM base as prod-install +FROM base AS prod-install COPY mae-consumer-job.jar /datahub/datahub-mae-consumer/bin/ COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-mae-consumer/resources/entity-registry.yml COPY docker/datahub-mae-consumer/start.sh /datahub/datahub-mae-consumer/scripts/ COPY docker/monitoring/client-prometheus-config.yaml /datahub/datahub-mae-consumer/scripts/prometheus-config.yaml RUN chmod +x /datahub/datahub-mae-consumer/scripts/start.sh -FROM base as dev-install +FROM base AS dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 -FROM ${APP_ENV}-install as final +FROM ${APP_ENV}-install AS final RUN addgroup -S datahub && adduser -S datahub -G datahub USER datahub diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index 8f85b432a1071..1eb56633c561e 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -11,7 +11,7 @@ FROM golang:1-alpine3.20 AS binary # Re-declaring arg from above to make it available in this stage (will inherit default value) ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk @@ -45,7 +45,7 @@ RUN apk --no-cache --update-cache --available upgrade \ && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin -FROM base as prod-install +FROM base AS prod-install COPY mce-consumer-job.jar /datahub/datahub-mce-consumer/bin/ COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-mce-consumer/resources/entity-registry.yml COPY docker/datahub-mce-consumer/start.sh /datahub/datahub-mce-consumer/scripts/ @@ -54,12 +54,12 @@ RUN chmod +x /datahub/datahub-mce-consumer/scripts/start.sh ENV LD_LIBRARY_PATH="/lib:/lib64" -FROM base as dev-install +FROM base AS dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-mce-consumer/resources/entity-registry.yml -FROM ${APP_ENV}-install as final +FROM ${APP_ENV}-install AS final RUN addgroup -S datahub && adduser -S datahub -G datahub USER datahub diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 675e24ab87109..3d59a903414b1 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -11,7 +11,7 @@ FROM golang:1-alpine3.20 AS binary # Re-declaring arg from above to make it available in this stage (will inherit default value) ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk @@ -51,15 +51,15 @@ COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" -FROM base as prod-install +FROM base AS prod-install COPY datahub-upgrade.jar /datahub/datahub-upgrade/bin/ COPY metadata-models/src/main/resources/entity-registry.yml /datahub/datahub-gms/resources/entity-registry.yml -FROM base as dev-install +FROM base AS dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 -FROM ${APP_ENV}-install as final +FROM ${APP_ENV}-install AS final RUN addgroup -S datahub && adduser -S datahub -G datahub USER datahub diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index 7390e3579dcf8..4e64dcbc1e452 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -10,7 +10,7 @@ FROM golang:1-alpine3.20 AS binary ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index a68da4e41d4df..549373d5d457e 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -6,8 +6,8 @@ ARG GITHUB_REPO_URL=https://github.com ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 ARG APACHE_DOWNLOAD_URL=null -# Using as a base image because to get the needed jars for confluent utils -FROM confluentinc/cp-base-new:$KAFKA_DOCKER_VERSION as confluent_base +# Using AS a base image because to get the needed jars for confluent utils +FROM confluentinc/cp-base-new:$KAFKA_DOCKER_VERSION AS confluent_base ARG MAVEN_CENTRAL_REPO_URL ARG SNAKEYAML_VERSION="2.0" @@ -22,8 +22,8 @@ ARG ALPINE_REPO_URL ARG APACHE_DOWNLOAD_URL ARG GITHUB_REPO_URL -ENV KAFKA_VERSION 3.7.0 -ENV SCALA_VERSION 2.13 +ENV KAFKA_VERSION=3.7.0 +ENV SCALA_VERSION=2.13 LABEL name="kafka" version=${KAFKA_VERSION} @@ -44,7 +44,7 @@ RUN mkdir -p /opt \ && rm -rf /tmp/* \ && apk del --purge .build-deps -ENV PATH /sbin:/opt/kafka/bin/:$PATH +ENV PATH=/sbin:/opt/kafka/bin/:$PATH WORKDIR /opt/kafka diff --git a/docker/mysql-setup/Dockerfile b/docker/mysql-setup/Dockerfile index 46969352d8174..b0ca45ad8f6f2 100644 --- a/docker/mysql-setup/Dockerfile +++ b/docker/mysql-setup/Dockerfile @@ -5,7 +5,7 @@ FROM golang:1-alpine3.20 AS binary ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk diff --git a/docker/postgres-setup/Dockerfile b/docker/postgres-setup/Dockerfile index 8ab211218f240..e145456e807d4 100644 --- a/docker/postgres-setup/Dockerfile +++ b/docker/postgres-setup/Dockerfile @@ -5,7 +5,7 @@ FROM golang:1-alpine3.20 AS binary ARG ALPINE_REPO_URL -ENV DOCKERIZE_VERSION v0.6.1 +ENV DOCKERIZE_VERSION=v0.6.1 WORKDIR /go/src/github.com/jwilder # Optionally set corporate mirror for apk From c2dbfb838677b8b921c87e1f58841830ab9bd80d Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Wed, 14 Aug 2024 15:34:28 +0100 Subject: [PATCH 38/72] feat(hooks): Make hook enable flag non-default (#11159) --- .../linkedin/metadata/kafka/hook/MetadataChangeLogHook.java | 4 +--- .../main/java/com/datahub/event/hook/PlatformEventHook.java | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java index 06a184c9f89f9..876df4279b7b8 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java @@ -30,9 +30,7 @@ default MetadataChangeLogHook init(@Nonnull OperationContext systemOperationCont * Return whether the hook is enabled or not. If not enabled, the below invoke method is not * triggered */ - default boolean isEnabled() { - return true; - } + boolean isEnabled(); /** Invoke the hook when a MetadataChangeLog is received */ void invoke(@Nonnull MetadataChangeLog log) throws Exception; diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java index 37241861f2e5e..7fcc2a07b950b 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java @@ -20,9 +20,7 @@ default void init() {} * Return whether the hook is enabled or not. If not enabled, the below invoke method is not * triggered */ - default boolean isEnabled() { - return true; - } + boolean isEnabled(); /** Invoke the hook when a PlatformEvent is received */ void invoke(@Nonnull OperationContext opContext, @Nonnull PlatformEvent event); From 66fdf29154f6c9a087cbf3f88a6c52c4fbc87cc7 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 14 Aug 2024 11:48:17 -0500 Subject: [PATCH 39/72] fix(ci): smoke-test changes do not need to build images (#11174) --- .github/actions/ci-optimization/action.yml | 4 ---- .github/actions/docker-custom-build-and-push/action.yml | 1 + 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index ae429c8d8b9fe..ff901b5de04b6 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -52,20 +52,17 @@ runs: frontend: - "datahub-frontend/**" - "datahub-web-react/**" - - "smoke-test/tests/cypress/**" - "docker/datahub-frontend/**" ingestion: - "metadata-ingestion-modules/**" - "metadata-ingestion/**" - "metadata-models/**" - - "smoke-test/**" - "docker/datahub-ingestion**" ingestion-base: - "docker/datahub-ingestion-base/**" docker: - "docker/**" backend: - - ".github/**" - "metadata-models/**" - "datahub-upgrade/**" - "entity-registry/**" @@ -79,7 +76,6 @@ runs: - "metadata-utils/**" - "metadata-operation-context/**" - "datahub-graphql-core/**" - - "smoke-test/**" - "docker/**" kafka-setup: - "docker/kafka-setup/**" diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index 763cd29343f5d..4ac9bb536ec93 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -88,6 +88,7 @@ runs: ${{ inputs.image_tag }} """ echo "SINGLE_TAG=$(echo $IMAGES | tr '\n' ' ' | awk -F' ' '{ print $1 }'):$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT + id: single_tag - name: Upload image locally for testing (if not publishing) uses: ishworkh/docker-image-artifact-upload@v1 if: ${{ inputs.publish != 'true' }} From 841cb5b7cf0f8e43b17688282bfa92b9ef26c4ff Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 14 Aug 2024 17:43:42 -0500 Subject: [PATCH 40/72] fix(ci): fix single tag comma split (#11179) --- .github/actions/docker-custom-build-and-push/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index 4ac9bb536ec93..855b6922d1d65 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -87,7 +87,7 @@ runs: TAGS=""" ${{ inputs.image_tag }} """ - echo "SINGLE_TAG=$(echo $IMAGES | tr '\n' ' ' | awk -F' ' '{ print $1 }'):$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT + echo "SINGLE_TAG=$(echo $IMAGES | tr '\n' ' ' | awk -F' |,' '{ print $1 }'):$(echo $TAGS | tr '\n' ' ' | awk -F' ' |,'{ print $1 }')" >> $GITHUB_OUTPUT id: single_tag - name: Upload image locally for testing (if not publishing) uses: ishworkh/docker-image-artifact-upload@v1 From a06b3181a130011b54cc58d21312843b262f9553 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 14 Aug 2024 17:46:09 -0500 Subject: [PATCH 41/72] lint(restore-indices): clean-up restore indices class (#11176) --- .../upgrade/config/RestoreIndicesConfig.java | 26 +++++-------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 949b75edaa6ba..26e40485787e9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -8,37 +8,23 @@ import io.ebean.Database; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.DependsOn; @Slf4j @Configuration public class RestoreIndicesConfig { - @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") - @DependsOn({ - "ebeanServer", - "entityService", - "systemMetadataService", - "searchService", - "graphService" - }) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - public RestoreIndices createInstance() { - final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemMetadataService systemMetadataService = - applicationContext.getBean(SystemMetadataService.class); - final EntitySearchService entitySearchService = - applicationContext.getBean(EntitySearchService.class); - final GraphService graphService = applicationContext.getBean(GraphService.class); - + public RestoreIndices createInstance( + final Database ebeanServer, + final EntityService entityService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final SystemMetadataService systemMetadataService) { return new RestoreIndices( ebeanServer, entityService, systemMetadataService, entitySearchService, graphService); } From c661a8786d8a8ab8195bcc8fd67a505e9118299c Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 14 Aug 2024 17:52:38 -0500 Subject: [PATCH 42/72] fix(ci): typo (#11180) --- .github/actions/docker-custom-build-and-push/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index 855b6922d1d65..6c5f224872d4b 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -87,7 +87,7 @@ runs: TAGS=""" ${{ inputs.image_tag }} """ - echo "SINGLE_TAG=$(echo $IMAGES | tr '\n' ' ' | awk -F' |,' '{ print $1 }'):$(echo $TAGS | tr '\n' ' ' | awk -F' ' |,'{ print $1 }')" >> $GITHUB_OUTPUT + echo "SINGLE_TAG=$(echo $IMAGES | tr '\n' ' ' | awk -F' |,' '{ print $1 }'):$(echo $TAGS | tr '\n' ' ' | awk -F' |,' '{ print $1 }')" >> $GITHUB_OUTPUT id: single_tag - name: Upload image locally for testing (if not publishing) uses: ishworkh/docker-image-artifact-upload@v1 From 5fa48d7f25e1274caa43a646fb4becb173709d3e Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 15 Aug 2024 10:14:13 -0500 Subject: [PATCH 43/72] fix(ci): additional ci and smoke-test updates (#11183) --- .../docker-custom-build-and-push/action.yml | 3 +- .github/scripts/docker_helpers.sh | 4 +-- .github/scripts/docker_logs.sh | 8 +++++ .github/workflows/docker-unified.yml | 29 +++++++++---------- 4 files changed, 24 insertions(+), 20 deletions(-) create mode 100644 .github/scripts/docker_logs.sh diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index 6c5f224872d4b..3805b3501ccec 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -56,8 +56,7 @@ runs: suffix=${{ inputs.flavor && format('-{0}', inputs.flavor) || '' }} tags: | type=raw,value=${{ inputs.image_tag }} - type=raw,value=head,enable=${{ github.ref == format('refs/heads/{0}', 'acryl-main') }} - type=ref,event=pr,prefix=pr + type=raw,value=head,enable={{is_default_branch}} type=sha,prefix=,format=short # Code for testing the build when not pushing to Docker Hub. diff --git a/.github/scripts/docker_helpers.sh b/.github/scripts/docker_helpers.sh index 421a77ce4df4c..138c8649820ec 100755 --- a/.github/scripts/docker_helpers.sh +++ b/.github/scripts/docker_helpers.sh @@ -16,11 +16,11 @@ function get_tag { } function get_tag_slim { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim,g" -e 's,refs/tags/\(.*\),\1-slim,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g'),${SHORT_SHA}-slim + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim,g" -e 's,refs/tags/\(.*\),\1-slim,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g') } function get_tag_full { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g'),${SHORT_SHA}-full + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') } function get_python_docker_release_v { diff --git a/.github/scripts/docker_logs.sh b/.github/scripts/docker_logs.sh new file mode 100644 index 0000000000000..918b859fbe5b1 --- /dev/null +++ b/.github/scripts/docker_logs.sh @@ -0,0 +1,8 @@ +TARGET_DIR="${TARGET_DIR:=docker_logs}" +TEST_STRATEGY="${TEST_STRATEGY:=}" + +mkdir -p "$TARGET_DIR" +for name in `docker ps -a --format '{{.Names}}'`; +do + docker logs "$name" >& "${TARGET_DIR}/${name}${TEST_STRATEGY}.log" || true +done \ No newline at end of file diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index c708b562864c5..2bb3930e2f56e 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -501,7 +501,7 @@ jobs: name: Build and Push DataHub Elasticsearch Setup Docker Image runs-on: ubuntu-latest needs: setup - if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }} + if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' ) }} steps: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 @@ -861,11 +861,6 @@ jobs: with: python-version: "3.10" cache: "pip" - - name: Install dependencies - run: ./metadata-ingestion/scripts/install_deps.sh - - name: Build datahub cli - run: | - ./gradlew :metadata-ingestion:install - name: Login to DockerHub uses: docker/login-action@v3 if: ${{ needs.setup.outputs.docker-login == 'true' }} @@ -993,6 +988,15 @@ jobs: } } }' + - name: Disk Check + run: df -h . && docker images + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh + - name: Build datahub cli + run: | + ./gradlew :metadata-ingestion:install + - name: Disk Check + run: df -h . && docker images - name: Remove Source Code run: find ./*/* ! -path "./metadata-ingestion*" ! -path "./smoke-test*" ! -path "./gradle*" -delete - name: Disk Check @@ -1013,21 +1017,14 @@ jobs: if: failure() run: | docker ps -a - docker logs datahub-datahub-gms-1 >& gms-${{ matrix.test_strategy }}.log || true - docker logs datahub-datahub-actions-1 >& actions-${{ matrix.test_strategy }}.log || true - docker logs datahub-datahub-mae-consumer-1 >& mae-${{ matrix.test_strategy }}.log || true - docker logs datahub-datahub-mce-consumer-1 >& mce-${{ matrix.test_strategy }}.log || true - docker logs datahub-broker-1 >& broker-${{ matrix.test_strategy }}.log || true - docker logs datahub-mysql-1 >& mysql-${{ matrix.test_strategy }}.log || true - docker logs datahub-elasticsearch-1 >& elasticsearch-${{ matrix.test_strategy }}.log || true - docker logs datahub-datahub-frontend-react-1 >& frontend-${{ matrix.test_strategy }}.log || true - docker logs datahub-upgrade-1 >& upgrade-${{ matrix.test_strategy }}.log || true + TEST_STRATEGY="-${{ matrix.test_strategy }}" + source .github/scripts/docker_logs.sh - name: Upload logs uses: actions/upload-artifact@v3 if: failure() with: name: docker logs - path: "*.log" + path: "docker_logs/*.log" - name: Upload screenshots uses: actions/upload-artifact@v3 if: failure() From ff955523bbcda15def63a88f5dca3f9251d0ab47 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 15 Aug 2024 10:14:29 -0500 Subject: [PATCH 44/72] test(smoke-test): minor update to openapi test (#11184) --- smoke-test/tests/openapi/test_openapi.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/smoke-test/tests/openapi/test_openapi.py b/smoke-test/tests/openapi/test_openapi.py index 6561ee6d5c5cc..20398e0e58168 100644 --- a/smoke-test/tests/openapi/test_openapi.py +++ b/smoke-test/tests/openapi/test_openapi.py @@ -64,6 +64,7 @@ def evaluate_test(test_name, test_data): actual_resp.json(), req_resp["response"]["json"], exclude_regex_paths=exclude_regex_paths, + ignore_order=True, ) assert not diff else: @@ -81,11 +82,12 @@ def evaluate_test(test_name, test_data): raise e -def run_tests(fixture_glob, num_workers=3): +def run_tests(fixture_globs, num_workers=3): with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: futures = [] - for test_fixture, test_data in load_tests(fixture_glob=fixture_glob): - futures.append(executor.submit(evaluate_test, test_fixture, test_data)) + for fixture_glob in fixture_globs: + for test_fixture, test_data in load_tests(fixture_glob=fixture_glob): + futures.append(executor.submit(evaluate_test, test_fixture, test_data)) for future in concurrent.futures.as_completed(futures): logger.info(future.result()) @@ -93,7 +95,7 @@ def run_tests(fixture_glob, num_workers=3): @pytest.mark.dependency(depends=["test_healthchecks"]) def test_openapi_all(): - run_tests(fixture_glob="tests/openapi/**/*.json", num_workers=10) + run_tests(fixture_globs=["tests/openapi/*/*.json"], num_workers=10) # @pytest.mark.dependency(depends=["test_healthchecks"]) From 3903b17b2d8d97596820314e6033247bcccff673 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 15 Aug 2024 10:08:45 -0700 Subject: [PATCH 45/72] feat(ingest): use pre-built dockerize binary (#11181) --- docker/datahub-ingestion-base/Dockerfile | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 75e67fdf14d7a..a2686ee8b6557 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -7,22 +7,7 @@ ARG GITHUB_REPO_URL=https://github.com ARG DEBIAN_REPO_URL=https://deb.debian.org/debian ARG PIP_MIRROR_URL=https://pypi.python.org/simple -FROM golang:1-alpine3.20 AS dockerize-binary - -# Re-declaring arg from above to make it available in this stage (will inherit default value) -ARG ALPINE_REPO_URL - -ENV DOCKERIZE_VERSION=v0.6.1 -WORKDIR /go/src/github.com/jwilder - -# Optionally set corporate mirror for apk -RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi - -RUN apk --no-cache --update add openssl git tar curl - -WORKDIR /go/src/github.com/jwilder/dockerize - -RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION +FROM powerman/dockerize:0.19 as dockerize-binary FROM python:3.10 AS base @@ -56,8 +41,7 @@ RUN apt-get update && apt-get install -y -qq \ && python -m pip install --no-cache --upgrade pip uv>=0.1.10 wheel setuptools \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* -# compiled against newer golang for security fixes -COPY --from=dockerize-binary /go/bin/dockerize /usr/local/bin +COPY --from=dockerize-binary /usr/local/bin/dockerize /usr/local/bin COPY ./docker/datahub-ingestion-base/base-requirements.txt requirements.txt COPY ./docker/datahub-ingestion-base/entrypoint.sh /entrypoint.sh From b4473063d2ba7f272170a01dd0317db1fddf7a5d Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Thu, 15 Aug 2024 22:44:16 +0530 Subject: [PATCH 46/72] doc: mark deprecated feature (#11175) --- docs/authorization/policies.md | 3 ++- docs/authorization/roles.md | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/authorization/policies.md b/docs/authorization/policies.md index b393c8ffa3757..45d0b59e40833 100644 --- a/docs/authorization/policies.md +++ b/docs/authorization/policies.md @@ -173,12 +173,13 @@ These privileges are for DataHub operators to access & manage the administrative | View Tests | View Asset Tests. | | Manage Tests[^2] | Allow actor to create and remove Asset Tests. | | View Metadata Proposals[^2] | Allow actor to view the requests tab for viewing metadata proposals. | -| Create metadata constraints[^2] | Allow actor to create metadata constraints. | +| Create metadata constraints[^3] | Allow actor to create metadata constraints. | | Manage Platform Settings[^2] | Allow actor to view and change platform-level settings, like integrations & notifications. | | Manage Monitors[^2] | Allow actor to create, update, and delete any data asset monitors, including Custom SQL monitors. Grant with care. | [^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true [^2]: DataHub Cloud only +[^3]: Deprecated feature #### Entity Management diff --git a/docs/authorization/roles.md b/docs/authorization/roles.md index 7c7b4581faffc..a1719438d2941 100644 --- a/docs/authorization/roles.md +++ b/docs/authorization/roles.md @@ -156,10 +156,12 @@ These privileges are only relevant to DataHub Cloud. |-----------------------------|--------------------|--------------------|--------|-----------------------------------------------------------------------------------------------------| | Manage Tests | :heavy_check_mark: | :heavy_check_mark: | :x: | Create and remove Asset Tests. | | View Metadata Proposals | :heavy_check_mark: | :heavy_check_mark: | :x: | View the requests tab for viewing metadata proposals. | -| Create metadata constraints | :heavy_check_mark: | :heavy_check_mark: | :x: | Create metadata constraints. | +| Create metadata constraints[^1] | :heavy_check_mark: | :heavy_check_mark: | :x: | Create metadata constraints. | | Manage Platform Settings | :heavy_check_mark: | :x: | :x: | View and change platform-level settings, like integrations & notifications. | | Manage Monitors | :heavy_check_mark: | :x: | :x: | Create, update, and delete any data asset monitors, including Custom SQL monitors. Grant with care. | +[^1]: Deprecated feature + ##### Metadata Privileges | Privilege | Admin | Editor | Reader | Description | From d81161e6e4e1546b4fe5e841772b9e9599e82ea6 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Thu, 15 Aug 2024 15:44:49 -0400 Subject: [PATCH 47/72] fix(delete) Fix removing completed/verified forms references (#11172) --- .../com/linkedin/metadata/entity/DeleteEntityService.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java index aed9b97411ff6..ed14dec4ed940 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java @@ -729,11 +729,11 @@ private MetadataChangeProposal updateFormsAspect( .collect(Collectors.toList()); List completedForms = formsAspect.getCompletedForms().stream() - .filter(completedForm -> completedForm.getUrn() != deletedUrn) + .filter(completedForm -> !completedForm.getUrn().equals(deletedUrn)) .collect(Collectors.toList()); final List verifications = formsAspect.getVerifications().stream() - .filter(verification -> verification.getForm() != deletedUrn) + .filter(verification -> !verification.getForm().equals(deletedUrn)) .collect(Collectors.toList()); updatedAspect.get().setIncompleteForms(new FormAssociationArray(incompleteForms)); From 3b895d9062c431c5e880d05339aba326d5929321 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Thu, 15 Aug 2024 15:59:22 -0500 Subject: [PATCH 48/72] feat(docs): update docs for new release (#11164) --- docs-website/docusaurus.config.js | 8 ++++++++ docs-website/versions.json | 1 + docs/how/updating-datahub.md | 10 ++++++++++ .../examples/mce_files/bootstrap_mce.json | 4 ++-- 4 files changed, 21 insertions(+), 2 deletions(-) diff --git a/docs-website/docusaurus.config.js b/docs-website/docusaurus.config.js index 1a40c986b3167..3b2019f785c1e 100644 --- a/docs-website/docusaurus.config.js +++ b/docs-website/docusaurus.config.js @@ -170,6 +170,14 @@ module.exports = { value: '

    ', }, { + value: ` + 0.14.0 + + + `, + type: "html", + }, + { value: ` 0.13.0 diff --git a/docs-website/versions.json b/docs-website/versions.json index afd30a317c618..5288c42437c77 100644 --- a/docs-website/versions.json +++ b/docs-website/versions.json @@ -1,3 +1,4 @@ [ + "0.14.0", "0.13.1" ] diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 08ababcb5cfce..2443375099b7b 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -20,6 +20,16 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes +### Potential Downtime + +### Deprecations + +### Other Notable Changes + +## 0.14.0 + +### Breaking Changes + - Protobuf CLI will no longer create binary encoded protoc custom properties. Flag added `-protocProp` in case this behavior is required. - #10814 Data flow info and data job info aspect will produce an additional field that will require a corresponding upgrade of server. Otherwise server can reject the aspects. diff --git a/metadata-ingestion/examples/mce_files/bootstrap_mce.json b/metadata-ingestion/examples/mce_files/bootstrap_mce.json index fbe6b9953cb4f..bc218e5e8c2d5 100644 --- a/metadata-ingestion/examples/mce_files/bootstrap_mce.json +++ b/metadata-ingestion/examples/mce_files/bootstrap_mce.json @@ -3394,7 +3394,7 @@ "changeType":"UPSERT", "aspectName":"datasetProfile", "aspect":{ - "value":"{\"timestampMillis\": 1679515693000, \"rowCount\": 4500, \"columnCount\": 2, \"sizeInBytes\": 842000200000, \"fieldProfiles\": [{\"fieldPath\": \"field_foo\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00044, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"true\", \"false\"]}, {\"fieldPath\": \"field_bar\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00044, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"false\"]}]}", + "value":"{\"timestampMillis\": 1723488954865, \"rowCount\": 4500, \"columnCount\": 2, \"sizeInBytes\": 842000200000, \"fieldProfiles\": [{\"fieldPath\": \"field_foo\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00044, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"true\", \"false\"]}, {\"fieldPath\": \"field_bar\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00044, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"false\"]}]}", "contentType":"application/json" }, "systemMetadata":null @@ -3406,7 +3406,7 @@ "changeType":"UPSERT", "aspectName":"datasetProfile", "aspect":{ - "value":"{\"timestampMillis\": 1684786093000, \"rowCount\": 3500, \"columnCount\": 2, \"fieldProfiles\": [{\"fieldPath\": \"field_foo\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00057, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"true\", \"false\"]}, {\"fieldPath\": \"field_bar\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00057, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"true\"]}]}", + "value":"{\"timestampMillis\": 1723488954865, \"rowCount\": 3500, \"columnCount\": 2, \"fieldProfiles\": [{\"fieldPath\": \"field_foo\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00057, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"true\", \"false\"]}, {\"fieldPath\": \"field_bar\", \"uniqueCount\": 2, \"uniqueProportion\": 0.00057, \"nullCount\": 0, \"nullProportion\": 0.0, \"sampleValues\": [\"true\"]}]}", "contentType":"application/json" }, "systemMetadata":null From 43b5a5f1388e0b33879d1b61c0f98963b44df8ff Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Thu, 15 Aug 2024 16:07:01 -0500 Subject: [PATCH 49/72] fix(ingest): invalid urn should not fail full batch of changes (#11187) --- .../entity/ebean/batch/AspectsBatchImpl.java | 22 ++++++---- .../ebean/batch/AspectsBatchImplTest.java | 42 +++++++++++++++++-- 2 files changed, 53 insertions(+), 11 deletions(-) diff --git a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index 0808c29e8ea89..3ec090a3db3a4 100644 --- a/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/metadata-io-api/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -170,16 +170,22 @@ public AspectsBatchImplBuilder mcps( mcps.stream() .map( mcp -> { - if (mcp.getChangeType().equals(ChangeType.PATCH)) { - return PatchItemImpl.PatchItemImplBuilder.build( - mcp, - auditStamp, - retrieverContext.getAspectRetriever().getEntityRegistry()); - } else { - return ChangeItemImpl.ChangeItemImplBuilder.build( - mcp, auditStamp, retrieverContext.getAspectRetriever()); + try { + if (mcp.getChangeType().equals(ChangeType.PATCH)) { + return PatchItemImpl.PatchItemImplBuilder.build( + mcp, + auditStamp, + retrieverContext.getAspectRetriever().getEntityRegistry()); + } else { + return ChangeItemImpl.ChangeItemImplBuilder.build( + mcp, auditStamp, retrieverContext.getAspectRetriever()); + } + } catch (IllegalArgumentException e) { + log.error("Invalid proposal, skipping and proceeding with batch: " + mcp, e); + return null; } }) + .filter(Objects::nonNull) .collect(Collectors.toList())); return this; } diff --git a/metadata-io/metadata-io-api/src/test/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImplTest.java b/metadata-io/metadata-io-api/src/test/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImplTest.java index d2e7243d04560..31dd868b4cb4a 100644 --- a/metadata-io/metadata-io-api/src/test/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImplTest.java +++ b/metadata-io/metadata-io-api/src/test/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImplTest.java @@ -1,22 +1,26 @@ package com.linkedin.metadata.entity.ebean.batch; -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.FabricType; import com.linkedin.common.Status; +import com.linkedin.common.urn.DataPlatformUrn; +import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.ByteString; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.dataset.DatasetProperties; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.GraphRetriever; import com.linkedin.metadata.aspect.batch.MCPItem; import com.linkedin.metadata.aspect.patch.GenericJsonPatch; import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.hooks.MutationHook; import com.linkedin.metadata.entity.SearchRetriever; @@ -297,6 +301,38 @@ public void toUpsertBatchItemsProposedItemTest() { "Mutation to status aspect"); } + @Test + public void singleInvalidDoesntBreakBatch() { + MetadataChangeProposal proposal1 = + new DatasetPropertiesPatchBuilder() + .urn(new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)) + .setDescription("something") + .setName("name") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); + MetadataChangeProposal proposal2 = + new MetadataChangeProposal() + .setEntityType(DATASET_ENTITY_NAME) + .setAspectName(DATASET_PROPERTIES_ASPECT_NAME) + .setAspect(GenericRecordUtils.serializeAspect(new DatasetProperties())) + .setChangeType(ChangeType.UPSERT); + + AspectsBatchImpl testBatch = + AspectsBatchImpl.builder() + .mcps( + ImmutableList.of(proposal1, proposal2), + AuditStampUtils.createDefaultAuditStamp(), + retrieverContext) + .retrieverContext(retrieverContext) + .build(); + + assertEquals( + testBatch.toUpsertBatchItems(Map.of()).getSecond().size(), + 1, + "Expected 1 valid mcp to be passed through."); + } + /** Converts unsupported to status aspect */ @Getter @Setter From 0e045432bb4c4ac9d6b14d8798515e824c0c3a9e Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 15 Aug 2024 17:20:45 -0500 Subject: [PATCH 50/72] fix(kafka-setup): add missing script to image (#11190) --- docker/kafka-setup/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index 549373d5d457e..ad1d01c1ce97c 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -71,6 +71,7 @@ COPY docker/kafka-setup/kafka-setup.sh ./kafka-setup.sh COPY docker/kafka-setup/kafka-config.sh ./kafka-config.sh COPY docker/kafka-setup/kafka-topic-workers.sh ./kafka-topic-workers.sh COPY docker/kafka-setup/kafka-ready.sh ./kafka-ready.sh +COPY docker/kafka-setup/env_to_properties.py ./env_to_properties.py RUN chmod +x ./kafka-setup.sh ./kafka-topic-workers.sh ./kafka-ready.sh From 12b3da3d719320fbd4a323a2fbee129b3d3e8fa9 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Thu, 15 Aug 2024 21:49:27 -0500 Subject: [PATCH 51/72] fix(config): fix hash algo config (#11191) --- .../systemmetadata/ElasticSearchSystemMetadataService.java | 4 ++-- .../systemmetadata/SystemMetadataServiceTestBase.java | 2 +- .../common/ElasticSearchSystemMetadataServiceFactory.java | 7 +++++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java index cdfc4e985293f..fe79ba75cb1d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java @@ -52,6 +52,7 @@ public class ElasticSearchSystemMetadataService private final IndexConvention _indexConvention; private final ESSystemMetadataDAO _esDAO; private final ESIndexBuilder _indexBuilder; + @Nonnull private final String elasticIdHashAlgo; private static final String DOC_DELIMETER = "--"; public static final String INDEX_NAME = "system_metadata_service_v1"; @@ -86,10 +87,9 @@ private String toDocument(SystemMetadata systemMetadata, String urn, String aspe private String toDocId(@Nonnull final String urn, @Nonnull final String aspect) { String rawDocId = urn + DOC_DELIMETER + aspect; - String hashAlgo = System.getenv("ELASTIC_ID_HASH_ALGO"); try { byte[] bytesOfRawDocID = rawDocId.getBytes(StandardCharsets.UTF_8); - MessageDigest md = MessageDigest.getInstance(hashAlgo); + MessageDigest md = MessageDigest.getInstance(elasticIdHashAlgo); byte[] thedigest = md.digest(bytesOfRawDocID); return Base64.getEncoder().encodeToString(thedigest); } catch (NoSuchAlgorithmException e) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java index d843191bed741..af7005c93c46d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java @@ -54,7 +54,7 @@ private ElasticSearchSystemMetadataService buildService() { ESSystemMetadataDAO dao = new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchSystemMetadataService( - getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); + getBulkProcessor(), _indexConvention, dao, getIndexBuilder(), "MD5"); } @Test diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java index d560fba399f34..fb48d64ce7ba9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java @@ -6,6 +6,7 @@ import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -19,7 +20,8 @@ public class ElasticSearchSystemMetadataServiceFactory { @Bean(name = "elasticSearchSystemMetadataService") @Nonnull - protected ElasticSearchSystemMetadataService getInstance() { + protected ElasticSearchSystemMetadataService getInstance( + @Value("${elasticsearch.idHashAlgo}") final String elasticIdHashAlgo) { return new ElasticSearchSystemMetadataService( components.getBulkProcessor(), components.getIndexConvention(), @@ -28,6 +30,7 @@ protected ElasticSearchSystemMetadataService getInstance() { components.getIndexConvention(), components.getBulkProcessor(), components.getNumRetries()), - components.getIndexBuilder()); + components.getIndexBuilder(), + elasticIdHashAlgo); } } From 11890e544540053d2ce82d4c414e8a09d937d817 Mon Sep 17 00:00:00 2001 From: skrydal Date: Fri, 16 Aug 2024 10:46:42 +0200 Subject: [PATCH 52/72] feat(ingest): allow custom SF API version (#11145) --- .../datahub/ingestion/source/salesforce.py | 41 ++++--- .../integration/salesforce/test_salesforce.py | 102 ++++++++++++++++-- 2 files changed, 120 insertions(+), 23 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/salesforce.py b/metadata-ingestion/src/datahub/ingestion/source/salesforce.py index 42128123c6144..7a7f1f30950eb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/salesforce.py +++ b/metadata-ingestion/src/datahub/ingestion/source/salesforce.py @@ -3,7 +3,7 @@ import time from datetime import datetime from enum import Enum -from typing import Dict, Iterable, List, Optional +from typing import Any, Dict, Iterable, List, Optional import requests from pydantic import Field, validator @@ -124,6 +124,9 @@ class SalesforceConfig(DatasetSourceConfigMixin): default=dict(), description='Regex patterns for tables/schemas to describe domain_key domain key (domain_key can be any string like "sales".) There can be multiple domain keys specified.', ) + api_version: Optional[str] = Field( + description="If specified, overrides default version used by the Salesforce package. Example value: '59.0'" + ) profiling: SalesforceProfilingConfig = SalesforceProfilingConfig() @@ -222,6 +225,12 @@ def __init__(self, config: SalesforceConfig, ctx: PipelineContext) -> None: self.session = requests.Session() self.platform: str = "salesforce" self.fieldCounts = {} + common_args: Dict[str, Any] = { + "domain": "test" if self.config.is_sandbox else None, + "session": self.session, + } + if self.config.api_version: + common_args["version"] = self.config.api_version try: if self.config.auth is SalesforceAuthType.DIRECT_ACCESS_TOKEN: @@ -236,8 +245,7 @@ def __init__(self, config: SalesforceConfig, ctx: PipelineContext) -> None: self.sf = Salesforce( instance_url=self.config.instance_url, session_id=self.config.access_token, - session=self.session, - domain="test" if self.config.is_sandbox else None, + **common_args, ) elif self.config.auth is SalesforceAuthType.USERNAME_PASSWORD: logger.debug("Username/Password Provided in Config") @@ -255,8 +263,7 @@ def __init__(self, config: SalesforceConfig, ctx: PipelineContext) -> None: username=self.config.username, password=self.config.password, security_token=self.config.security_token, - session=self.session, - domain="test" if self.config.is_sandbox else None, + **common_args, ) elif self.config.auth is SalesforceAuthType.JSON_WEB_TOKEN: @@ -275,14 +282,13 @@ def __init__(self, config: SalesforceConfig, ctx: PipelineContext) -> None: username=self.config.username, consumer_key=self.config.consumer_key, privatekey=self.config.private_key, - session=self.session, - domain="test" if self.config.is_sandbox else None, + **common_args, ) except Exception as e: logger.error(e) raise ConfigurationError("Salesforce login failed") from e - else: + if not self.config.api_version: # List all REST API versions and use latest one versions_url = "https://{instance}/services/data/".format( instance=self.sf.sf_instance, @@ -290,17 +296,22 @@ def __init__(self, config: SalesforceConfig, ctx: PipelineContext) -> None: versions_response = self.sf._call_salesforce("GET", versions_url).json() latest_version = versions_response[-1] version = latest_version["version"] + # we could avoid setting the version like below (after the Salesforce object has been already initiated + # above), since, according to the docs: + # https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/dome_versions.htm + # we don't need to be authenticated to list the versions (so we could perform this call before even + # authenticating) self.sf.sf_version = version - self.base_url = "https://{instance}/services/data/v{sf_version}/".format( - instance=self.sf.sf_instance, sf_version=version - ) + self.base_url = "https://{instance}/services/data/v{sf_version}/".format( + instance=self.sf.sf_instance, sf_version=self.sf.sf_version + ) - logger.debug( - "Using Salesforce REST API with {label} version: {version}".format( - label=latest_version["label"], version=latest_version["version"] - ) + logger.debug( + "Using Salesforce REST API version: {version}".format( + version=self.sf.sf_version ) + ) def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: sObjects = self.get_salesforce_objects() diff --git a/metadata-ingestion/tests/integration/salesforce/test_salesforce.py b/metadata-ingestion/tests/integration/salesforce/test_salesforce.py index 8b6b883b2148d..89a37a372df84 100644 --- a/metadata-ingestion/tests/integration/salesforce/test_salesforce.py +++ b/metadata-ingestion/tests/integration/salesforce/test_salesforce.py @@ -1,10 +1,12 @@ import json import pathlib from unittest import mock +from unittest.mock import Mock from freezegun import freeze_time from datahub.ingestion.run.pipeline import Pipeline +from datahub.ingestion.source.salesforce import SalesforceConfig, SalesforceSource from tests.test_helpers import mce_helpers FROZEN_TIME = "2022-05-12 11:00:00" @@ -19,15 +21,16 @@ def _read_response(file_name: str) -> dict: return data -def side_effect_call_salesforce(type, url): - class MockResponse: - def __init__(self, json_data, status_code): - self.json_data = json_data - self.status_code = status_code +class MockResponse: + def __init__(self, json_data, status_code): + self.json_data = json_data + self.status_code = status_code + + def json(self): + return self.json_data - def json(self): - return self.json_data +def side_effect_call_salesforce(type, url): if url.endswith("/services/data/"): return MockResponse(_read_response("versions_response.json"), 200) if url.endswith("FROM EntityDefinition WHERE IsCustomizable = true"): @@ -55,9 +58,92 @@ def json(self): return MockResponse({}, 404) +@mock.patch("datahub.ingestion.source.salesforce.Salesforce") +def test_latest_version(mock_sdk): + mock_sf = mock.Mock() + mocked_call = mock.Mock() + mocked_call.side_effect = side_effect_call_salesforce + mock_sf._call_salesforce = mocked_call + mock_sdk.return_value = mock_sf + + config = SalesforceConfig.parse_obj( + { + "auth": "DIRECT_ACCESS_TOKEN", + "instance_url": "https://mydomain.my.salesforce.com/", + "access_token": "access_token`", + "ingest_tags": True, + "object_pattern": { + "allow": [ + "^Account$", + "^Property__c$", + ], + }, + "domain": {"sales": {"allow": {"^Property__c$"}}}, + "profiling": {"enabled": True}, + "profile_pattern": { + "allow": [ + "^Property__c$", + ] + }, + } + ) + SalesforceSource(config=config, ctx=Mock()) + calls = mock_sf._call_salesforce.mock_calls + assert ( + len(calls) == 1 + ), "We didn't specify version but source didn't call SF API to get the latest one" + assert calls[0].ends_with( + "/services/data" + ), "Source didn't call proper SF API endpoint to get all versions" + assert ( + mock_sf.sf_version == "54.0" + ), "API version was not correctly set (see versions_responses.json)" + + +@mock.patch("datahub.ingestion.source.salesforce.Salesforce") +def test_custom_version(mock_sdk): + mock_sf = mock.Mock() + mocked_call = mock.Mock() + mocked_call.side_effect = side_effect_call_salesforce + mock_sf._call_salesforce = mocked_call + mock_sdk.return_value = mock_sf + + config = SalesforceConfig.parse_obj( + { + "auth": "DIRECT_ACCESS_TOKEN", + "api_version": "46.0", + "instance_url": "https://mydomain.my.salesforce.com/", + "access_token": "access_token`", + "ingest_tags": True, + "object_pattern": { + "allow": [ + "^Account$", + "^Property__c$", + ], + }, + "domain": {"sales": {"allow": {"^Property__c$"}}}, + "profiling": {"enabled": True}, + "profile_pattern": { + "allow": [ + "^Property__c$", + ] + }, + } + ) + SalesforceSource(config=config, ctx=Mock()) + + calls = mock_sf._call_salesforce.mock_calls + assert ( + len(calls) == 0 + ), "Source called API to get all versions even though we specified proper version" + assert ( + mock_sdk.call_args.kwargs["version"] == "46.0" + ), "API client object was not correctly initialized with the custom version" + + @freeze_time(FROZEN_TIME) def test_salesforce_ingest(pytestconfig, tmp_path): - with mock.patch("simple_salesforce.Salesforce") as mock_sdk: + with mock.patch("datahub.ingestion.source.salesforce.Salesforce") as mock_sdk: mock_sf = mock.Mock() mocked_call = mock.Mock() mocked_call.side_effect = side_effect_call_salesforce From 608c5cfc330fc1521534ddacde45ec6aac4fb07b Mon Sep 17 00:00:00 2001 From: sagar-salvi-apptware <159135491+sagar-salvi-apptware@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:18:35 +0530 Subject: [PATCH 53/72] fix(ingestion/transformer): extend dataset_to_data_product_urns_pattern to support containers (#11124) --- .../docs/transformer/dataset_transformer.md | 33 ++++++++++++++++++- .../transformer/add_dataset_dataproduct.py | 32 ++++++++++++++++-- 2 files changed, 62 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/docs/transformer/dataset_transformer.md b/metadata-ingestion/docs/transformer/dataset_transformer.md index ac6fefc309574..03a224bcf7da4 100644 --- a/metadata-ingestion/docs/transformer/dataset_transformer.md +++ b/metadata-ingestion/docs/transformer/dataset_transformer.md @@ -1207,20 +1207,51 @@ The config, which we’d append to our ingestion recipe YAML, would look like th | Field | Required | Type | Default | Description | |---------------------------------------|----------|----------------------|-------------|---------------------------------------------------------------------------------------------| | `dataset_to_data_product_urns_pattern`| ✅ | map[regx, urn] | | Dataset Entity urn with regular expression and dataproduct urn apply to matching entity urn.| +| `is_container` | | bool | `false` | Whether to also consider a container or not. If true, the data product will be attached to both the dataset and its container. | -Let’s suppose we’d like to append a series of dataproducts with specific datasets as its assets. To do so, we can use the `pattern_add_dataset_dataproduct` module that’s included in the ingestion framework. This will match the regex pattern to `urn` of the dataset and create the data product entity with given urn and matched datasets as its assets. + +Let’s suppose we’d like to append a series of data products with specific datasets or their containers as assets. To do so, we can use the pattern_add_dataset_dataproduct module that’s included in the ingestion framework. This module matches a regex pattern to the urn of the dataset and creates a data product entity with the given urn, associating the matched datasets as its assets. + +If the is_container field is set to true, the module will not only attach the data product to the matching datasets but will also find and attach the containers associated with those datasets. This means that both the datasets and their containers will be associated with the specified data product. The config, which we’d append to our ingestion recipe YAML, would look like this: +- Add Product to dataset + ```yaml + transformers: + - type: "pattern_add_dataset_dataproduct" + config: + dataset_to_data_product_urns_pattern: + rules: + ".*example1.*": "urn:li:dataProduct:first" + ".*example2.*": "urn:li:dataProduct:second" + ``` +- Add Product to dataset container ```yaml transformers: - type: "pattern_add_dataset_dataproduct" config: + is_container: true dataset_to_data_product_urns_pattern: rules: ".*example1.*": "urn:li:dataProduct:first" ".*example2.*": "urn:li:dataProduct:second" ``` +⚠️ Warning: +When working with two datasets in the same container but with different data products, only one data product can be attached to the container. + +For example: +```yaml +transformers: + - type: "pattern_add_dataset_dataproduct" + config: + is_container: true + dataset_to_data_product_urns_pattern: + rules: + ".*example1.*": "urn:li:dataProduct:first" + ".*example2.*": "urn:li:dataProduct:second" +``` +If example1 and example2 are in the same container, only urn:li:dataProduct:first will be added. However, if they are in separate containers, the system works as expected and assigns the correct data product URNs. ## Add Dataset dataProduct ### Config Details diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_dataproduct.py b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_dataproduct.py index 45e9262843025..c474e423030e0 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_dataproduct.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_dataproduct.py @@ -11,7 +11,7 @@ from datahub.ingestion.transformer.dataset_transformer import ( DatasetDataproductTransformer, ) -from datahub.metadata.schema_classes import MetadataChangeProposalClass +from datahub.metadata.schema_classes import ContainerClass, MetadataChangeProposalClass from datahub.specific.dataproduct import DataProductPatchBuilder logger = logging.getLogger(__name__) @@ -23,6 +23,8 @@ class AddDatasetDataProductConfig(ConfigModel): _resolve_data_product_fn = pydantic_resolve_key("get_data_product_to_add") + is_container: bool = False + class AddDatasetDataProduct(DatasetDataproductTransformer): """Transformer that adds dataproduct entity for provided dataset as its asset according to a callback function.""" @@ -49,10 +51,11 @@ def handle_end_of_stream( self, ) -> List[Union[MetadataChangeProposalWrapper, MetadataChangeProposalClass]]: data_products: Dict[str, DataProductPatchBuilder] = {} - + data_products_container: Dict[str, DataProductPatchBuilder] = {} logger.debug("Generating dataproducts") for entity_urn in self.entity_map.keys(): data_product_urn = self.config.get_data_product_to_add(entity_urn) + is_container = self.config.is_container if data_product_urn: if data_product_urn not in data_products: data_products[data_product_urn] = DataProductPatchBuilder( @@ -63,11 +66,34 @@ def handle_end_of_stream( data_product_urn ].add_asset(entity_urn) + if is_container: + assert self.ctx.graph + container_aspect = self.ctx.graph.get_aspect( + entity_urn, aspect_type=ContainerClass + ) + if not container_aspect: + continue + container_urn = container_aspect.container + if data_product_urn not in data_products_container: + container_product = DataProductPatchBuilder( + data_product_urn + ).add_asset(container_urn) + data_products_container[data_product_urn] = container_product + else: + data_products_container[ + data_product_urn + ] = data_products_container[data_product_urn].add_asset( + container_urn + ) + mcps: List[ Union[MetadataChangeProposalWrapper, MetadataChangeProposalClass] ] = [] for data_product in data_products.values(): mcps.extend(list(data_product.build())) + if is_container: + for data_product in data_products_container.values(): + mcps.extend(list(data_product.build())) return mcps @@ -97,6 +123,7 @@ def create( class PatternDatasetDataProductConfig(ConfigModel): dataset_to_data_product_urns_pattern: KeyValuePattern = KeyValuePattern.all() + is_container: bool = False @pydantic.root_validator(pre=True) def validate_pattern_value(cls, values: Dict) -> Dict: @@ -122,6 +149,7 @@ def __init__(self, config: PatternDatasetDataProductConfig, ctx: PipelineContext )[0] if dataset_to_data_product.value(dataset_urn) else None, + is_container=config.is_container, ) super().__init__(generic_config, ctx) From ae30041ed768b31dfde6efeae62b3ad2194c183a Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 16 Aug 2024 11:58:33 -0400 Subject: [PATCH 54/72] fix(ui) Fix bug with editing entity names (#11186) --- .../profile/__tests__/EntityHeader.test.tsx | 43 +++++++++++++++++-- .../profile/header/EntityHeader.tsx | 11 +++-- 2 files changed, 47 insertions(+), 7 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/__tests__/EntityHeader.test.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/__tests__/EntityHeader.test.tsx index db347d4f1cc54..ec6a91df9019a 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/__tests__/EntityHeader.test.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/__tests__/EntityHeader.test.tsx @@ -3,13 +3,14 @@ import { EntityType } from '../../../../../../types.generated'; import { getCanEditName } from '../header/EntityHeader'; describe('getCanEditName', () => { - const entityDataWithManagePrivileges = { privileges: { canManageEntity: true } }; - const entityDataWithoutManagePrivileges = { privileges: { canManageEntity: false } }; + const entityDataWithManagePrivileges = { privileges: { canManageEntity: true, canEditProperties: true } }; + const entityDataWithoutManagePrivileges = { privileges: { canManageEntity: false, canEditProperties: false } }; it('should return true for Terms if manageGlossaries privilege is true', () => { const canEditName = getCanEditName( EntityType.GlossaryTerm, entityDataWithoutManagePrivileges, + true, platformPrivileges, ); @@ -21,6 +22,7 @@ describe('getCanEditName', () => { const canEditName = getCanEditName( EntityType.GlossaryTerm, entityDataWithoutManagePrivileges, + true, privilegesWithoutGlossaries, ); @@ -32,6 +34,7 @@ describe('getCanEditName', () => { const canEditName = getCanEditName( EntityType.GlossaryTerm, entityDataWithManagePrivileges, + true, privilegesWithoutGlossaries, ); @@ -42,6 +45,7 @@ describe('getCanEditName', () => { const canEditName = getCanEditName( EntityType.GlossaryNode, entityDataWithoutManagePrivileges, + true, platformPrivileges, ); @@ -53,6 +57,7 @@ describe('getCanEditName', () => { const canEditName = getCanEditName( EntityType.GlossaryNode, entityDataWithoutManagePrivileges, + true, privilegesWithoutGlossaries, ); @@ -64,6 +69,7 @@ describe('getCanEditName', () => { const canEditName = getCanEditName( EntityType.GlossaryNode, entityDataWithManagePrivileges, + true, privilegesWithoutGlossaries, ); @@ -71,7 +77,12 @@ describe('getCanEditName', () => { }); it('should return true for Domains if manageDomains privilege is true', () => { - const canEditName = getCanEditName(EntityType.Domain, entityDataWithoutManagePrivileges, platformPrivileges); + const canEditName = getCanEditName( + EntityType.Domain, + entityDataWithoutManagePrivileges, + true, + platformPrivileges, + ); expect(canEditName).toBe(true); }); @@ -81,6 +92,7 @@ describe('getCanEditName', () => { const canEditName = getCanEditName( EntityType.Domain, entityDataWithoutManagePrivileges, + true, privilegesWithoutDomains, ); @@ -88,7 +100,30 @@ describe('getCanEditName', () => { }); it('should return false for an unsupported entity', () => { - const canEditName = getCanEditName(EntityType.Chart, entityDataWithManagePrivileges, platformPrivileges); + const canEditName = getCanEditName(EntityType.Chart, entityDataWithManagePrivileges, true, platformPrivileges); + + expect(canEditName).toBe(false); + }); + + it('should return true for a dataset if canEditProperties is true', () => { + const canEditName = getCanEditName(EntityType.Chart, entityDataWithManagePrivileges, true, platformPrivileges); + + expect(canEditName).toBe(false); + }); + + it('should return false for a dataset if canEditProperties is false', () => { + const canEditName = getCanEditName( + EntityType.Chart, + entityDataWithoutManagePrivileges, + true, + platformPrivileges, + ); + + expect(canEditName).toBe(false); + }); + + it('should return false for a dataset if isEditableDatasetNameEnabled is false', () => { + const canEditName = getCanEditName(EntityType.Chart, entityDataWithManagePrivileges, false, platformPrivileges); expect(canEditName).toBe(false); }); diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx index 11335d0378760..12fa9131f33c7 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx @@ -60,6 +60,7 @@ const TopButtonsWrapper = styled.div` export function getCanEditName( entityType: EntityType, entityData: GenericEntityProperties | null, + isEditableDatasetNameEnabled: boolean, privileges?: PlatformPrivileges, ) { switch (entityType) { @@ -73,7 +74,7 @@ export function getCanEditName( case EntityType.BusinessAttribute: return privileges?.manageBusinessAttributes; case EntityType.Dataset: - return entityData?.privileges?.canEditProperties; + return isEditableDatasetNameEnabled && entityData?.privileges?.canEditProperties; default: return false; } @@ -99,9 +100,13 @@ export const EntityHeader = ({ headerDropdownItems, headerActionItems, isNameEdi const isEditableDatasetNameEnabled = useIsEditableDatasetNameEnabled(); const canEditName = - isEditableDatasetNameEnabled && isNameEditable && - getCanEditName(entityType, entityData, me?.platformPrivileges as PlatformPrivileges); + getCanEditName( + entityType, + entityData, + isEditableDatasetNameEnabled, + me?.platformPrivileges as PlatformPrivileges, + ); const entityRegistry = useEntityRegistry(); return ( From 437569ab0e4bc6a7a834dba4bbc1164c74e89474 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 16 Aug 2024 14:12:04 -0500 Subject: [PATCH 55/72] ci(smoke-test): allow smoke-test only PRs (#11194) --- .github/workflows/docker-unified.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 2bb3930e2f56e..32e68a76a88f5 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -60,6 +60,7 @@ jobs: mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + smoke_test_change: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} steps: - name: Check out the repo uses: acryldata/sane-checkout-action@v3 @@ -813,7 +814,7 @@ jobs: echo 'matrix=["cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT elif [ '${{ needs.setup.outputs.ingestion_only }}' == 'true' ]; then echo 'matrix=["no_cypress_suite0","no_cypress_suite1"]' >> $GITHUB_OUTPUT - elif [ '${{ needs.setup.outputs.backend_change }}' == 'true' ]; then + elif [[ '${{ needs.setup.outputs.backend_change }}' == 'true' || '${{ needs.setup.outputs.smoke_test_change }}' == 'true' ]]; then echo 'matrix=["no_cypress_suite0","no_cypress_suite1","cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT else echo 'matrix=[]' >> $GITHUB_OUTPUT From cb33c0fef7486787c815840ea356d92b7f056256 Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Sat, 17 Aug 2024 00:57:59 +0530 Subject: [PATCH 56/72] feat(ingestion/lookml): support looker `-- if` comments (#11113) --- metadata-ingestion/setup.py | 1 + .../source/looker/looker_constant.py | 9 + .../source/looker/looker_file_loader.py | 15 +- .../source/looker/looker_template_language.py | 351 +++++++++++-- .../source/looker/lookml_concept_context.py | 2 +- .../ingestion/source/looker/lookml_config.py | 9 +- .../ingestion/source/looker/lookml_source.py | 2 +- .../data.model.lkml | 8 + ...mployee_income_source_as_per_env.view.lkml | 40 ++ .../environment_activity_logs.view.lkml | 12 + .../vv_lineage_liquid_template_golden.json | 466 ++++++++++++++++++ 11 files changed, 867 insertions(+), 48 deletions(-) create mode 100644 metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/employee_income_source_as_per_env.view.lkml create mode 100644 metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/environment_activity_logs.view.lkml diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index abb716d2434ac..03b44401dd244 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -173,6 +173,7 @@ *sqlglot_lib, "GitPython>2", "python-liquid", + "deepmerge>=1.1.1" } bigquery_common = { diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_constant.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_constant.py index 21160cc97d4a6..5f47d361abb37 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_constant.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_constant.py @@ -1 +1,10 @@ IMPORTED_PROJECTS = "imported_projects" +SQL_TABLE_NAME = "sql_table_name" +DATAHUB_TRANSFORMED_SQL_TABLE_NAME = "datahub_transformed_sql_table_name" +DERIVED_TABLE = "derived_table" +SQL = "sql" +DATAHUB_TRANSFORMED_SQL = "datahub_transformed_sql" +prod = "prod" +dev = "dev" +NAME = "name" +DERIVED_DOT_SQL = "derived.sql" diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_file_loader.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_file_loader.py index fd670c23ad9cb..52ebcdde06a27 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_file_loader.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_file_loader.py @@ -1,17 +1,18 @@ import logging import pathlib from dataclasses import replace -from typing import Any, Dict, Optional +from typing import Dict, Optional from datahub.ingestion.source.looker.lkml_patched import load_lkml from datahub.ingestion.source.looker.looker_config import LookerConnectionDefinition from datahub.ingestion.source.looker.looker_dataclasses import LookerViewFile from datahub.ingestion.source.looker.looker_template_language import ( - resolve_liquid_variable_in_view_dict, + process_lookml_template_language, ) from datahub.ingestion.source.looker.lookml_config import ( _EXPLORE_FILE_EXTENSION, _VIEW_FILE_EXTENSION, + LookMLSourceConfig, LookMLSourceReport, ) @@ -29,13 +30,13 @@ def __init__( root_project_name: Optional[str], base_projects_folder: Dict[str, pathlib.Path], reporter: LookMLSourceReport, - liquid_variable: Dict[Any, Any], + source_config: LookMLSourceConfig, ) -> None: self.viewfile_cache: Dict[str, Optional[LookerViewFile]] = {} self._root_project_name = root_project_name self._base_projects_folder = base_projects_folder self.reporter = reporter - self.liquid_variable = liquid_variable + self.source_config = source_config def _load_viewfile( self, project_name: str, path: str, reporter: LookMLSourceReport @@ -73,9 +74,9 @@ def _load_viewfile( parsed = load_lkml(path) - resolve_liquid_variable_in_view_dict( - raw_view=parsed, - liquid_variable=self.liquid_variable, + process_lookml_template_language( + view_lkml_file_dict=parsed, + source_config=self.source_config, ) looker_viewfile = LookerViewFile.from_looker_dict( diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py index 99f83b5e922ba..04f9ec081ee68 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_template_language.py @@ -1,15 +1,31 @@ import logging import re -from typing import Any, ClassVar, Dict, Set +from abc import ABC, abstractmethod +from typing import Any, ClassVar, Dict, List, Optional, Set +from deepmerge import always_merger from liquid import Undefined from liquid.exceptions import LiquidSyntaxError +from datahub.ingestion.source.looker.looker_constant import ( + DATAHUB_TRANSFORMED_SQL, + DATAHUB_TRANSFORMED_SQL_TABLE_NAME, + DERIVED_DOT_SQL, + DERIVED_TABLE, + NAME, + SQL, + SQL_TABLE_NAME, + dev, + prod, +) from datahub.ingestion.source.looker.looker_liquid_tag import ( CustomTagException, create_template, ) -from datahub.ingestion.source.looker.lookml_config import DERIVED_VIEW_PATTERN +from datahub.ingestion.source.looker.lookml_config import ( + DERIVED_VIEW_PATTERN, + LookMLSourceConfig, +) logger = logging.getLogger(__name__) @@ -92,52 +108,311 @@ def resolve_liquid_variable(text: str, liquid_variable: Dict[Any, Any]) -> str: return text -def _drop_derived_view_pattern(value: str) -> str: - # Drop ${ and } - return re.sub(DERIVED_VIEW_PATTERN, r"\1", value) +class LookMLViewTransformer(ABC): + """ + There are many transformations that we need to perform on the LookML view to make it suitable for metadata ingestion. + + These transformations include: + + 1. Evaluating Looker templates, such as `-- if prod --` comments. Example `LookMlIfCommentTransformer`. + + 2. Resolving Liquid templates. Example `LiquidVariableTransformer`. + + 3. Removing ${} from derived view patterns. Example `DropDerivedViewPatternTransformer`. + (e.g., changing ${view_name.SQL_TABLE_NAME} to 4. view_name.SQL_TABLE_NAME). + + 4. Completing incomplete SQL fragments. Example `IncompleteSqlTransformer`. + + Each transformer works on specific attributes of the LookML view. For example, the #4 transformation is only + applicable to the view.derived.sql attribute, while the other transformations apply to both the + view.sql_table_name and view.derived.sql attributes. + + This class contains the logic to ensure that the transformer is applied to specific attributes and returns a + dictionary containing the transformed data. + + For example: + In case of #1 and #2, it returns: + + **transformed derived_table:** + ``` + { + "derived_table": { + "datahub_transformed_sql": "" + } + } + ``` + + **Whereas original was:** + ``` + { + "derived_table": { + "sql": "" + } + } + ``` + + In case #3, it returns: + **transformed sql_table_name:** + ``` + { + "datahub_transformed_sql_table_name": "employee_income_source.SQL_TABLE_NAME" + } + ``` + + **Whereas original was:** + ``` + { + "sql_table_name": "${employee_income_source.SQL_TABLE_NAME}" + } + ``` + + In case #4, it returns: + **transformed derived_table:** + ``` + { + "derived_table": { + "datahub_transformed_sql": "SELECT column_a, column_b FROM foo" + } + } + ``` + + **Whereas original was:** + ``` + { + "derived_table": { + "sql": "column_a, column_b" + } + } + ``` + + Each transformation generates a section of the transformed dictionary with a new attribute named + `datahub_transformed_`. + """ + + source_config: LookMLSourceConfig + + def __init__(self, source_config: LookMLSourceConfig): + self.source_config = source_config + + def transform(self, view: dict) -> dict: + value_to_transform: Optional[str] = None + + # is_attribute_supported check is required because not all transformer works on all attributes in current + # case mostly all transformer works on sql_table_name and derived.sql attributes, + # however IncompleteSqlTransformer only transform the derived.sql attribute + if SQL_TABLE_NAME in view and self.is_attribute_supported(SQL_TABLE_NAME): + # Give precedence to already processed transformed view.sql_table_name to apply more transformation + value_to_transform = view.get( + DATAHUB_TRANSFORMED_SQL_TABLE_NAME, view[SQL_TABLE_NAME] + ) + if ( + DERIVED_TABLE in view + and SQL in view[DERIVED_TABLE] + and self.is_attribute_supported(DERIVED_DOT_SQL) + ): + # Give precedence to already processed transformed view.derived.sql to apply more transformation + value_to_transform = view[DERIVED_TABLE].get( + DATAHUB_TRANSFORMED_SQL, view[DERIVED_TABLE][SQL] + ) -def _complete_incomplete_sql(raw_view: dict, sql: str) -> str: + if value_to_transform is None: + return {} - # Looker supports sql fragments that omit the SELECT and FROM parts of the query - # Add those in if we detect that it is missing - sql_query: str = sql + logger.debug(f"value to transform = {value_to_transform}") - if not re.search(r"SELECT\s", sql_query, flags=re.I): - # add a SELECT clause at the beginning - sql_query = f"SELECT {sql}" + transformed_value: str = self._apply_transformation( + value=value_to_transform, view=view + ) - if not re.search(r"FROM\s", sql_query, flags=re.I): - # add a FROM clause at the end - sql_query = f"{sql_query} FROM {raw_view['name']}" + logger.debug(f"transformed value = {transformed_value}") - return _drop_derived_view_pattern(sql_query) + if SQL_TABLE_NAME in view and value_to_transform: + return {DATAHUB_TRANSFORMED_SQL_TABLE_NAME: transformed_value} + if DERIVED_TABLE in view and SQL in view[DERIVED_TABLE] and value_to_transform: + return {DERIVED_TABLE: {DATAHUB_TRANSFORMED_SQL: transformed_value}} -def resolve_liquid_variable_in_view_dict( - raw_view: dict, liquid_variable: Dict[Any, Any] -) -> None: - if "views" not in raw_view: - return + return {} - for view in raw_view["views"]: - if "sql_table_name" in view: - view["datahub_transformed_sql_table_name"] = resolve_liquid_variable( - text=view["sql_table_name"], - liquid_variable=liquid_variable, - ) # keeping original sql_table_name as is to avoid any visualization issue later + @abstractmethod + def _apply_transformation(self, value: str, view: dict) -> str: + pass - view["datahub_transformed_sql_table_name"] = _drop_derived_view_pattern( - value=view["datahub_transformed_sql_table_name"] - ) + def is_attribute_supported(self, attribute: str) -> bool: + return attribute in [DERIVED_DOT_SQL, SQL_TABLE_NAME] + + +class LiquidVariableTransformer(LookMLViewTransformer): + """ + Replace the liquid variables with their values. + """ + + def _apply_transformation(self, value: str, view: dict) -> str: + return resolve_liquid_variable( + text=value, + liquid_variable=self.source_config.liquid_variable, + ) + + +class IncompleteSqlTransformer(LookMLViewTransformer): + """ + lookml view may contain the fragment of sql, however for lineage generation we need a complete sql. + IncompleteSqlTransformer will complete the view's derived.sql. + """ + + def is_attribute_supported(self, attribute: str) -> bool: + return attribute in [DERIVED_DOT_SQL] - if "derived_table" in view and "sql" in view["derived_table"]: - # In sql we don't need to remove the extra spaces as sql parser takes care of extra spaces and \n - # while generating URN from sql - view["derived_table"]["datahub_transformed_sql"] = resolve_liquid_variable( - text=view["derived_table"]["sql"], liquid_variable=liquid_variable - ) # keeping original sql as is, so that on UI sql will be shown same is it is visible on looker portal + def _apply_transformation(self, value: str, view: dict) -> str: + if DERIVED_TABLE not in view or SQL not in view[DERIVED_TABLE]: + # This transformation is only applicable in-case of view contains view.derived.sql + return value - view["derived_table"]["datahub_transformed_sql"] = _complete_incomplete_sql( - raw_view=view, sql=view["derived_table"]["datahub_transformed_sql"] + # Looker supports sql fragments that omit the SELECT and FROM parts of the query + # Add those in if we detect that it is missing + sql_query: str = value + + if not re.search(r"SELECT\s", sql_query, flags=re.I): + # add a SELECT clause at the beginning + sql_query = f"SELECT {sql_query}" + + if not re.search(r"FROM\s", sql_query, flags=re.I): + # add a FROM clause at the end + sql_query = f"{sql_query} FROM {view[NAME]}" + + return sql_query + + +class DropDerivedViewPatternTransformer(LookMLViewTransformer): + """ + drop ${} from datahub_transformed_sql_table_name and view["derived_table"]["datahub_transformed_sql_table_name"] values. + + Example: transform ${employee_income_source.SQL_TABLE_NAME} to employee_income_source.SQL_TABLE_NAME + """ + + def _apply_transformation(self, value: str, view: dict) -> str: + return re.sub( + DERIVED_VIEW_PATTERN, + r"\1", + value, + ) + + +class LookMlIfCommentTransformer(LookMLViewTransformer): + """ + Evaluate the looker -- if -- comments. + """ + + evaluate_to_true_regx: str + remove_if_comment_line_regx: str + + def __init__(self, source_config: LookMLSourceConfig): + super().__init__(source_config=source_config) + + # This regx will keep whatever after -- if looker_environment -- + self.evaluate_to_true_regx = r"-- if {} --".format( + self.source_config.looker_environment + ) + + # It will remove all other lines starts with -- if ... -- + self.remove_if_comment_line_regx = r"-- if {} --.*?(?=\n|-- if|$)".format( + dev if self.source_config.looker_environment.lower() == prod else prod + ) + + def _apply_regx(self, value: str) -> str: + result: str = re.sub( + self.remove_if_comment_line_regx, "", value, flags=re.IGNORECASE | re.DOTALL + ) + + # Remove '-- if prod --' but keep the rest of the line + result = re.sub(self.evaluate_to_true_regx, "", result, flags=re.IGNORECASE) + + return result + + def _apply_transformation(self, value: str, view: dict) -> str: + return self._apply_regx(value) + + +class TransformedLookMlView: + """ + TransformedLookMlView is collecting output of LookMLViewTransformer and creating a new transformed LookML view. + TransformedLookMlView creates a copy of the original view dictionary and updates the copy with the transformed output. + The deepmerge library is used because Python's dict.update function doesn't merge nested fields. + + The transformed LookML view will contain the following attributes: + + ``` + { + "derived_table": { + "sql": "" + }, + + dimensions ..... + } + ``` + see documentation of LookMLViewTransformer for output of each transformer. + """ + + transformers: List[LookMLViewTransformer] + view_dict: dict + transformed_dict: dict + + def __init__( + self, + transformers: List[LookMLViewTransformer], + view_dict: dict, + ): + self.transformers = transformers + self.view_dict = view_dict + self.transformed_dict = {} + + def view(self) -> dict: + if self.transformed_dict: + return self.transformed_dict + + self.transformed_dict = {**self.view_dict} + + logger.debug(f"Processing view {self.view_dict[NAME]}") + + for transformer in self.transformers: + logger.debug(f"Applying transformer {transformer.__class__.__name__}") + + self.transformed_dict = always_merger.merge( + self.transformed_dict, transformer.transform(self.transformed_dict) ) + + return self.transformed_dict + + +def process_lookml_template_language( + source_config: LookMLSourceConfig, + view_lkml_file_dict: dict, +) -> None: + if "views" not in view_lkml_file_dict: + return + + transformers: List[LookMLViewTransformer] = [ + LookMlIfCommentTransformer( + source_config=source_config + ), # First evaluate the -- if -- comments. Looker does the same + LiquidVariableTransformer( + source_config=source_config + ), # Now resolve liquid variables + DropDerivedViewPatternTransformer( + source_config=source_config + ), # Remove any ${} symbol + IncompleteSqlTransformer( + source_config=source_config + ), # complete any incomplete sql + ] + + transformed_views: List[dict] = [] + + for view in view_lkml_file_dict["views"]: + transformed_views.append( + TransformedLookMlView(transformers=transformers, view_dict=view).view() + ) + + view_lkml_file_dict["views"] = transformed_views diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py index 7805b8b7b7d9a..69b9f842ac14d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_concept_context.py @@ -313,9 +313,9 @@ def datahub_transformed_sql_table_name(self) -> str: # remove extra spaces and new lines from sql_table_name if it is not a sql if not self.is_direct_sql_query_case(): - table_name = remove_extra_spaces_and_newlines(table_name) # Some sql_table_name fields contain quotes like: optimizely."group", just remove the quotes table_name = table_name.replace('"', "").replace("`", "").lower() + table_name = remove_extra_spaces_and_newlines(table_name).strip() return table_name diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_config.py index f4fb1316b16a2..0bcee14ec77a1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_config.py @@ -1,7 +1,7 @@ import logging from dataclasses import dataclass, field as dataclass_field from datetime import timedelta -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Literal, Optional, Union import pydantic from pydantic import root_validator, validator @@ -174,6 +174,13 @@ class LookMLSourceConfig( "view.sql_table_name. Defaults to an empty dictionary.", ) + looker_environment: Literal["prod", "dev"] = Field( + "prod", + description="A looker prod or dev environment. " + "It helps to evaluate looker if comments i.e. -- if prod --. " + "All if comments are evaluated to true for configured looker_environment value", + ) + @validator("connection_to_platform_map", pre=True) def convert_string_to_connection_def(cls, conn_map): # Previous version of config supported strings in connection map. This upconverts strings to ConnectionMap diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index d77e65ac73323..b00291caabbf6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -669,7 +669,7 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 self.source_config.project_name, self.base_projects_folder, self.reporter, - self.source_config.liquid_variable, + self.source_config, ) # Some views can be mentioned by multiple 'include' statements and can be included via different connections. diff --git a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml index 2cc6ae994d245..a87381dd0bf75 100644 --- a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml +++ b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/data.model.lkml @@ -6,6 +6,8 @@ include: "employee_total_income.view.lkml" include: "top_10_employee_income_source.view.lkml" include: "employee_tax_report.view.lkml" include: "employee_salary_rating.view.lkml" +include: "environment_activity_logs.view.lkml" +include: "employee_income_source_as_per_env.view.lkml" include: "rent_as_employee_income_source.view.lkml" explore: activity_logs { @@ -26,5 +28,11 @@ explore: employee_tax_report { explore: employee_salary_rating { } +explore: environment_activity_logs { +} + +explore: employee_income_source_as_per_env { +} + explore: rent_as_employee_income_source { } \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/employee_income_source_as_per_env.view.lkml b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/employee_income_source_as_per_env.view.lkml new file mode 100644 index 0000000000000..4b8e0dd46a8ce --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/employee_income_source_as_per_env.view.lkml @@ -0,0 +1,40 @@ +view: employee_income_source_as_per_env { + derived_table: { + sql: SELECT + employee_id, + employee_name, + {% if dw_eff_dt_date._is_selected or finance_dw_eff_dt_date._is_selected %} + prod_core.data.r_metric_summary_v2 + {% elsif dw_eff_dt_week._is_selected or finance_dw_eff_dt_week._is_selected %} + prod_core.data.r_metric_summary_v3 + {% else %} + 'default_table' as source + {% endif %}, + employee_income + FROM -- if dev -- dev_income_source -- if prod -- prod_income_source + WHERE + {% condition source_region %} source_table.region {% endcondition %} + ;; + } + + dimension: id { + type: number + sql: ${TABLE}.employee_id;; + } + + dimension: name { + type: string + sql: ${TABLE}.employee_name;; + } + + dimension: source { + type: string + sql: ${TABLE}.source ;; + } + + dimension: income { + type: number + sql: ${TABLE}.employee_income ;; + } + +} diff --git a/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/environment_activity_logs.view.lkml b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/environment_activity_logs.view.lkml new file mode 100644 index 0000000000000..efc7ba82754b8 --- /dev/null +++ b/metadata-ingestion/tests/integration/lookml/vv-lineage-and-liquid-templates/environment_activity_logs.view.lkml @@ -0,0 +1,12 @@ +view: environment_activity_logs { + sql_table_name: -- if prod -- prod.staging_app.stg_app__activity_logs + -- if dev -- {{ _user_attributes['dev_database_prefix'] }}analytics.{{ _user_attributes['dev_schema_prefix'] }}staging_app.stg_app__activity_logs + ;; + + dimension: generated_message_id { + group_label: "IDs" + primary_key: yes + type: number + sql: ${TABLE}."GENERATED_MESSAGE_ID" ;; + } +} diff --git a/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json b/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json index 2e55971b65bd4..b723aff080bc4 100644 --- a/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json +++ b/metadata-ingestion/tests/integration/lookml/vv_lineage_liquid_template_golden.json @@ -1580,6 +1580,472 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.environment_activity_logs,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.environment_activity_logs,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "view: environment_activity_logs {\n sql_table_name: -- if prod -- prod.staging_app.stg_app__activity_logs\n -- if dev -- {{ _user_attributes['dev_database_prefix'] }}analytics.{{ _user_attributes['dev_schema_prefix'] }}staging_app.stg_app__activity_logs\n ;;\n\n dimension: generated_message_id {\n group_label: \"IDs\"\n primary_key: yes\n type: number\n sql: ${TABLE}.\"GENERATED_MESSAGE_ID\" ;;\n }\n}\n", + "viewLanguage": "lookml" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.environment_activity_logs,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:78f22c19304954b15e8adb1d9809975e" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.environment_activity_logs,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/Develop/lkml_samples/" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1586847600000, + "actor": "urn:li:corpuser:datahub" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,prod.staging_app.stg_app__activity_logs,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,prod.staging_app.stg_app__activity_logs,PROD),generated_message_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.environment_activity_logs,PROD),generated_message_id)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "environment_activity_logs", + "platform": "urn:li:dataPlatform:looker", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "generated_message_id", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "number", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": true + } + ], + "primaryKeys": [ + "generated_message_id" + ] + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "looker.file.path": "environment_activity_logs.view.lkml", + "looker.model": "data" + }, + "name": "environment_activity_logs", + "tags": [] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.environment_activity_logs,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Develop" + }, + { + "id": "urn:li:container:78f22c19304954b15e8adb1d9809975e", + "urn": "urn:li:container:78f22c19304954b15e8adb1d9809975e" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "View" + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "SELECT\n employee_id,\n employee_name,\n {% if dw_eff_dt_date._is_selected or finance_dw_eff_dt_date._is_selected %}\n prod_core.data.r_metric_summary_v2\n {% elsif dw_eff_dt_week._is_selected or finance_dw_eff_dt_week._is_selected %}\n prod_core.data.r_metric_summary_v3\n {% else %}\n 'default_table' as source\n {% endif %},\n employee_income\n FROM -- if dev -- dev_income_source -- if prod -- prod_income_source\n WHERE\n {% condition source_region %} source_table.region {% endcondition %}", + "viewLanguage": "sql" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:78f22c19304954b15e8adb1d9809975e" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.BrowsePaths": { + "paths": [ + "/Develop/lkml_samples/" + ] + } + }, + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.UpstreamLineage": { + "upstreams": [ + { + "auditStamp": { + "time": 1586847600000, + "actor": "urn:li:corpuser:datahub" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,prod_income_source,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,prod_income_source,PROD),employee_id)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD),id)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,prod_income_source,PROD),employee_name)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD),name)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,prod_income_source,PROD),source)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD),source)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,prod_income_source,PROD),employee_income)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD),income)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "employee_income_source_as_per_env", + "platform": "urn:li:dataPlatform:looker", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "" + } + }, + "fields": [ + { + "fieldPath": "id", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "number", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "name", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "source", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + }, + { + "fieldPath": "income", + "nullable": false, + "description": "", + "label": "", + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "number", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Dimension" + } + ] + }, + "isPartOfKey": false + } + ], + "primaryKeys": [] + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "looker.file.path": "employee_income_source_as_per_env.view.lkml", + "looker.model": "data" + }, + "name": "employee_income_source_as_per_env", + "tags": [] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.employee_income_source_as_per_env,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "Develop" + }, + { + "id": "urn:li:container:78f22c19304954b15e8adb1d9809975e", + "urn": "urn:li:container:78f22c19304954b15e8adb1d9809975e" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "lookml-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.rent_as_employee_income_source,PROD)", From edb9a87b84e2491a22571fecb7b87c0fe2fa012c Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 16 Aug 2024 14:41:44 -0500 Subject: [PATCH 57/72] fix(elasticsearch): refactor idHashAlgo setting (#11193) --- .../datahub-gms/env/docker-without-neo4j.env | 2 -- docker/datahub-gms/env/docker.env | 2 -- .../env/docker-without-neo4j.env | 2 -- docker/datahub-mae-consumer/env/docker.env | 2 -- .../env/docker-without-neo4j.env | 2 -- docker/datahub-mce-consumer/env/docker.env | 2 -- .../docker-compose-m1.quickstart.yml | 1 - ...er-compose-without-neo4j-m1.quickstart.yml | 1 - ...ocker-compose-without-neo4j.quickstart.yml | 1 - ...ose.consumers-without-neo4j.quickstart.yml | 2 -- .../docker-compose.consumers.quickstart.yml | 2 -- .../quickstart/docker-compose.quickstart.yml | 1 - .../metadata/aspect/models/graph/Edge.java | 6 ++-- metadata-io/build.gradle | 1 - .../elastic/ElasticSearchGraphService.java | 5 ++-- .../service/UpdateIndicesService.java | 9 ++++-- .../TimeseriesAspectTransformer.java | 20 +++++++------ .../search/SearchGraphServiceTestBase.java | 5 ++-- .../search/LineageServiceTestBase.java | 2 +- .../search/SearchServiceTestBase.java | 2 +- .../metadata/search/TestEntityTestBase.java | 3 +- .../metadata/search/query/BrowseDAOTest.java | 2 +- .../SystemMetadataServiceTestBase.java | 2 +- .../TimeseriesAspectServiceTestBase.java | 4 +-- .../SampleDataFixtureConfiguration.java | 4 +-- .../SearchLineageFixtureConfiguration.java | 5 ++-- .../kafka/hook/UpdateIndicesHookTest.java | 6 ++-- .../metadata/context/SearchContext.java | 2 +- .../context/TestOperationContexts.java | 2 +- .../metadata/context/SearchContextTest.java | 18 +++++------ .../search/ElasticSearchConfiguration.java | 1 + .../ElasticSearchGraphServiceFactory.java | 7 +++-- .../common/IndexConventionFactory.java | 5 ++-- .../indices/UpdateIndicesServiceFactory.java | 13 +++++--- .../metadata/resources/usage/UsageStats.java | 30 ++----------------- .../utils/elasticsearch/IndexConvention.java | 3 ++ .../elasticsearch/IndexConventionImpl.java | 9 ++++-- .../IndexConventionImplTest.java | 8 ++--- smoke-test/run-quickstart.sh | 2 -- smoke-test/set-test-env-vars.sh | 3 +- 40 files changed, 90 insertions(+), 109 deletions(-) diff --git a/docker/datahub-gms/env/docker-without-neo4j.env b/docker/datahub-gms/env/docker-without-neo4j.env index 37b7ba1797af5..cc0dd6b4278b5 100644 --- a/docker/datahub-gms/env/docker-without-neo4j.env +++ b/docker/datahub-gms/env/docker-without-neo4j.env @@ -23,8 +23,6 @@ PE_CONSUMER_ENABLED=true UI_INGESTION_ENABLED=true ENTITY_SERVICE_ENABLE_RETENTION=true -ELASTIC_ID_HASH_ALGO=MD5 - # Uncomment to disable persistence of client-side analytics events # DATAHUB_ANALYTICS_ENABLED=false diff --git a/docker/datahub-gms/env/docker.env b/docker/datahub-gms/env/docker.env index 0ecaa32c4cb12..59fc4bdde02ff 100644 --- a/docker/datahub-gms/env/docker.env +++ b/docker/datahub-gms/env/docker.env @@ -27,8 +27,6 @@ MCE_CONSUMER_ENABLED=true PE_CONSUMER_ENABLED=true UI_INGESTION_ENABLED=true -ELASTIC_ID_HASH_ALGO=MD5 - # Uncomment to enable Metadata Service Authentication METADATA_SERVICE_AUTH_ENABLED=false diff --git a/docker/datahub-mae-consumer/env/docker-without-neo4j.env b/docker/datahub-mae-consumer/env/docker-without-neo4j.env index 6a82f235b2971..b6899f7e6d63b 100644 --- a/docker/datahub-mae-consumer/env/docker-without-neo4j.env +++ b/docker/datahub-mae-consumer/env/docker-without-neo4j.env @@ -13,8 +13,6 @@ ES_BULK_REFRESH_POLICY=WAIT_UNTIL GRAPH_SERVICE_IMPL=elasticsearch ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml -ELASTIC_ID_HASH_ALGO=MD5 - # Uncomment to disable persistence of client-side analytics events # DATAHUB_ANALYTICS_ENABLED=false diff --git a/docker/datahub-mae-consumer/env/docker.env b/docker/datahub-mae-consumer/env/docker.env index 1f0ee4b05b382..5a6daa6eaeaed 100644 --- a/docker/datahub-mae-consumer/env/docker.env +++ b/docker/datahub-mae-consumer/env/docker.env @@ -17,8 +17,6 @@ NEO4J_PASSWORD=datahub GRAPH_SERVICE_IMPL=neo4j ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml -ELASTIC_ID_HASH_ALGO=MD5 - # Uncomment to disable persistence of client-side analytics events # DATAHUB_ANALYTICS_ENABLED=false diff --git a/docker/datahub-mce-consumer/env/docker-without-neo4j.env b/docker/datahub-mce-consumer/env/docker-without-neo4j.env index b0edfc0a75b66..e7be7d8ed4ddc 100644 --- a/docker/datahub-mce-consumer/env/docker-without-neo4j.env +++ b/docker/datahub-mce-consumer/env/docker-without-neo4j.env @@ -24,8 +24,6 @@ MAE_CONSUMER_ENABLED=false PE_CONSUMER_ENABLED=false UI_INGESTION_ENABLED=false -ELASTIC_ID_HASH_ALGO=MD5 - # Uncomment to configure kafka topic names # Make sure these names are consistent across the whole deployment # METADATA_CHANGE_PROPOSAL_TOPIC_NAME=MetadataChangeProposal_v1 diff --git a/docker/datahub-mce-consumer/env/docker.env b/docker/datahub-mce-consumer/env/docker.env index c0f85ef667546..8618f3f5f7af7 100644 --- a/docker/datahub-mce-consumer/env/docker.env +++ b/docker/datahub-mce-consumer/env/docker.env @@ -24,8 +24,6 @@ MAE_CONSUMER_ENABLED=false PE_CONSUMER_ENABLED=false UI_INGESTION_ENABLED=false -ELASTIC_ID_HASH_ALGO=MD5 - # Uncomment to configure kafka topic names # Make sure these names are consistent across the whole deployment # METADATA_CHANGE_PROPOSAL_TOPIC_NAME=MetadataChangeProposal_v1 diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index a0f60d23710a0..834d55096468f 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -86,7 +86,6 @@ services: - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - ELASTICSEARCH_PORT=9200 - - ELASTIC_ID_HASH_ALGO=MD5 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true - ES_BULK_REFRESH_POLICY=WAIT_UNTIL diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index 11e33a9950ba9..47fb50f78e4f0 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -86,7 +86,6 @@ services: - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - ELASTICSEARCH_PORT=9200 - - ELASTIC_ID_HASH_ALGO=MD5 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true - ES_BULK_REFRESH_POLICY=WAIT_UNTIL diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 2efa895983418..3fa13a9e56b42 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -86,7 +86,6 @@ services: - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - ELASTICSEARCH_PORT=9200 - - ELASTIC_ID_HASH_ALGO=MD5 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true - ES_BULK_REFRESH_POLICY=WAIT_UNTIL diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index 4f47a3da24eb1..a4211acedcf10 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -19,7 +19,6 @@ services: - ES_BULK_REFRESH_POLICY=WAIT_UNTIL - GRAPH_SERVICE_IMPL=elasticsearch - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml - - ELASTIC_ID_HASH_ALGO=MD5 hostname: datahub-mae-consumer image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: @@ -38,7 +37,6 @@ services: - EBEAN_DATASOURCE_USERNAME=datahub - ELASTICSEARCH_HOST=elasticsearch - ELASTICSEARCH_PORT=9200 - - ELASTIC_ID_HASH_ALGO=MD5 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mce-consumer/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true - ES_BULK_REFRESH_POLICY=WAIT_UNTIL diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index 7dd7388b93988..e7571e4baf8b4 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -26,7 +26,6 @@ services: - NEO4J_PASSWORD=datahub - GRAPH_SERVICE_IMPL=neo4j - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml - - ELASTIC_ID_HASH_ALGO=MD5 hostname: datahub-mae-consumer image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: @@ -48,7 +47,6 @@ services: - EBEAN_DATASOURCE_USERNAME=datahub - ELASTICSEARCH_HOST=elasticsearch - ELASTICSEARCH_PORT=9200 - - ELASTIC_ID_HASH_ALGO=MD5 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mce-consumer/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true - ES_BULK_REFRESH_POLICY=WAIT_UNTIL diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index f42ed1f40c246..c63b6d1d61b03 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -86,7 +86,6 @@ services: - ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX=true - ELASTICSEARCH_INDEX_BUILDER_SETTINGS_REINDEX=true - ELASTICSEARCH_PORT=9200 - - ELASTIC_ID_HASH_ALGO=MD5 - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml - ENTITY_SERVICE_ENABLE_RETENTION=true - ES_BULK_REFRESH_POLICY=WAIT_UNTIL diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/models/graph/Edge.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/models/graph/Edge.java index 3de09e599d99e..8777be57e1bd8 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/models/graph/Edge.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/models/graph/Edge.java @@ -13,6 +13,7 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; @@ -59,7 +60,7 @@ public Edge( null); } - public String toDocId() { + public String toDocId(@Nonnull String idHashAlgo) { StringBuilder rawDocId = new StringBuilder(); rawDocId .append(getSource().toString()) @@ -72,9 +73,8 @@ public String toDocId() { } try { - String hashAlgo = System.getenv("ELASTIC_ID_HASH_ALGO"); byte[] bytesOfRawDocID = rawDocId.toString().getBytes(StandardCharsets.UTF_8); - MessageDigest md = MessageDigest.getInstance(hashAlgo); + MessageDigest md = MessageDigest.getInstance(idHashAlgo); byte[] thedigest = md.digest(bytesOfRawDocID); return Base64.getEncoder().encodeToString(thedigest); } catch (NoSuchAlgorithmException e) { diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 17d9cb8cd14fe..ff29cb5fff47d 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -130,7 +130,6 @@ test { // override, testng controlling parallelization // increasing >1 will merely run all tests extra times maxParallelForks = 1 - environment "ELASTIC_ID_HASH_ALGO", "MD5" } useTestNG() { suites 'src/test/resources/testng.xml' diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 5b0fb554a4f48..e1532ea4e26c0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -64,6 +64,7 @@ public class ElasticSearchGraphService implements GraphService, ElasticSearchInd private final ESGraphWriteDAO _graphWriteDAO; private final ESGraphQueryDAO _graphReadDAO; private final ESIndexBuilder _indexBuilder; + private final String idHashAlgo; public static final String INDEX_NAME = "graph_service_v1"; private static final Map EMPTY_HASH = new HashMap<>(); @@ -125,7 +126,7 @@ public LineageRegistry getLineageRegistry() { @Override public void addEdge(@Nonnull final Edge edge) { - String docId = edge.toDocId(); + String docId = edge.toDocId(idHashAlgo); String edgeDocument = toDocument(edge); _graphWriteDAO.upsertDocument(docId, edgeDocument); } @@ -137,7 +138,7 @@ public void upsertEdge(@Nonnull final Edge edge) { @Override public void removeEdge(@Nonnull final Edge edge) { - String docId = edge.toDocId(); + String docId = edge.toDocId(idHashAlgo); _graphWriteDAO.deleteDocument(docId); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index dff0a99a142b7..2ab9e17f28163 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -80,6 +80,7 @@ public class UpdateIndicesService implements SearchIndicesService { private final SystemMetadataService _systemMetadataService; private final SearchDocumentTransformer _searchDocumentTransformer; private final EntityIndexBuilders _entityIndexBuilders; + @Nonnull private final String idHashAlgo; @Value("${featureFlags.graphServiceDiffModeEnabled:true}") private boolean _graphDiffMode; @@ -117,13 +118,15 @@ public UpdateIndicesService( TimeseriesAspectService timeseriesAspectService, SystemMetadataService systemMetadataService, SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { + EntityIndexBuilders entityIndexBuilders, + @Nonnull String idHashAlgo) { _graphService = graphService; _entitySearchService = entitySearchService; _timeseriesAspectService = timeseriesAspectService; _systemMetadataService = systemMetadataService; _searchDocumentTransformer = searchDocumentTransformer; _entityIndexBuilders = entityIndexBuilders; + this.idHashAlgo = idHashAlgo; } @Override @@ -601,7 +604,9 @@ private void updateTimeseriesFields( SystemMetadata systemMetadata) { Map documents; try { - documents = TimeseriesAspectTransformer.transform(urn, aspect, aspectSpec, systemMetadata); + documents = + TimeseriesAspectTransformer.transform( + urn, aspect, aspectSpec, systemMetadata, idHashAlgo); } catch (JsonProcessingException e) { log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java index cf0a3f1466d25..c353e601a31b7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java @@ -54,7 +54,8 @@ public static Map transform( @Nonnull final Urn urn, @Nonnull final RecordTemplate timeseriesAspect, @Nonnull final AspectSpec aspectSpec, - @Nullable final SystemMetadata systemMetadata) + @Nullable final SystemMetadata systemMetadata, + @Nonnull final String idHashAlgo) throws JsonProcessingException { ObjectNode commonDocument = getCommonDocument(urn, timeseriesAspect, systemMetadata); Map finalDocuments = new HashMap<>(); @@ -74,7 +75,7 @@ public static Map transform( final Map> timeseriesFieldValueMap = FieldExtractor.extractFields(timeseriesAspect, aspectSpec.getTimeseriesFieldSpecs()); timeseriesFieldValueMap.forEach((k, v) -> setTimeseriesField(document, k, v)); - finalDocuments.put(getDocId(document, null), document); + finalDocuments.put(getDocId(document, null, idHashAlgo), document); // Create new rows for the member collection fields. final Map> timeseriesFieldCollectionValueMap = @@ -83,7 +84,7 @@ public static Map transform( timeseriesFieldCollectionValueMap.forEach( (key, values) -> finalDocuments.putAll( - getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); + getTimeseriesFieldCollectionDocuments(key, values, commonDocument, idHashAlgo))); return finalDocuments; } @@ -216,12 +217,13 @@ private static void setTimeseriesField( private static Map getTimeseriesFieldCollectionDocuments( final TimeseriesFieldCollectionSpec fieldSpec, final List values, - final ObjectNode commonDocument) { + final ObjectNode commonDocument, + @Nonnull final String idHashAlgo) { return values.stream() .map(value -> getTimeseriesFieldCollectionDocument(fieldSpec, value, commonDocument)) .collect( Collectors.toMap( - keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), + keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst(), idHashAlgo), Pair::getSecond)); } @@ -257,9 +259,9 @@ private static Pair getTimeseriesFieldCollectionDocument( finalDocument); } - private static String getDocId(@Nonnull JsonNode document, String collectionId) + private static String getDocId( + @Nonnull JsonNode document, String collectionId, @Nonnull String idHashAlgo) throws IllegalArgumentException { - String hashAlgo = System.getenv("ELASTIC_ID_HASH_ALGO"); String docId = document.get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD).toString(); JsonNode eventGranularity = document.get(MappingsBuilder.EVENT_GRANULARITY); if (eventGranularity != null) { @@ -278,9 +280,9 @@ private static String getDocId(@Nonnull JsonNode document, String collectionId) docId += partitionSpec.toString(); } - if (hashAlgo.equalsIgnoreCase("SHA-256")) { + if (idHashAlgo.equalsIgnoreCase("SHA-256")) { return DigestUtils.sha256Hex(docId); - } else if (hashAlgo.equalsIgnoreCase("MD5")) { + } else if (idHashAlgo.equalsIgnoreCase("MD5")) { return DigestUtils.md5Hex(docId); } throw new IllegalArgumentException("Hash function not handled !"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 06f1369ff0670..d1a51b1d69b2c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -62,7 +62,7 @@ public abstract class SearchGraphServiceTestBase extends GraphServiceTestBase { @Nonnull protected abstract ESIndexBuilder getIndexBuilder(); - private final IndexConvention _indexConvention = IndexConventionImpl.NO_PREFIX; + private final IndexConvention _indexConvention = IndexConventionImpl.noPrefix("MD5"); private final String _indexName = _indexConvention.getIndexName(INDEX_NAME); private ElasticSearchGraphService _client; @@ -108,7 +108,8 @@ private ElasticSearchGraphService buildService(boolean enableMultiPathSearch) { _indexConvention, writeDAO, readDAO, - getIndexBuilder()); + getIndexBuilder(), + "MD5"); } @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java index a9d84ae1f3aea..99e4923885a41 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java @@ -122,7 +122,7 @@ public void setup() throws RemoteInvocationException, URISyntaxException { operationContext = TestOperationContexts.systemContextNoSearchAuthorization( new SnapshotEntityRegistry(new Snapshot()), - new IndexConventionImpl("lineage_search_service_test")) + new IndexConventionImpl("lineage_search_service_test", "MD5")) .asSession(RequestContext.TEST, Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildEntitySearchService(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java index 445b71b2eaff6..5e30e01a8ea69 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java @@ -79,7 +79,7 @@ public void setup() throws RemoteInvocationException, URISyntaxException { operationContext = TestOperationContexts.systemContextNoSearchAuthorization( new SnapshotEntityRegistry(new Snapshot()), - new IndexConventionImpl("search_service_test")) + new IndexConventionImpl("search_service_test", "MD5")) .asSession(RequestContext.TEST, Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); settingsBuilder = new SettingsBuilder(null); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java index ab5e90f77c21a..282a3d8e3ea6a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java @@ -62,7 +62,8 @@ public abstract class TestEntityTestBase extends AbstractTestNGSpringContextTest public void setup() { opContext = TestOperationContexts.systemContextNoSearchAuthorization( - new SnapshotEntityRegistry(new Snapshot()), new IndexConventionImpl("es_service_test")); + new SnapshotEntityRegistry(new Snapshot()), + new IndexConventionImpl("es_service_test", "MD5")); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildService(); elasticSearchService.reindexAll(Collections.emptySet()); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java index a0288d019644b..8044515e3dc6a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java @@ -45,7 +45,7 @@ public void setup() throws RemoteInvocationException, URISyntaxException { mockClient = mock(RestHighLevelClient.class); opContext = TestOperationContexts.systemContextNoSearchAuthorization( - new IndexConventionImpl("es_browse_dao_test")); + new IndexConventionImpl("es_browse_dao_test", "MD5")); browseDAO = new ESBrowseDAO(mockClient, searchConfiguration, customSearchConfiguration); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java index af7005c93c46d..1b9d8c57b4cad 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java @@ -32,7 +32,7 @@ public abstract class SystemMetadataServiceTestBase extends AbstractTestNGSpring protected abstract ESIndexBuilder getIndexBuilder(); private final IndexConvention _indexConvention = - new IndexConventionImpl("es_system_metadata_service_test"); + new IndexConventionImpl("es_system_metadata_service_test", "MD5"); private ElasticSearchSystemMetadataService _client; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java index 10c6f09cb8f8d..414183c8882f9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java @@ -126,7 +126,7 @@ public void setup() throws RemoteInvocationException, URISyntaxException { opContext = TestOperationContexts.systemContextNoSearchAuthorization( - entityRegistry, new IndexConventionImpl("es_timeseries_aspect_service_test")); + entityRegistry, new IndexConventionImpl("es_timeseries_aspect_service_test", "MD5")); elasticSearchTimeseriesAspectService = buildService(); elasticSearchTimeseriesAspectService.reindexAll(Collections.emptySet()); @@ -152,7 +152,7 @@ private ElasticSearchTimeseriesAspectService buildService() { private void upsertDocument(TestEntityProfile dp, Urn urn) throws JsonProcessingException { Map documents = - TimeseriesAspectTransformer.transform(urn, dp, aspectSpec, null); + TimeseriesAspectTransformer.transform(urn, dp, aspectSpec, null, "MD5"); assertEquals(documents.size(), 3); documents.forEach( (key, value) -> diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 28a4a2b00cd6f..6a95d16c25437 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -86,12 +86,12 @@ protected String longTailIndexPrefix() { @Bean(name = "sampleDataIndexConvention") protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { - return new IndexConventionImpl(prefix); + return new IndexConventionImpl(prefix, "MD5"); } @Bean(name = "longTailIndexConvention") protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { - return new IndexConventionImpl(prefix); + return new IndexConventionImpl(prefix, "MD5"); } @Bean(name = "sampleDataFixtureName") diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java index e783c011de6d0..33e04af83c0a3 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java @@ -71,7 +71,7 @@ protected String indexPrefix() { @Bean(name = "searchLineageIndexConvention") protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { - return new IndexConventionImpl(prefix); + return new IndexConventionImpl(prefix, "MD5"); } @Bean(name = "searchLineageFixtureName") @@ -173,7 +173,8 @@ protected ElasticSearchGraphService graphService( new ESGraphWriteDAO(indexConvention, bulkProcessor, 1), new ESGraphQueryDAO( searchClient, lineageRegistry, indexConvention, getGraphQueryConfiguration()), - indexBuilder); + indexBuilder, + indexConvention.getIdHashAlgo()); graphService.reindexAll(Collections.emptySet()); return graphService; } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java index 411fe02260bb1..4cd59992eb2f0 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java @@ -125,7 +125,8 @@ public void setupTest() { mockTimeseriesAspectService, mockSystemMetadataService, searchDocumentTransformer, - mockEntityIndexBuilders); + mockEntityIndexBuilders, + "MD5"); OperationContext systemOperationContext = TestOperationContexts.systemContextNoSearchAuthorization(); @@ -235,7 +236,8 @@ public void testInputFieldsEdgesAreAdded() throws Exception { mockTimeseriesAspectService, mockSystemMetadataService, searchDocumentTransformer, - mockEntityIndexBuilders); + mockEntityIndexBuilders, + "MD5"); updateIndicesHook = new UpdateIndicesHook(updateIndicesService, true, false); updateIndicesHook.init( diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java index c067e91c3524c..5ad7bdc14820c 100644 --- a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java @@ -21,7 +21,7 @@ public class SearchContext implements ContextInterface { public static SearchContext EMPTY = - SearchContext.builder().indexConvention(IndexConventionImpl.NO_PREFIX).build(); + SearchContext.builder().indexConvention(IndexConventionImpl.noPrefix("")).build(); public static SearchContext withFlagDefaults( @Nonnull SearchContext searchContext, diff --git a/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java b/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java index e54c040fe13b5..76f58fb475108 100644 --- a/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java @@ -191,7 +191,7 @@ public static OperationContext systemContext( IndexConvention indexConvention = Optional.ofNullable(indexConventionSupplier) .map(Supplier::get) - .orElse(IndexConventionImpl.NO_PREFIX); + .orElse(IndexConventionImpl.noPrefix("MD5")); ServicesRegistryContext servicesRegistryContext = Optional.ofNullable(servicesRegistrySupplier).orElse(() -> null).get(); diff --git a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java index 4858bb342258a..2e0585cc82a4f 100644 --- a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java +++ b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java @@ -12,26 +12,26 @@ public class SearchContextTest { @Test public void searchContextId() { SearchContext testNoFlags = - SearchContext.builder().indexConvention(IndexConventionImpl.NO_PREFIX).build(); + SearchContext.builder().indexConvention(IndexConventionImpl.noPrefix("MD5")).build(); assertEquals( testNoFlags.getCacheKeyComponent(), SearchContext.builder() - .indexConvention(IndexConventionImpl.NO_PREFIX) + .indexConvention(IndexConventionImpl.noPrefix("MD5")) .build() .getCacheKeyComponent(), "Expected consistent context ids across instances"); SearchContext testWithFlags = SearchContext.builder() - .indexConvention(IndexConventionImpl.NO_PREFIX) + .indexConvention(IndexConventionImpl.noPrefix("MD5")) .searchFlags(new SearchFlags().setFulltext(true)) .build(); assertEquals( testWithFlags.getCacheKeyComponent(), SearchContext.builder() - .indexConvention(IndexConventionImpl.NO_PREFIX) + .indexConvention(IndexConventionImpl.noPrefix("MD5")) .searchFlags(new SearchFlags().setFulltext(true)) .build() .getCacheKeyComponent(), @@ -44,7 +44,7 @@ public void searchContextId() { assertNotEquals( testWithFlags.getCacheKeyComponent(), SearchContext.builder() - .indexConvention(IndexConventionImpl.NO_PREFIX) + .indexConvention(IndexConventionImpl.noPrefix("MD5")) .searchFlags(new SearchFlags().setFulltext(true).setIncludeRestricted(true)) .build() .getCacheKeyComponent(), @@ -53,7 +53,7 @@ public void searchContextId() { assertNotEquals( testNoFlags.getCacheKeyComponent(), SearchContext.builder() - .indexConvention(new IndexConventionImpl("Some Prefix")) + .indexConvention(new IndexConventionImpl("Some Prefix", "MD5")) .searchFlags(null) .build() .getCacheKeyComponent(), @@ -61,7 +61,7 @@ public void searchContextId() { assertNotEquals( SearchContext.builder() - .indexConvention(IndexConventionImpl.NO_PREFIX) + .indexConvention(IndexConventionImpl.noPrefix("MD5")) .searchFlags( new SearchFlags() .setFulltext(false) @@ -70,7 +70,7 @@ public void searchContextId() { .build() .getCacheKeyComponent(), SearchContext.builder() - .indexConvention(IndexConventionImpl.NO_PREFIX) + .indexConvention(IndexConventionImpl.noPrefix("MD5")) .searchFlags(new SearchFlags().setFulltext(true).setIncludeRestricted(true)) .build() .getCacheKeyComponent(), @@ -80,7 +80,7 @@ public void searchContextId() { @Test public void testImmutableSearchFlags() { SearchContext initial = - SearchContext.builder().indexConvention(IndexConventionImpl.NO_PREFIX).build(); + SearchContext.builder().indexConvention(IndexConventionImpl.noPrefix("MD5")).build(); assertEquals(initial.getSearchFlags(), new SearchFlags().setSkipCache(false)); SearchContext mutated = initial.withFlagDefaults(flags -> flags.setSkipCache(true)); diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java index 130620a9ab918..7d68e18940401 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java @@ -8,4 +8,5 @@ public class ElasticSearchConfiguration { private BuildIndicesConfiguration buildIndices; public String implementation; private SearchConfiguration search; + private String idHashAlgo; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java index eb56e8d42c158..55eb931625fec 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java @@ -11,6 +11,7 @@ import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -30,7 +31,8 @@ public class ElasticSearchGraphServiceFactory { @Bean(name = "elasticSearchGraphService") @Nonnull - protected ElasticSearchGraphService getInstance() { + protected ElasticSearchGraphService getInstance( + @Value("${elasticsearch.idHashAlgo}") final String idHashAlgo) { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); return new ElasticSearchGraphService( lineageRegistry, @@ -45,6 +47,7 @@ protected ElasticSearchGraphService getInstance() { lineageRegistry, components.getIndexConvention(), configurationProvider.getElasticSearch().getSearch().getGraph()), - components.getIndexBuilder()); + components.getIndexBuilder(), + idHashAlgo); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java index 5b76a3f2cb833..2288c8d4ecd50 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java @@ -19,7 +19,8 @@ public class IndexConventionFactory { private String indexPrefix; @Bean(name = INDEX_CONVENTION_BEAN) - protected IndexConvention createInstance() { - return new IndexConventionImpl(indexPrefix); + protected IndexConvention createInstance( + @Value("${elasticsearch.idHashAlgo}") final String isHashAlgo) { + return new IndexConventionImpl(indexPrefix, isHashAlgo); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java index fad9d0eaf3b45..38a344f8be8e9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -30,7 +31,8 @@ public UpdateIndicesService searchIndicesServiceNonGMS( TimeseriesAspectService timeseriesAspectService, SystemMetadataService systemMetadataService, SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { + EntityIndexBuilders entityIndexBuilders, + @Value("${elasticsearch.idHashAlgo}") final String idHashAlgo) { return new UpdateIndicesService( graphService, @@ -38,7 +40,8 @@ public UpdateIndicesService searchIndicesServiceNonGMS( timeseriesAspectService, systemMetadataService, searchDocumentTransformer, - entityIndexBuilders); + entityIndexBuilders, + idHashAlgo); } @Bean @@ -50,7 +53,8 @@ public UpdateIndicesService searchIndicesServiceGMS( final SystemMetadataService systemMetadataService, final SearchDocumentTransformer searchDocumentTransformer, final EntityIndexBuilders entityIndexBuilders, - final EntityService entityService) { + final EntityService entityService, + @Value("${elasticsearch.idHashAlgo}") final String idHashAlgo) { UpdateIndicesService updateIndicesService = new UpdateIndicesService( @@ -59,7 +63,8 @@ public UpdateIndicesService searchIndicesServiceGMS( timeseriesAspectService, systemMetadataService, searchDocumentTransformer, - entityIndexBuilders); + entityIndexBuilders, + idHashAlgo); entityService.setUpdateIndicesService(updateIndicesService); diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java index 518dfecd57680..1b003fec82e8b 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java @@ -2,25 +2,20 @@ import static com.datahub.authorization.AuthUtil.isAPIAuthorized; import static com.datahub.authorization.AuthUtil.isAPIAuthorizedEntityUrns; -import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; import static com.linkedin.metadata.timeseries.elastic.UsageServiceUtil.USAGE_STATS_ASPECT_NAME; import static com.linkedin.metadata.timeseries.elastic.UsageServiceUtil.USAGE_STATS_ENTITY_NAME; import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.common.WindowDuration; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.data.template.StringArray; import com.linkedin.dataset.DatasetFieldUsageCounts; import com.linkedin.dataset.DatasetFieldUsageCountsArray; import com.linkedin.dataset.DatasetUsageStatistics; @@ -29,17 +24,10 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.timeseries.elastic.UsageServiceUtil; import com.linkedin.metadata.timeseries.transformer.TimeseriesAspectTransformer; -import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.parseq.Task; import com.linkedin.restli.common.HttpStatus; import com.linkedin.restli.server.RestLiServiceException; @@ -47,35 +35,20 @@ import com.linkedin.restli.server.annotations.ActionParam; import com.linkedin.restli.server.annotations.RestLiSimpleResource; import com.linkedin.restli.server.resources.SimpleResourceTemplate; -import com.linkedin.timeseries.AggregationSpec; -import com.linkedin.timeseries.AggregationType; -import com.linkedin.timeseries.CalendarInterval; -import com.linkedin.timeseries.GenericTable; -import com.linkedin.timeseries.GroupingBucket; -import com.linkedin.timeseries.GroupingBucketType; import com.linkedin.timeseries.TimeWindowSize; import com.linkedin.usage.FieldUsageCounts; -import com.linkedin.usage.FieldUsageCountsArray; import com.linkedin.usage.UsageAggregation; -import com.linkedin.usage.UsageAggregationArray; import com.linkedin.usage.UsageAggregationMetrics; import com.linkedin.usage.UsageQueryResult; -import com.linkedin.usage.UsageQueryResultAggregations; import com.linkedin.usage.UsageTimeRange; import com.linkedin.usage.UserUsageCounts; -import com.linkedin.usage.UserUsageCountsArray; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.RequestContext; import io.opentelemetry.extension.annotations.WithSpan; -import java.net.URISyntaxException; -import java.time.Instant; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Named; @@ -255,7 +228,8 @@ private void ingest(@Nonnull OperationContext opContext, @Nonnull UsageAggregati try { documents = TimeseriesAspectTransformer.transform( - bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), null); + bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), null, + systemOperationContext.getSearchContext().getIndexConvention().getIdHashAlgo()); } catch (JsonProcessingException e) { log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java index 4a3f78fcef7bd..87aebabf64366 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java @@ -47,4 +47,7 @@ public interface IndexConvention { * if one cannot be extracted */ Optional> getEntityAndAspectName(String timeseriesAspectIndexName); + + @Nonnull + String getIdHashAlgo(); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java index 47801cd2054fa..2c9c927cd8c34 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java @@ -8,25 +8,30 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.Getter; import org.apache.commons.lang3.StringUtils; // Default implementation of search index naming convention public class IndexConventionImpl implements IndexConvention { - public static final IndexConvention NO_PREFIX = new IndexConventionImpl(null); + public static IndexConvention noPrefix(@Nonnull String idHashAlgo) { + return new IndexConventionImpl(null, idHashAlgo); + } // Map from Entity name -> Index name private final Map indexNameMapping = new ConcurrentHashMap<>(); private final Optional _prefix; private final String _getAllEntityIndicesPattern; private final String _getAllTimeseriesIndicesPattern; + @Getter private final String idHashAlgo; private static final String ENTITY_INDEX_VERSION = "v2"; private static final String ENTITY_INDEX_SUFFIX = "index"; private static final String TIMESERIES_INDEX_VERSION = "v1"; private static final String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; - public IndexConventionImpl(@Nullable String prefix) { + public IndexConventionImpl(@Nullable String prefix, String idHashAlgo) { _prefix = StringUtils.isEmpty(prefix) ? Optional.empty() : Optional.of(prefix); + this.idHashAlgo = idHashAlgo; _getAllEntityIndicesPattern = _prefix.map(p -> p + "_").orElse("") + "*" diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java index 8074f344cd244..2f6c7138d3c4f 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java @@ -10,7 +10,7 @@ public class IndexConventionImplTest { @Test public void testIndexConventionNoPrefix() { - IndexConvention indexConventionNoPrefix = IndexConventionImpl.NO_PREFIX; + IndexConvention indexConventionNoPrefix = IndexConventionImpl.noPrefix("MD5"); String entityName = "dataset"; String expectedIndexName = "datasetindex_v2"; assertEquals(indexConventionNoPrefix.getEntityIndexName(entityName), expectedIndexName); @@ -25,7 +25,7 @@ public void testIndexConventionNoPrefix() { @Test public void testIndexConventionPrefix() { - IndexConvention indexConventionPrefix = new IndexConventionImpl("prefix"); + IndexConvention indexConventionPrefix = new IndexConventionImpl("prefix", "MD5"); String entityName = "dataset"; String expectedIndexName = "prefix_datasetindex_v2"; assertEquals(indexConventionPrefix.getEntityIndexName(entityName), expectedIndexName); @@ -42,7 +42,7 @@ public void testIndexConventionPrefix() { @Test public void testTimeseriesIndexConventionNoPrefix() { - IndexConvention indexConventionNoPrefix = IndexConventionImpl.NO_PREFIX; + IndexConvention indexConventionNoPrefix = IndexConventionImpl.noPrefix("MD5"); String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "dataset_datasetusagestatisticsaspect_v1"; @@ -64,7 +64,7 @@ public void testTimeseriesIndexConventionNoPrefix() { @Test public void testTimeseriesIndexConventionPrefix() { - IndexConvention indexConventionPrefix = new IndexConventionImpl("prefix"); + IndexConvention indexConventionPrefix = new IndexConventionImpl("prefix", "MD5"); String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "prefix_dataset_datasetusagestatisticsaspect_v1"; diff --git a/smoke-test/run-quickstart.sh b/smoke-test/run-quickstart.sh index 2bf5cdf8ca9c4..eb0d46b317244 100755 --- a/smoke-test/run-quickstart.sh +++ b/smoke-test/run-quickstart.sh @@ -16,8 +16,6 @@ DATAHUB_SEARCH_TAG="${DATAHUB_SEARCH_TAG:=2.9.0}" XPACK_SECURITY_ENABLED="${XPACK_SECURITY_ENABLED:=plugins.security.disabled=true}" ELASTICSEARCH_USE_SSL="${ELASTICSEARCH_USE_SSL:=false}" USE_AWS_ELASTICSEARCH="${USE_AWS_ELASTICSEARCH:=true}" -ELASTIC_ID_HASH_ALGO="${ELASTIC_ID_HASH_ALGO:=MD5}" - DATAHUB_TELEMETRY_ENABLED=false \ DOCKER_COMPOSE_BASE="file://$( dirname "$DIR" )" \ diff --git a/smoke-test/set-test-env-vars.sh b/smoke-test/set-test-env-vars.sh index dee3af2b68747..4668721f80de0 100644 --- a/smoke-test/set-test-env-vars.sh +++ b/smoke-test/set-test-env-vars.sh @@ -1,3 +1,2 @@ export DATAHUB_KAFKA_SCHEMA_REGISTRY_URL=http://localhost:8080/schema-registry/api -export DATAHUB_GMS_URL=http://localhost:8080 -export ELASTIC_ID_HASH_ALGO="MD5" \ No newline at end of file +export DATAHUB_GMS_URL=http://localhost:8080 \ No newline at end of file From 0e698173fe9d34dce9e2fe480e1ddd6d867b1b8d Mon Sep 17 00:00:00 2001 From: dushayntAW <158567391+dushayntAW@users.noreply.github.com> Date: Mon, 19 Aug 2024 02:30:27 +0200 Subject: [PATCH 58/72] fix(ingestion/airflow-plugin): fixed missing inlet/outlets (#11101) Co-authored-by: Harshal Sheth --- .../integration/dags/custom_operator_dag.py | 74 ++++ .../goldens/v2_custom_operator_dag.json | 365 ++++++++++++++++ ...2_custom_operator_dag_no_dag_listener.json | 404 ++++++++++++++++++ .../tests/integration/test_plugin.py | 5 +- 4 files changed, 847 insertions(+), 1 deletion(-) create mode 100644 metadata-ingestion-modules/airflow-plugin/tests/integration/dags/custom_operator_dag.py create mode 100644 metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag.json create mode 100644 metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag_no_dag_listener.json diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/custom_operator_dag.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/custom_operator_dag.py new file mode 100644 index 0000000000000..b31226b7b4cee --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/custom_operator_dag.py @@ -0,0 +1,74 @@ +import logging +from datetime import datetime, timedelta +from typing import Any, List, Tuple + +from airflow import DAG +from airflow.models.baseoperator import BaseOperator + +from datahub_airflow_plugin.entities import Dataset + +logger = logging.getLogger(__name__) + + +class CustomOperator(BaseOperator): + def __init__(self, name, **kwargs): + super().__init__(**kwargs) + self.name = name + + def execute(self, context): + """ + Other code.... + """ + logger.info("executing other code here") + + input_tables = ["mydb.schema.tableA", "mydb.schema.tableB"] + output_tables = ["mydb.schema.tableD"] + + inlets, outlets = self._get_sf_lineage(input_tables, output_tables) + + context["ti"].task.inlets = inlets + context["ti"].task.outlets = outlets + + @staticmethod + def _get_sf_lineage( + input_tables: List[str], output_tables: List[str] + ) -> Tuple[List[Any], List[Any]]: + """ + Get lineage tables from Snowflake. + """ + inlets: List[Dataset] = [] + outlets: List[Dataset] = [] + + for table in input_tables: + inlets.append(Dataset(platform="snowflake", name=table)) + + for table in output_tables: + outlets.append(Dataset(platform="snowflake", name=table)) + + return inlets, outlets + + +default_args = { + "owner": "airflow", + "depends_on_past": False, + "start_date": datetime(2023, 1, 1), + "email": ["jdoe@example.com"], + "email_on_failure": False, + "execution_timeout": timedelta(minutes=5), +} + + +with DAG( + "custom_operator_dag", + default_args=default_args, + description="An example dag with custom operator", + schedule_interval=None, + tags=["example_tag"], + catchup=False, + default_view="tree", +) as dag: + custom_task = CustomOperator( + task_id="custom_task_id", + name="custom_name", + dag=dag, + ) diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag.json new file mode 100644 index 0000000000000..b81466930ed41 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag.json @@ -0,0 +1,365 @@ +[ +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "dataFlowInfo", + "aspect": { + "json": { + "customProperties": { + "_access_control": "None", + "catchup": "False", + "description": "'An example dag with custom operator'", + "doc_md": "None", + "fileloc": "", + "is_paused_upon_creation": "None", + "start_date": "None", + "tags": "['example_tag']", + "timezone": "Timezone('UTC')" + }, + "externalUrl": "http://airflow.example.com/tree?dag_id=custom_operator_dag", + "name": "custom_operator_dag", + "description": "An example dag with custom operator" + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:airflow", + "type": "DEVELOPER", + "source": { + "type": "SERVICE" + } + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:airflow" + } + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:example_tag" + } + ] + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:example_tag", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "custom_operator_dag" + } + ] + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInfo", + "aspect": { + "json": { + "customProperties": { + "depends_on_past": "False", + "email": "['jdoe@example.com']", + "label": "'custom_task_id'", + "execution_timeout": "datetime.timedelta(seconds=300)", + "sla": "None", + "task_id": "'custom_task_id'", + "trigger_rule": "", + "wait_for_downstream": "False", + "downstream_task_ids": "[]", + "inlets": "[]", + "outlets": "[]", + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.18.0/integration/airflow\", \"_schemaURL\": \"https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/BaseFacet\", \"unknownItems\": [{\"name\": \"CustomOperator\", \"properties\": {\"depends_on_past\": false, \"downstream_task_ids\": \"[]\", \"execution_timeout\": \"<>\", \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"is_setup\": false, \"is_teardown\": false, \"mapped\": false, \"operator_class\": \"custom_operator.CustomOperator\", \"owner\": \"airflow\", \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_exponential_backoff\": false, \"task_id\": \"custom_task_id\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": \"[]\", \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + }, + "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=custom_operator_dag&_flt_3_task_id=custom_task_id", + "name": "custom_task_id", + "type": { + "string": "COMMAND" + } + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInputOutput", + "aspect": { + "json": { + "inputDatasets": [], + "outputDatasets": [], + "inputDatajobs": [], + "fineGrainedLineages": [] + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": { + "run_id": "manual_run_test", + "duration": "", + "start_date": "", + "end_date": "", + "execution_date": "2023-09-27 21:34:38+00:00", + "try_number": "0", + "max_tries": "0", + "external_executor_id": "None", + "state": "running", + "operator": "CustomOperator", + "priority_weight": "1", + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=custom_task_id&dag_id=custom_operator_dag&map_index=-1", + "orchestrator": "airflow", + "dag_id": "custom_operator_dag", + "task_id": "custom_task_id" + }, + "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=custom_task_id&dag_id=custom_operator_dag&map_index=-1", + "name": "custom_operator_dag_custom_task_id_manual_run_test", + "type": "BATCH_AD_HOC", + "created": { + "time": 1722943444074, + "actor": "urn:li:corpuser:datahub" + } + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:airflow", + "type": "DEVELOPER", + "source": { + "type": "SERVICE" + } + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:airflow" + } + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "upstreamInstances": [] + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1722943444074, + "partitionSpec": { + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" + }, + "status": "STARTED", + "attempt": 1 + } + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetKey", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "name": "mydb.schema.tableA", + "origin": "PROD" + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInfo", + "aspect": { + "json": { + "customProperties": { + "depends_on_past": "False", + "email": "['jdoe@example.com']", + "label": "'custom_task_id'", + "execution_timeout": "datetime.timedelta(seconds=300)", + "sla": "None", + "task_id": "'custom_task_id'", + "trigger_rule": "", + "wait_for_downstream": "False", + "downstream_task_ids": "[]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='PROD', platform_instance=None)]", + "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.18.0/integration/airflow\", \"_schemaURL\": \"https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/BaseFacet\", \"unknownItems\": [{\"name\": \"CustomOperator\", \"properties\": {\"depends_on_past\": false, \"downstream_task_ids\": \"[]\", \"execution_timeout\": \"<>\", \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"is_setup\": false, \"is_teardown\": false, \"mapped\": false, \"operator_class\": \"custom_operator.CustomOperator\", \"owner\": \"airflow\", \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_exponential_backoff\": false, \"task_id\": \"custom_task_id\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": \"[]\", \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + }, + "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=custom_operator_dag&_flt_3_task_id=custom_task_id", + "name": "custom_task_id", + "type": { + "string": "COMMAND" + } + } + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetKey", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "name": "mydb.schema.tableD", + "origin": "PROD" + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:example_tag" + } + ] + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInputOutput", + "aspect": { + "json": { + "inputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,PROD)" + ], + "outputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" + ], + "inputDatajobs": [], + "fineGrainedLineages": [] + } + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetKey", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "name": "mydb.schema.tableB", + "origin": "PROD" + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1722943444263, + "partitionSpec": { + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" + }, + "status": "COMPLETE", + "result": { + "type": "SUCCESS", + "nativeResultType": "airflow" + } + } + } +} +] \ No newline at end of file diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag_no_dag_listener.json new file mode 100644 index 0000000000000..019122600aedb --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_custom_operator_dag_no_dag_listener.json @@ -0,0 +1,404 @@ +[ +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "dataFlowInfo", + "aspect": { + "json": { + "customProperties": { + "_access_control": "None", + "catchup": "False", + "description": "'An example dag with custom operator'", + "doc_md": "None", + "fileloc": "", + "is_paused_upon_creation": "None", + "start_date": "None", + "tags": "['example_tag']", + "timezone": "Timezone('UTC')" + }, + "externalUrl": "http://airflow.example.com/tree?dag_id=custom_operator_dag", + "name": "custom_operator_dag", + "description": "An example dag with custom operator" + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:airflow", + "type": "DEVELOPER", + "source": { + "type": "SERVICE" + } + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:airflow" + } + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:example_tag" + } + ] + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:example_tag", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + } +}, +{ + "entityType": "dataFlow", + "entityUrn": "urn:li:dataFlow:(airflow,custom_operator_dag,prod)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "custom_operator_dag" + } + ] + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInfo", + "aspect": { + "json": { + "customProperties": { + "depends_on_past": "False", + "email": "['jdoe@example.com']", + "label": "'custom_task_id'", + "execution_timeout": "datetime.timedelta(seconds=300)", + "sla": "None", + "task_id": "'custom_task_id'", + "trigger_rule": "", + "wait_for_downstream": "False", + "downstream_task_ids": "[]", + "inlets": "[]", + "outlets": "[]", + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.18.0/integration/airflow\", \"_schemaURL\": \"https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/BaseFacet\", \"unknownItems\": [{\"name\": \"CustomOperator\", \"properties\": {\"depends_on_past\": false, \"downstream_task_ids\": \"[]\", \"execution_timeout\": \"<>\", \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"mapped\": false, \"operator_class\": \"custom_operator.CustomOperator\", \"owner\": \"airflow\", \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_exponential_backoff\": false, \"task_id\": \"custom_task_id\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": \"[]\", \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + }, + "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=custom_operator_dag&_flt_3_task_id=custom_task_id", + "name": "custom_task_id", + "type": { + "string": "COMMAND" + } + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInputOutput", + "aspect": { + "json": { + "inputDatasets": [], + "outputDatasets": [], + "inputDatajobs": [], + "fineGrainedLineages": [] + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:airflow", + "type": "DEVELOPER", + "source": { + "type": "SERVICE" + } + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:airflow" + } + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:example_tag" + } + ] + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceProperties", + "aspect": { + "json": { + "customProperties": { + "run_id": "manual_run_test", + "duration": "", + "start_date": "", + "end_date": "", + "execution_date": "2023-09-27 21:34:38+00:00", + "try_number": "0", + "max_tries": "0", + "external_executor_id": "None", + "state": "running", + "operator": "CustomOperator", + "priority_weight": "1", + "log_url": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=custom_task_id&dag_id=custom_operator_dag&map_index=-1", + "orchestrator": "airflow", + "dag_id": "custom_operator_dag", + "task_id": "custom_task_id" + }, + "externalUrl": "http://airflow.example.com/log?execution_date=2023-09-27T21%3A34%3A38%2B00%3A00&task_id=custom_task_id&dag_id=custom_operator_dag&map_index=-1", + "name": "custom_operator_dag_custom_task_id_manual_run_test", + "type": "BATCH_AD_HOC", + "created": { + "time": 1723716446564, + "actor": "urn:li:corpuser:datahub" + } + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRelationships", + "aspect": { + "json": { + "parentTemplate": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "upstreamInstances": [] + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1723716446564, + "partitionSpec": { + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" + }, + "status": "STARTED", + "attempt": 1 + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInfo", + "aspect": { + "json": { + "customProperties": { + "depends_on_past": "False", + "email": "['jdoe@example.com']", + "label": "'custom_task_id'", + "execution_timeout": "datetime.timedelta(seconds=300)", + "sla": "None", + "task_id": "'custom_task_id'", + "trigger_rule": "", + "wait_for_downstream": "False", + "downstream_task_ids": "[]", + "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='PROD', platform_instance=None)]", + "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.18.0/integration/airflow\", \"_schemaURL\": \"https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/BaseFacet\", \"unknownItems\": [{\"name\": \"CustomOperator\", \"properties\": {\"depends_on_past\": false, \"downstream_task_ids\": \"[]\", \"execution_timeout\": \"<>\", \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"mapped\": false, \"operator_class\": \"custom_operator.CustomOperator\", \"owner\": \"airflow\", \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_exponential_backoff\": false, \"task_id\": \"custom_task_id\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": \"[]\", \"wait_for_downstream\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + }, + "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=custom_operator_dag&_flt_3_task_id=custom_task_id", + "name": "custom_task_id", + "type": { + "string": "COMMAND" + } + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "dataJobInputOutput", + "aspect": { + "json": { + "inputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,PROD)" + ], + "outputDatasets": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" + ], + "inputDatajobs": [], + "fineGrainedLineages": [] + } + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetKey", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "name": "mydb.schema.tableA", + "origin": "PROD" + } + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetKey", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "name": "mydb.schema.tableB", + "origin": "PROD" + } + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetKey", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:snowflake", + "name": "mydb.schema.tableD", + "origin": "PROD" + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:airflow", + "type": "DEVELOPER", + "source": { + "type": "SERVICE" + } + } + ], + "ownerTypes": {}, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:airflow" + } + } + } +}, +{ + "entityType": "dataJob", + "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,custom_operator_dag,prod),custom_task_id)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:example_tag" + } + ] + } + } +}, +{ + "entityType": "dataProcessInstance", + "entityUrn": "urn:li:dataProcessInstance:07a4aaeffa3875a24cccd1fec6fc7c8c", + "changeType": "UPSERT", + "aspectName": "dataProcessInstanceRunEvent", + "aspect": { + "json": { + "timestampMillis": 1723716446701, + "partitionSpec": { + "partition": "FULL_TABLE_SNAPSHOT", + "type": "FULL_TABLE" + }, + "status": "COMPLETE", + "result": { + "type": "SUCCESS", + "nativeResultType": "airflow" + } + } + } +} +] \ No newline at end of file diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py index 9ea822edeef81..2b8d4c47f6224 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py @@ -110,7 +110,9 @@ def _wait_for_dag_finish( @contextlib.contextmanager def _run_airflow( - tmp_path: pathlib.Path, dags_folder: pathlib.Path, is_v1: bool + tmp_path: pathlib.Path, + dags_folder: pathlib.Path, + is_v1: bool, ) -> Iterator[AirflowInstance]: airflow_home = tmp_path / "airflow_home" print(f"Using airflow home: {airflow_home}") @@ -272,6 +274,7 @@ class DagTestCase: DagTestCase("basic_iolets"), DagTestCase("snowflake_operator", success=False, v2_only=True), DagTestCase("sqlite_operator", v2_only=True), + DagTestCase("custom_operator_dag", v2_only=True), ] From ff96ffd2e1c9110f3201f7d53d49d0cb1461eb7a Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 19 Aug 2024 07:16:51 -0500 Subject: [PATCH 59/72] docs(readme): add security notes (#11196) --- README.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/README.md b/README.md index 3ac0668918f70..2335c7c4728b0 100644 --- a/README.md +++ b/README.md @@ -173,6 +173,23 @@ Here are the companies that have officially adopted DataHub. Please feel free to See the full list [here](docs/links.md). +## Security Notes + +### Multi-Component + +The DataHub project uses a wide range of code which is responsible for build automation, documentation generation, and +include both service (i.e. GMS) and client (i.e. ingestion) components. When evaluating security vulnerabilities in +upstream dependencies, it is important to consider which component and how it is used in the project. For example, an +upstream javascript library may include a Denial of Service (DoS) vulnerability however when used for generating +documentation it does not affect the running of DataHub itself and cannot be used to impact DataHub's service. Similarly, +python dependencies for ingestion are part of the DataHub client and are not exposed as a service. + +### Known False Positives + +DataHub's ingestion client does not include credentials in the code repository, python package, or Docker images. +Upstream python dependencies may include files that look like credentials and are often misinterpreted as credentials +by automated scanners. + ## License [Apache License 2.0](./LICENSE). From 291fc41081797f3e4028ea9f1761f93a337d1bdd Mon Sep 17 00:00:00 2001 From: prashanthic23 <111007365+prashanthic23@users.noreply.github.com> Date: Mon, 19 Aug 2024 14:28:11 -0700 Subject: [PATCH 60/72] docs: Update README.md (#11144) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 2335c7c4728b0..8aa177c3d6675 100644 --- a/README.md +++ b/README.md @@ -138,6 +138,7 @@ Here are the companies that have officially adopted DataHub. Please feel free to - [Peloton](https://www.onepeloton.com) - [PITS Global Data Recovery Services](https://www.pitsdatarecovery.net/) - [Razer](https://www.razer.com) +- [Rippling](https://www.rippling.com/) - [Showroomprive](https://www.showroomprive.com/) - [SpotHero](https://spothero.com) - [Stash](https://www.stash.com) @@ -153,6 +154,7 @@ Here are the companies that have officially adopted DataHub. Please feel free to - [Zynga](https://www.zynga.com) + ## Select Articles & Talks - [DataHub Blog](https://blog.datahubproject.io/) From bb8cf97ea3091a8e00c16ad2436d4e833421e059 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 19 Aug 2024 18:38:27 -0400 Subject: [PATCH 61/72] feat(ingest/dbt): skip CLL on sources with `skip_sources_in_lineage` (#11195) --- .../ingestion/source/dbt/dbt_common.py | 37 +- ...test_prefer_sql_parser_lineage_golden.json | 772 ------------------ 2 files changed, 23 insertions(+), 786 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index d2b41323e5115..8d67551b9e1f2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -770,23 +770,30 @@ def make_mapping_upstream_lineage( downstream_urn: str, node: DBTNode, convert_column_urns_to_lowercase: bool, + skip_sources_in_lineage: bool, ) -> UpstreamLineageClass: cll = [] - for column in node.columns or []: - field_name = column.name - if convert_column_urns_to_lowercase: - field_name = field_name.lower() - - cll.append( - FineGrainedLineage( - upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, - upstreams=[mce_builder.make_schema_field_urn(upstream_urn, field_name)], - downstreamType=FineGrainedLineageDownstreamType.FIELD, - downstreams=[ - mce_builder.make_schema_field_urn(downstream_urn, field_name) - ], + if not (node.node_type == "source" and skip_sources_in_lineage): + # If `skip_sources_in_lineage` is enabled, we want to generate table lineage (for siblings) + # but not CLL. That's because CLL will make it look like the warehouse node has downstream + # column lineage, but it's really just empty. + for column in node.columns or []: + field_name = column.name + if convert_column_urns_to_lowercase: + field_name = field_name.lower() + + cll.append( + FineGrainedLineage( + upstreamType=FineGrainedLineageUpstreamType.FIELD_SET, + upstreams=[ + mce_builder.make_schema_field_urn(upstream_urn, field_name) + ], + downstreamType=FineGrainedLineageDownstreamType.FIELD, + downstreams=[ + mce_builder.make_schema_field_urn(downstream_urn, field_name) + ], + ) ) - ) return UpstreamLineageClass( upstreams=[ @@ -1477,6 +1484,7 @@ def create_target_platform_mces( downstream_urn=node_datahub_urn, node=node, convert_column_urns_to_lowercase=self.config.convert_column_urns_to_lowercase, + skip_sources_in_lineage=self.config.skip_sources_in_lineage, ) if self.config.incremental_lineage: # We only generate incremental lineage for non-dbt nodes. @@ -1822,6 +1830,7 @@ def _create_lineage_aspect_for_dbt_node( downstream_urn=node_urn, node=node, convert_column_urns_to_lowercase=self.config.convert_column_urns_to_lowercase, + skip_sources_in_lineage=self.config.skip_sources_in_lineage, ) else: upstream_urns = get_upstreams( diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json index d421fc4ba42f5..d2c7165970681 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_prefer_sql_parser_lineage_golden.json @@ -2150,52 +2150,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),actor_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),actor_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),first_name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),first_name)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),last_name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),last_name)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD),last_update)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.actor,PROD),last_update)" - ], - "confidenceScore": 1.0 - } ] } } @@ -2402,96 +2356,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),address)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),address2)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address2)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),address_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),city_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),city_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),district)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),district)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),last_update)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),last_update)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),phone)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD),postal_code)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" - ], - "confidenceScore": 1.0 - } ] } } @@ -2638,41 +2502,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD),category_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD),category_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD),last_update)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD),last_update)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD),name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.category,PROD),name)" - ], - "confidenceScore": 1.0 - } ] } } @@ -2831,52 +2660,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),city)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),city_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),country_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),country_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD),last_update)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),last_update)" - ], - "confidenceScore": 1.0 - } ] } } @@ -3041,41 +2824,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD),country)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD),country)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD),country_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD),country_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD),last_update)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.country,PROD),last_update)" - ], - "confidenceScore": 1.0 - } ] } } @@ -3306,118 +3054,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),active)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),active)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),activebool)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),activebool)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),address_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),address_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),create_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),create_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),email)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),first_name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),last_name)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),last_update)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_update)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD),store_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),store_id)" - ], - "confidenceScore": 1.0 - } ] } } @@ -3600,74 +3236,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),amount)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),payment_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),payment_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),rental_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD),staff_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)" - ], - "confidenceScore": 1.0 - } ] } } @@ -3869,74 +3437,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),amount)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),payment_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),payment_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),rental_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD),staff_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)" - ], - "confidenceScore": 1.0 - } ] } } @@ -4119,74 +3619,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),amount)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),payment_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),payment_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),rental_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD),staff_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)" - ], - "confidenceScore": 1.0 - } ] } } @@ -4369,74 +3801,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),amount)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),payment_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),payment_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),rental_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD),staff_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)" - ], - "confidenceScore": 1.0 - } ] } } @@ -4619,74 +3983,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),amount)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),payment_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),payment_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),rental_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD),staff_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)" - ], - "confidenceScore": 1.0 - } ] } } @@ -4869,74 +4165,6 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD)", "type": "COPY" } - ], - "fineGrainedLineages": [ - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),amount)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),customer_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),payment_date)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),payment_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),rental_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" - ], - "confidenceScore": 1.0 - }, - { - "upstreamType": "FIELD_SET", - "upstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD),staff_id)" - ], - "downstreamType": "FIELD", - "downstreams": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" - ], - "confidenceScore": 1.0 - } ] } } From fa43b67dc612b4e4db82d31861bd2026730d152e Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Tue, 20 Aug 2024 02:28:43 +0100 Subject: [PATCH 62/72] fix(graphql): Correct ownership check when removing owners (#11154) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> Co-authored-by: Aseem Bansal --- .../datahub/graphql/resolvers/mutate/util/OwnerUtils.java | 2 +- .../linkedin/datahub/graphql/utils/OwnerUtilsTest.java | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 29056eb71a7a3..ddb795189c0e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -171,7 +171,7 @@ public static boolean isOwnerEqual( if (!owner.getOwner().equals(ownerUrn)) { return false; } - if (owner.getTypeUrn() != null) { + if (owner.getTypeUrn() != null && ownershipTypeUrn != null) { return owner.getTypeUrn().equals(ownershipTypeUrn); } if (ownershipTypeUrn == null) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java index b4097d9dd045d..d524d8bfb9a6b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java @@ -59,6 +59,7 @@ public void testIsOwnerEqualOnlyOwnershipTypeUrn() throws URISyntaxException { Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); Urn businessOwnershipTypeUrn = new Urn(BUSINESS_OWNER_OWNERSHIP_TYPE_URN); Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + Urn ownerUrn2 = new Urn("urn:li:corpuser:bar"); Owner ownerWithTechnicalOwnership = new Owner(); ownerWithTechnicalOwnership.setOwner(ownerUrn1); @@ -72,12 +73,17 @@ public void testIsOwnerEqualOnlyOwnershipTypeUrn() throws URISyntaxException { ownerWithoutOwnershipType.setOwner(ownerUrn1); ownerWithoutOwnershipType.setType(OwnershipType.NONE); + Owner owner2WithoutOwnershipType = new Owner(); + owner2WithoutOwnershipType.setOwner(ownerUrn2); + owner2WithoutOwnershipType.setType(OwnershipType.NONE); + assertTrue( OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); assertFalse( OwnerUtils.isOwnerEqual(ownerWithBusinessOwnership, ownerUrn1, technicalOwnershipTypeUrn)); - assertFalse(OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, null)); + assertTrue(OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, null)); assertTrue(OwnerUtils.isOwnerEqual(ownerWithoutOwnershipType, ownerUrn1, null)); + assertFalse(OwnerUtils.isOwnerEqual(owner2WithoutOwnershipType, ownerUrn1, null)); } public void testIsOwnerEqualWithBothLegacyAndNewType() throws URISyntaxException { From 7d08ee2ba110cac1c57861bcbe09853038743be4 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Mon, 19 Aug 2024 20:47:07 -0700 Subject: [PATCH 63/72] feat(propagation): UI for rendering propagated column documentation (#11047) Co-authored-by: Sam Black Co-authored-by: John Joyce Co-authored-by: John Joyce Co-authored-by: John Joyce Co-authored-by: John Joyce Co-authored-by: John Joyce --- .../DocPropagationSettingsResolver.java | 4 +- datahub-web-react/.eslintrc.js | 2 +- datahub-web-react/README.md | 130 +++++++++--------- .../components/SchemaDescriptionField.tsx | 55 +++++--- .../components/legacy/DescriptionModal.tsx | 24 +++- .../shared/propagation/PropagationDetails.tsx | 109 +++++++++++++++ .../propagation/PropagationEntityLink.tsx | 56 ++++++++ .../shared/propagation/PropagationIcon.tsx | 22 +++ .../app/entity/shared/propagation/utils.ts | 24 ++++ .../SchemaFieldDrawer/FieldDescription.tsx | 40 ++++-- .../utils/getFieldDescriptionDetails.ts | 25 ++++ .../Schema/utils/useDescriptionRenderer.tsx | 17 ++- .../src/app/entity/shared/useGetEntities.ts | 18 +++ .../src/app/lineage/LineageExplorer.tsx | 4 +- .../src/app/settings/SettingsPage.tsx | 2 +- .../src/app/settings/features/Feature.tsx | 47 ++++--- .../src/app/settings/features/Features.tsx | 9 +- docs-website/sidebars.js | 17 +++ docs/automation/docs-propagation.md | 128 +++++++++++++++++ docs/automation/snowflake-tag-propagation.md | 88 ++++++++++++ .../settings/global/GlobalSettingsInfo.pdl | 5 +- .../main/resources/boot/global_settings.json | 4 + .../cypress/e2e/actions/docPropagation.js | 27 ++++ smoke-test/tests/cypress/data.json | 112 ++++++++++++--- 24 files changed, 821 insertions(+), 148 deletions(-) create mode 100644 datahub-web-react/src/app/entity/shared/propagation/PropagationDetails.tsx create mode 100644 datahub-web-react/src/app/entity/shared/propagation/PropagationEntityLink.tsx create mode 100644 datahub-web-react/src/app/entity/shared/propagation/PropagationIcon.tsx create mode 100644 datahub-web-react/src/app/entity/shared/propagation/utils.ts create mode 100644 datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/getFieldDescriptionDetails.ts create mode 100644 datahub-web-react/src/app/entity/shared/useGetEntities.ts create mode 100644 docs/automation/docs-propagation.md create mode 100644 docs/automation/snowflake-tag-propagation.md create mode 100644 smoke-test/tests/cypress/cypress/e2e/actions/docPropagation.js diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java index 84d3bcd7b376c..0641d6aca6370 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java @@ -33,7 +33,9 @@ public CompletableFuture get(final DataFetchingEnvironme final GlobalSettingsInfo globalSettings = _settingsService.getGlobalSettings(context.getOperationContext()); final DocPropagationSettings defaultSettings = new DocPropagationSettings(); - defaultSettings.setDocColumnPropagation(true); + // TODO: Enable by default. Currently the automation trusts the settings aspect, which + // does not have this. + defaultSettings.setDocColumnPropagation(false); return globalSettings != null && globalSettings.hasDocPropagation() ? mapDocPropagationSettings(globalSettings.getDocPropagation()) : defaultSettings; diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 5627283af1af1..3fdf7b6a3042c 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -48,7 +48,7 @@ module.exports = { ], 'vitest/prefer-to-be': 'off', '@typescript-eslint/no-use-before-define': ['error', { functions: false, classes: false }], - 'react-refresh/only-export-components': ['warn', { 'allowConstantExport': true }], + 'react-refresh/only-export-components': ['warn', { allowConstantExport: true }], }, settings: { react: { diff --git a/datahub-web-react/README.md b/datahub-web-react/README.md index 560f5315b2c71..86bbb349b027c 100644 --- a/datahub-web-react/README.md +++ b/datahub-web-react/README.md @@ -1,44 +1,47 @@ --- -title: "datahub-web-react" +title: 'datahub-web-react' --- # DataHub React App ## About -This module contains a React application that serves as the DataHub UI. -Feel free to take a look around, deploy, and contribute. +This module contains a React application that serves as the DataHub UI. +Feel free to take a look around, deploy, and contribute. ## Functional Goals + The initial milestone for the app was to achieve functional parity with the previous Ember app. This meant supporting -- Dataset Profiles, Search, Browse Experience -- User Profiles, Search -- LDAP Authentication Flow +- Dataset Profiles, Search, Browse Experience +- User Profiles, Search +- LDAP Authentication Flow -This has since been achieved. The new set of functional goals are reflected in the latest version of the [DataHub Roadmap](../docs/roadmap.md). +This has since been achieved. The new set of functional goals are reflected in the latest version of the [DataHub Roadmap](../docs/roadmap.md). ## Design Goals + In building out the client experience, we intend to leverage learnings from the previous Ember-based app and incorporate feedback gathered from organizations operating DataHub. Two themes have emerged to serve as guideposts: -1. **Configurability**: The client experience should be configurable, such that deploying organizations can tailor certain - aspects to their needs. This includes theme / styling configurability, showing and hiding specific functionality, - customizing copy & logos, etc. - -2. **Extensibility**: Extending the *functionality* of DataHub should be as simple as possible. Making changes like - extending an existing entity & adding a new entity should require minimal effort and should be well covered in detailed - documentation. +1. **Configurability**: The client experience should be configurable, such that deploying organizations can tailor certain + aspects to their needs. This includes theme / styling configurability, showing and hiding specific functionality, + customizing copy & logos, etc. +2. **Extensibility**: Extending the _functionality_ of DataHub should be as simple as possible. Making changes like + extending an existing entity & adding a new entity should require minimal effort and should be well covered in detailed + documentation. ## Starting the Application ### Quick Start Navigate to the `docker` directory and run the following to spin up the react app: + ``` ./quickstart.sh ``` + at `http://localhost:9002`. If you want to make changes to the UI see them live without having to rebuild the `datahub-frontend-react` docker image, you @@ -54,8 +57,9 @@ Optionally you could also start the app with the mock server without running the ### Testing your customizations There is two options to test your customizations: -* **Option 1**: Initialize the docker containers with the `quickstart.sh` script (or if any custom docker-compose file) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at `http://localhost:9002` to fetch real data. -* **Option 2**: Change the environment variable `REACT_APP_PROXY_TARGET` in the `.env` file to point to your `datahub-frontend` server (ex: https://my_datahub_host.com) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at some domain to fetch real data. + +- **Option 1**: Initialize the docker containers with the `quickstart.sh` script (or if any custom docker-compose file) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at `http://localhost:9002` to fetch real data. +- **Option 2**: Change the environment variable `REACT_APP_PROXY_TARGET` in the `.env` file to point to your `datahub-frontend` server (ex: https://my_datahub_host.com) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at some domain to fetch real data. The option 2 is useful if you want to test your React customizations without having to run the hole DataHub stack locally. However, if you changed other components of the DataHub stack, you will need to run the hole stack locally (building the docker images) and use the option 1. @@ -68,10 +72,10 @@ In order to start a server and run frontend unit tests using react-testing-frame There are also more automated tests using Cypress in the `smoke-test` folder of the repository root. #### Troubleshooting + `Error: error:0308010C:digital envelope routines::unsupported`: This error message shows up when using Node 17, due to an OpenSSL update related to md5. The best workaround is to revert to the Active LTS version of Node, 16.13.0 with the command `nvm install 16.13.0` and if necessary reinstall yarn `npm install --global yarn`. - ### Theming #### Customizing your App without rebuilding assets @@ -108,74 +112,74 @@ you to terminate and re-run `yarn start` to see updated styles. The `src` dir of the app is broken down into the following modules -**conf** - Stores global configuration flags that can be referenced across the app. For example, the number of +**conf** - Stores global configuration flags that can be referenced across the app. For example, the number of search results shown per page, or the placeholder text in the search bar box. It serves as a location where levels -for functional configurability should reside. +for functional configurability should reside. **app** - Contains all important components of the app. It has a few sub-modules: -- `auth`: Components used to render the user authentication experience. -- `browse`: Shared components used to render the 'browse-by-path' experience. The experience is akin to navigating a filesystem hierarchy. -- `preview`: Shared components used to render Entity 'preview' views. These can appear in search results, browse results, - and within entity profile pages. -- `search`: Shared components used to render the full-text search experience. -- `shared`: Misc. shared components -- `entity`: Contains Entity definitions, where entity-specific functionality resides. - Configuration is provided by implementing the 'Entity' interface. (See DatasetEntity.tsx for example) - There are 2 visual components each entity should supply: - - `profiles`: display relevant details about an individual entity. This serves as the entity's 'profile'. - - `previews`: provide a 'preview', or a smaller details card, containing the most important information about an entity instance. - - When rendering a preview, the entity's data and the type of preview (SEARCH, BROWSE, PREVIEW) are provided. This +- `auth`: Components used to render the user authentication experience. +- `browse`: Shared components used to render the 'browse-by-path' experience. The experience is akin to navigating a filesystem hierarchy. +- `preview`: Shared components used to render Entity 'preview' views. These can appear in search results, browse results, + and within entity profile pages. +- `search`: Shared components used to render the full-text search experience. +- `shared`: Misc. shared components +- `entity`: Contains Entity definitions, where entity-specific functionality resides. + Configuration is provided by implementing the 'Entity' interface. (See DatasetEntity.tsx for example) + There are 2 visual components each entity should supply: + + - `profiles`: display relevant details about an individual entity. This serves as the entity's 'profile'. + - `previews`: provide a 'preview', or a smaller details card, containing the most important information about an entity instance. + + When rendering a preview, the entity's data and the type of preview (SEARCH, BROWSE, PREVIEW) are provided. This allows you to optionally customize the way an entities preview is rendered in different views. - - - `entity registry`: There's another very important piece of code living within this module: the **EntityRegistry**. This is a layer + + - `entity registry`: There's another very important piece of code living within this module: the **EntityRegistry**. This is a layer of abstraction over the intimate details of rendering a particular entity. It is used to render a view associated with a particular entity type (user, dataset, etc.). - - +

    -**graphql** - The React App talks to the `dathub-frontend` server using GraphQL. This module is where the *queries* issued -against the server are defined. Once defined, running `yarn run generate` will code-gen TypeScript objects to make invoking +**graphql** - The React App talks to the `dathub-frontend` server using GraphQL. This module is where the _queries_ issued +against the server are defined. Once defined, running `yarn run generate` will code-gen TypeScript objects to make invoking these queries extremely easy. An example can be found at the top of `SearchPage.tsx.` -**images** - Images to be displayed within the app. This is where one would place a custom logo image. +**images** - Images to be displayed within the app. This is where one would place a custom logo image. ## Adding an Entity The following outlines a series of steps required to introduce a new entity into the React app: -1. Declare the GraphQL Queries required to display the new entity - - If search functionality should be supported, extend the "search" query within `search.graphql` to fetch the new +1. Declare the GraphQL Queries required to display the new entity + + - If search functionality should be supported, extend the "search" query within `search.graphql` to fetch the new + entity data. + - If browse functionality should be supported, extend the "browse" query within `browse.graphql` to fetch the new entity data. - - If browse functionality should be supported, extend the "browse" query within `browse.graphql` to fetch the new - entity data. - - If display a 'profile' should be supported (most often), introduce a new `.graphql` file that contains a - `get` query to fetch the entity by primary key (urn). - - Note that your new entity *must* implement the `Entity` GraphQL type interface, and thus must have a corresponding - `EntityType`. - - -2. Implement the `Entity` interface + - If display a 'profile' should be supported (most often), introduce a new `.graphql` file that contains a + `get` query to fetch the entity by primary key (urn). + + Note that your new entity _must_ implement the `Entity` GraphQL type interface, and thus must have a corresponding + `EntityType`. + +2. Implement the `Entity` interface + - Create a new folder under `src/components/entity` corresponding to your entity - Create a class that implements the `Entity` interface (example: `DatasetEntity.tsx`) - - Provide an implementation each method defined on the interface. - - This class specifies whether your new entity should be searchable & browsable, defines the names used to - identify your entity when instances are rendered in collection / when entity appears - in the URL path, and provides the ability to render your entity given data returned by the GQL API. - + - Provide an implementation each method defined on the interface. + - This class specifies whether your new entity should be searchable & browsable, defines the names used to + identify your entity when instances are rendered in collection / when entity appears + in the URL path, and provides the ability to render your entity given data returned by the GQL API. 3. Register the new entity in the `EntityRegistry` - - Update `App.tsx` to register an instance of your new entity. Now your entity will be accessible via the registry + - Update `App.tsx` to register an instance of your new entity. Now your entity will be accessible via the registry and appear in the UI. To manually retrieve the info about your entity or others, simply use an instance - of the `EntityRegistry`, which is provided via `ReactContext` to *all* components in the hierarchy. + of the `EntityRegistry`, which is provided via `ReactContext` to _all_ components in the hierarchy. For example - ``` - entityRegistry.getCollectionName(EntityType.YOUR_NEW_ENTITY) - ``` - -That's it! For any questions, do not hesitate to reach out on the DataHub Slack community in #datahub-react. + ``` + entityRegistry.getCollectionName(EntityType.YOUR_NEW_ENTITY) + ``` + +That's it! For any questions, do not hesitate to reach out on the DataHub Slack community in #datahub-react. diff --git a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx index ce8d03fbdc960..e7d986028d4a6 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx @@ -5,6 +5,8 @@ import styled from 'styled-components'; import { FetchResult } from '@apollo/client'; import { UpdateDatasetMutation } from '../../../../../../graphql/dataset.generated'; +import { StringMapEntry } from '../../../../../../types.generated'; +import PropagationDetails from '../../../../shared/propagation/PropagationDetails'; import UpdateDescriptionModal from '../../../../shared/components/legacy/DescriptionModal'; import StripMarkdownText, { removeMarkdown } from '../../../../shared/components/styled/StripMarkdownText'; import SchemaEditableContext from '../../../../../shared/SchemaEditableContext'; @@ -28,6 +30,11 @@ const ExpandedActions = styled.div` height: 10px; `; +const DescriptionWrapper = styled.span` + display: inline-flex; + align-items: center; +`; + const DescriptionContainer = styled.div` position: relative; display: flex; @@ -105,6 +112,8 @@ type Props = { isEdited?: boolean; isReadOnly?: boolean; businessAttributeDescription?: string; + isPropagated?: boolean; + sourceDetail?: StringMapEntry[] | null; }; const ABBREVIATED_LIMIT = 80; @@ -120,6 +129,8 @@ export default function DescriptionField({ original, isReadOnly, businessAttributeDescription, + isPropagated, + sourceDetail, }: Props) { const [showAddModal, setShowAddModal] = useState(false); const overLimit = removeMarkdown(description).length > 80; @@ -163,7 +174,7 @@ export default function DescriptionField({ return ( - {expanded || !overLimit ? ( + {expanded ? ( <> {!!description && } {!!description && (EditButton || overLimit) && ( @@ -184,25 +195,29 @@ export default function DescriptionField({ ) : ( <> - - { - e.stopPropagation(); - handleExpanded(true); - }} - > - Read More - - - } - suffix={EditButton} - shouldWrap - > - {description} - + + {isPropagated && } +   + + { + e.stopPropagation(); + handleExpanded(true); + }} + > + Read More + + + } + suffix={EditButton} + shouldWrap + > + {description} + + )} {isEdited && (edited)} diff --git a/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx b/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx index 0e899bc391e0a..2d65a305b4cc8 100644 --- a/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx +++ b/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx @@ -19,16 +19,29 @@ const StyledViewer = styled(Editor)` } `; +const OriginalDocumentation = styled(Form.Item)` + margin-bottom: 0; +`; + type Props = { title: string; description?: string | undefined; original?: string | undefined; + propagatedDescription?: string | undefined; onClose: () => void; onSubmit: (description: string) => void; isAddDesc?: boolean; }; -export default function UpdateDescriptionModal({ title, description, original, onClose, onSubmit, isAddDesc }: Props) { +export default function UpdateDescriptionModal({ + title, + description, + original, + propagatedDescription, + onClose, + onSubmit, + isAddDesc, +}: Props) { const [updatedDesc, setDesc] = useState(description || original || ''); const handleEditorKeyDown = (event: React.KeyboardEvent) => { @@ -72,9 +85,14 @@ export default function UpdateDescriptionModal({ title, description, original, o /> {!isAddDesc && description && original && ( - Original:}> + Original:}> - + + )} + {!isAddDesc && description && propagatedDescription && ( + Propagated:}> + + )} diff --git a/datahub-web-react/src/app/entity/shared/propagation/PropagationDetails.tsx b/datahub-web-react/src/app/entity/shared/propagation/PropagationDetails.tsx new file mode 100644 index 0000000000000..646f47134938c --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/propagation/PropagationDetails.tsx @@ -0,0 +1,109 @@ +import React from 'react'; +import styled from 'styled-components'; +import { Popover } from 'antd'; +import { StringMapEntry } from '../../../../types.generated'; +import PropagationEntityLink from './PropagationEntityLink'; +import { usePropagationDetails } from './utils'; +import { PropagateThunderbolt, PropagateThunderboltFilled } from './PropagationIcon'; + +const PopoverWrapper = styled.div` + display: flex; + flex-direction: column; +`; + +const PopoverTitle = styled.div` + font-weight: bold; + font-size: 14px; + padding: 6px 0px; + color: #eeecfa; +`; + +const PopoverDescription = styled.div` + max-width: 340px; + font-size: 14px; + color: #eeecfa; + display: inline; + padding: 0px 0px 8px 0px; +`; + +const PopoverAttributes = styled.div` + display: flex; +`; + +const PopoverAttribute = styled.div` + margin-right: 12px; + margin-bottom: 4px; +`; + +const PopoverAttributeTitle = styled.div` + font-size: 14px; + color: #eeecfa; + font-weight: bold; + margin: 8px 0px; + overflow: hidden; + text-overflow: ellipsis; +`; + +const PopoverDocumentation = styled.a` + margin-top: 12px; +`; + +interface Props { + sourceDetail?: StringMapEntry[] | null; +} + +export default function PropagationDetails({ sourceDetail }: Props) { + const { + isPropagated, + origin: { entity: originEntity }, + via: { entity: viaEntity }, + } = usePropagationDetails(sourceDetail); + + if (!sourceDetail || !isPropagated) return null; + + const popoverContent = + originEntity || viaEntity ? ( + + + This description was automatically propagated from an upstream column.{' '} + + Learn more + + + + {originEntity && originEntity.urn !== viaEntity?.urn && ( + + Origin + + + )} + {viaEntity && ( + + Via + + + )} + + + ) : undefined; + + return ( + + + Propagated Description + + } + content={popoverContent} + > + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/propagation/PropagationEntityLink.tsx b/datahub-web-react/src/app/entity/shared/propagation/PropagationEntityLink.tsx new file mode 100644 index 0000000000000..8c1285dd5808b --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/propagation/PropagationEntityLink.tsx @@ -0,0 +1,56 @@ +import React from 'react'; +import styled from 'styled-components'; +import { Link } from 'react-router-dom'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { Entity, EntityType, SchemaFieldEntity } from '../../../../types.generated'; +import { GenericEntityProperties } from '../types'; + +const PreviewImage = styled.img<{ size: number }>` + height: ${(props) => props.size}px; + width: ${(props) => props.size}px; + min-width: ${(props) => props.size}px; + object-fit: contain; + background-color: transparent; + margin: 0px 4px 0px 0px; +`; + +const StyledLink = styled(Link)` + margin-right: 4px; + display: flex; + align-items: center; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +`; + +interface Props { + entity: Entity; +} + +export default function PropagationEntityLink({ entity }: Props) { + const entityRegistry = useEntityRegistry(); + + const isSchemaField = entity.type === EntityType.SchemaField; + const baseEntity = isSchemaField ? (entity as SchemaFieldEntity).parent : entity; + + const logoUrl = (baseEntity as GenericEntityProperties)?.platform?.properties?.logoUrl || ''; + let entityUrl = entityRegistry.getEntityUrl(baseEntity.type, baseEntity.urn); + let entityDisplayName = entityRegistry.getDisplayName(baseEntity.type, baseEntity); + + if (isSchemaField) { + entityUrl = `${entityUrl}/${encodeURIComponent('Columns')}?schemaFilter=${encodeURIComponent( + (entity as SchemaFieldEntity).fieldPath, + )}`; + const schemaFieldName = entityRegistry.getDisplayName(entity.type, entity); + entityDisplayName = `${entityDisplayName}.${schemaFieldName}`; + } + + return ( + <> + + + {entityDisplayName} + + + ); +} diff --git a/datahub-web-react/src/app/entity/shared/propagation/PropagationIcon.tsx b/datahub-web-react/src/app/entity/shared/propagation/PropagationIcon.tsx new file mode 100644 index 0000000000000..01b4570c4ca0d --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/propagation/PropagationIcon.tsx @@ -0,0 +1,22 @@ +import styled from 'styled-components'; +import { ThunderboltFilled } from '@ant-design/icons'; +import { REDESIGN_COLORS } from '../constants'; + +export const PropagateThunderbolt = styled(ThunderboltFilled)` + && { + color: #a7c7fa; + } + font-size: 16px; + &:hover { + color: ${REDESIGN_COLORS.BLUE}; + } + margin-right: 4px; +`; + +export const PropagateThunderboltFilled = styled(ThunderboltFilled)` + && { + color: ${REDESIGN_COLORS.BLUE}; + } + font-size: 16px; + margin-right: 4px; +`; diff --git a/datahub-web-react/src/app/entity/shared/propagation/utils.ts b/datahub-web-react/src/app/entity/shared/propagation/utils.ts new file mode 100644 index 0000000000000..d8b4d4d931f4e --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/propagation/utils.ts @@ -0,0 +1,24 @@ +import { StringMapEntry } from '../../../../types.generated'; +import { useGetEntities } from '../useGetEntities'; + +export function usePropagationDetails(sourceDetail?: StringMapEntry[] | null) { + const isPropagated = !!sourceDetail?.find((mapEntry) => mapEntry.key === 'propagated' && mapEntry.value === 'true'); + const originEntityUrn = sourceDetail?.find((mapEntry) => mapEntry.key === 'origin')?.value || ''; + const viaEntityUrn = sourceDetail?.find((mapEntry) => mapEntry.key === 'via')?.value || ''; + + const entities = useGetEntities([originEntityUrn, viaEntityUrn]); + const originEntity = entities.find((e) => e.urn === originEntityUrn); + const viaEntity = entities.find((e) => e.urn === viaEntityUrn); + + return { + isPropagated, + origin: { + urn: originEntityUrn, + entity: originEntity, + }, + via: { + urn: viaEntityUrn, + entity: viaEntity, + }, + }; +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx index be95cba3ab4f0..e64a1436b0b1c 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldDescription.tsx @@ -6,6 +6,8 @@ import styled from 'styled-components'; import { SectionHeader, StyledDivider } from './components'; import UpdateDescriptionModal from '../../../../../components/legacy/DescriptionModal'; import { EditableSchemaFieldInfo, SchemaField, SubResourceType } from '../../../../../../../../types.generated'; +import { getFieldDescriptionDetails } from '../../utils/getFieldDescriptionDetails'; +import PropagationDetails from '../../../../../propagation/PropagationDetails'; import DescriptionSection from '../../../../../containers/profile/sidebar/AboutSection/DescriptionSection'; import { useEntityData, useMutationUrn, useRefetch } from '../../../../../EntityContext'; import { useSchemaRefetch } from '../../SchemaContext'; @@ -13,11 +15,6 @@ import { useUpdateDescriptionMutation } from '../../../../../../../../graphql/mu import analytics, { EntityActionType, EventType } from '../../../../../../../analytics'; import SchemaEditableContext from '../../../../../../../shared/SchemaEditableContext'; -const DescriptionWrapper = styled.div` - display: flex; - justify-content: space-between; -`; - const EditIcon = styled(Button)` border: none; box-shadow: none; @@ -25,6 +22,13 @@ const EditIcon = styled(Button)` width: 20px; `; +const DescriptionWrapper = styled.div` + display: flex; + gap: 4px; + align-items: center; + justify-content: space-between; +`; + interface Props { expandedField: SchemaField; editableFieldInfo?: EditableSchemaFieldInfo; @@ -76,7 +80,13 @@ export default function FieldDescription({ expandedField, editableFieldInfo }: P }, }); - const displayedDescription = editableFieldInfo?.description || expandedField.description; + const { schemaFieldEntity, description } = expandedField; + const { displayedDescription, isPropagated, sourceDetail, propagatedDescription } = getFieldDescriptionDetails({ + schemaFieldEntity, + editableFieldInfo, + defaultDescription: description, + }); + const baDescription = expandedField?.schemaFieldEntity?.businessAttributes?.businessAttribute?.businessAttribute?.properties ?.description; @@ -87,12 +97,17 @@ export default function FieldDescription({ expandedField, editableFieldInfo }: P
    Description - + + {isPropagated && } + {!!displayedDescription && ( + + )} +
    {isSchemaEditable && ( )} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/getFieldDescriptionDetails.ts b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/getFieldDescriptionDetails.ts new file mode 100644 index 0000000000000..6434baddb77a6 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/getFieldDescriptionDetails.ts @@ -0,0 +1,25 @@ +import { EditableSchemaFieldInfo, SchemaFieldEntity } from '../../../../../../../types.generated'; + +interface Props { + schemaFieldEntity?: SchemaFieldEntity | null; + editableFieldInfo?: EditableSchemaFieldInfo; + defaultDescription?: string | null; +} + +export function getFieldDescriptionDetails({ schemaFieldEntity, editableFieldInfo, defaultDescription }: Props) { + const documentation = schemaFieldEntity?.documentation?.documentations?.[0]; + const isUsingDocumentationAspect = !editableFieldInfo?.description && !!documentation; + const isPropagated = + isUsingDocumentationAspect && + !!documentation?.attribution?.sourceDetail?.find( + (mapEntry) => mapEntry.key === 'propagated' && mapEntry.value === 'true', + ); + + const displayedDescription = + editableFieldInfo?.description || documentation?.documentation || defaultDescription || ''; + + const sourceDetail = documentation?.attribution?.sourceDetail; + const propagatedDescription = documentation?.documentation; + + return { displayedDescription, isPropagated, sourceDetail, propagatedDescription }; +} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx index 73e6d2ca6e9b3..bb70c2cb49303 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/utils/useDescriptionRenderer.tsx @@ -6,6 +6,7 @@ import { useUpdateDescriptionMutation } from '../../../../../../../graphql/mutat import { useMutationUrn, useRefetch } from '../../../../EntityContext'; import { useSchemaRefetch } from '../SchemaContext'; import { pathMatchesNewPath } from '../../../../../dataset/profile/schema/utils/utils'; +import { getFieldDescriptionDetails } from './getFieldDescriptionDetails'; export default function useDescriptionRenderer(editableSchemaMetadata: EditableSchemaMetadata | null | undefined) { const urn = useMutationUrn(); @@ -21,10 +22,16 @@ export default function useDescriptionRenderer(editableSchemaMetadata: EditableS }; return (description: string, record: SchemaField, index: number): JSX.Element => { - const relevantEditableFieldInfo = editableSchemaMetadata?.editableSchemaFieldInfo.find( - (candidateEditableFieldInfo) => pathMatchesNewPath(candidateEditableFieldInfo.fieldPath, record.fieldPath), + const editableFieldInfo = editableSchemaMetadata?.editableSchemaFieldInfo.find((candidateEditableFieldInfo) => + pathMatchesNewPath(candidateEditableFieldInfo.fieldPath, record.fieldPath), ); - const displayedDescription = relevantEditableFieldInfo?.description || description; + const { schemaFieldEntity } = record; + const { displayedDescription, isPropagated, sourceDetail } = getFieldDescriptionDetails({ + schemaFieldEntity, + editableFieldInfo, + defaultDescription: description, + }); + const sanitizedDescription = DOMPurify.sanitize(displayedDescription); const original = record.description ? DOMPurify.sanitize(record.description) : undefined; const businessAttributeDescription = @@ -43,7 +50,7 @@ export default function useDescriptionRenderer(editableSchemaMetadata: EditableS baExpanded={!!expandedBARows[index]} description={sanitizedDescription} original={original} - isEdited={!!relevantEditableFieldInfo?.description} + isEdited={!!editableFieldInfo?.description} onUpdate={(updatedDescription) => updateDescription({ variables: { @@ -56,6 +63,8 @@ export default function useDescriptionRenderer(editableSchemaMetadata: EditableS }, }).then(refresh) } + isPropagated={isPropagated} + sourceDetail={sourceDetail} isReadOnly /> ); diff --git a/datahub-web-react/src/app/entity/shared/useGetEntities.ts b/datahub-web-react/src/app/entity/shared/useGetEntities.ts new file mode 100644 index 0000000000000..9391bc17d7a8a --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/useGetEntities.ts @@ -0,0 +1,18 @@ +import { useEffect, useState } from 'react'; +import { useGetEntitiesQuery } from '../../../graphql/entity.generated'; +import { Entity } from '../../../types.generated'; + +export function useGetEntities(urns: string[]): Entity[] { + const [verifiedUrns, setVerifiedUrns] = useState([]); + + useEffect(() => { + urns.forEach((urn) => { + if (urn.startsWith('urn:li:') && !verifiedUrns.includes(urn)) { + setVerifiedUrns((prevUrns) => [...prevUrns, urn]); + } + }); + }, [urns, verifiedUrns]); + + const { data } = useGetEntitiesQuery({ variables: { urns: verifiedUrns }, skip: !verifiedUrns.length }); + return (data?.entities || []) as Entity[]; +} diff --git a/datahub-web-react/src/app/lineage/LineageExplorer.tsx b/datahub-web-react/src/app/lineage/LineageExplorer.tsx index 26ffaa26a6ca2..ce0c4bb8f122d 100644 --- a/datahub-web-react/src/app/lineage/LineageExplorer.tsx +++ b/datahub-web-react/src/app/lineage/LineageExplorer.tsx @@ -221,7 +221,9 @@ export default function LineageExplorer({ urn, type }: Props) { Close {selectedEntity.type !== EntityType.Restricted && ( - )} diff --git a/datahub-web-react/src/app/settings/SettingsPage.tsx b/datahub-web-react/src/app/settings/SettingsPage.tsx index 24bcd17ca7f9c..e3948349546ef 100644 --- a/datahub-web-react/src/app/settings/SettingsPage.tsx +++ b/datahub-web-react/src/app/settings/SettingsPage.tsx @@ -121,7 +121,7 @@ export const SettingsPage = () => { const showViews = isViewsEnabled || false; const showOwnershipTypes = me && me?.platformPrivileges?.manageOwnershipTypes; const showHomePagePosts = me && me?.platformPrivileges?.manageGlobalAnnouncements && !readOnlyModeEnabled; - const showFeatures = true; // TODO: Add feature flag for this + const showFeatures = me?.platformPrivileges?.manageIngestion; // TODO: Add feature flag for this return ( diff --git a/datahub-web-react/src/app/settings/features/Feature.tsx b/datahub-web-react/src/app/settings/features/Feature.tsx index 2c090aae696f8..13453cf8f7325 100644 --- a/datahub-web-react/src/app/settings/features/Feature.tsx +++ b/datahub-web-react/src/app/settings/features/Feature.tsx @@ -104,6 +104,8 @@ export interface FeatureType { title: string; description: string; isAvailable: boolean; + isDisabled: boolean; + disabledMessage?: string; checked: boolean; onChange?: (checked: boolean) => void; }>; @@ -134,22 +136,6 @@ export const Feature = ({ key, title, description, settings, options, isNew, lea - {settings.map((option) => ( - <> - - - - {option.title} - - - - - - - - ))} {options.map((option, index) => ( <> @@ -165,15 +151,34 @@ export const Feature = ({ key, title, description, settings, options, isNew, lea {option.description} - (option.onChange ? option.onChange(checked) : null)} - disabled={!option.isAvailable} - /> + + (option.onChange ? option.onChange(checked) : null)} + disabled={!option.isAvailable || option.isDisabled} + /> + {index !== options.length - 1 && } ))} + {settings.map((option) => ( + <> + + + + {option.title} + Only available on DataHub Cloud + + + + + + + + ))} ); diff --git a/datahub-web-react/src/app/settings/features/Features.tsx b/datahub-web-react/src/app/settings/features/Features.tsx index ee8d7c628c1ef..1d0a0bb469cf8 100644 --- a/datahub-web-react/src/app/settings/features/Features.tsx +++ b/datahub-web-react/src/app/settings/features/Features.tsx @@ -73,18 +73,23 @@ export const Features = () => { setIsColPropagateChecked(checked); updateDocPropagation(checked); }, + isDisabled: false, + disabledMessage: undefined, }, { key: uuidv4(), title: 'Asset Level Propagation', description: 'Propagate new documentation from upstream to downstream assets based on data lineage relationships.', - isAvailable: false, checked: false, + onChange: (_: boolean) => null, + isAvailable: true, + isDisabled: true, + disabledMessage: 'Coming soon!', }, ], isNew: true, - learnMoreLink: 'https://datahubproject.io/docs/automations/doc-propagation', + learnMoreLink: 'https://datahubproject.io/docs/automations/docs-propagation', }, ]; diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index a3aa54657d067..e6c7992b7006d 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -98,6 +98,23 @@ module.exports = { }, ], }, + { + label: "Automations", + type: "category", + items: [ + { + label: "Documentation Propagation", + type: "doc", + id: "docs/automation/docs-propagation", + }, + { + label: "Snowflake Tag Sync", + type: "doc", + id: "docs/automation/snowflake-tag-propagation", + className: "saasOnly", + }, + ], + }, { label: "Business Attributes", type: "doc", diff --git a/docs/automation/docs-propagation.md b/docs/automation/docs-propagation.md new file mode 100644 index 0000000000000..a637afcde4dca --- /dev/null +++ b/docs/automation/docs-propagation.md @@ -0,0 +1,128 @@ +# Documentation Propagation Automation + +## Introduction + +Documentation Propagation is an automation automatically propagates column and asset (coming soon) descriptions based on downstream column-level lineage and sibling relationships. +It simplifies metadata management by ensuring consistency and reducing the manual effort required for documenting data assets to aid +in Data Governance & Compliance along with Data Discovery. + +This feature is enabled by default in Open Source DataHub. + +## Capabilities + +### Open Source +- **Column-Level Docs Propagation**: Automatically propagate documentation to downstream columns and sibling columns that are derived or dependent on the source column. +- **(Coming Soon) Asset-Level Docs Propagation**: Propagate descriptions to sibling assets. + +### DataHub Cloud (Acryl) +- Includes all the features of Open Source. +- **Propagation Rollback (Undo)**: Offers the ability to undo any propagation changes, providing a safety net against accidental updates. +- **Historical Backfilling**: Automatically backfills historical data for newly documented columns to maintain consistency across time. + +### Comparison of Features + +| Feature | Open Source | DataHub Cloud | +|---------------------------------|-------------|---------------| +| Column-Level Docs Propagation | ✔️ | ✔️ | +| Asset-Level Docs Propagation | ✔️ | ✔️ | +| Downstream Lineage + Siblings | ✔️ | ✔️ | +| Propagation Rollback (Undo) | ❌ | ✔️ | +| Historical Backfilling | ❌ | ✔️ | + +## Enabling Documentation Propagation + +### In Open Source + +Notice that the user must have the `Manage Ingestion` permission to view and enable the feature. + +1. **Navigate to Settings**: Click on the 'Settings' gear in top navigation bar. + +

    + +

    + +2. **Navigate to Features**: Click on the 'Features' tab in the left-hand navigation bar. + +

    + +

    + +3**Enable Documentation Propagation**: Locate the 'Documentation Propagation' section and toggle the feature to enable it for column-level and asset-level propagation. +Currently, Column Level propagation is supported, with asset level propagation coming soon. + +

    + +

    + + +### In DataHub Cloud + +1. **Navigate to Automations**: Click on 'Govern' > 'Automations' in the navigation bar. + +

    + +

    + +2. **Create An Automation**: Click on 'Create' and select 'Column Documentation Propagation'. + +

    + +

    + +3. **Configure Automation**: Fill in the required fields, such as the name, description, and category. Finally, click 'Save and Run' to start the automation + +

    + +

    + +## Propagating for Existing Assets (DataHub Cloud Only) + +In DataHub Cloud, you can back-fill historical data for existing assets to ensure that all existing column descriptions are propagated to downstreams +when you start the automation. Note that it may take some time to complete the initial back-filling process, depending on the number of assets and the complexity of your lineage. + +To do this, navigate to the Automation you created in Step 3 above, click the 3-dot "more" menu: + +

    + +

    + +and then click "Initialize". + +

    + +

    + +This one-time step will kick off the back-filling process for existing descriptions. If you only want to begin propagating +descriptions going forward, you can skip this step. + +## Rolling Back Propagated Descriptions (DataHub Cloud Only) + +In DataHub Cloud, you can rollback all descriptions that have been propagated historically. + +This feature allows you to "clean up" or "undo" any accidental propagation that may have occurred automatically, in the case +that you no longer want propagated descriptions to be visible. + +To do this, navigate to the Automation you created in Step 3 above, click the 3-dot "More" menu + +

    + +

    + +and then click "Rollback". + +

    + +

    + +This one-time step will remove all propagated tags and glossary terms from Snowflake. To simply stop propagating new tags, you can disable the automation. + +## Viewing Propagated Descriptions + +Once the automation is enabled, you'll be able to recognize propagated descriptions as those with the thunderbolt icon next to them: + +The tooltip will provide additional information, including where the description originated and any intermediate hops that were +used to propagate the description. + +

    + +

    \ No newline at end of file diff --git a/docs/automation/snowflake-tag-propagation.md b/docs/automation/snowflake-tag-propagation.md new file mode 100644 index 0000000000000..bdc80376dfb48 --- /dev/null +++ b/docs/automation/snowflake-tag-propagation.md @@ -0,0 +1,88 @@ + +import FeatureAvailability from '@site/src/components/FeatureAvailability'; + +# Snowflake Tag Propagation Automation + + + +## Introduction + +Snowflake Tag Propagation is an automation that allows you to sync DataHub Glossary Terms and Tags on +both columns and tables back to Snowflake. This automation is available in DataHub Cloud (Acryl) only. + +## Capabilities + +- Automatically Add DataHub Glossary Terms to Snowflake Tables and Columns +- Automatically Add DataHub Tags to Snowflake Tables and Columns +- Automatically Remove DataHub Glossary Terms and Tags from Snowflake Tables and Columns when they are removed in DataHub + +## Enabling Snowflake Tag Sync + +1. **Navigate to Automations**: Click on 'Govern' > 'Automations' in the navigation bar. + +

    + +

    + +2. **Create An Automation**: Click on 'Create' and select 'Snowflake Tag Propagation'. + +

    + +

    + +3. **Configure Automation**: Fill in the required fields to connect to Snowflake, along with the name, description, and category. +Note that you can limit propagation based on specific Tags and Glossary Terms. If none are selected, then ALL Tags or Glossary Terms will be automatically +propagated to Snowflake tables and columns. Finally, click 'Save and Run' to start the automation + +

    + +

    + +## Propagating for Existing Assets + +You can back-fill historical data for existing assets to ensure that all existing column and table Tags and Glossary Terms are propagated to Snowflake. +Note that it may take some time to complete the initial back-filling process, depending on the number of Snowflake assets you have. + +To do so, navigate to the Automation you created in Step 3 above, click the 3-dot "More" menu + +

    + +

    + +and then click "Initialize". + +

    + +

    + +This one-time step will kick off the back-filling process for existing descriptions. If you only want to begin propagating +descriptions going forward, you can skip this step. + +## Rolling Back Propagated Tags + +You can rollback all tags and glossary terms that have been propagated historically. + +This feature allows you to "clean up" or "undo" any accidental propagation that may have occurred automatically, in the case +that you no longer want propagated descriptions to be visible. + +To do this, navigate to the Automation you created in Step 3 above, click the 3-dot "More" menu + +

    + +

    + +and then click "Rollback". + +

    + +

    + +This one-time step will remove all propagated tags and glossary terms from Snowflake. To simply stop propagating new tags, you can disable the automation. + +## Viewing Propagated Tags + +You can view propagated Tags (and corresponding DataHub URNs) inside the Snowflake UI to confirm the automation is working as expected. + +

    + +

    diff --git a/metadata-models/src/main/pegasus/com/linkedin/settings/global/GlobalSettingsInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/settings/global/GlobalSettingsInfo.pdl index 8d4121b767dc3..6c6f4d0036ce0 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/settings/global/GlobalSettingsInfo.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/settings/global/GlobalSettingsInfo.pdl @@ -12,16 +12,17 @@ record GlobalSettingsInfo { * SSO integrations between DataHub and identity providers */ sso: optional SsoSettings + /** * Settings related to the Views Feature */ views: optional GlobalViewsSettings + /** * Settings related to the documentation propagation feature */ - docPropagation: DocPropagationFeatureSettings = { + docPropagation: optional DocPropagationFeatureSettings = { "enabled": true "columnPropagationEnabled": true } - } \ No newline at end of file diff --git a/metadata-service/war/src/main/resources/boot/global_settings.json b/metadata-service/war/src/main/resources/boot/global_settings.json index 129783afd6df4..35145b85202a7 100644 --- a/metadata-service/war/src/main/resources/boot/global_settings.json +++ b/metadata-service/war/src/main/resources/boot/global_settings.json @@ -1,4 +1,8 @@ { "views": { + }, + "docPropagation": { + "enabled": true, + "columnPropagationEnabled": true } } \ No newline at end of file diff --git a/smoke-test/tests/cypress/cypress/e2e/actions/docPropagation.js b/smoke-test/tests/cypress/cypress/e2e/actions/docPropagation.js new file mode 100644 index 0000000000000..3d7e14195ab64 --- /dev/null +++ b/smoke-test/tests/cypress/cypress/e2e/actions/docPropagation.js @@ -0,0 +1,27 @@ +const testId = '[data-testid="docPropagationIndicator"]'; + +describe("docPropagation", () => { + it("logs in and navigates to the schema page and checks for docPropagationIndicator", () => { + cy.login(); + cy.visit( + "/dataset/urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_deleted,PROD)/Schema?is_lineage_mode=false&schemaFilter=", + "/dataset/urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_deleted,PROD)/Schema?is_lineage_mode=false&schemaFilter=", + ); + + // verify that the indicator exists in the table + cy.get(testId).should("exist"); + + // click on the table row + cy.get('[data-row-key="user_id"]').click(); + + // verify that the indicator exists in id="entity-profile-sidebar" + cy.get('[id="entity-profile-sidebar"]') + .then(($sidebar) => { + if ($sidebar.find(testId).length) return testId; + return null; + }) + .then((selector) => { + cy.get(selector).should("exist"); + }); + }); +}); diff --git a/smoke-test/tests/cypress/data.json b/smoke-test/tests/cypress/data.json index 5253b7a33b085..ce61f7c83a038 100644 --- a/smoke-test/tests/cypress/data.json +++ b/smoke-test/tests/cypress/data.json @@ -96,7 +96,11 @@ }, "nativeDataType": "varchar(100)", "globalTags": { - "tags": [{ "tag": "urn:li:tag:NeedsDocumentation" }] + "tags": [ + { + "tag": "urn:li:tag:NeedsDocumentation" + } + ] }, "recursive": false }, @@ -137,7 +141,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -246,7 +254,13 @@ "editableSchemaFieldInfo": [ { "fieldPath": "shipment_info", - "globalTags": { "tags": [{ "tag": "urn:li:tag:Legacy" }] }, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Legacy" + } + ] + }, "glossaryTerms": { "terms": [ { @@ -401,8 +415,12 @@ { "com.linkedin.pegasus2avro.common.GlobalTags": { "tags": [ - { "tag": "urn:li:tag:Cypress" }, - { "tag": "urn:li:tag:Cypress2" } + { + "tag": "urn:li:tag:Cypress" + }, + { + "tag": "urn:li:tag:Cypress2" + } ] } } @@ -542,7 +560,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -718,7 +740,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1011,7 +1037,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1229,7 +1259,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1279,7 +1313,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1332,7 +1370,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1371,7 +1413,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1413,7 +1459,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1459,7 +1509,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1521,7 +1575,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:Cypress" }] + "tags": [ + { + "tag": "urn:li:tag:Cypress" + } + ] } } ] @@ -1758,7 +1816,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:CypressFeatureTag" }] + "tags": [ + { + "tag": "urn:li:tag:CypressFeatureTag" + } + ] } } ] @@ -1785,7 +1847,11 @@ }, { "com.linkedin.pegasus2avro.common.GlobalTags": { - "tags": [{ "tag": "urn:li:tag:CypressPrimaryKeyTag" }] + "tags": [ + { + "tag": "urn:li:tag:CypressPrimaryKeyTag" + } + ] } } ] @@ -2137,5 +2203,17 @@ "contentType": "application/json" }, "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "schemaField", + "entityUrn": "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_deleted,PROD),user_id)", + "changeType": "UPSERT", + "aspectName": "documentation", + "aspect": { + "value": "{\"documentations\":[{\"attribution\":{\"actor\":\"urn:li:corpuser:__datahub_system\",\"source\":\"urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD),user_id)\",\"sourceDetail\":{\"actor\":\"urn:li:corpuser:shirshanka@acryl.io\",\"origin\":\"urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD),user_id)\",\"propagated\":\"true\"},\"time\":1721422917808},\"documentation\":\"Unique identifier of user profile.\"}]}", + "contentType": "application/json" + }, + "systemMetadata": null } ] From a2ed732c15e3710306b04559bee0d02a5e208d2c Mon Sep 17 00:00:00 2001 From: Pinaki Bhattacharjee Date: Tue, 20 Aug 2024 14:43:26 +0530 Subject: [PATCH 64/72] fix(ui): checks truthy value for last ingested (#10840) Co-authored-by: Harshal Sheth --- .../entity/shared/containers/profile/sidebar/EntitySidebar.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx index a8d1dceb71ec9..b5e3b221c736d 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntitySidebar.tsx @@ -46,7 +46,7 @@ export const EntitySidebar = ({ sidebarSections, topSection }: Props) => { return ( <> {topSection && } - {entityData?.lastIngested && ( + {!!entityData?.lastIngested && ( From a9ef48ea85e0277955e4e2eb36dc3b1013051e05 Mon Sep 17 00:00:00 2001 From: ksrinath Date: Tue, 20 Aug 2024 15:06:49 +0530 Subject: [PATCH 65/72] docs(scim): document okta integration with datahub for scim provisioning (#11120) --- docs-website/sidebars.js | 7 +- ...iguring-identity-provisioning-with-okta.md | 119 ++++++++++++++++++ 2 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 docs/managed-datahub/configuring-identity-provisioning-with-okta.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index e6c7992b7006d..1f9c0a4d79a9d 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -544,7 +544,12 @@ module.exports = { "Advanced Guides": [ "docs/how/delete-metadata", "docs/how/configuring-authorization-with-apache-ranger", - "docs/managed-datahub/configuring-identity-provisioning-with-ms-entra", + { + "SCIM Provisioning": [ + "docs/managed-datahub/configuring-identity-provisioning-with-ms-entra", + "docs/managed-datahub/configuring-identity-provisioning-with-okta", + ], + }, "docs/how/backup-datahub", "docs/how/restore-indices", "docs/advanced/db-retention", diff --git a/docs/managed-datahub/configuring-identity-provisioning-with-okta.md b/docs/managed-datahub/configuring-identity-provisioning-with-okta.md new file mode 100644 index 0000000000000..a7939b514166d --- /dev/null +++ b/docs/managed-datahub/configuring-identity-provisioning-with-okta.md @@ -0,0 +1,119 @@ +--- +title: "SCIM Integration: Okta and DataHub" +hide_title: true +--- +import FeatureAvailability from '@site/src/components/FeatureAvailability'; + +## SCIM Integration: Okta and DataHub + + +## Overview +This document covers the steps required to enable SCIM provisioning from Okta to DataHub. + +This document assumes you are using OIDC for SSO with DataHub. +Since Okta doesn't currently support SCIM with OIDC, you would need to create an additional SWA-app-integration to enable SCIM provisioning. + +On completing the steps in this guide, Okta will start automatically pushing changes to users/groups of this SWA-app-integration to DataHub, thereby simplifying provisioning of users/groups in DataHub. + +### Why SCIM provisioning? +Let us look at an example of the flows enabled through SCIM provisioning. + +Consider the following configuration in Okta +- A group `governance-team` +- And it has two members `john` and `sid` +- And the group has role `Reader` + +Through SCIM provisioning, the following are enabled: +* If the `governance-team` group is assigned to the DataHub app in Okta with the role `Reader`, Okta will create the users `john` and `sid` in DataHub with the `Reader` role. +* If you remove `john` from group `governance-team` then `john` would automatically get deactivated in DataHub. +* If you remove `sid` from the DataHub app in Okta, then `sid` would automatically get deactivated in DataHub. + +Generally, any user assignment/unassignment to the app in Okta - directly or through groups - are automatically reflected in the DataHub application. + +This guide also covers other variations such as how to assign a role to a user directly, and how group-information can be pushed to DataHub. + +> Only Admin, Editor and Reader roles are supported in DataHub. These roles are preconfigured/created on DataHub. + +## Configuring SCIM provisioning + +### 1. Create an SWA app integration +a). Create a new [SWA app integration](https://help.okta.com/en-us/content/topics/apps/apps_app_integration_wizard_swa.htm), called say, `DataHub-SCIM-SWA`. + +Note: this app-integration will only be used for SCIM provisioning. You would continue to use the existing OIDC-app-integration for SSO. + +b). In the `General` tab of the `DataHub-SCIM-SWA` application, check the `Enable SCIM provisioning` option + +

    + +

    + +You may also want to configure the other selections as shown in the above image, so that this application isn't visible to your users. + +### 2. Configure SCIM + +a). Generate a personal access token from [DataHub](../../docs/authentication/personal-access-tokens.md#creating-personal-access-tokens). + +b). In the `Provisioning` tab, configure the DataHub-SCIM endpoint as shown in the below image: + +

    + +

    + +**Note**: Set the value of the `Bearer` field to the personal access token obtained in step (a) above. + +c). Configure the `To App` section as shown below: + +

    + +

    + +**Note**: We are not pushing passwords to DataHub over SCIM, since we are assuming SSO with OIDC as mentioned earlier. + +### 3. Add a custom attribute to represent roles +a). Navigate to `Directory` -> `Profile Editor`, and select the user-profile of this new application. + +

    + +

    + +b). Click `Add Attribute` and define a new attribute that will be used to specify the role of a DataHub user. + +

    + +

    + +* Set value of `External name` to `roles.^[primary==true].value` +* Set value of `External namespace` to `urn:ietf:params:scim:schemas:core:2.0:User` +* Define an enumerated list of values as shown in the above image +* Mark this attribute as required +* Select `Attribute type` as `Personal` + +c). Add a similar attribute for groups i.e. repeat step (b) above, but select `Attribute Type` as `Group`. (Specify the variable name as, say, `dataHubGroupRoles`.) + +### 4. Assign users & groups to the app +Assign users and groups to the app from the `Assignments` tab: + +

    + +

    + +While assigning a user/group, choose an appropriate value for the dataHubRoles/dataHubGroupRoles attribute. +Note that when a role is selected for a group, the corresponding role is pushed for all users of that group in DataHub. + +### The provisioning setup is now complete +Once the above steps are completed, user assignments/unassignments to the DataHub-SCIM-SWA app in Okta will get reflected in DataHub automatically. + +> #### A note on user deletion +>Note that when users are unassigned or deactivated in Okta, the corresponding users in DataHub are also deactivated (marked "suspended"). +But when a user is *deleted* in Okta, the corresponding user in DataHub does *not* get deleted. +Refer the Okta documentation on [Delete (Deprovision)](https://developer.okta.com/docs/concepts/scim/#delete-deprovision) for more details. + +### 5. (Optional): Configure push groups +When groups are assigned to the app, Okta pushes the group-members as users to DataHub, but the group itself isn't pushed. +To push group information to DataHub, configure the `Push Groups` tab accordingly as shown below: + +

    + +

    + +Refer to the Okta [Group Push](https://help.okta.com/en-us/content/topics/users-groups-profiles/app-assignments-group-push.htm) documentation for more details. \ No newline at end of file From 8f7642b91078acbe69a3745f35e9b546516cd337 Mon Sep 17 00:00:00 2001 From: skrydal Date: Tue, 20 Aug 2024 13:24:16 +0200 Subject: [PATCH 66/72] fix(ingestion/tableau): Tableau field type parsing (#11202) --- .../ingestion/source/tableau_common.py | 7 ++++- .../tests/unit/test_tableau_source.py | 31 ++++++++++++++++++- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py index 93f2a0ef2f6a8..f3a9c4a5aa201 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py @@ -502,7 +502,12 @@ def get_tags_from_params(params: List[str] = []) -> GlobalTagsClass: def tableau_field_to_schema_field(field, ingest_tags): - nativeDataType = field.get("dataType", "UNKNOWN") + # The check here makes sure that even if 'dataType' key exists in the 'field' dictionary but has value None, + # it will be set as "UNKNOWN" (nativeDataType field can not be None in the SchemaField). + # Hence, field.get("dataType", "UNKNOWN") is not enough + nativeDataType = field.get("dataType") + if nativeDataType is None: + nativeDataType = "UNKNOWN" TypeClass = FIELD_TYPE_MAPPING.get(nativeDataType, NullTypeClass) schema_field = SchemaField( diff --git a/metadata-ingestion/tests/unit/test_tableau_source.py b/metadata-ingestion/tests/unit/test_tableau_source.py index f5410b161ed70..1cd0557d085f1 100644 --- a/metadata-ingestion/tests/unit/test_tableau_source.py +++ b/metadata-ingestion/tests/unit/test_tableau_source.py @@ -1,8 +1,37 @@ +from typing import Any, Dict + import pytest import datahub.ingestion.source.tableau_constant as c from datahub.ingestion.source.tableau import TableauSiteSource -from datahub.ingestion.source.tableau_common import get_filter_pages, make_filter +from datahub.ingestion.source.tableau_common import ( + get_filter_pages, + make_filter, + tableau_field_to_schema_field, +) +from datahub.metadata.com.linkedin.pegasus2avro.schema import SchemaField + + +def test_tablea_source_handles_none_nativedatatype(): + field: Dict[str, Any] = { + "__typename": "CalculatedField", + "id": "abcd", + "name": "Test Field", + "description": None, + "isHidden": False, + "folderName": None, + "upstreamFields": [], + "upstreamColumns": [], + "role": None, + "dataType": None, + "defaultFormat": "s", + "aggregation": None, + "formula": "a/b + d", + } + schema_field: SchemaField = tableau_field_to_schema_field( + field=field, ingest_tags=False + ) + assert schema_field.nativeDataType == "UNKNOWN" def test_tableau_source_unescapes_lt(): From f15a7feda3eda88451c39d902d6b7fa81e48f3d2 Mon Sep 17 00:00:00 2001 From: Filipe Caetano - OVO <150357006+filipe-caetano-ovo@users.noreply.github.com> Date: Tue, 20 Aug 2024 14:11:38 +0100 Subject: [PATCH 67/72] feat(analytics): Add page number to SearchResultClickEvent analytics event (#11151) --- datahub-web-react/src/app/analytics/event.ts | 1 + datahub-web-react/src/app/search/SearchResultList.tsx | 3 +++ datahub-web-react/src/app/search/SearchResults.tsx | 1 + docs/developers.md | 2 +- 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/analytics/event.ts b/datahub-web-react/src/app/analytics/event.ts index d63b731c72042..c3a57830b8c50 100644 --- a/datahub-web-react/src/app/analytics/event.ts +++ b/datahub-web-react/src/app/analytics/event.ts @@ -191,6 +191,7 @@ export interface SearchResultClickEvent extends BaseEvent { entityTypeFilter?: EntityType; index: number; total: number; + pageNumber: number; } export interface SearchFiltersClearAllEvent extends BaseEvent { diff --git a/datahub-web-react/src/app/search/SearchResultList.tsx b/datahub-web-react/src/app/search/SearchResultList.tsx index d85c3674cbd43..bc0efcfa3f47e 100644 --- a/datahub-web-react/src/app/search/SearchResultList.tsx +++ b/datahub-web-react/src/app/search/SearchResultList.tsx @@ -62,6 +62,7 @@ type Props = { selectedEntities: EntityAndType[]; setSelectedEntities: (entities: EntityAndType[]) => any; suggestions: SearchSuggestion[]; + pageNumber: number; }; export const SearchResultList = ({ @@ -73,6 +74,7 @@ export const SearchResultList = ({ selectedEntities, setSelectedEntities, suggestions, + pageNumber, }: Props) => { const entityRegistry = useEntityRegistry(); const selectedEntityUrns = selectedEntities.map((entity) => entity.urn); @@ -86,6 +88,7 @@ export const SearchResultList = ({ entityType: result.entity.type, index, total: totalResultCount, + pageNumber, }); }; diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index dafe9a20b6ab7..e96e8fd528b9e 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -264,6 +264,7 @@ export const SearchResults = ({ selectedEntities={selectedEntities} setSelectedEntities={setSelectedEntities} suggestions={suggestions} + pageNumber={page} /> {totalResults > 0 && ( diff --git a/docs/developers.md b/docs/developers.md index 0c9d7bee3d79f..401169490dd4b 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -46,7 +46,7 @@ Use [gradle wrapper](https://docs.gradle.org/current/userguide/gradle_wrapper.ht ./gradlew build ``` -Note that the above will also run run tests and a number of validations which makes the process considerably slower. +Note that the above will also run tests and a number of validations which makes the process considerably slower. We suggest partially compiling DataHub according to your needs: From 8e35eb953e190d15f63d60e0692df8463cb581aa Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Tue, 20 Aug 2024 09:30:05 -0400 Subject: [PATCH 68/72] fix(graphql) Fix NPE on form actor assignemnt (#11203) --- .../datahub/graphql/GmsGraphQLEngine.java | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index db9bf304a1085..b470da3c7c74a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -2728,9 +2728,11 @@ private void configureFormResolvers(final RuntimeWiring.Builder builder) { corpUserType, (env) -> { final FormActorAssignment actors = env.getSource(); - return actors.getUsers().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()); + return actors.getUsers() != null + ? actors.getUsers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; })) .dataFetcher( "groups", @@ -2738,9 +2740,11 @@ private void configureFormResolvers(final RuntimeWiring.Builder builder) { corpGroupType, (env) -> { final FormActorAssignment actors = env.getSource(); - return actors.getGroups().stream() - .map(CorpGroup::getUrn) - .collect(Collectors.toList()); + return actors.getGroups() != null + ? actors.getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList()) + : null; })) .dataFetcher("isAssignedToMe", new IsFormAssignedToMeResolver(groupService))); } From b46a9632cbe893d482038c9cfe65c14095660409 Mon Sep 17 00:00:00 2001 From: skrydal Date: Tue, 20 Aug 2024 16:07:30 +0200 Subject: [PATCH 69/72] fix(tests): Bump databricks-sdk dependency to `>=0.30.0` (#11209) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 03b44401dd244..7fb83fb6a8325 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -298,7 +298,7 @@ databricks = { # 0.1.11 appears to have authentication issues with azure databricks - "databricks-sdk>=0.9.0", + "databricks-sdk>=0.30.0", "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes From bc79aece3c0d1b744dbcae711930ab8acb1980b0 Mon Sep 17 00:00:00 2001 From: Pinaki Bhattacharjee Date: Tue, 20 Aug 2024 20:09:43 +0530 Subject: [PATCH 70/72] chore(vulnerability): Log Injection (High) (#11131) --- .../metadata/systemmetadata/ESSystemMetadataDAO.java | 5 +---- .../datahub/auth/authentication/AuthServiceController.java | 6 +++--- .../openapi/schema/registry/SchemaRegistryController.java | 6 +++++- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java index cf1674ac00480..a5c2fb04b5ce3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java @@ -54,10 +54,7 @@ public Optional getTaskStatus(@Nonnull String nodeId, long task try { return client.tasks().get(taskRequest, RequestOptions.DEFAULT); } catch (IOException e) { - log.error( - String.format( - "ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", - nodeId, taskId)); + log.error("ERROR: Failed to get task status: ", e); e.printStackTrace(); } return Optional.empty(); diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index 71eaca71a3641..de2582af00a93 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -123,7 +123,7 @@ CompletableFuture> generateSessionTokenForUser( try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error("Failed to parse json while attempting to generate session token {}", jsonStr, e); + log.error("Failed to parse json while attempting to generate session token ", e); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -238,7 +238,7 @@ CompletableFuture> signUp(final HttpEntity httpEn try { Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); if (!_inviteTokenService.isInviteTokenValid(systemOperationContext, inviteTokenUrn)) { - log.error("Invalid invite token {}", inviteTokenString); + log.error("Invalid invite token"); return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } @@ -386,7 +386,7 @@ CompletableFuture> track(final HttpEntity httpEnt try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error("Failed to parse json while attempting to track analytics event {}", jsonStr); + log.error("Failed to parse json while attempting to track analytics event", e); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java index d73b353f38ae7..09043c6dd5e87 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java @@ -307,7 +307,11 @@ public ResponseEntity register( }) .orElseGet( () -> { - log.error("Couldn't find topic with name {}.", topicName); + if (topicName.matches("^[a-zA-Z0-9._-]+$")) { + log.error("Couldn't find topic with name {}.", topicName); + } else { + log.error("Couldn't find topic (Malformed topic name)"); + } return new ResponseEntity<>(HttpStatus.NOT_FOUND); }); } From 627c5abfd6e0ffd0fea196870cbbcf1b61f74625 Mon Sep 17 00:00:00 2001 From: sid-acryl <155424659+sid-acryl@users.noreply.github.com> Date: Wed, 21 Aug 2024 00:12:00 +0530 Subject: [PATCH 71/72] feat(ingestion/bigquery): Add ability to filter GCP project ingestion based on project labels (#11169) Co-authored-by: Alice Naghshineh Co-authored-by: Alice Naghshineh <45885699+anaghshineh@users.noreply.github.com> Co-authored-by: Tamas Nemeth Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- docs/quick-ingestion-guides/bigquery/setup.md | 4 +- metadata-ingestion/setup.py | 1 + .../ingestion/source/bigquery_v2/bigquery.py | 32 +- .../source/bigquery_v2/bigquery_config.py | 47 +- .../source/bigquery_v2/bigquery_report.py | 1 + .../source/bigquery_v2/bigquery_schema.py | 26 +- .../bigquery_v2/bigquery_test_connection.py | 4 +- .../ingestion/source/bigquery_v2/lineage.py | 4 +- .../bigquery_project_label_mcp_golden.json | 452 ++++++++++++++++++ .../integration/bigquery_v2/test_bigquery.py | 150 +++++- .../tests/unit/test_bigquery_source.py | 128 ++++- 11 files changed, 786 insertions(+), 63 deletions(-) create mode 100644 metadata-ingestion/tests/integration/bigquery_v2/bigquery_project_label_mcp_golden.json diff --git a/docs/quick-ingestion-guides/bigquery/setup.md b/docs/quick-ingestion-guides/bigquery/setup.md index 10351d6572c53..96850f2deb68e 100644 --- a/docs/quick-ingestion-guides/bigquery/setup.md +++ b/docs/quick-ingestion-guides/bigquery/setup.md @@ -38,7 +38,9 @@ Please refer to the BigQuery [Permissions](https://cloud.google.com/iam/docs/per You can always add/remove roles to Service Accounts later on. Please refer to the BigQuery [Manage access to projects, folders, and organizations](https://cloud.google.com/iam/docs/granting-changing-revoking-access) guide for more details. ::: -3. Create and download a [Service Account Key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). We will use this to set up authentication within DataHub. +3. To filter projects based on the `project_labels` configuration, first visit [cloudresourcemanager.googleapis.com](https://console.developers.google.com/apis/api/cloudresourcemanager.googleapis.com/overview) and enable the `Cloud Resource Manager API` + +4. Create and download a [Service Account Key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). We will use this to set up authentication within DataHub. The key file looks like this: diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 7fb83fb6a8325..d59545694c324 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -181,6 +181,7 @@ "google-cloud-logging<=3.5.0", "google-cloud-bigquery", "google-cloud-datacatalog>=1.5.0", + "google-cloud-resource-manager", "more-itertools>=8.12.0", "sqlalchemy-bigquery>=1.4.1", } diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 7a96b2f0643ab..0d73c9ad02897 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -113,8 +113,9 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): BigqueryTableIdentifier._BQ_SHARDED_TABLE_SUFFIX = "" self.bigquery_data_dictionary = BigQuerySchemaApi( - self.report.schema_api_perf, - self.config.get_bigquery_client(), + report=BigQueryV2Report().schema_api_perf, + projects_client=config.get_projects_client(), + client=config.get_bigquery_client(), ) if self.config.extract_policy_tags_from_catalog: self.bigquery_data_dictionary.datacatalog_client = ( @@ -257,14 +258,37 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: def _get_projects(self) -> List[BigqueryProject]: logger.info("Getting projects") + if self.config.project_ids or self.config.project_id: project_ids = self.config.project_ids or [self.config.project_id] # type: ignore return [ BigqueryProject(id=project_id, name=project_id) for project_id in project_ids ] - else: - return list(self._query_project_list()) + + if self.config.project_labels: + return list(self._query_project_list_from_labels()) + + return list(self._query_project_list()) + + def _query_project_list_from_labels(self) -> Iterable[BigqueryProject]: + projects = self.bigquery_data_dictionary.get_projects_with_labels( + self.config.project_labels + ) + + if not projects: # Report failure on exception and if empty list is returned + self.report.report_failure( + "metadata-extraction", + "Get projects didn't return any project with any of the specified label(s). " + "Maybe resourcemanager.projects.list permission is missing for the service account. " + "You can assign predefined roles/bigquery.metadataViewer role to your service account.", + ) + + for project in projects: + if self.config.project_id_pattern.allowed(project.id): + yield project + else: + self.report.report_dropped(project.id) def _query_project_list(self) -> Iterable[BigqueryProject]: try: diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index fe961dbd780f6..af9256d8877f5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -3,7 +3,7 @@ from datetime import timedelta from typing import Any, Dict, List, Optional, Union -from google.cloud import bigquery, datacatalog_v1 +from google.cloud import bigquery, datacatalog_v1, resourcemanager_v3 from google.cloud.logging_v2.client import Client as GCPLoggingClient from pydantic import Field, PositiveInt, PrivateAttr, root_validator, validator @@ -34,12 +34,16 @@ class BigQueryUsageConfig(BaseUsageConfig): max_query_duration: timedelta = Field( default=timedelta(minutes=15), - description="Correction to pad start_time and end_time with. For handling the case where the read happens within our time range but the query completion event is delayed and happens after the configured end time.", + description="Correction to pad start_time and end_time with. For handling the case where the read happens " + "within our time range but the query completion event is delayed and happens after the configured" + " end time.", ) apply_view_usage_to_tables: bool = Field( default=False, - description="Whether to apply view's usage to its base tables. If set to False, uses sql parser and applies usage to views / tables mentioned in the query. If set to True, usage is applied to base tables only.", + description="Whether to apply view's usage to its base tables. If set to False, uses sql parser and applies " + "usage to views / tables mentioned in the query. If set to True, usage is applied to base tables " + "only.", ) @@ -74,6 +78,9 @@ def get_bigquery_client(self) -> bigquery.Client: client_options = self.extra_client_options return bigquery.Client(self.project_on_behalf, **client_options) + def get_projects_client(self) -> resourcemanager_v3.ProjectsClient: + return resourcemanager_v3.ProjectsClient() + def get_policy_tag_manager_client(self) -> datacatalog_v1.PolicyTagManagerClient: return datacatalog_v1.PolicyTagManagerClient() @@ -143,12 +150,14 @@ class BigQueryV2Config( dataset_pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), - description="Regex patterns for dataset to filter in ingestion. Specify regex to only match the schema name. e.g. to match all tables in schema analytics, use the regex 'analytics'", + description="Regex patterns for dataset to filter in ingestion. Specify regex to only match the schema name. " + "e.g. to match all tables in schema analytics, use the regex 'analytics'", ) match_fully_qualified_names: bool = Field( default=True, - description="[deprecated] Whether `dataset_pattern` is matched against fully qualified dataset name `.`.", + description="[deprecated] Whether `dataset_pattern` is matched against fully qualified dataset name " + "`.`.", ) include_external_url: bool = Field( @@ -169,7 +178,9 @@ class BigQueryV2Config( table_snapshot_pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), - description="Regex patterns for table snapshots to filter in ingestion. Specify regex to match the entire snapshot name in database.schema.snapshot format. e.g. to match all snapshots starting with customer in Customer database and public schema, use the regex 'Customer.public.customer.*'", + description="Regex patterns for table snapshots to filter in ingestion. Specify regex to match the entire " + "snapshot name in database.schema.snapshot format. e.g. to match all snapshots starting with " + "customer in Customer database and public schema, use the regex 'Customer.public.customer.*'", ) debug_include_full_payloads: bool = Field( @@ -180,17 +191,22 @@ class BigQueryV2Config( number_of_datasets_process_in_batch: int = Field( hidden_from_docs=True, default=10000, - description="Number of table queried in batch when getting metadata. This is a low level config property which should be touched with care.", + description="Number of table queried in batch when getting metadata. This is a low level config property " + "which should be touched with care.", ) number_of_datasets_process_in_batch_if_profiling_enabled: int = Field( default=1000, - description="Number of partitioned table queried in batch when getting metadata. This is a low level config property which should be touched with care. This restriction is needed because we query partitions system view which throws error if we try to touch too many tables.", + description="Number of partitioned table queried in batch when getting metadata. This is a low level config " + "property which should be touched with care. This restriction is needed because we query " + "partitions system view which throws error if we try to touch too many tables.", ) use_tables_list_query_v2: bool = Field( default=False, - description="List tables using an improved query that extracts partitions and last modified timestamps more accurately. Requires the ability to read table data. Automatically enabled when profiling is enabled.", + description="List tables using an improved query that extracts partitions and last modified timestamps more " + "accurately. Requires the ability to read table data. Automatically enabled when profiling is " + "enabled.", ) @property @@ -199,7 +215,9 @@ def have_table_data_read_permission(self) -> bool: column_limit: int = Field( default=300, - description="Maximum number of columns to process in a table. This is a low level config property which should be touched with care. This restriction is needed because excessively wide tables can result in failure to ingest the schema.", + description="Maximum number of columns to process in a table. This is a low level config property which " + "should be touched with care. This restriction is needed because excessively wide tables can " + "result in failure to ingest the schema.", ) # The inheritance hierarchy is wonky here, but these options need modifications. project_id: Optional[str] = Field( @@ -214,6 +232,15 @@ def have_table_data_read_permission(self) -> bool: "Overrides `project_id_pattern`." ), ) + project_labels: List[str] = Field( + default_factory=list, + description=( + "Ingests projects with the specified labels. Set value in the format of `key:value`. Use this property to " + "define which projects to ingest based" + "on project-level labels. If project_ids or project_id is set, this configuration has no effect. The " + "ingestion process filters projects by label first, and then applies the project_id_pattern." + ), + ) storage_project_id: None = Field(default=None, hidden_from_docs=True) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py index 4cfcc3922ddc3..807e99604f013 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py @@ -31,6 +31,7 @@ class BigQuerySchemaApiPerfReport(Report): num_get_snapshots_for_dataset_api_requests: int = 0 list_projects: PerfTimer = field(default_factory=PerfTimer) + list_projects_with_labels: PerfTimer = field(default_factory=PerfTimer) list_datasets: PerfTimer = field(default_factory=PerfTimer) get_columns_for_dataset_sec: float = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py index d73ac46c862ea..4326ff7a35527 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_schema.py @@ -5,7 +5,7 @@ from typing import Any, Dict, Iterable, Iterator, List, Optional from google.api_core import retry -from google.cloud import bigquery, datacatalog_v1 +from google.cloud import bigquery, datacatalog_v1, resourcemanager_v3 from google.cloud.bigquery.table import ( RowIterator, TableListItem, @@ -144,9 +144,11 @@ def __init__( self, report: BigQuerySchemaApiPerfReport, client: bigquery.Client, + projects_client: resourcemanager_v3.ProjectsClient, datacatalog_client: Optional[datacatalog_v1.PolicyTagManagerClient] = None, ) -> None: self.bq_client = client + self.projects_client = projects_client self.report = report self.datacatalog_client = datacatalog_client @@ -175,7 +177,7 @@ def _should_retry(exc: BaseException) -> bool: # 'Quota exceeded: Your user exceeded quota for concurrent project.lists requests.' # Hence, added the api request retry of 15 min. # We already tried adding rate_limit externally, proving max_result and page_size - # to restrict the request calls inside list_project but issue still occured. + # to restrict the request calls inside list_project but issue still occurred. projects_iterator = self.bq_client.list_projects( max_results=max_results_per_page, page_token=page_token, @@ -202,6 +204,26 @@ def _should_retry(exc: BaseException) -> bool: return [] return projects + def get_projects_with_labels(self, labels: List[str]) -> List[BigqueryProject]: + with self.report.list_projects_with_labels: + try: + projects = [] + labels_query = " OR ".join([f"labels.{label}" for label in labels]) + for project in self.projects_client.search_projects(query=labels_query): + projects.append( + BigqueryProject( + id=project.project_id, name=project.display_name + ) + ) + + return projects + + except Exception as e: + logger.error( + f"Error getting projects with labels: {labels}. {e}", exc_info=True + ) + return [] + def get_datasets_for_project_id( self, project_id: str, maxResults: Optional[int] = None ) -> List[BigqueryDataset]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_test_connection.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_test_connection.py index 3aac78c154b2e..e21aadd91d7d5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_test_connection.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_test_connection.py @@ -96,7 +96,9 @@ def metadata_read_capability_test( client: bigquery.Client = config.get_bigquery_client() assert client bigquery_data_dictionary = BigQuerySchemaApi( - BigQueryV2Report().schema_api_perf, client + report=BigQueryV2Report().schema_api_perf, + projects_client=config.get_projects_client(), + client=client, ) result = bigquery_data_dictionary.get_datasets_for_project_id( project_id, 10 diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index 496bd64d3b4fe..9d15691491740 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -479,7 +479,9 @@ def lineage_via_catalog_lineage_api( lineage_client: lineage_v1.LineageClient = lineage_v1.LineageClient() data_dictionary = BigQuerySchemaApi( - self.report.schema_api_perf, self.config.get_bigquery_client() + self.report.schema_api_perf, + self.config.get_bigquery_client(), + self.config.get_projects_client(), ) # Filtering datasets diff --git a/metadata-ingestion/tests/integration/bigquery_v2/bigquery_project_label_mcp_golden.json b/metadata-ingestion/tests/integration/bigquery_v2/bigquery_project_label_mcp_golden.json new file mode 100644 index 0000000000000..a529ddc6221a7 --- /dev/null +++ b/metadata-ingestion/tests/integration/bigquery_v2/bigquery_project_label_mcp_golden.json @@ -0,0 +1,452 @@ +[ +{ + "entityType": "container", + "entityUrn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "bigquery", + "env": "PROD", + "project_id": "dev" + }, + "name": "dev" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:bigquery" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Project" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "bigquery", + "env": "PROD", + "project_id": "dev", + "dataset_id": "bigquery-dataset-1" + }, + "externalUrl": "https://console.cloud.google.com/bigquery?project=dev&ws=!1m4!1m3!3m2!1sdev!2sbigquery-dataset-1", + "name": "bigquery-dataset-1" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:bigquery" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Dataset" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "urn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "schemaMetadata", + "aspect": { + "json": { + "schemaName": "dev.bigquery-dataset-1.table-1", + "platform": "urn:li:dataPlatform:bigquery", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "age", + "nullable": false, + "description": "comment", + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "INT", + "recursive": false, + "globalTags": { + "tags": [ + { + "tag": "urn:li:tag:Test Policy Tag" + } + ] + }, + "glossaryTerms": { + "terms": [ + { + "urn": "urn:li:glossaryTerm:Age" + } + ], + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:datahub" + } + }, + "isPartOfKey": false + }, + { + "fieldPath": "email", + "nullable": false, + "description": "comment", + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "STRING", + "recursive": false, + "globalTags": { + "tags": [] + }, + "glossaryTerms": { + "terms": [ + { + "urn": "urn:li:glossaryTerm:Email_Address" + } + ], + "auditStamp": { + "time": 1643871600000, + "actor": "urn:li:corpuser:datahub" + } + }, + "isPartOfKey": false + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": {}, + "externalUrl": "https://console.cloud.google.com/bigquery?project=dev&ws=!1m5!1m4!4m3!1sdev!2sbigquery-dataset-1!3stable-1", + "name": "table-1", + "qualifiedName": "dev.bigquery-dataset-1.table-1", + "description": "", + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:bigquery", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:bigquery,dev)" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:bigquery,dev.bigquery-dataset-1.table-1,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e", + "urn": "urn:li:container:f284164f9a7db03ca6bbdb7bb17d5a7e" + }, + { + "id": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e", + "urn": "urn:li:container:ce17940c2d64e7e315e68f8d7d071b1e" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "glossaryTerm", + "entityUrn": "urn:li:glossaryTerm:Age", + "changeType": "UPSERT", + "aspectName": "glossaryTermKey", + "aspect": { + "json": { + "name": "Age" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "glossaryTerm", + "entityUrn": "urn:li:glossaryTerm:Email_Address", + "changeType": "UPSERT", + "aspectName": "glossaryTermKey", + "aspect": { + "json": { + "name": "Email_Address" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:Test Policy Tag", + "changeType": "UPSERT", + "aspectName": "tagKey", + "aspect": { + "json": { + "name": "Test Policy Tag" + } + }, + "systemMetadata": { + "lastObserved": 1643871600000, + "runId": "bigquery-2022_02_03-07_00_00", + "lastRunId": "no-run-id-provided" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/bigquery_v2/test_bigquery.py b/metadata-ingestion/tests/integration/bigquery_v2/test_bigquery.py index 762c73d2a55c6..dff7f18db6135 100644 --- a/metadata-ingestion/tests/integration/bigquery_v2/test_bigquery.py +++ b/metadata-ingestion/tests/integration/bigquery_v2/test_bigquery.py @@ -15,6 +15,7 @@ from datahub.ingestion.source.bigquery_v2.bigquery_schema import ( BigqueryColumn, BigqueryDataset, + BigqueryProject, BigQuerySchemaApi, BigqueryTable, ) @@ -39,6 +40,33 @@ def random_email(): ) +def recipe(mcp_output_path: str, override: dict = {}) -> dict: + return { + "source": { + "type": "bigquery", + "config": { + "project_ids": ["project-id-1"], + "include_usage_statistics": False, + "include_table_lineage": False, + "include_data_platform_instance": True, + "classification": ClassificationConfig( + enabled=True, + classifiers=[ + DynamicTypedClassifierConfig( + type="datahub", + config=DataHubClassifierConfig( + minimum_values_threshold=1, + ), + ) + ], + max_workers=1, + ).dict(), + }, + }, + "sink": {"type": "file", "config": {"filename": mcp_output_path}}, + } + + @freeze_time(FROZEN_TIME) @patch.object(BigQuerySchemaApi, "get_tables_for_dataset") @patch.object(BigQuerySchemaGenerator, "get_core_table_details") @@ -47,9 +75,11 @@ def random_email(): @patch.object(BigQueryDataReader, "get_sample_data_for_table") @patch("google.cloud.bigquery.Client") @patch("google.cloud.datacatalog_v1.PolicyTagManagerClient") +@patch("google.cloud.resourcemanager_v3.ProjectsClient") def test_bigquery_v2_ingest( client, policy_tag_manager_client, + projects_client, get_sample_data_for_table, get_columns_for_dataset, get_datasets_for_project_id, @@ -111,33 +141,105 @@ def test_bigquery_v2_ingest( ) get_tables_for_dataset.return_value = iter([bigquery_table]) - source_config_dict: Dict[str, Any] = { - "project_ids": ["project-id-1"], - "include_usage_statistics": False, - "include_table_lineage": False, - "include_data_platform_instance": True, - "classification": ClassificationConfig( - enabled=True, - classifiers=[ - DynamicTypedClassifierConfig( - type="datahub", - config=DataHubClassifierConfig( - minimum_values_threshold=1, - ), - ) - ], - max_workers=1, - ).dict(), - } + pipeline_config_dict: Dict[str, Any] = recipe(mcp_output_path=mcp_output_path) - pipeline_config_dict: Dict[str, Any] = { - "source": { - "type": "bigquery", - "config": source_config_dict, - }, - "sink": {"type": "file", "config": {"filename": mcp_output_path}}, + run_and_get_pipeline(pipeline_config_dict) + + mce_helpers.check_golden_file( + pytestconfig, + output_path=mcp_output_path, + golden_path=mcp_golden_path, + ) + + +@freeze_time(FROZEN_TIME) +@patch.object(BigQuerySchemaApi, attribute="get_projects_with_labels") +@patch.object(BigQuerySchemaApi, "get_tables_for_dataset") +@patch.object(BigQuerySchemaGenerator, "get_core_table_details") +@patch.object(BigQuerySchemaApi, "get_datasets_for_project_id") +@patch.object(BigQuerySchemaApi, "get_columns_for_dataset") +@patch.object(BigQueryDataReader, "get_sample_data_for_table") +@patch("google.cloud.bigquery.Client") +@patch("google.cloud.datacatalog_v1.PolicyTagManagerClient") +@patch("google.cloud.resourcemanager_v3.ProjectsClient") +def test_bigquery_v2_project_labels_ingest( + client, + policy_tag_manager_client, + projects_client, + get_sample_data_for_table, + get_columns_for_dataset, + get_datasets_for_project_id, + get_core_table_details, + get_tables_for_dataset, + get_projects_with_labels, + pytestconfig, + tmp_path, +): + test_resources_dir = pytestconfig.rootpath / "tests/integration/bigquery_v2" + mcp_golden_path = f"{test_resources_dir}/bigquery_project_label_mcp_golden.json" + mcp_output_path = "{}/{}".format(tmp_path, "bigquery_project_label_mcp_output.json") + + get_datasets_for_project_id.return_value = [ + BigqueryDataset(name="bigquery-dataset-1") + ] + + get_projects_with_labels.return_value = [ + BigqueryProject(id="dev", name="development") + ] + + table_list_item = TableListItem( + {"tableReference": {"projectId": "", "datasetId": "", "tableId": ""}} + ) + table_name = "table-1" + get_core_table_details.return_value = {table_name: table_list_item} + get_columns_for_dataset.return_value = { + table_name: [ + BigqueryColumn( + name="age", + ordinal_position=1, + is_nullable=False, + field_path="col_1", + data_type="INT", + comment="comment", + is_partition_column=False, + cluster_column_position=None, + policy_tags=["Test Policy Tag"], + ), + BigqueryColumn( + name="email", + ordinal_position=1, + is_nullable=False, + field_path="col_2", + data_type="STRING", + comment="comment", + is_partition_column=False, + cluster_column_position=None, + ), + ] + } + get_sample_data_for_table.return_value = { + "age": [random.randint(1, 80) for i in range(20)], + "email": [random_email() for i in range(20)], } + bigquery_table = BigqueryTable( + name=table_name, + comment=None, + created=None, + last_altered=None, + size_in_bytes=None, + rows_count=None, + ) + get_tables_for_dataset.return_value = iter([bigquery_table]) + + pipeline_config_dict: Dict[str, Any] = recipe(mcp_output_path=mcp_output_path) + + del pipeline_config_dict["source"]["config"]["project_ids"] + + pipeline_config_dict["source"]["config"]["project_labels"] = [ + "environment:development" + ] + run_and_get_pipeline(pipeline_config_dict) mce_helpers.check_golden_file( diff --git a/metadata-ingestion/tests/unit/test_bigquery_source.py b/metadata-ingestion/tests/unit/test_bigquery_source.py index 746cf9b0acfc3..d12ffbcbbcf10 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_source.py +++ b/metadata-ingestion/tests/unit/test_bigquery_source.py @@ -170,7 +170,11 @@ def test_bigquery_uri_with_credential(): @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_projects_with_project_ids(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_with_project_ids( + get_projects_client, + get_bq_client_mock, +): client_mock = MagicMock() get_bq_client_mock.return_value = client_mock config = BigQueryV2Config.parse_obj( @@ -197,8 +201,10 @@ def test_get_projects_with_project_ids(get_bq_client_mock): @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_get_projects_with_project_ids_overrides_project_id_pattern( - get_bq_client_mock, + get_projects_client, + get_bigquery_client, ): config = BigQueryV2Config.parse_obj( { @@ -226,7 +232,11 @@ def test_platform_instance_config_always_none(): @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_dataplatform_instance_aspect_returns_project_id(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_dataplatform_instance_aspect_returns_project_id( + get_projects_client, + get_bq_client_mock, +): project_id = "project_id" expected_instance = ( f"urn:li:dataPlatformInstance:(urn:li:dataPlatform:bigquery,{project_id})" @@ -247,7 +257,11 @@ def test_get_dataplatform_instance_aspect_returns_project_id(get_bq_client_mock) @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_dataplatform_instance_default_no_instance(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_dataplatform_instance_default_no_instance( + get_projects_client, + get_bq_client_mock, +): config = BigQueryV2Config.parse_obj({}) source = BigqueryV2Source(config=config, ctx=PipelineContext(run_id="test")) schema_gen = source.bq_schema_extractor @@ -263,7 +277,11 @@ def test_get_dataplatform_instance_default_no_instance(get_bq_client_mock): @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_projects_with_single_project_id(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_with_single_project_id( + get_projects_client, + get_bq_client_mock, +): client_mock = MagicMock() get_bq_client_mock.return_value = client_mock config = BigQueryV2Config.parse_obj({"project_id": "test-3"}) @@ -275,9 +293,10 @@ def test_get_projects_with_single_project_id(get_bq_client_mock): @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_projects_by_list(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_by_list(get_projects_client, get_bigquery_client): client_mock = MagicMock() - get_bq_client_mock.return_value = client_mock + get_bigquery_client.return_value = client_mock first_page = MagicMock() first_page.__iter__.return_value = iter( @@ -296,6 +315,7 @@ def test_get_projects_by_list(get_bq_client_mock): ] ) second_page.next_page_token = None + client_mock.list_projects.side_effect = [first_page, second_page] config = BigQueryV2Config.parse_obj({}) @@ -311,7 +331,10 @@ def test_get_projects_by_list(get_bq_client_mock): @patch.object(BigQuerySchemaApi, "get_projects") @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_projects_filter_by_pattern(get_bq_client_mock, get_projects_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_filter_by_pattern( + get_projects_client, get_bq_client_mock, get_projects_mock +): get_projects_mock.return_value = [ BigqueryProject("test-project", "Test Project"), BigqueryProject("test-project-2", "Test Project 2"), @@ -329,7 +352,10 @@ def test_get_projects_filter_by_pattern(get_bq_client_mock, get_projects_mock): @patch.object(BigQuerySchemaApi, "get_projects") @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_projects_list_empty(get_bq_client_mock, get_projects_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_list_empty( + get_projects_client, get_bq_client_mock, get_projects_mock +): get_projects_mock.return_value = [] config = BigQueryV2Config.parse_obj( @@ -342,7 +368,9 @@ def test_get_projects_list_empty(get_bq_client_mock, get_projects_mock): @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_get_projects_list_failure( + get_projects_client: MagicMock, get_bq_client_mock: MagicMock, caplog: pytest.LogCaptureFixture, ) -> None: @@ -366,7 +394,10 @@ def test_get_projects_list_failure( @patch.object(BigQuerySchemaApi, "get_projects") @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_get_projects_list_fully_filtered(get_projects_mock, get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_list_fully_filtered( + get_projects_mock, get_bq_client_mock, get_projects_client +): get_projects_mock.return_value = [BigqueryProject("test-project", "Test Project")] config = BigQueryV2Config.parse_obj( @@ -399,7 +430,10 @@ def bigquery_table() -> BigqueryTable: @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_gen_table_dataset_workunits(get_bq_client_mock, bigquery_table): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_gen_table_dataset_workunits( + get_projects_client, get_bq_client_mock, bigquery_table +): project_id = "test-project" dataset_name = "test-dataset" config = BigQueryV2Config.parse_obj( @@ -471,7 +505,8 @@ def test_gen_table_dataset_workunits(get_bq_client_mock, bigquery_table): @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_simple_upstream_table_generation(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_simple_upstream_table_generation(get_bq_client_mock, get_projects_client): a: BigQueryTableRef = BigQueryTableRef( BigqueryTableIdentifier( project_id="test-project", dataset="test-dataset", table="a" @@ -503,8 +538,10 @@ def test_simple_upstream_table_generation(get_bq_client_mock): @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_upstream_table_generation_with_temporary_table_without_temp_upstream( get_bq_client_mock, + get_projects_client, ): a: BigQueryTableRef = BigQueryTableRef( BigqueryTableIdentifier( @@ -536,7 +573,10 @@ def test_upstream_table_generation_with_temporary_table_without_temp_upstream( @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_upstream_table_column_lineage_with_temp_table(get_bq_client_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_upstream_table_column_lineage_with_temp_table( + get_bq_client_mock, get_projects_client +): from datahub.ingestion.api.common import PipelineContext a: BigQueryTableRef = BigQueryTableRef( @@ -611,8 +651,9 @@ def test_upstream_table_column_lineage_with_temp_table(get_bq_client_mock): @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_upstream_table_generation_with_temporary_table_with_multiple_temp_upstream( - get_bq_client_mock, + get_bq_client_mock, get_projects_client ): a: BigQueryTableRef = BigQueryTableRef( BigqueryTableIdentifier( @@ -675,7 +716,10 @@ def test_upstream_table_generation_with_temporary_table_with_multiple_temp_upstr @patch.object(BigQuerySchemaApi, "get_tables_for_dataset") @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_table_processing_logic(get_bq_client_mock, data_dictionary_mock): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_table_processing_logic( + get_projects_client, get_bq_client_mock, data_dictionary_mock +): client_mock = MagicMock() get_bq_client_mock.return_value = client_mock config = BigQueryV2Config.parse_obj( @@ -747,8 +791,9 @@ def test_table_processing_logic(get_bq_client_mock, data_dictionary_mock): @patch.object(BigQuerySchemaApi, "get_tables_for_dataset") @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_table_processing_logic_date_named_tables( - get_bq_client_mock, data_dictionary_mock + get_projects_client, get_bq_client_mock, data_dictionary_mock ): client_mock = MagicMock() get_bq_client_mock.return_value = client_mock @@ -859,8 +904,10 @@ def bigquery_view_2() -> BigqueryView: @patch.object(BigQuerySchemaApi, "get_query_result") @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_get_views_for_dataset( get_bq_client_mock: Mock, + get_projects_client: MagicMock, query_mock: Mock, bigquery_view_1: BigqueryView, bigquery_view_2: BigqueryView, @@ -889,7 +936,9 @@ def test_get_views_for_dataset( ) query_mock.return_value = [row1, row2] bigquery_data_dictionary = BigQuerySchemaApi( - BigQueryV2Report().schema_api_perf, client_mock + report=BigQueryV2Report().schema_api_perf, + client=client_mock, + projects_client=MagicMock(), ) views = bigquery_data_dictionary.get_views_for_dataset( @@ -905,8 +954,9 @@ def test_get_views_for_dataset( BigQuerySchemaGenerator, "gen_dataset_workunits", lambda *args, **kwargs: [] ) @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_gen_view_dataset_workunits( - get_bq_client_mock, bigquery_view_1, bigquery_view_2 + get_projects_client, get_bq_client_mock, bigquery_view_1, bigquery_view_2 ): project_id = "test-project" dataset_name = "test-dataset" @@ -963,7 +1013,9 @@ def bigquery_snapshot() -> BigqueryTableSnapshot: @patch.object(BigQuerySchemaApi, "get_query_result") @patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") def test_get_snapshots_for_dataset( + get_projects_client: MagicMock, get_bq_client_mock: Mock, query_mock: Mock, bigquery_snapshot: BigqueryTableSnapshot, @@ -988,7 +1040,9 @@ def test_get_snapshots_for_dataset( ) query_mock.return_value = [row1] bigquery_data_dictionary = BigQuerySchemaApi( - BigQueryV2Report().schema_api_perf, client_mock + report=BigQueryV2Report().schema_api_perf, + client=client_mock, + projects_client=MagicMock(), ) snapshots = bigquery_data_dictionary.get_snapshots_for_dataset( @@ -1001,7 +1055,10 @@ def test_get_snapshots_for_dataset( @patch.object(BigQueryV2Config, "get_bigquery_client") -def test_gen_snapshot_dataset_workunits(get_bq_client_mock, bigquery_snapshot): +@patch.object(BigQueryV2Config, "get_projects_client") +def test_gen_snapshot_dataset_workunits( + get_bq_client_mock, get_projects_client, bigquery_snapshot +): project_id = "test-project" dataset_name = "test-dataset" config = BigQueryV2Config.parse_obj( @@ -1140,7 +1197,9 @@ def test_default_config_for_excluding_projects_and_datasets(): @patch.object(BigQueryConnectionConfig, "get_bigquery_client", new=lambda self: None) @patch.object(BigQuerySchemaApi, "get_datasets_for_project_id") +@patch.object(BigQueryV2Config, "get_projects_client") def test_excluding_empty_projects_from_ingestion( + get_projects_client, get_datasets_for_project_id_mock, ): project_id_with_datasets = "project-id-with-datasets" @@ -1173,3 +1232,32 @@ def get_datasets_for_project_id_side_effect( config = BigQueryV2Config.parse_obj({**base_config, "exclude_empty_projects": True}) source = BigqueryV2Source(config=config, ctx=PipelineContext(run_id="test-2")) assert len({wu.metadata.entityUrn for wu in source.get_workunits()}) == 1 # type: ignore + + +@patch.object(BigQueryV2Config, "get_bigquery_client") +@patch.object(BigQueryV2Config, "get_projects_client") +def test_get_projects_with_project_labels( + get_projects_client, + get_bq_client_mock, +): + client_mock = MagicMock() + + get_projects_client.return_value = client_mock + + client_mock.search_projects.return_value = [ + SimpleNamespace(project_id="dev", display_name="dev_project"), + SimpleNamespace(project_id="qa", display_name="qa_project"), + ] + + config = BigQueryV2Config.parse_obj( + { + "project_labels": ["environment:dev", "environment:qa"], + } + ) + + source = BigqueryV2Source(config=config, ctx=PipelineContext(run_id="test1")) + + assert source._get_projects() == [ + BigqueryProject("dev", "dev_project"), + BigqueryProject("qa", "qa_project"), + ] From 09acd5b989e3b00a5dce263c70cb27bcd744abb9 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:42:45 -0500 Subject: [PATCH 72/72] chore(kafka): kafka version bump (#11211) --- datahub-upgrade/build.gradle | 2 +- docker/kafka-setup/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 304bf3a67a5b2..f64886953fe22 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -55,7 +55,7 @@ dependencies { // mock internal schema registry implementation externalDependency.kafkaAvroSerde implementation externalDependency.kafkaAvroSerializer - implementation "org.apache.kafka:kafka_2.12:3.7.0" + implementation "org.apache.kafka:kafka_2.12:3.7.1" implementation externalDependency.slf4jApi compileOnly externalDependency.lombok diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index ad1d01c1ce97c..af32dd5dd4d36 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -22,7 +22,7 @@ ARG ALPINE_REPO_URL ARG APACHE_DOWNLOAD_URL ARG GITHUB_REPO_URL -ENV KAFKA_VERSION=3.7.0 +ENV KAFKA_VERSION=3.7.1 ENV SCALA_VERSION=2.13 LABEL name="kafka" version=${KAFKA_VERSION}