diff --git a/service_configuration_lib/spark_config.py b/service_configuration_lib/spark_config.py index f76e5b2..a79689e 100644 --- a/service_configuration_lib/spark_config.py +++ b/service_configuration_lib/spark_config.py @@ -888,9 +888,6 @@ def get_spark_conf( # configure spark conf log spark_conf = _append_spark_config(spark_conf, 'spark.logConf', 'true') - # configure spark Console Progress - spark_conf = _append_spark_config(spark_conf, 'spark.ui.showConsoleProgress', 'true') - spark_conf = _append_aws_credentials_conf(spark_conf, *aws_creds, aws_region) return spark_conf diff --git a/setup.py b/setup.py index 21d93c8..a7b9ec7 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ setup( name='service-configuration-lib', - version='2.12.3', + version='2.12.4', provides=['service_configuration_lib'], description='Start, stop, and inspect Yelp SOA services', url='https://github.com/Yelp/service_configuration_lib', diff --git a/tests/spark_config_test.py b/tests/spark_config_test.py index 90d5b3b..464d752 100644 --- a/tests/spark_config_test.py +++ b/tests/spark_config_test.py @@ -667,22 +667,6 @@ def test_append_spark_conf_log( assert output[key] == expected_output - @pytest.mark.parametrize( - 'user_spark_opts,expected_output', [ - # not configured by user - ({}, 'true'), - # configured by user - ({'spark.ui.showConsoleProgress': 'false'}, 'false'), - ], - ) - def test_append_console_progress_conf( - self, user_spark_opts, expected_output, - ): - key = 'spark.ui.showConsoleProgress' - output = spark_config._append_spark_config(user_spark_opts, key, 'true') - - assert output[key] == expected_output - def test_append_aws_credentials_conf(self): output = spark_config._append_aws_credentials_conf( {}, @@ -702,14 +686,6 @@ def mock_append_spark_conf_log(self): ) as m: yield m - @pytest.fixture - def mock_append_console_progress_conf(self): - return_value = {'spark.ui.showConsoleProgress': 'true'} - with MockConfigFunction( - '_append_spark_config', return_value, - ) as m: - yield m - @pytest.fixture def mock_get_mesos_docker_volumes_conf(self): return_value = {'spark.mesos.executor.docker.volumes': '/tmp:/tmp:ro'} @@ -960,7 +936,6 @@ def test_get_spark_conf_mesos( assert_app_name, mock_log, mock_append_spark_conf_log, - mock_append_console_progress_conf, ): other_spark_opts = {'spark.driver.memory': '2g', 'spark.executor.memoryOverhead': '1024'} not_allowed_opts = {'spark.executorEnv.PAASTA_SERVICE': 'random-service'} @@ -1007,8 +982,7 @@ def test_get_spark_conf_mesos( list(mock_append_event_log_conf.return_value.keys()) + list(mock_append_aws_credentials_conf.return_value.keys()) + list(mock_append_sql_shuffle_partitions_conf.return_value.keys()) + - list(mock_append_spark_conf_log.return_value.keys()) + - list(mock_append_console_progress_conf.return_value.keys()), + list(mock_append_spark_conf_log.return_value.keys()), ) assert len(set(output.keys()) - verified_keys) == 0 mock_get_mesos_docker_volumes_conf.mocker.assert_called_once_with( @@ -1063,7 +1037,6 @@ def assert_kubernetes_conf(self, base_volumes): 'spark.kubernetes.executor.label.paasta.yelp.com/pool': self.pool, 'spark.kubernetes.executor.label.yelp.com/owner': 'core_ml', 'spark.logConf': 'true', - 'spark.ui.showConsoleProgress': 'true', } for i, volume in enumerate(base_volumes + self._get_k8s_base_volumes()): expected_output[f'spark.kubernetes.executor.volumes.hostPath.{i}.mount.path'] = volume['containerPath'] @@ -1150,7 +1123,6 @@ def assert_local_conf(self, base_volumes): 'spark.executorEnv.PAASTA_INSTANCE_TYPE': 'spark', 'spark.executorEnv.SPARK_EXECUTOR_DIRS': '/tmp', 'spark.logConf': 'true', - 'spark.ui.showConsoleProgress': 'true', } for i, volume in enumerate(base_volumes + self._get_k8s_base_volumes()): expected_output[f'spark.kubernetes.executor.volumes.hostPath.{i}.mount.path'] = volume['containerPath']