Skip to content

Commit

Permalink
fix(ingest/ruff): added flake8-comprehensions and flake8-annotations …
Browse files Browse the repository at this point in the history
…to ruff
  • Loading branch information
sagar-salvi-apptware committed Jan 20, 2025
1 parent aff5e16 commit 55b05ee
Show file tree
Hide file tree
Showing 10 changed files with 49 additions and 33 deletions.
24 changes: 21 additions & 3 deletions metadata-ingestion/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,10 @@ extend-select = [
"G010", # logging.warn -> logging.warning
"I", # isort
"TID", # flake8-tidy-imports
"ANN20",# flake8-annotations
"C4", # flake8-comprehensions
]

extend-ignore = [
"E501", # Handled by formatter
"E111", # Handled by formatter
Expand All @@ -38,10 +41,25 @@ extend-ignore = [
"E203", # Ignore whitespace before ':' (matches Black)
"B019", # Allow usages of functools.lru_cache
"B008", # Allow function call in argument defaults
"COM812", # Avoid conflicts with formatter
"ANN204", # Missing return type annotation for special method `__init__`
"ANN206", # Missing return type annotation for classmethod
"ANN205", # Missing return type annotation for staticmethod
"C400", # Ignore Unnecessary generator
"C410", # Ignore Unnecessary list literal passed to `list()`
"C414", # Ignore Unnecessary `reversed()` call within `sorted()`
"C417", # Ignore Unnecessary `map()` usage (rewrite using a list comprehension)
"C419", # Ignore Unnecessary list comprehension
# TODO: Enable these later
"B006", # Mutable args
"B017", # Do not assert blind exception
"B904", # Checks for raise statements in exception handlers that lack a from clause
"B006", # Mutable args
"B017", # Do not assert blind exception
"B904", # Checks for raise statements in exception handlers that lack a from clause
"ANN201", # Missing return type annotation for public function
"ANN202", # Missing return type annotation for private function
"C401", # Unnecessary generator
"C403", # Unnecessary list comprehension
"C408", # Unnecessary `list()` call
"C416", # Unnecessary Comprehension
]

[tool.ruff.lint.mccabe]
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/cli/delete_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ def _validate_user_aspect_flags(
) -> None:
# Check the aspect name.
if aspect and aspect not in ASPECT_MAP:
logger.info(f"Supported aspects: {list(sorted(ASPECT_MAP.keys()))}")
logger.info(f"Supported aspects: {sorted(ASPECT_MAP.keys())}")

Check warning on line 618 in metadata-ingestion/src/datahub/cli/delete_cli.py

View check run for this annotation

Codecov / codecov/patch

metadata-ingestion/src/datahub/cli/delete_cli.py#L618

Added line #L618 was not covered by tests
raise click.UsageError(
f"Unknown aspect {aspect}. Ensure the aspect is in the above list."
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,13 @@


# we always skip over ingesting metadata about these keyspaces
SYSTEM_KEYSPACE_LIST = set(
["system", "system_auth", "system_schema", "system_distributed", "system_traces"]
)
SYSTEM_KEYSPACE_LIST = {
"system",
"system_auth",
"system_schema",
"system_distributed",
"system_traces",
}


@dataclass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -429,11 +429,9 @@ def _parse_into_dbt_node(self, node: Dict) -> DBTNode:
columns = []
if "columns" in node and node["columns"] is not None:
# columns will be empty for ephemeral models
columns = list(
sorted(
[self._parse_into_dbt_column(column) for column in node["columns"]],
key=lambda c: c.index,
)
columns = sorted(

Check warning on line 432 in metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py

View check run for this annotation

Codecov / codecov/patch

metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py#L432

Added line #L432 was not covered by tests
[self._parse_into_dbt_column(column) for column in node["columns"]],
key=lambda c: c.index,
)

test_info = None
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/secret/secret_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

def resolve_secrets(secret_names: List[str], secret_stores: List[SecretStore]) -> dict:
# Attempt to resolve secret using by checking each configured secret store.
final_secret_values = dict({})
final_secret_values = {}

Check warning on line 13 in metadata-ingestion/src/datahub/secret/secret_common.py

View check run for this annotation

Codecov / codecov/patch

metadata-ingestion/src/datahub/secret/secret_common.py#L13

Added line #L13 was not covered by tests

for secret_store in secret_stores:
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_ingestion_stage_context_report_handles_multiple_stages():
for duration in report.ingestion_stage_durations.values()
)

sorted_stages = list(sorted(report.ingestion_stage_durations.keys()))
sorted_stages = sorted(report.ingestion_stage_durations.keys())
assert "Test Stage 1" in sorted_stages[0]
assert "Test Stage 2" in sorted_stages[1]
assert "Test Stage 3" in sorted_stages[2]
14 changes: 6 additions & 8 deletions metadata-ingestion/tests/unit/test_csv_enricher_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,12 @@ def create_mocked_csv_enricher_source() -> CSVEnricherSource:


def create_base_csv_enricher_config() -> Dict:
return dict(
{
"filename": "../integration/csv_enricher/csv_enricher_test_data.csv",
"write_semantics": "PATCH",
"delimiter": ",",
"array_delimiter": "|",
},
)
return {
"filename": "../integration/csv_enricher/csv_enricher_test_data.csv",
"write_semantics": "PATCH",
"delimiter": ",",
"array_delimiter": "|",
}


def test_get_resource_glossary_terms_work_unit_no_terms():
Expand Down
16 changes: 7 additions & 9 deletions metadata-ingestion/tests/unit/test_dbt_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,15 +65,13 @@ def create_mocked_dbt_source() -> DBTCoreSource:


def create_base_dbt_config() -> Dict:
return dict(
{
"manifest_path": "temp/",
"catalog_path": "temp/",
"sources_path": "temp/",
"target_platform": "postgres",
"enable_meta_mapping": False,
},
)
return {
"manifest_path": "temp/",
"catalog_path": "temp/",
"sources_path": "temp/",
"target_platform": "postgres",
"enable_meta_mapping": False,
}


def test_dbt_source_patching_no_new():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def task(key: str, id: str) -> None:
saw_keys_in_parallel = False
while executing_tasks or not done_tasks:
keys_executing = [key for key, _ in executing_tasks]
assert list(sorted(keys_executing)) == list(sorted(set(keys_executing))), (
assert sorted(keys_executing) == sorted(set(keys_executing)), (
"partitioning not working"
)

Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/tests/unit/utilities/test_perf_timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def generator_function():

with PerfTimer() as outer_timer:
seq = generator_function()
list([i for i in seq])
[i for i in seq]
assert approx(outer_timer.elapsed_seconds()) == 1 + 0.2 * n + 0.2 * n


Expand Down

0 comments on commit 55b05ee

Please sign in to comment.