diff --git a/astronomer/providers/core/sensors/filesystem.py b/astronomer/providers/core/sensors/filesystem.py index 1ed91f13f..53258d5a5 100644 --- a/astronomer/providers/core/sensors/filesystem.py +++ b/astronomer/providers/core/sensors/filesystem.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import os import warnings from datetime import timedelta -from typing import Any, Dict, Optional +from typing import Any from airflow.hooks.filesystem import FSHook from airflow.sensors.filesystem import FileSensor @@ -54,7 +56,7 @@ def execute(self, context: Context) -> None: method_name="execute_complete", ) - def execute_complete(self, context: Context, event: Optional[Dict[str, Any]]) -> None: + def execute_complete(self, context: Context, event: bool | None = None) -> None: """ Callback for when the trigger fires - returns immediately. Relies on trigger to throw an exception, otherwise it assumes execution was diff --git a/astronomer/providers/google/cloud/extractors/bigquery.py b/astronomer/providers/google/cloud/extractors/bigquery.py index 1e72623f4..5aeda6401 100644 --- a/astronomer/providers/google/cloud/extractors/bigquery.py +++ b/astronomer/providers/google/cloud/extractors/bigquery.py @@ -107,7 +107,7 @@ def extract_on_complete(self, task_instance: TaskInstance) -> Optional[TaskMetad stats = BigQueryDatasetsProvider(client=self._big_query_client).get_facets(bigquery_job_id) inputs = stats.inputs - output = stats.output + output = stats.outputs run_facets = stats.run_facets job_facets = {} if isinstance( diff --git a/tests/google/cloud/extractors/test_bigquery.py b/tests/google/cloud/extractors/test_bigquery.py index 9b035a3fa..e558b2b2c 100644 --- a/tests/google/cloud/extractors/test_bigquery.py +++ b/tests/google/cloud/extractors/test_bigquery.py @@ -88,7 +88,7 @@ def test_extract_on_complete( job_id=job_id, error_result=False ) mock_bg_dataset_provider.return_value = BigQueryFacets( - run_facets=RUN_FACETS, inputs=INPUT_STATS, output=OUTPUT_STATS + run_facets=RUN_FACETS, inputs=INPUT_STATS, outputs=OUTPUT_STATS ) task_id = "insert_query_job"