Skip to content

Commit

Permalink
PRMP-0000: Pre-prod fixes (#438)
Browse files Browse the repository at this point in the history
  • Loading branch information
abbas-khan10 authored Sep 27, 2024
1 parent 919545b commit 187ecb4
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 22 deletions.
4 changes: 4 additions & 0 deletions lambdas/enums/metadata_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,7 @@ class MetadataReport(StrEnum):
Date = "Date"
Timestamp = "Timestamp"
ID = "ID"

@staticmethod
def list():
return [str(field) for field in MetadataReport]
38 changes: 34 additions & 4 deletions lambdas/services/bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,12 @@ def report_handler(self):
)

if report_data:
logger.info(
f"Bulk upload reports for {str(start_time)} to {str(end_time)}.csv"
)

generated_at = end_time.strftime("%Y%m%d")

ods_reports: list[OdsReport] = self.generate_ods_reports(
report_data, generated_at
)
Expand All @@ -37,6 +42,9 @@ def report_handler(self):
self.generate_summary_report(ods_reports, generated_at)
logger.info("Successfully processed daily summary report")

self.generate_daily_report(report_data, start_time, end_time)
logger.info("Successfully processed daily report")

else:
logger.info("No data found, no new report file to upload")

Expand Down Expand Up @@ -133,7 +141,7 @@ def generate_individual_ods_report(

file_key = f"daily_statistical_report_bulk_upload_summary_{generated_at}_uploaded_by_{uploader_ods_code}.csv"

self.write_items_to_csv(
self.write_summary_data_to_csv(
file_name=file_key,
total_successful=ods_report.total_successful,
total_registered_elsewhere=ods_report.total_registered_elsewhere,
Expand Down Expand Up @@ -181,7 +189,7 @@ def generate_summary_report(self, ods_reports: list[OdsReport], generated_at: st
file_name = f"daily_statistical_report_bulk_upload_summary_{generated_at}.csv"
file_key = f"daily-reports/{file_name}"

self.write_items_to_csv(
self.write_summary_data_to_csv(
file_name=file_name,
total_successful=total_successful,
total_registered_elsewhere=total_registered_elsewhere,
Expand All @@ -196,8 +204,30 @@ def generate_summary_report(self, ods_reports: list[OdsReport], generated_at: st
file_name=f"/tmp/{file_name}",
)

def write_items_to_csv(
self,
def generate_daily_report(self, report_data: list, start_time: str, end_time: str):
file_name = f"Bulk upload report for {str(start_time)} to {str(end_time)}.csv"

self.write_items_to_csv(report_data, f"/tmp/{file_name}")

logger.info("Uploading daily report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"daily-reports/{file_name}",
file_name=f"/tmp/{file_name}",
)

@staticmethod
def write_items_to_csv(items: list, csv_file_path: str):
logger.info("Writing scan results to csv file")
with open(csv_file_path, "w") as output_file:
field_names = MetadataReport.list()
dict_writer_object = csv.DictWriter(output_file, fieldnames=field_names)
dict_writer_object.writeheader()
for item in items:
dict_writer_object.writerow(item)

@staticmethod
def write_summary_data_to_csv(
file_name: str,
total_successful: int,
total_registered_elsewhere: int,
Expand Down
6 changes: 3 additions & 3 deletions lambdas/services/pds_api_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
from botocore.exceptions import ClientError
from enums.pds_ssm_parameters import SSMParameter
from requests.adapters import HTTPAdapter
from requests.models import HTTPError
from requests.exceptions import ConnectionError, HTTPError, Timeout
from services.patient_search_service import PatientSearch
from urllib3 import HTTPConnectionPool, Retry
from urllib3 import Retry
from utils.audit_logging_setup import LoggingService
from utils.exceptions import PdsErrorException, PdsTooManyRequestsException

Expand Down Expand Up @@ -67,7 +67,7 @@ def pds_request(self, nhs_number: str, retry_on_expired: bool):
logger.error(str(e), {"Result": "Error when getting ssm parameters"})
raise PdsErrorException("Failed to perform patient search")

except HTTPConnectionPool as e:
except (ConnectionError, Timeout, HTTPError) as e:
logger.error(str(e), {"Result": "Error when calling PDS"})
raise PdsTooManyRequestsException("Failed to perform patient search")

Expand Down
53 changes: 38 additions & 15 deletions lambdas/tests/unit/services/test_bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,11 @@ def mock_get_db_report_items(bulk_upload_report_service, mocker):
yield mocker.patch.object(bulk_upload_report_service, "get_dynamodb_report_items")


@pytest.fixture
def mock_write_summary_data_to_csv(bulk_upload_report_service, mocker):
yield mocker.patch.object(bulk_upload_report_service, "write_summary_data_to_csv")


@pytest.fixture
def mock_write_items_to_csv(bulk_upload_report_service, mocker):
yield mocker.patch.object(bulk_upload_report_service, "write_items_to_csv")
Expand Down Expand Up @@ -244,15 +249,17 @@ def test_report_handler_no_items_returns_expected_log(
def test_report_handler_with_items_uploads_summary_report_to_bucket(
bulk_upload_report_service,
mock_get_db_with_data,
mock_write_items_to_csv,
mock_write_summary_data_to_csv,
mock_get_times_for_scan,
caplog,
):
expected_message = "Successfully processed daily summary report"
expected_messages = [
"Successfully processed daily ODS reports",
"Successfully processed daily summary report",
"Successfully processed daily report",
]

mock_date_string = MOCK_END_REPORT_TIME.strftime("%Y%m%d")
mock_file_name = (
f"daily_statistical_report_bulk_upload_summary_{mock_date_string}.csv"
)
bulk_upload_report_service.report_handler()

mock_get_times_for_scan.assert_called_once()
Expand All @@ -261,19 +268,35 @@ def test_report_handler_with_items_uploads_summary_report_to_bucket(
int(MOCK_END_REPORT_TIME.timestamp()),
)

mock_write_items_to_csv.assert_called()
mock_write_summary_data_to_csv.assert_called()

bulk_upload_report_service.s3_service.upload_file.assert_called()
bulk_upload_report_service.s3_service.upload_file.assert_called_with(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"daily-reports/{mock_file_name}",
file_name=f"/tmp/{mock_file_name}",
)
assert caplog.records[-1].msg == expected_message
calls = [
call(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"daily_statistical_report_bulk_upload_summary_{mock_date_string}_uploaded_by_Y12345.csv",
file_name=f"/tmp/daily_statistical_report_bulk_upload_summary_{mock_date_string}_uploaded_by_Y12345.csv",
),
call(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"daily-reports/daily_statistical_report_bulk_upload_summary_{mock_date_string}.csv",
file_name=f"/tmp/daily_statistical_report_bulk_upload_summary_{mock_date_string}.csv",
),
call(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"daily-reports/Bulk upload report for {str(MOCK_END_REPORT_TIME)}.csv",
file_name=f"/tmp/Bulk upload report for {str(MOCK_END_REPORT_TIME)}.csv",
),
]

bulk_upload_report_service.s3_service.upload_file.has_calls(calls)

log_message_match = set(expected_messages).issubset(caplog.messages)

assert log_message_match


def test_generate_individual_ods_report_creates_ods_report(
bulk_upload_report_service, mock_write_items_to_csv
bulk_upload_report_service, mock_write_summary_data_to_csv
):
mock_ods_report_data = [MOCK_DATA_COMPLETE_UPLOAD, MOCK_DATA_FAILED_UPLOAD]
expected = OdsReport(
Expand All @@ -289,7 +312,7 @@ def test_generate_individual_ods_report_creates_ods_report(

assert actual.__dict__ == expected.__dict__
bulk_upload_report_service.s3_service.upload_file.assert_called()
mock_write_items_to_csv.assert_called()
mock_write_summary_data_to_csv.assert_called()


def test_generate_individual_ods_report_writes_csv_report(bulk_upload_report_service):
Expand Down

0 comments on commit 187ecb4

Please sign in to comment.