Skip to content

Commit

Permalink
[PRMP-1063] blank bulk upload reports don't create a file
Browse files Browse the repository at this point in the history
* [PRMP-1063] blank bulk upload reports don't create a file

* [PRMP-1063] moved CSV creation and upload logic into separate function

* [PRMP-1063] correcting log typo, adding param types to function

* [PRMP-1063] created unit test for csv and s3 writer, mocked existing tests
  • Loading branch information
AndyFlintNHS authored Oct 30, 2024
1 parent 273955c commit 8d1dc7d
Show file tree
Hide file tree
Showing 2 changed files with 164 additions and 110 deletions.
91 changes: 41 additions & 50 deletions lambdas/services/bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,16 +158,11 @@ def generate_success_report(self, ods_reports: list[OdsReport]):
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily success report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
if data_rows:
logger.info("Uploading daily success report file to S3")
self.write_and_upload_additional_reports(file_name, headers, data_rows)
else:
logger.info("No data to report for daily success report file")

def generate_suspended_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -186,16 +181,11 @@ def generate_suspended_report(self, ods_reports: list[OdsReport]):
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily suspended report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
if data_rows:
logger.info("Uploading daily suspended report file to S3")
self.write_and_upload_additional_reports(file_name, headers, data_rows)
else:
logger.info("No data to report for daily suspended report file")

def generate_deceased_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -220,16 +210,11 @@ def generate_deceased_report(self, ods_reports: list[OdsReport]):
]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily deceased report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
if data_rows:
logger.info("Uploading daily deceased report file to S3")
self.write_and_upload_additional_reports(file_name, headers, data_rows)
else:
logger.info("No data to report for daily deceased report file")

def generate_restricted_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -248,16 +233,11 @@ def generate_restricted_report(self, ods_reports: list[OdsReport]):
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily restricted report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
if data_rows:
logger.info("Uploading daily restricted report file to S3")
self.write_and_upload_additional_reports(file_name, headers, data_rows)
else:
logger.info("No data to report for daily restricted report file")

def generate_rejected_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -283,16 +263,11 @@ def generate_rejected_report(self, ods_reports: list[OdsReport]):
]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily rejected report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
if data_rows:
logger.info("Uploading daily rejected report file to S3")
self.write_and_upload_additional_reports(file_name, headers, data_rows)
else:
logger.info("No data to report for daily rejected report file")

@staticmethod
def write_items_to_csv(items: list[BulkUploadReport], csv_file_path: str):
Expand Down Expand Up @@ -400,3 +375,19 @@ def get_times_for_scan(self) -> tuple[datetime, datetime]:
self.s3_key_prefix = f"bulk-upload-reports/{date_folder_name}"

return start_timestamp, end_timestamp

def write_and_upload_additional_reports(
self,
file_name: str,
headers: list[str],
data_rows: list[list[str]],
):
self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
183 changes: 123 additions & 60 deletions lambdas/tests/unit/services/test_bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import pytest
from boto3.dynamodb.conditions import Attr
from enums.metadata_report import MetadataReport
from freezegun import freeze_time
from services.bulk_upload_report_service import BulkUploadReportService, OdsReport
from tests.unit.conftest import (
Expand Down Expand Up @@ -434,122 +435,184 @@ def test_generate_summary_report_with_two_ods_reports(


def test_generate_success_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_success_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
MOCK_REPORT_ITEMS_ALL,
)

bulk_upload_report_service.generate_success_report(test_ods_reports)

expected = readfile("expected_success_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
assert expected == actual
os.remove(f"/tmp/{mock_file_name}")
bulk_upload_report_service.write_and_upload_additional_reports.assert_called()

bulk_upload_report_service.s3_service.upload_file.assert_called_with(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"bulk-upload-reports/2012-01-13/{mock_file_name}",
file_name=f"/tmp/{mock_file_name}",
)

def test_generate_success_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_success_report(blank_ods_reports)

bulk_upload_report_service.write_and_upload_additional_reports.assert_not_called()


def test_generate_suspended_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_suspended_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
MOCK_REPORT_ITEMS_ALL,
)

bulk_upload_report_service.generate_suspended_report(test_ods_reports)

expected = readfile("expected_suspended_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
assert expected == actual
os.remove(f"/tmp/{mock_file_name}")
bulk_upload_report_service.write_and_upload_additional_reports.assert_called()

bulk_upload_report_service.s3_service.upload_file.assert_called_with(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"bulk-upload-reports/2012-01-13/{mock_file_name}",
file_name=f"/tmp/{mock_file_name}",
)

def test_generate_suspended_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

# just used to assert this isn't created

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_suspended_report(blank_ods_reports)

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_deceased_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_deceased_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
MOCK_REPORT_ITEMS_ALL,
)

bulk_upload_report_service.generate_deceased_report(test_ods_reports)

expected = readfile("expected_deceased_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
assert expected == actual
os.remove(f"/tmp/{mock_file_name}")
bulk_upload_report_service.write_and_upload_additional_reports.assert_called()

bulk_upload_report_service.s3_service.upload_file.assert_called_with(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"bulk-upload-reports/2012-01-13/{mock_file_name}",
file_name=f"/tmp/{mock_file_name}",
)

def test_generate_deceased_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_deceased_report(blank_ods_reports)

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_restricted_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_restricted_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
MOCK_REPORT_ITEMS_ALL,
)

bulk_upload_report_service.generate_restricted_report(test_ods_reports)

expected = readfile("expected_restricted_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
assert expected == actual
os.remove(f"/tmp/{mock_file_name}")
bulk_upload_report_service.write_and_upload_additional_reports.assert_called()

bulk_upload_report_service.s3_service.upload_file.assert_called_with(
s3_bucket_name=MOCK_STATISTICS_REPORT_BUCKET_NAME,
file_key=f"bulk-upload-reports/2012-01-13/{mock_file_name}",
file_name=f"/tmp/{mock_file_name}",
)

def test_generate_restricted_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_restricted_report(blank_ods_reports)

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_rejected_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_rejected_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
MOCK_REPORT_ITEMS_ALL,
)

bulk_upload_report_service.generate_rejected_report(test_ods_reports)

bulk_upload_report_service.write_and_upload_additional_reports.assert_called()


def test_generate_rejected_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_rejected_report(blank_ods_reports)

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_write_and_upload_additional_reports_creates_csv_and_writes_to_s3(
bulk_upload_report_service, mock_get_times_for_scan
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_rejected_{MOCK_TIMESTAMP}.csv"
)

mock_headers = [
MetadataReport.NhsNumber,
MetadataReport.UploaderOdsCode,
MetadataReport.Date,
MetadataReport.Reason,
]

mock_data_rows = [
[
"9000000005",
"Y12345",
"2012-01-13",
"Could not find the given patient on PDS",
],
[
"9000000006",
"Y12345",
"2012-01-13",
"Could not find the given patient on PDS",
],
["9000000007", "Y12345", "2012-01-13", "Lloyd George file already exists"],
[
"9000000014",
"Z12345",
"2012-01-13",
"Could not find the given patient on PDS",
],
[
"9000000015",
"Z12345",
"2012-01-13",
"Could not find the given patient on PDS",
],
["9000000016", "Z12345", "2012-01-13", "Lloyd George file already exists"],
]

bulk_upload_report_service.write_and_upload_additional_reports(
mock_file_name, mock_headers, mock_data_rows
)

expected = readfile("expected_rejected_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
Expand Down

0 comments on commit 8d1dc7d

Please sign in to comment.