Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -415,11 +415,14 @@ def copy_metadata_to_dated_folder(self):
"""Copy processed metadata CSV into a dated archive folder in S3."""
logger.info("Copying metadata CSV to dated folder")
current_datetime = datetime.now().strftime("%Y-%m-%d_%H-%M")
original_path_directory = str(Path(self.file_key).parent)
logger.info(f"Original file key is {self.file_key}")
destination_key = f"metadata/{original_path_directory}_{current_datetime}.csv"
self.s3_service.copy_across_bucket(
self.staging_bucket_name,
self.file_key,
self.staging_bucket_name,
f"metadata/{current_datetime}.csv",
destination_key,
)
self.s3_service.delete_object(self.staging_bucket_name, self.file_key)

Expand Down
77 changes: 61 additions & 16 deletions lambdas/tests/unit/handlers/test_bulk_upload_metadata_handler.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,66 @@
import pytest
from handlers.bulk_upload_metadata_handler import lambda_handler
from models.staging_metadata import METADATA_FILENAME
from services.bulk_upload_metadata_service import BulkUploadMetadataService
from unittest.mock import Mock

from services.bulk_upload_metadata_processor_service import (
BulkUploadMetadataProcessorService,
)

def test_lambda_call_process_metadata_of_service_class(
set_env, event, context, mock_metadata_service
):
lambda_handler(event, context)

mock_metadata_service.process_metadata.assert_called_once_with(METADATA_FILENAME)
def test_copy_metadata_to_dated_folder_copies_and_deletes(mocker, monkeypatch):
monkeypatch.setenv("STAGING_STORE_BUCKET_NAME", "staging-bucket")
monkeypatch.setenv("METADATA_SQS_QUEUE_URL", "https://example.com/metadata-queue")
monkeypatch.setenv("EXPEDITE_SQS_QUEUE_URL", "https://example.com/expedite-queue")

mocker.patch(
"services.bulk_upload_metadata_processor_service.S3Service",
autospec=True,
)
mocker.patch(
"services.bulk_upload_metadata_processor_service.SQSService",
autospec=True,
)
mocker.patch(
"services.bulk_upload_metadata_processor_service.BulkUploadDynamoRepository",
autospec=True,
)
mocker.patch(
"services.bulk_upload_metadata_processor_service.BulkUploadSqsRepository",
autospec=True,
)
mocker.patch(
"services.bulk_upload_metadata_processor_service.BulkUploadS3Repository",
autospec=True,
)
mocker.patch(
"services.bulk_upload_metadata_processor_service.get_virus_scan_service",
autospec=True,
)

@pytest.fixture
def mock_metadata_service(mocker):
mocked_instance = mocker.patch(
"handlers.bulk_upload_metadata_handler.BulkUploadMetadataService",
spec=BulkUploadMetadataService,
).return_value
yield mocked_instance
mocked_datetime = mocker.patch(
"services.bulk_upload_metadata_processor_service.datetime",
)
mocked_datetime.now.return_value.strftime.return_value = "2026-03-05_12-34"

formatter_service = Mock()

service = BulkUploadMetadataProcessorService(
metadata_formatter_service=formatter_service,
metadata_heading_remap={},
input_file_location="some/dir/metadata.csv",
)

service.s3_service = Mock()

service.copy_metadata_to_dated_folder()

expected_destination_key = "metadata/some/dir_2026-03-05_12-34.csv"

service.s3_service.copy_across_bucket.assert_called_once_with(
"staging-bucket",
"some/dir/metadata.csv",
"staging-bucket",
expected_destination_key,
)
service.s3_service.delete_object.assert_called_once_with(
"staging-bucket",
"some/dir/metadata.csv",
)
Loading