diff --git a/.github/workflows/run-flyway-command.yml b/.github/workflows/run-flyway-command.yml
index d57f38fd7..f808a3be8 100644
--- a/.github/workflows/run-flyway-command.yml
+++ b/.github/workflows/run-flyway-command.yml
@@ -15,9 +15,12 @@ on:
options: [info, migrate, validate]
database:
type: string
+ required: true
+ description: Database on which to perform migrations
+ skip-s3-upload:
+ type: boolean
required: false
- description: Database on which to perform migrations (defaults to dap_txma_reporting_db)
- default: unset
+ description: Skip uploading flyway files to S3
jobs:
validate-environment:
@@ -28,9 +31,24 @@ jobs:
- name: Validate input environment
run: scripts/validate-environment.sh ${{ inputs.environment }}
- run-flyway:
+ upload-flyway-files:
+ if: inputs.skip-s3-upload == false
needs: [validate-environment]
# These permissions are needed to interact with GitHub's OIDC Token endpoint (enabling the aws-actions/configure-aws-credentials action)
+ permissions:
+ id-token: write
+ contents: read
+ secrets: inherit
+ uses: ./.github/workflows/upload-flyway-files.yml
+ with:
+ environment: ${{ inputs.environment }}
+
+ run-flyway:
+ # needs combined with always as this job needs to run whether or not upload-flyway-files runs
+ # (and if upload-flyway-files does run then this job should run after it)
+ needs: [upload-flyway-files]
+ if: always()
+ # These permissions are needed to interact with GitHub's OIDC Token endpoint (enabling the aws-actions/configure-aws-credentials action)
permissions:
id-token: write
contents: read
@@ -45,8 +63,7 @@ jobs:
role-to-assume: ${{ secrets[format('DB_MIGRATION_ROLE_ARN_{0}', inputs.environment)] }}
- name: Invoke lambda
run: |
- DATABASE=$(if [ -z ${{ inputs.database }} ] || [ ${{ inputs.database }} == "unset" ]; then echo dap_txma_reporting_db; else echo ${{ inputs.database }}; fi)
- PAYLOAD=$(echo "{\"command\": \"${{ inputs.command }}\", \"database\": \"$DATABASE\"}")
+ PAYLOAD=$(echo "{\"command\": \"${{ inputs.command }}\", \"database\": \"${{ inputs.database }}\"}")
echo "$PAYLOAD" | jq
ENCODED=$(echo "$PAYLOAD" | openssl base64)
aws --region eu-west-2 lambda invoke --function-name run-flyway-command --payload "$ENCODED" out.json
diff --git a/.github/workflows/upload-flyway-files.yml b/.github/workflows/upload-flyway-files.yml
new file mode 100644
index 000000000..8001d4962
--- /dev/null
+++ b/.github/workflows/upload-flyway-files.yml
@@ -0,0 +1,48 @@
+name: ✳️ Upload Flyway files to S3
+
+on:
+ workflow_call:
+ inputs:
+ environment:
+ type: string
+ required: true
+ workflow_dispatch:
+ inputs:
+ environment:
+ type: choice
+ required: true
+ description: AWS environment
+ options: [DEV, TEST, FEATURE, BUILD, STAGING, INTEGRATION, PRODUCTION, PRODUCTION-PREVIEW]
+
+jobs:
+ validate-environment:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v4
+ - name: Validate input environment
+ run: scripts/validate-environment.sh ${{ inputs.environment }}
+
+ upload-to-s3:
+ needs: [validate-environment]
+ # These permissions are needed to interact with GitHub's OIDC Token endpoint (enabling the aws-actions/configure-aws-credentials action)
+ permissions:
+ id-token: write
+ contents: read
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v4
+ - name: Assume AWS GitHub actions role
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-region: eu-west-2
+ role-to-assume: ${{ secrets.FLYWAY_FILES_UPLOAD_ROLE_ARN }}
+ - name: Upload athena files to S3
+ run: |
+ REGION="eu-west-2"
+ FILES_ROOT="redshift-scripts/flyway"
+ S3_BUCKET="s3://$(echo "${{ inputs.environment }}" | tr '[:upper:]' '[:lower:]')-dap-flyway-files"
+
+ echo "Uploading contents of $FILES_ROOT to bucket $S3_BUCKET"
+ aws --region="$REGION" s3 cp "$FILES_ROOT" "$S3_BUCKET" --recursive
diff --git a/.prettierignore b/.prettierignore
index ec87a5b74..f3fb12609 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -11,3 +11,4 @@ Dockerfile
*.sql
*.jar
*.tar.gz
+flyway.conf
diff --git a/README.md b/README.md
index 4b0a89469..dbe5f0115 100644
--- a/README.md
+++ b/README.md
@@ -86,7 +86,7 @@ Below is a list of workflows. The ✳️ symbol at the start of a workflow name
| Name | File | Triggers | Purpose |
|-------------------------------------------------|----------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|
| Deploy to an AWS environment | deploy-to-aws.yml |
- [other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)
| Deploys to a deployable AWS environment (dev, build, test) |
-| ✳️ Deploy to the test environment | deploy-to-test.yml | - [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
| Deploys IaC and lambda code to the test AWS |
+| ✳️ Deploy to the test environment | deploy-to-test.yml | - [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
- [other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)
| Deploys IaC and lambda code to the test AWS |
| ✳️ Deploy to the dev environment | deploy-to-dev.yml | - [merge to main](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push)
- [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
| Deploys IaC and lambda code to the dev AWS |
| Deploy to the build environment | deploy-to-build.yml | - [merge to main](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push)
| Deploys IaC and lambda code to the build AWS |
| ✳️ Test and validate iac and lambdas | test-and-validate.yml | - [other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)
- [pull requests](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)
- [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
| Runs linting, formatting and testing of lambda code, and linting and scanning of IaC code |
@@ -101,6 +101,7 @@ Below is a list of workflows. The ✳️ symbol at the start of a workflow name
| ✳️ Add Quicksight users from spreadsheet | add-quicksight-users.yml | - [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
| Reads the DAP account management spreadsheet and attempts to add users to Cognito and Quicksight |
| ✳️ Deploy to the production preview environment | deploy-to-production-preview.yml | - [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
| Deploys to the production-preview environment |
| SAM deploy | sam-deploy.yml | - [other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)
| Performs a SAM deploy to an environment without secure pipelines (feature, production-preview) |
+| ✳️ Upload Flyway files to S3 | upload-flyway-files.yml | - [manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)
- [other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)
| Uploads flyway files for a particular environment (under [redshift-scripts/flyway](redshift-scripts/flyway)) to S3 |
## Testing
diff --git a/iac/main/resources/redshift.yml b/iac/main/resources/redshift.yml
index dcde8e843..d6f7b826a 100644
--- a/iac/main/resources/redshift.yml
+++ b/iac/main/resources/redshift.yml
@@ -282,11 +282,19 @@ RunFlywayCommandLambda:
Condition:
StringEquals:
kms:EncryptionContext:SecretARN: !Ref RedshiftSecret
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ - s3:ListBucket
+ Resource:
+ - !Sub ${FlywayFilesBucket.Arn}
+ - !Sub ${FlywayFilesBucket.Arn}/*
ReservedConcurrentExecutions: 10
Environment:
# checkov:skip=CKV_AWS_173: These environment variables do not require encryption
Variables:
REDSHIFT_SECRET_ID: !Ref RedshiftSecret
+ FLYWAY_FILES_BUCKET_NAME: !Ref FlywayFilesBucket
ENVIRONMENT: !Ref Environment
Tags:
Environment: !Ref Environment
@@ -309,3 +317,89 @@ FlywayLayer:
ContentUri: layer-dist/flyway
LayerName: !Sub ${Environment}-dap-lambda-layer-flyway
RetentionPolicy: Delete
+
+FlywayFilesBucket:
+ Type: AWS::S3::Bucket
+ Properties:
+ AccessControl: Private
+ BucketName: !Sub ${Environment}-dap-flyway-files
+ LoggingConfiguration:
+ DestinationBucketName: !Ref GlobalLogBucket
+ LogFilePrefix: dap-flyway-files/log
+ PublicAccessBlockConfiguration:
+ BlockPublicAcls: true
+ BlockPublicPolicy: true
+ IgnorePublicAcls: true
+ RestrictPublicBuckets: true
+ VersioningConfiguration:
+ Status: Enabled
+ LifecycleConfiguration:
+ Rules:
+ - ExpirationInDays: 365
+ Status: Enabled
+ NotificationConfiguration:
+ LambdaConfigurations:
+ - Event: s3:ObjectCreated:*
+ Function: !GetAtt S3NotificationsLoggerLambda.Arn
+ - Event: s3:ObjectRemoved:*
+ Function: !GetAtt S3NotificationsLoggerLambda.Arn
+
+FlywayFilesBucketPolicy:
+ Type: AWS::S3::BucketPolicy
+ Properties:
+ Bucket: !Ref FlywayFilesBucket
+ PolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ - Effect: Deny
+ Action: 's3:*'
+ Resource: !Sub ${FlywayFilesBucket.Arn}/*
+ Principal: '*'
+ Condition:
+ Bool:
+ aws:SecureTransport: false
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ - s3:GetObjectVersion
+ - s3:ListBucket
+ - s3:ListBucketVersions
+ - s3:PutObject
+ Resource:
+ - !Sub ${FlywayFilesBucket.Arn}
+ - !Sub ${FlywayFilesBucket.Arn}/*
+ Principal:
+ AWS: !Sub arn:aws:iam::${BuildAccountId}:role/dap-flyway-files-upload-role
+
+FlywayFilesBucketUploadRole:
+ Condition: IsBuild
+ Type: AWS::IAM::Role
+ Properties:
+ RoleName: dap-flyway-files-upload-role
+ AssumeRolePolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ - Effect: Allow
+ Principal:
+ Federated: !Sub arn:aws:iam::${AWS::AccountId}:oidc-provider/token.actions.githubusercontent.com
+ Action: sts:AssumeRoleWithWebIdentity
+ Condition:
+ StringLike:
+ 'token.actions.githubusercontent.com:sub':
+ - repo:govuk-one-login/data-analytics-platform:ref:refs/heads/*
+ - repo:govuk-one-login/data-analytics-platform:environment:*
+ Policies:
+ - PolicyName: dap-flyway-files-upload-policy
+ PolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ - Effect: Allow
+ Action:
+ - s3:GetObject
+ - s3:GetObjectVersion
+ - s3:ListBucket
+ - s3:ListBucketVersions
+ - s3:PutObject
+ Resource:
+ - arn:aws:s3:::*-dap-flyway-files
+ - arn:aws:s3:::*-dap-flyway-files/*
diff --git a/redshift-scripts/flyway/flyway.conf b/redshift-scripts/flyway/flyway.conf
new file mode 100644
index 000000000..e69de29bb
diff --git a/redshift-scripts/migrations/V1__add_hello_table.sql b/redshift-scripts/flyway/migrations/V1__add_hello_table.sql
similarity index 100%
rename from redshift-scripts/migrations/V1__add_hello_table.sql
rename to redshift-scripts/flyway/migrations/V1__add_hello_table.sql
diff --git a/scripts/build-flyway-layer.sh b/scripts/build-flyway-layer.sh
index 4dca66603..6476c0b06 100755
--- a/scripts/build-flyway-layer.sh
+++ b/scripts/build-flyway-layer.sh
@@ -30,7 +30,3 @@ rm -rf "$FLYWAY_DIR"/lib/rgcompare
# remove jre/legal/ as it is full of broken symlinks that cause sam deploy to exit with an error
rm -rf "$FLYWAY_DIR"/jre/legal
-
-# add migrations
-mkdir -p "$FLYWAY_DIR"/sql
-cp redshift-scripts/migrations/*.sql "$FLYWAY_DIR"/sql
diff --git a/src/handlers/redshift-rotate-secret/handler.spec.ts b/src/handlers/redshift-rotate-secret/handler.spec.ts
index b8da05f93..fe7db8a34 100644
--- a/src/handlers/redshift-rotate-secret/handler.spec.ts
+++ b/src/handlers/redshift-rotate-secret/handler.spec.ts
@@ -11,6 +11,9 @@ import { databaseAccess, handler } from './handler';
import type { RotateSecretStep } from './handler';
import type { RedshiftSecret, SecretRotationStage } from '../../shared/types/secrets-manager';
import type { Database } from './database-access';
+import { DatabaseAccess } from './database-access';
+import { getLogger } from '../../shared/powertools';
+import type { Knex } from 'knex';
const mockSecretsManagerClient = mockClient(SecretsManagerClient);
@@ -205,6 +208,29 @@ test('finish secret no current version', async () => {
expect(mockSecretsManagerClient.calls()).toHaveLength(2);
});
+test('secret to database connection', async () => {
+ // @ts-expect-error this incorrectly extends DatabaseAccess by overriding a private method but it's fine as it's a test
+ const databaseAccess = new (class extends DatabaseAccess {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ private async validateConnection(connection: Knex): Promise> {
+ return connection;
+ }
+ })(getLogger(''));
+
+ const secret = JSON.parse(getSecretString({ SecretId: 'hello', VersionStage: 'AWSCURRENT' }));
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const connection: any = await databaseAccess.getDatabaseConnection(secret);
+ const config = connection.context.client.config;
+
+ expect(config.client).toEqual('pg');
+ expect(config.connection).toEqual({
+ host: secret.host,
+ user: secret.username,
+ database: secret.dbname,
+ port: parseInt(secret.port, 10),
+ });
+});
+
const mockSecretsManager = (config: SecretsManagerMockingConfig = {}): void => {
const pendingSecretError = config.pendingSecretError ?? false;
const versions = config.versions ?? { [CLIENT_REQUEST_TOKEN]: ['AWSPENDING'] };
diff --git a/src/handlers/run-flyway-command/handler.spec.ts b/src/handlers/run-flyway-command/handler.spec.ts
index 6210ca43d..c4431bfa5 100644
--- a/src/handlers/run-flyway-command/handler.spec.ts
+++ b/src/handlers/run-flyway-command/handler.spec.ts
@@ -3,13 +3,19 @@ import { GetSecretValueCommand, SecretsManagerClient } from '@aws-sdk/client-sec
import { handler } from './handler';
import * as child_process from 'node:child_process';
import { getTestResource } from '../../shared/utils/test-utils';
+import * as fs from 'node:fs';
+import { GetObjectCommand, ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3';
+import { Readable } from 'node:stream';
+const mockS3Client = mockClient(S3Client);
const mockSecretsManagerClient = mockClient(SecretsManagerClient);
const DATABASE = 'dap_txma_reporting_db';
const SECRET_ID = 'MySecretId';
+const FLYWAY_FILES_BUCKET_NAME = 'flyway-files-bucket';
+
// this is of type RedshiftSecret but we can't use type annotations in this file (see further explanation above jest.mock call)
const SECRET_VALUE = {
engine: 'redshift',
@@ -40,6 +46,16 @@ jest.mock('node:child_process', () => {
const spawnSyncSpy = jest.spyOn(child_process, 'spawnSync');
+jest.mock('node:fs', () => {
+ return {
+ __esModule: true,
+ ...jest.requireActual('node:fs'),
+ };
+});
+
+const mkdirSyncSpy = jest.spyOn(fs, 'mkdirSync');
+const createWriteStreamSpy = jest.spyOn(fs, 'createWriteStream');
+
let FLYWAY_CONNECTION_ERROR: Record;
let FLYWAY_INFO: Record;
@@ -47,15 +63,23 @@ let FLYWAY_INFO: Record;
beforeAll(async () => {
FLYWAY_CONNECTION_ERROR = JSON.parse(await getTestResource('flyway-connection-error.json'));
FLYWAY_INFO = JSON.parse(await getTestResource('flyway-info.json'));
+ mkdirSyncSpy.mockImplementation();
+ createWriteStreamSpy.mockImplementation();
});
beforeEach(() => {
+ mockS3Client.reset();
+ mockS3Client.callsFake(input => {
+ throw new Error(`Unexpected S3 request - ${JSON.stringify(input)}`);
+ });
+
mockSecretsManagerClient.reset();
mockSecretsManagerClient.callsFake(input => {
throw new Error(`Unexpected Secrets Manager request - ${JSON.stringify(input)}`);
});
process.env.REDSHIFT_SECRET_ID = SECRET_ID;
+ process.env.FLYWAY_FILES_BUCKET_NAME = FLYWAY_FILES_BUCKET_NAME;
});
test('unknown command', async () => {
@@ -64,22 +88,37 @@ test('unknown command', async () => {
);
});
+test('error getting files', async () => {
+ const errorMessage = 's3 error';
+
+ mockS3Responses(errorMessage);
+ mockSecretsManagerResponses();
+
+ await expect(handler({ command: 'info', database: DATABASE })).rejects.toThrow(
+ `Error getting flyway files - ${errorMessage}`,
+ );
+
+ expect(mockS3Client.calls()).toHaveLength(1);
+ expect(mockSecretsManagerClient.calls()).toHaveLength(0);
+});
+
test('error getting secret', async () => {
const errorMessage = 'secretsmanager error';
- mockSecretsManagerClient.on(GetSecretValueCommand, { SecretId: SECRET_ID }).rejectsOnce(errorMessage);
+ mockS3Responses();
+ mockSecretsManagerResponses(errorMessage);
await expect(handler({ command: 'info', database: DATABASE })).rejects.toThrow(
`Error getting redshift secret - Error getting secret - ${errorMessage}`,
);
+ expect(mockS3Client.calls()).toHaveLength(4);
expect(mockSecretsManagerClient.calls()).toHaveLength(1);
});
test('flyway success', async () => {
- mockSecretsManagerClient
- .on(GetSecretValueCommand, { SecretId: SECRET_ID })
- .resolvesOnce({ SecretString: JSON.stringify(SECRET_VALUE) });
+ mockS3Responses();
+ mockSecretsManagerResponses();
spawnSyncSpy.mockImplementation((command, args, options) => {
expect(options?.env).toEqual(EXPECTED_ENVIRONMENT);
@@ -91,12 +130,14 @@ test('flyway success', async () => {
expect(response.stderr).toEqual({});
expect(response.stdout).toEqual(FLYWAY_INFO);
expect(response.error).toBeUndefined();
+
+ expect(mockS3Client.calls()).toHaveLength(4);
+ expect(mockSecretsManagerClient.calls()).toHaveLength(1);
});
test('flyway error', async () => {
- mockSecretsManagerClient
- .on(GetSecretValueCommand, { SecretId: SECRET_ID })
- .resolvesOnce({ SecretString: JSON.stringify(SECRET_VALUE) });
+ mockS3Responses();
+ mockSecretsManagerResponses();
// flyway sends errors using stdout
spawnSyncSpy.mockImplementation((command, args, options) => {
@@ -109,15 +150,17 @@ test('flyway error', async () => {
expect(response.stderr).toEqual({});
expect(response.stdout).toEqual(FLYWAY_CONNECTION_ERROR);
expect(response.error).toBeUndefined();
+
+ expect(mockS3Client.calls()).toHaveLength(4);
+ expect(mockSecretsManagerClient.calls()).toHaveLength(1);
});
test('spawn sync error', async () => {
const error = new Error('Command line error');
const stderr = { error: 'spawn sync error' };
- mockSecretsManagerClient
- .on(GetSecretValueCommand, { SecretId: SECRET_ID })
- .resolvesOnce({ SecretString: JSON.stringify(SECRET_VALUE) });
+ mockS3Responses();
+ mockSecretsManagerResponses();
spawnSyncSpy.mockImplementation((command, args, options) => {
expect(options?.env).toEqual(EXPECTED_ENVIRONMENT);
@@ -129,14 +172,16 @@ test('spawn sync error', async () => {
expect(response.stderr).toEqual(stderr);
expect(response.stdout).toEqual({});
expect(response.error).toEqual(error);
+
+ expect(mockS3Client.calls()).toHaveLength(4);
+ expect(mockSecretsManagerClient.calls()).toHaveLength(1);
});
test('spawn sync uncaught error', async () => {
const error = new Error('Command line error');
- mockSecretsManagerClient
- .on(GetSecretValueCommand, { SecretId: SECRET_ID })
- .resolvesOnce({ SecretString: JSON.stringify(SECRET_VALUE) });
+ mockS3Responses();
+ mockSecretsManagerResponses();
spawnSyncSpy.mockImplementation((command, args, options) => {
expect(options?.env).toEqual(EXPECTED_ENVIRONMENT);
@@ -144,6 +189,9 @@ test('spawn sync uncaught error', async () => {
});
await expect(handler({ command: 'info', database: DATABASE })).rejects.toThrow(error.message);
+
+ expect(mockS3Client.calls()).toHaveLength(4);
+ expect(mockSecretsManagerClient.calls()).toHaveLength(1);
});
// this should return type child_process.SpawnSyncReturns but we can't use type annotations in this file (see further explanation above jest.mock call)
@@ -161,3 +209,51 @@ const spawnSyncResult = (
error,
};
};
+
+class MockReadable extends Readable {
+ pipe(destination: T, options?: { end?: boolean | undefined }): T {
+ // @ts-expect-error this is fine as it's just for creating a mock
+ return this;
+ }
+
+ // @ts-expect-error this is fine as it's just for creating a mock
+ on(event: string, listener: () => void): this {
+ if (event === 'close') {
+ listener();
+ }
+ return this;
+ }
+}
+
+// the Body properties below need an 'as SdkStream' but we can't use type annotations in this file (see further explanation above jest.mock call)
+const mockS3Responses = (errorMessage?: string): void => {
+ const contents = [
+ { Key: 'flyway.conf' },
+ { Key: 'migrations/V1_0__first_migration.sql' },
+ { Key: 'migrations/V1_1__second_migration.sql' },
+ ];
+
+ if (errorMessage !== undefined) {
+ mockS3Client.onAnyCommand().rejects(errorMessage);
+ return;
+ }
+ mockS3Client
+ .on(ListObjectsV2Command, { Bucket: FLYWAY_FILES_BUCKET_NAME })
+ .resolvesOnce({ Contents: contents })
+ .on(GetObjectCommand, { Bucket: FLYWAY_FILES_BUCKET_NAME, Key: contents[0].Key })
+ .resolvesOnce({ Body: new MockReadable() as never })
+ .on(GetObjectCommand, { Bucket: FLYWAY_FILES_BUCKET_NAME, Key: contents[1].Key })
+ .resolvesOnce({ Body: new MockReadable() as never })
+ .on(GetObjectCommand, { Bucket: FLYWAY_FILES_BUCKET_NAME, Key: contents[2].Key })
+ .resolvesOnce({ Body: new MockReadable() as never });
+};
+
+const mockSecretsManagerResponses = (errorMessage?: string): void => {
+ if (errorMessage !== undefined) {
+ mockSecretsManagerClient.onAnyCommand().rejects(errorMessage);
+ return;
+ }
+ mockSecretsManagerClient
+ .on(GetSecretValueCommand, { SecretId: SECRET_ID })
+ .resolvesOnce({ SecretString: JSON.stringify(SECRET_VALUE) });
+};
diff --git a/src/handlers/run-flyway-command/handler.ts b/src/handlers/run-flyway-command/handler.ts
index 13108c1df..d04a7b253 100644
--- a/src/handlers/run-flyway-command/handler.ts
+++ b/src/handlers/run-flyway-command/handler.ts
@@ -3,11 +3,22 @@ import { getEnvironmentVariable, getErrorMessage } from '../../shared/utils/util
import { getSecret } from '../../shared/secrets-manager/get-secret';
import type { RedshiftSecret } from '../../shared/types/secrets-manager';
import * as child_process from 'node:child_process';
+import { s3Client } from '../../shared/clients';
+import { GetObjectCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
+import type { GetObjectCommandOutput } from '@aws-sdk/client-s3';
+import * as fs from 'node:fs';
+import { Readable } from 'node:stream';
const logger = getLogger('lambda/run-flyway-command');
const FLYWAY_COMMANDS = ['clean', 'info', 'migrate', 'validate'];
+const LAMBDA_FILES_ROOT = '/tmp/flyway';
+
+const CONFIG_FILE_NAME = 'flyway.conf';
+
+const MIGRATIONS_DIRECTORY_NAME = 'migrations';
+
type FlywayCommand = (typeof FLYWAY_COMMANDS)[number];
interface RunFlywayEvent {
@@ -26,8 +37,9 @@ export const handler = async (event: RunFlywayEvent): Promise =
try {
const validated = validateEvent(event);
logger.info('Starting run flyway command lambda', { event: validated });
+ await getFlywayFiles();
const redshiftSecret = await getRedshiftSecret();
- const flywayEnvironment = await getFlywayEnvironment(event, redshiftSecret);
+ const flywayEnvironment = await getFlywayEnvironment(validated, redshiftSecret);
return runFlywayCommand(validated, flywayEnvironment);
} catch (error) {
logger.error('Error running flyway command', { error });
@@ -42,6 +54,44 @@ const validateEvent = (event: RunFlywayEvent): RunFlywayEvent => {
return event;
};
+const getFlywayFiles = async (): Promise => {
+ try {
+ fs.mkdirSync(`${LAMBDA_FILES_ROOT}/${MIGRATIONS_DIRECTORY_NAME}`, { recursive: true });
+ const bucket = getEnvironmentVariable('FLYWAY_FILES_BUCKET_NAME');
+
+ const files = await s3Client.send(new ListObjectsV2Command({ Bucket: bucket }));
+ for (const file of files.Contents ?? []) {
+ const getObject = await getFile(bucket, file.Key);
+ await writeToFile(getObject, file.Key);
+ }
+ } catch (error) {
+ throw new Error(`Error getting flyway files - ${getErrorMessage(error)}`);
+ }
+};
+
+const getFile = async (bucket: string, key: string | undefined): Promise => {
+ return await s3Client.send(
+ new GetObjectCommand({
+ Bucket: bucket,
+ Key: key,
+ }),
+ );
+};
+
+const writeToFile = async (response: GetObjectCommandOutput, filename: string | undefined): Promise => {
+ await new Promise((resolve, reject) => {
+ if (response.Body instanceof Readable) {
+ response.Body.pipe(fs.createWriteStream(`${LAMBDA_FILES_ROOT}/${filename}`))
+ .on('error', err => {
+ reject(err);
+ })
+ .on('close', () => {
+ resolve();
+ });
+ }
+ });
+};
+
const getRedshiftSecret = async (): Promise => {
try {
const redshiftSecretId = getEnvironmentVariable('REDSHIFT_SECRET_ID');
@@ -58,6 +108,8 @@ const getFlywayEnvironment = async (
FLYWAY_URL: `jdbc:redshift://${redshiftSecret.host}:${redshiftSecret.port}/${event.database}`,
FLYWAY_USER: redshiftSecret.username,
FLYWAY_PASSWORD: redshiftSecret.password,
+ FLYWAY_LOCATIONS: `filesystem:${LAMBDA_FILES_ROOT}/${MIGRATIONS_DIRECTORY_NAME}`,
+ FLYWAY_CONFIG_FILES: `${LAMBDA_FILES_ROOT}/${CONFIG_FILE_NAME}`,
});
const runFlywayCommand = (event: RunFlywayEvent, environment: Record): RunFlywayResult => {
diff --git a/src/layers/flyway/README.md b/src/layers/flyway/README.md
index a52d02084..6d21ed98c 100644
--- a/src/layers/flyway/README.md
+++ b/src/layers/flyway/README.md
@@ -7,11 +7,13 @@ At build time (the `build` script in `package.json`), the [build-flyway-layer.sh
* Moves the run-flyway script to a `bin/` subdirectory (`layer-dist/flyway/bin`)
* Extracts the tar file into a `flyway/` subdirectory (`layer-dist/flyway/flyway`)
* Deletes various files within the extracted folder to make the deployment package within AWS limits (see [here](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-limits.html))
-* Copies in the redshift driver and the SQL migrations the correct places in the extracted folder
+* Copies the redshift driver to the `drivers/` subdirectory (`layer-dist/flyway/drivers`)
# Deployment info
The `layer-dist/flyway/` directory will be zipped up by AWS SAM into a layer. AWS puts this into `/opt` in the lambda but adds anything under `/bin` to `$PATH`.
Because of this we can execute the run script simply by running `run-flyway` like any other command line program.
+The migrations and `flyway.conf` file under `redshift-scripts/flyway` directory are stored in S3 so they can be updated without having to update the layer.
+
For more information see [here](https://docs.aws.amazon.com/lambda/latest/dg/packaging-layers.html#packaging-layers-paths).