Skip to content

Commit

Permalink
Remove flyway files from layer (#576)
Browse files Browse the repository at this point in the history
Remove flyway files from layer so they can be updated independently
Create upload flyway files workflow similar to the athena files one
Call upload flyway files workflow from run flyway command workflow
Create S3 bucket, S3 policy and upload role in IaC similar to the athena files ones
Add logic to run flyway command lambda to download flyway files from S3 and update the tests
Make database a required parameter in the run flyway command workflow
Add flyway.conf file and move migrations directory to new flyway files subdirectory of redshift-scripts
Remove logic to copy migrations into layer in build flyway layer script
Add new test to redshift rotate secret tests to increase coverage of database access logic
Update layer and main READMEs to cover changes
  • Loading branch information
hdavey-gds authored Feb 26, 2024
1 parent a24a2b6 commit 8c3fbcc
Show file tree
Hide file tree
Showing 12 changed files with 358 additions and 25 deletions.
27 changes: 22 additions & 5 deletions .github/workflows/run-flyway-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,12 @@ on:
options: [info, migrate, validate]
database:
type: string
required: true
description: Database on which to perform migrations
skip-s3-upload:
type: boolean
required: false
description: Database on which to perform migrations (defaults to dap_txma_reporting_db)
default: unset
description: Skip uploading flyway files to S3

jobs:
validate-environment:
Expand All @@ -28,9 +31,24 @@ jobs:
- name: Validate input environment
run: scripts/validate-environment.sh ${{ inputs.environment }}

run-flyway:
upload-flyway-files:
if: inputs.skip-s3-upload == false
needs: [validate-environment]
# These permissions are needed to interact with GitHub's OIDC Token endpoint (enabling the aws-actions/configure-aws-credentials action)
permissions:
id-token: write
contents: read
secrets: inherit
uses: ./.github/workflows/upload-flyway-files.yml
with:
environment: ${{ inputs.environment }}

run-flyway:
# needs combined with always as this job needs to run whether or not upload-flyway-files runs
# (and if upload-flyway-files does run then this job should run after it)
needs: [upload-flyway-files]
if: always()
# These permissions are needed to interact with GitHub's OIDC Token endpoint (enabling the aws-actions/configure-aws-credentials action)
permissions:
id-token: write
contents: read
Expand All @@ -45,8 +63,7 @@ jobs:
role-to-assume: ${{ secrets[format('DB_MIGRATION_ROLE_ARN_{0}', inputs.environment)] }}
- name: Invoke lambda
run: |
DATABASE=$(if [ -z ${{ inputs.database }} ] || [ ${{ inputs.database }} == "unset" ]; then echo dap_txma_reporting_db; else echo ${{ inputs.database }}; fi)
PAYLOAD=$(echo "{\"command\": \"${{ inputs.command }}\", \"database\": \"$DATABASE\"}")
PAYLOAD=$(echo "{\"command\": \"${{ inputs.command }}\", \"database\": \"${{ inputs.database }}\"}")
echo "$PAYLOAD" | jq
ENCODED=$(echo "$PAYLOAD" | openssl base64)
aws --region eu-west-2 lambda invoke --function-name run-flyway-command --payload "$ENCODED" out.json
Expand Down
48 changes: 48 additions & 0 deletions .github/workflows/upload-flyway-files.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: ✳️ Upload Flyway files to S3

on:
workflow_call:
inputs:
environment:
type: string
required: true
workflow_dispatch:
inputs:
environment:
type: choice
required: true
description: AWS environment
options: [DEV, TEST, FEATURE, BUILD, STAGING, INTEGRATION, PRODUCTION, PRODUCTION-PREVIEW]

jobs:
validate-environment:
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Validate input environment
run: scripts/validate-environment.sh ${{ inputs.environment }}

upload-to-s3:
needs: [validate-environment]
# These permissions are needed to interact with GitHub's OIDC Token endpoint (enabling the aws-actions/configure-aws-credentials action)
permissions:
id-token: write
contents: read
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Assume AWS GitHub actions role
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: eu-west-2
role-to-assume: ${{ secrets.FLYWAY_FILES_UPLOAD_ROLE_ARN }}
- name: Upload athena files to S3
run: |
REGION="eu-west-2"
FILES_ROOT="redshift-scripts/flyway"
S3_BUCKET="s3://$(echo "${{ inputs.environment }}" | tr '[:upper:]' '[:lower:]')-dap-flyway-files"
echo "Uploading contents of $FILES_ROOT to bucket $S3_BUCKET"
aws --region="$REGION" s3 cp "$FILES_ROOT" "$S3_BUCKET" --recursive
1 change: 1 addition & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ Dockerfile
*.sql
*.jar
*.tar.gz
flyway.conf
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Below is a list of workflows. The ✳️ symbol at the start of a workflow name
| Name | File | Triggers | Purpose |
|-------------------------------------------------|----------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|
| Deploy to an AWS environment | deploy-to-aws.yml | <ul><li>[other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)</li></ul> | Deploys to a deployable AWS environment (dev, build, test) |
| ✳️ Deploy to the test environment | deploy-to-test.yml | <ul><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li></ul> | Deploys IaC and lambda code to the test AWS |
| ✳️ Deploy to the test environment | deploy-to-test.yml | <ul><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li><li>[other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)</li></ul> | Deploys IaC and lambda code to the test AWS |
| ✳️ Deploy to the dev environment | deploy-to-dev.yml | <ul><li>[merge to main](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push)</li><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li></ul> | Deploys IaC and lambda code to the dev AWS |
| Deploy to the build environment | deploy-to-build.yml | <ul><li>[merge to main](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push)</li></ul> | Deploys IaC and lambda code to the build AWS |
| ✳️ Test and validate iac and lambdas | test-and-validate.yml | <ul><li>[other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)</li><li>[pull requests](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)</li><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li></ul> | Runs linting, formatting and testing of lambda code, and linting and scanning of IaC code |
Expand All @@ -101,6 +101,7 @@ Below is a list of workflows. The ✳️ symbol at the start of a workflow name
| ✳️ Add Quicksight users from spreadsheet | add-quicksight-users.yml | <ul><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li></ul> | Reads the DAP account management spreadsheet and attempts to add users to Cognito and Quicksight |
| ✳️ Deploy to the production preview environment | deploy-to-production-preview.yml | <ul><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li></ul> | Deploys to the production-preview environment |
| SAM deploy | sam-deploy.yml | <ul><li>[other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)</li></ul> | Performs a SAM deploy to an environment without secure pipelines (feature, production-preview) |
| ✳️ Upload Flyway files to S3 | upload-flyway-files.yml | <ul><li>[manual](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch)</li><li>[other workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_call)</li></ul> | Uploads flyway files for a particular environment (under [redshift-scripts/flyway](redshift-scripts/flyway)) to S3 |

## Testing

Expand Down
94 changes: 94 additions & 0 deletions iac/main/resources/redshift.yml
Original file line number Diff line number Diff line change
Expand Up @@ -282,11 +282,19 @@ RunFlywayCommandLambda:
Condition:
StringEquals:
kms:EncryptionContext:SecretARN: !Ref RedshiftSecret
- Effect: Allow
Action:
- s3:GetObject
- s3:ListBucket
Resource:
- !Sub ${FlywayFilesBucket.Arn}
- !Sub ${FlywayFilesBucket.Arn}/*
ReservedConcurrentExecutions: 10
Environment:
# checkov:skip=CKV_AWS_173: These environment variables do not require encryption
Variables:
REDSHIFT_SECRET_ID: !Ref RedshiftSecret
FLYWAY_FILES_BUCKET_NAME: !Ref FlywayFilesBucket
ENVIRONMENT: !Ref Environment
Tags:
Environment: !Ref Environment
Expand All @@ -309,3 +317,89 @@ FlywayLayer:
ContentUri: layer-dist/flyway
LayerName: !Sub ${Environment}-dap-lambda-layer-flyway
RetentionPolicy: Delete

FlywayFilesBucket:
Type: AWS::S3::Bucket
Properties:
AccessControl: Private
BucketName: !Sub ${Environment}-dap-flyway-files
LoggingConfiguration:
DestinationBucketName: !Ref GlobalLogBucket
LogFilePrefix: dap-flyway-files/log
PublicAccessBlockConfiguration:
BlockPublicAcls: true
BlockPublicPolicy: true
IgnorePublicAcls: true
RestrictPublicBuckets: true
VersioningConfiguration:
Status: Enabled
LifecycleConfiguration:
Rules:
- ExpirationInDays: 365
Status: Enabled
NotificationConfiguration:
LambdaConfigurations:
- Event: s3:ObjectCreated:*
Function: !GetAtt S3NotificationsLoggerLambda.Arn
- Event: s3:ObjectRemoved:*
Function: !GetAtt S3NotificationsLoggerLambda.Arn

FlywayFilesBucketPolicy:
Type: AWS::S3::BucketPolicy
Properties:
Bucket: !Ref FlywayFilesBucket
PolicyDocument:
Version: 2012-10-17
Statement:
- Effect: Deny
Action: 's3:*'
Resource: !Sub ${FlywayFilesBucket.Arn}/*
Principal: '*'
Condition:
Bool:
aws:SecureTransport: false
- Effect: Allow
Action:
- s3:GetObject
- s3:GetObjectVersion
- s3:ListBucket
- s3:ListBucketVersions
- s3:PutObject
Resource:
- !Sub ${FlywayFilesBucket.Arn}
- !Sub ${FlywayFilesBucket.Arn}/*
Principal:
AWS: !Sub arn:aws:iam::${BuildAccountId}:role/dap-flyway-files-upload-role

FlywayFilesBucketUploadRole:
Condition: IsBuild
Type: AWS::IAM::Role
Properties:
RoleName: dap-flyway-files-upload-role
AssumeRolePolicyDocument:
Version: 2012-10-17
Statement:
- Effect: Allow
Principal:
Federated: !Sub arn:aws:iam::${AWS::AccountId}:oidc-provider/token.actions.githubusercontent.com
Action: sts:AssumeRoleWithWebIdentity
Condition:
StringLike:
'token.actions.githubusercontent.com:sub':
- repo:govuk-one-login/data-analytics-platform:ref:refs/heads/*
- repo:govuk-one-login/data-analytics-platform:environment:*
Policies:
- PolicyName: dap-flyway-files-upload-policy
PolicyDocument:
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- s3:GetObject
- s3:GetObjectVersion
- s3:ListBucket
- s3:ListBucketVersions
- s3:PutObject
Resource:
- arn:aws:s3:::*-dap-flyway-files
- arn:aws:s3:::*-dap-flyway-files/*
Empty file.
4 changes: 0 additions & 4 deletions scripts/build-flyway-layer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,3 @@ rm -rf "$FLYWAY_DIR"/lib/rgcompare

# remove jre/legal/ as it is full of broken symlinks that cause sam deploy to exit with an error
rm -rf "$FLYWAY_DIR"/jre/legal

# add migrations
mkdir -p "$FLYWAY_DIR"/sql
cp redshift-scripts/migrations/*.sql "$FLYWAY_DIR"/sql
26 changes: 26 additions & 0 deletions src/handlers/redshift-rotate-secret/handler.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ import { databaseAccess, handler } from './handler';
import type { RotateSecretStep } from './handler';
import type { RedshiftSecret, SecretRotationStage } from '../../shared/types/secrets-manager';
import type { Database } from './database-access';
import { DatabaseAccess } from './database-access';
import { getLogger } from '../../shared/powertools';
import type { Knex } from 'knex';

const mockSecretsManagerClient = mockClient(SecretsManagerClient);

Expand Down Expand Up @@ -205,6 +208,29 @@ test('finish secret no current version', async () => {
expect(mockSecretsManagerClient.calls()).toHaveLength(2);
});

test('secret to database connection', async () => {
// @ts-expect-error this incorrectly extends DatabaseAccess by overriding a private method but it's fine as it's a test
const databaseAccess = new (class extends DatabaseAccess {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private async validateConnection(connection: Knex<any, unknown[]>): Promise<Knex<any, unknown[]>> {
return connection;
}
})(getLogger(''));

const secret = JSON.parse(getSecretString({ SecretId: 'hello', VersionStage: 'AWSCURRENT' }));
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const connection: any = await databaseAccess.getDatabaseConnection(secret);
const config = connection.context.client.config;

expect(config.client).toEqual('pg');
expect(config.connection).toEqual({
host: secret.host,
user: secret.username,
database: secret.dbname,
port: parseInt(secret.port, 10),
});
});

const mockSecretsManager = (config: SecretsManagerMockingConfig = {}): void => {
const pendingSecretError = config.pendingSecretError ?? false;
const versions = config.versions ?? { [CLIENT_REQUEST_TOKEN]: ['AWSPENDING'] };
Expand Down
Loading

0 comments on commit 8c3fbcc

Please sign in to comment.