diff --git a/.env.authdev1 b/.env.authdev1 deleted file mode 100644 index 07161a166..000000000 --- a/.env.authdev1 +++ /dev/null @@ -1,84 +0,0 @@ -# -# Environment variable configuration file for running di-authentication-frontend locally pointing to remote build -# - -ENVIRONMENT=development - -# AWS Profile for the build account -AWS_PROFILE=di-auth-development-admin - -# Orchestration OIDC API -API_BASE_URL=https://oidc.authdev1.sandpit.account.gov.uk - -# Authentication Frontend API -FRONTEND_API_BASE_URL=https://auth.authdev1.sandpit.account.gov.uk - -# Redirect host for the local stub client -STUB_HOSTNAME=rp-dev.build.stubs.account.gov.uk - -# Test Client for local testing, must be configured in the client registry - Ask for value -TEST_CLIENT_ID= - -# API Key for the Authentication Frontend API - Ask for value -API_KEY= - -# Domain where app is running -SERVICE_DOMAIN=localhost - -# Local Express session configuration -SESSION_EXPIRY=60000 -SESSION_SECRET=123456 - -# -# SmartAgent configuration for Support form submission - Ask for values -# -SMARTAGENT_API_KEY= -SMARTAGENT_API_URL= -SMARTAGENT_WEBFORM_ID= - -# -# Triage page URL -# -URL_FOR_SUPPORT_LINKS="/contact-us" - -# -# Orch to Auth configuration -# -ORCH_TO_AUTH_CLIENT_ID="orchestrationAuth" -ENCRYPTION_KEY_ID="alias/authdev1-authentication-encryption-key-alias" -ORCH_TO_AUTH_AUDIENCE="https://signin.authdev1.sandpit.account.gov.uk/" - -# NOTE: THIS IS A MULTILINE STRING VARIABLE. ENSURE THAT THESE LINES ARE NOT SEPARATED FROM EACH OTHER -ORCH_TO_AUTH_SIGNING_KEY="-----BEGIN PUBLIC KEY----- -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAENB3csRUIdoaTHNn079Jl7JpiXzxF -0p2ZIddCErxtIhGMTTqtbQZJCPesSKUVE/DQbpIko3mLoisuFgmQfFouCw== ------END PUBLIC KEY-----" - -# -# Local stub client options -# - -# Set VTR for the stub client authorization request -VTR=["Cl","Cl.Cm"] - -# Locales passed by the client - uncomment to use -# UI_LOCALES=cy - -# -# Feature switches -# -SUPPORT_MFA_OPTIONS=1 -SUPPORT_ACCOUNT_RECOVERY=1 -SUPPORT_AUTHORIZE_CONTROLLER=1 -SUPPORT_ACCOUNT_INTERVENTIONS=1 -SUPPORT_2FA_B4_PASSWORD_RESET=1 -SUPPORT_REAUTHENTICATION=1 - -# -# Docker Ports -# -DOCKER_STUB_NO_MFA_PORT=5000 -DOCKER_STUB_DEFAULT_PORT=2000 -DOCKER_REDIS_PORT=6379 -DOCKER_FRONTEND_PORT=3000 -DOCKER_FRONTEND_NODEMON_PORT=9230 diff --git a/.env.build b/.env.build deleted file mode 100644 index 3b3cf6c70..000000000 --- a/.env.build +++ /dev/null @@ -1,85 +0,0 @@ -# -# Environment variable configuration file for running di-authentication-frontend locally pointing to remote build -# - -ENVIRONMENT=development - -# AWS Profile for the build account -AWS_PROFILE=gds-di-development-admin - -# Orchestration OIDC API -API_BASE_URL=https://oidc.build.account.gov.uk - -# Authentication Frontend API -FRONTEND_API_BASE_URL=https://auth.build.account.gov.uk - -# Redirect host for the local stub client -STUB_HOSTNAME=di-auth-stub-relying-party-build.london.cloudapps.digital - -# Test Client for local testing, must be configured in the client registry - Ask for value -TEST_CLIENT_ID= - -# API Key for the Authentication Frontend API - Ask for value -API_KEY= - -# Domain where app is running -SERVICE_DOMAIN=localhost - -# Local Express session configuration -SESSION_EXPIRY=60000 -SESSION_SECRET=123456 - -# -# SmartAgent configuration for Support form submission - Ask for values -# -SMARTAGENT_API_KEY= -SMARTAGENT_API_URL= -SMARTAGENT_WEBFORM_ID= - -# -# Triage page URL -# -URL_FOR_SUPPORT_LINKS="/contact-us" - - -# -# Orch to Auth configuration -# -ORCH_TO_AUTH_CLIENT_ID=orchestrationAuth -ENCRYPTION_KEY_ID="alias/build-authentication-encryption-key-alias" -ORCH_TO_AUTH_AUDIENCE="https://signin.build.account.gov.uk/" - -# NOTE: THIS IS A MULTILINE STRING VARIABLE. ENSURE THAT THESE LINES ARE NOT SEPARATED FROM EACH OTHER -ORCH_TO_AUTH_SIGNING_KEY="-----BEGIN PUBLIC KEY----- -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAENRdvNXHwk1TvrgFUsWXAE5oDTcPr -CBp6HxbvYDLsqwNHiDFEzCwvbXKY2QQR/Rtel0o156CtU9k1lCZJGAsSIA== ------END PUBLIC KEY-----" - -# -# Local stub client options -# - -# Set VTR for the stub client authorization request -VTR=["Cl","Cl.Cm"] - -# Locales passed by the client - uncomment to use -# UI_LOCALES=cy - -# -# Feature switches -# -SUPPORT_MFA_OPTIONS=1 -SUPPORT_ACCOUNT_RECOVERY=1 -SUPPORT_AUTHORIZE_CONTROLLER=1 -SUPPORT_ACCOUNT_INTERVENTIONS=1 -SUPPORT_2FA_B4_PASSWORD_RESET=1 -SUPPORT_REAUTHENTICATION=1 - -# -# Docker Ports -# -DOCKER_STUB_NO_MFA_PORT=5000 -DOCKER_STUB_DEFAULT_PORT=2000 -DOCKER_REDIS_PORT=6379 -DOCKER_FRONTEND_PORT=3000 -DOCKER_FRONTEND_NODEMON_PORT=9230 diff --git a/.github/workflows/pre-merge-checks-python.yaml b/.github/workflows/pre-merge-checks-python.yaml new file mode 100644 index 000000000..7a19766f1 --- /dev/null +++ b/.github/workflows/pre-merge-checks-python.yaml @@ -0,0 +1,50 @@ +name: Pre-merge checks for Python +on: + pull_request: + paths: ["**/*.py"] + types: + - opened + - reopened + - ready_for_review + - synchronize + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.dev.txt + pip install -r scripts/requirements.txt + + - name: Pylint + run: | + pylint -f actions $(git ls-files '*.py') + + - name: Flake8 + run: | + flake8p --format github $(git ls-files '*.py') + + - name: Black + run: | + black --check $(git ls-files '*.py') + + - name: isort + run: | + isort --check-only $(git ls-files '*.py') diff --git a/README.md b/README.md index a292f5a77..beb4585f0 100644 --- a/README.md +++ b/README.md @@ -21,8 +21,14 @@ Before you can run the frontend app against the backend you will need to configu ### Set the Environment variables -Create a copy of one `.env.XXX`, where `XXX` is the environment you wish to use for OIDC, -rename it .env and fill in the missing values. +You will need a `.env` file. this can be generated by: + +1. Log into the VPN +1. Run `scripts/create-env-file.sh ${target_environment}`, where `${target_environment}` is the environment you wish to use for + OIDC etc. `build|sandpit|authdev#` are supported. + +If things stop working in future, this script can be rerun to update the variables sourced from AWS. All non-generated variables +may be updated, and changes will persist through reruns. `UI_LOCALES` can be used be the stub to request specific locales when authorising. Only 'en' and 'cy' are supported. @@ -62,7 +68,7 @@ Additionaly delete the Docker images for all the frontend services in docker-com There are two stub apps you can use to start a journey. -> NB: ports 2000 and 5000 can be set in `.env` with `STUB_DEFAULT_PORT=2000` and `STUB_NO_MFA_PORT=5000`. If you have changed these +> NB: ports 2000 and 5000 can be set in `.env` with `DOCKER_STUB_DEFAULT_PORT=2000` and `DOCKER_STUB_NO_MFA_PORT=5000`. If you have changed these > values in your `.env` file, use your value rather than the one stated below. To start an auth only journey with MFA required ("Cm"), navigate to the stub app on port 2000 [http://localhost:2000](http://localhost:2000). This acts like a local client to create a backend session and redirect to the start page. @@ -73,7 +79,7 @@ Changes made locally will automatically be deployed after a few seconds. You sho ### Switching between different Vectors of Trust -You can further tweak the vector of trust (VTR) requested by the stub client on port 5000 (or `$STUB_DEFAULT_PORT` if modified in `.env`) by editing `docker-compose.yml` +You can further tweak the vector of trust (VTR) requested by the stub client on port 5000 (or `$DOCKER_STUB_DEFAULT_PORT` if modified in `.env`) by editing `docker-compose.yml` and changing the `VTR` environment variable for the `di-auth-stub-no-mfa` service: ``` @@ -145,11 +151,11 @@ brew install pre-commit If you're having problems running locally, try these steps first: -* Connect to the VPN -* Run `./shutdown.sh` -* Delete your Docker **Images** (you can do this via Docker Desktop or with `docker system prune --all`) -* Run `./startup.sh -lc` to do a cleanup before a local run -* Because things sometimes don't work first time round, a `touch src/server.ts` _while the server is running_ might help +- Connect to the VPN +- Run `./shutdown.sh` +- Delete your Docker **Images** (you can do this via Docker Desktop or with `docker system prune --all`) +- Run `./startup.sh -lc` to do a cleanup before a local run +- Because things sometimes don't work first time round, a `touch src/server.ts` _while the server is running_ might help ### Getting past specific errors @@ -168,7 +174,7 @@ Remember to run these commands in the docker container itself. ### Documentation -> Generate and view documentation of the user journey state machine +> Generate and view documentation of the user journey state machine ```shell script yarn dev:render-user-journey-documentation @@ -220,7 +226,8 @@ using mocha. The application stack must be started before the integration tests can be run, either with frontend running in a docker container or on the local machine (./startup -l). In either case the command to run the integration tests is the same, but the tests usually run faster when frontend is started outside of docker. -In both cases frontend must have started successfully with the message 'Server listening on port 3000' before the tests can be run. If running frontend in a container check the actual container logs in docker desktop to make sure that frontend has started correctly, otherwise the test run will be invalid. +In both cases frontend must have started successfully with the message 'Server listening on port 3000' before the tests can be run. If running frontend in a container check the actual container logs in docker desktop to make sure that frontend has started correctly, otherwise the test run will be invalid. + > To run the integration tests ```shell script diff --git a/dev-app.js b/dev-app.js index 177509738..0412ea45b 100644 --- a/dev-app.js +++ b/dev-app.js @@ -19,7 +19,7 @@ function createAuthorizeRequest() { const ui_locales = process.env.UI_LOCALES && process.env.UI_LOCALES.length > 0 ? `&ui_locales=${process.env.UI_LOCALES}` : ""; const redirect_uri = `https://${process.env.STUB_HOSTNAME}/oidc/authorization-code/callback` - return process.env.API_BASE_URL + + return new URL( "/authorize?" + vtr + "scope=openid+phone+email" + @@ -30,7 +30,8 @@ function createAuthorizeRequest() { `&client_id=${process.env.TEST_CLIENT_ID}` + "&cookie_consent=accept" + "&_ga=test" + - ui_locales; + ui_locales, process.env.API_BASE_URL + ).toString(); } app.get("/", (req, res) => { diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..c11b1d401 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,20 @@ +[tool.pylint] +max-line-length = "88" +disable = '''missing-module-docstring, + missing-class-docstring, + missing-function-docstring, + invalid-name, + too-few-public-methods, + line-too-long + ''' +ignore = ".venv" + +[tool.pylint.main] +load-plugins = "pylint_actions" + +[tool.flake8] +max-line-length = "88" +extend-ignore = ["E203", "E701", "E501"] + +[tool.isort] +profile = "black" diff --git a/requirements.dev.txt b/requirements.dev.txt new file mode 100644 index 000000000..623f5e3af --- /dev/null +++ b/requirements.dev.txt @@ -0,0 +1,7 @@ +pylint +pylint-actions +black +isort +flake8 +Flake8-pyproject +flake8-github-actions diff --git a/scripts/.gitignore b/scripts/.gitignore new file mode 100644 index 000000000..1d17dae13 --- /dev/null +++ b/scripts/.gitignore @@ -0,0 +1 @@ +.venv diff --git a/scripts/_create_env_file.py b/scripts/_create_env_file.py new file mode 100644 index 000000000..356a63265 --- /dev/null +++ b/scripts/_create_env_file.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python +import json +import logging +import os +import re +import shutil +import sys +from datetime import datetime +from functools import cache, cached_property +from io import BytesIO +from pathlib import Path +from typing import Iterable, TypedDict + +import boto3 +from botocore.exceptions import BotoCoreError +from botocore.exceptions import TokenRetrievalError as BotoTokenRetrievalError +from dotenv import dotenv_values + +logging.basicConfig(level=logging.INFO) + +logger = logging.getLogger("build-env-file") + + +STATE_GETTER: "StateGetter" = None + + +class EnvFileVariable(TypedDict): + value: str + comment: str | None + + +class EnvFileSection(TypedDict): + header: str | None + variables: dict[str, EnvFileVariable | str | int] + + +DEFAULT_USER_VARIABLES: list[EnvFileSection] = [ + { + "header": "Miscellaneous variables", + "variables": { + "ENVIRONMENT": {"value": "development", "comment": "Environment"}, + "SERVICE_DOMAIN": { + "value": "localhost", + "comment": "Domain where app is running", + }, + }, + }, + { + "header": "Local Express session configuration", + "variables": { + "SESSION_EXPIRY": { + "value": 60000, + "comment": "Express session expiry time in milliseconds", + }, + "SESSION_SECRET": {"value": 123456, "comment": "Express session secret"}, + }, + }, + { + # pylint: disable=line-too-long + "header": "SmartAgent configuration for Support form submission - Ask for values", + "variables": { + "SMARTAGENT_API_KEY": { + "value": "asdf", + "comment": "API key for Smartagent", + }, + "SMARTAGENT_API_URL": { + "value": "asdf", + "comment": "API URL for Smartagent", + }, + "SMARTAGENT_WEBFORM_ID": { + "value": "asdf", + "comment": "Webform ID for Smartagent", + }, + }, + }, + { + "header": "Local stub client options", + "variables": { + "VTR": { + "value": '["Cl","Cl.m"]', + "comment": "VTR for the stub client authorization request", + }, + }, + }, + { + "header": "Feature switches", + "variables": { + "SUPPORT_MFA_OPTIONS": 1, + "SUPPORT_ACCOUNT_RECOVERY": 1, + "SUPPORT_AUTHORIZE_CONTROLLER": 1, + "SUPPORT_ACCOUNT_INTERVENTIONS": 1, + "SUPPORT_2FA_B4_PASSWORD_RESET": 1, + "SUPPORT_REAUTHENTICATION": 1, + "SUPPORT_2HR_LOCKOUT": 1, + }, + }, + { + "header": "Redis configuration", + "variables": { + "REDIS_HOST": {"value": "localhost", "comment": "Redis host"}, + "REDIS_PORT": {"value": 6379, "comment": "Redis port"}, + }, + }, + { + "header": "Docker ports", + "variables": { + "DOCKER_STUB_NO_MFA_PORT": { + "value": 5000, + "comment": "Listen port for no-mfa stub", + }, + "DOCKER_STUB_DEFAULT_PORT": { + "value": 2000, + "comment": "Listen port for default stub", + }, + "DOCKER_FRONTEND_PORT": { + "value": 3000, + "comment": "Listen port for frontend", + }, + "DOCKER_FRONTEND_NODEMON_PORT": { + "value": 9230, + "comment": "Listen port for frontend nodemon", + }, + }, + }, +] + +DEFAULT_USER_VARIABLE_LOOKUP = { + name: i + for i, section in enumerate(DEFAULT_USER_VARIABLES) + for name in section["variables"] +} + + +@cache +def cached_get_json_from_s3(s3_client, bucket, path) -> dict: + f = BytesIO() + s3_client.download_fileobj(bucket, path, f) + return json.loads(f.getvalue()) + + +class StateGetter: + boto_client: boto3.Session + s3_client: boto3.client + dynamodb_client: boto3.client + + def __init__(self, deployment_name: str, state_bucket: str, aws_profile_name: str): + self.deployment_name = deployment_name + self.state_bucket = state_bucket + try: + self.boto_client = boto3.Session(profile_name=aws_profile_name) + self._validate_aws_credentials() + self.s3_client = self.boto_client.client("s3") + self.dynamodb_client = self.boto_client.client("dynamodb") + except BotoTokenRetrievalError: + logger.fatal( + "AWS auth error: Your SSO session has expired. Please run `aws sso " + "login --profile di-auth-development-admin` to refresh your session." + ) + sys.exit(1) + except BotoCoreError as e: + logger.fatal("AWS error: %s. Are you connected to the VPN?", e) + sys.exit(1) + + def _validate_aws_credentials(self): + self.boto_client.client("sts").get_caller_identity() + + def get_stub_hostname_clientid_from_dynamodb(self): + hostname_regex = re.compile(r"^https://(rp-\w+\.\w+\.stubs\.account\.gov\.uk)") + paginator = self.dynamodb_client.get_paginator("scan") + iterator = paginator.paginate( + TableName=f"{self.deployment_name}-client-registry", + Select="SPECIFIC_ATTRIBUTES", + ProjectionExpression="ClientID, SectorIdentifierUri", + ) + for page in iterator: + for item in page["Items"]: + if "SectorIdentifierUri" in item: + search = hostname_regex.search(item["SectorIdentifierUri"]["S"]) + if search: + return search.group(1), item["ClientID"]["S"] + raise ValueError("Stub hostname not found in DynamoDB") + + @cached_property + def _api_remote_state(self): + state_json = cached_get_json_from_s3( + self.s3_client, + self.state_bucket, + f"frontend-{self.deployment_name}-terraform.tfstate", + ) + resources = state_json["resources"] + return next( + resource + for resource in resources + if resource["mode"] == "data" + and resource["type"] == "terraform_remote_state" + and resource["name"] == "api" + ) + + @cached_property + def _ecs_task_environment(self): + state_json = cached_get_json_from_s3( + self.s3_client, + self.state_bucket, + f"frontend-{self.deployment_name}-terraform.tfstate", + ) + resources = state_json["resources"] + definitions = next( + resource + for resource in resources + if resource["mode"] == "managed" + and resource["type"] == "aws_ecs_task_definition" + and resource["name"] == "frontend_task_definition" + )["instances"][0]["attributes"]["container_definitions"] + definitions = json.loads(definitions) + return next( + definition["environment"] + for definition in definitions + if definition["name"] == "frontend-application" + ) + + def get_api_remote_state_value(self, key: str): + api_remote_state = self._api_remote_state + try: + return api_remote_state["instances"][0]["attributes"]["outputs"]["value"][ + key + ] + except KeyError as e: + raise KeyError(f"Key {key} not found in api remote state") from e + + def get_ecs_task_environment_value(self, key: str): + ecs_task_environment = self._ecs_task_environment + try: + return next( + env["value"] for env in ecs_task_environment if env["name"] == key + ) + except KeyError as e: + raise KeyError(f"Key {key} not found in ecs task environment") from e + + +def get_static_variables_from_remote( + deployment_name: str, + aws_profile_name: str, +) -> list[EnvFileSection]: + try: + stub_hostname, client_id = ( + STATE_GETTER.get_stub_hostname_clientid_from_dynamodb() + ) + except ValueError as e: + logger.error("Error getting stub hostname from DynamoDB: %s", e) + sys.exit(1) + return [ + { + "variables": { + "DEPLOYMENT_NAME": deployment_name, + "AWS_PROFILE": aws_profile_name, + "API_BASE_URL": STATE_GETTER.get_api_remote_state_value("base_url"), + "FRONTEND_API_BASE_URL": STATE_GETTER.get_api_remote_state_value( + "frontend_api_base_url" + ), + }, + }, + { + "variables": { + "STUB_HOSTNAME": stub_hostname, + "API_KEY": STATE_GETTER.get_ecs_task_environment_value("API_KEY"), + "TEST_CLIENT_ID": client_id, + "URL_FOR_SUPPORT_LINKS": STATE_GETTER.get_ecs_task_environment_value( + "URL_FOR_SUPPORT_LINKS" + ), + "ORCH_TO_AUTH_CLIENT_ID": STATE_GETTER.get_ecs_task_environment_value( + "ORCH_TO_AUTH_CLIENT_ID" + ), + "ENCRYPTION_KEY_ID": STATE_GETTER.get_ecs_task_environment_value( + "ENCRYPTION_KEY_ID" + ), + "ORCH_TO_AUTH_AUDIENCE": STATE_GETTER.get_ecs_task_environment_value( + "ORCH_TO_AUTH_AUDIENCE" + ), + "ORCH_TO_AUTH_SIGNING_KEY": STATE_GETTER.get_ecs_task_environment_value( + "ORCH_TO_AUTH_SIGNING_KEY" + ), + }, + }, + ] + + +def get_user_variables( + dotenv_file: Path, static_variables: list[EnvFileSection] +) -> list[EnvFileSection]: + if not dotenv_file.exists() or not dotenv_file.is_file(): + return DEFAULT_USER_VARIABLES + + known_static_variable_names = [ + var for section in static_variables for var in section["variables"] + ] + + vars_from_file = dotenv_values(dotenv_file) + user_variables = DEFAULT_USER_VARIABLES.copy() + unrecognised_vars = {} + for k, v in vars_from_file.items(): + if k in known_static_variable_names: + continue + if k in DEFAULT_USER_VARIABLE_LOOKUP: + section_index = DEFAULT_USER_VARIABLE_LOOKUP[k] + user_variables[section_index]["variables"][k] = v + continue + if k in unrecognised_vars: + logger.warning( + "Duplicate variable found: `%s=%s`. Not adding to env file.", k, v + ) + continue + + unrecognised_vars[k] = v + + if len(unrecognised_vars) > 0: + return user_variables + [ + { + "header": "Unrecognised variables from import", + "variables": unrecognised_vars, + } + ] + + return user_variables + + +def format_value(value: str | int) -> str: + if isinstance(value, int): + return format_value(str(value)) + if "\n" in value: + value = value.replace('"', '\\"') + value = f'"{value}"' + return value + + +def build_lines_from_section(sections: list[EnvFileSection]) -> Iterable[str]: + for section in sections: + if section.get("header"): + yield f"# {section['header']}" + for var_name, var in section["variables"].items(): + if isinstance(var, dict): + yield f"# {var['comment']}" + var = var["value"] + yield f"{var_name}={format_value(var)}" + yield "" + + +def build_env_file_lines( + deployment_name: str, + static_sections: list[EnvFileSection], + user_sections: list[EnvFileSection], +) -> Iterable[str]: + # pylint: disable=line-too-long + yield from [ + f"# This file was generated with `build-env.sh {deployment_name}` at {datetime.now().isoformat()}.\n", + "# You may update variables between this line and the 'DO NOT EDIT BELOW THIS LINE' marker.\n", + ] + + yield from build_lines_from_section(user_sections) + yield "# DO NOT EDIT BELOW THIS LINE" # Mark the end of user-editable variables + yield f"# The following variables should be updated by rerunning `build-env.sh {deployment_name}`" + yield "# Any manual changes made below this line will be lost.\n" + yield from build_lines_from_section(static_sections) + + +def main(deployment_name: str, aws_profile_name: str, dotenv_file: Path): + start_time = datetime.now() + static_variables = get_static_variables_from_remote( + deployment_name, aws_profile_name + ) + user_variables = get_user_variables(dotenv_file, static_variables) + + env_file_lines = build_env_file_lines( + deployment_name, static_variables, user_variables + ) + + # Create a backup of the existing .env file + try: + dotenv_file_backup = dotenv_file.with_suffix(dotenv_file.suffix + ".bak") + logger.info("Backing up %s to %s", dotenv_file, dotenv_file_backup) + shutil.copy2(dotenv_file, dotenv_file_backup) + except FileNotFoundError: + logger.warning("No existing .env file found to back up.") + # pylint: disable=broad-except + except Exception as e: + logger.error("Error backing up %s: %s", dotenv_file, e) + sys.exit(1) + + try: + dotenv_file.write_text("\n".join(env_file_lines)) + except OSError as e: + logger.error("Error writing to %s: %s", dotenv_file, e) + sys.exit(1) + + logger.info( + "Successfully updated %s with values from %s environment in %f seconds.", + dotenv_file, + deployment_name, + (datetime.now() - start_time).total_seconds(), + ) + + +if __name__ == "__main__": + try: + assert os.getenv("FROM_WRAPPER", "false") == "true" + except AssertionError: + logger.fatal( + "This script is intended to be run from the wrapper `scripts/build-env-file.sh`. " + "Please use that instead." + ) + sys.exit(1) + + if len(sys.argv) != 2: + logger.error("Usage: build-env.py ") + sys.exit(1) + try: + deploy_env = sys.argv[1] + assert isinstance(deploy_env, str) + assert len(deploy_env) > 0 + except (KeyError, AssertionError): + logger.error("Deploy environment must be specified") + sys.exit(1) + + _aws_profile_name = "gds-di-development-admin" + _state_bucket_name = "digital-identity-dev-tfstate" + if re.match(r"^authdev[0-9]+$", deploy_env): + _aws_profile_name = "di-auth-development-admin" + _state_bucket_name = "di-auth-development-tfstate" + + STATE_GETTER = StateGetter(deploy_env, _state_bucket_name, _aws_profile_name) + + main(deploy_env, _aws_profile_name, Path(".env")) diff --git a/scripts/create-env-file.sh b/scripts/create-env-file.sh new file mode 100755 index 000000000..6187a66b5 --- /dev/null +++ b/scripts/create-env-file.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +set -euo pipefail + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" + +# Ensure python is at least 3.10 +min_python_version=3.10 +python_version="$(python3 --version | cut -d' ' -f2)" +if [ "$(printf '%s\n' "${min_python_version}" "${python_version}" | sort -V | head -n1)" != "${min_python_version}" ]; then + # shellcheck disable=SC2016 + printf 'Please install python %s or later (found: %s). You could probably use `brew install python@3.12`' "${min_python_version}" "${python_version}" + exit 1 +fi + +# test if a python virtualenv already exists +if [ -d "${DIR}/.venv" ]; then + echo "! Using existing virtualenv" +else + echo "! Creating virtualenv" + python3 -m venv "${DIR}/.venv" +fi + +# activate the virtualenv +# shellcheck source=/dev/null +source "${DIR}/.venv/bin/activate" + +# install dependencies +echo "! installing python dependencies" +pip3 install -r "${DIR}/requirements.txt" + +echo +echo + +FROM_WRAPPER=true python3 "${DIR}/_create_env_file.py" "$@" diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 000000000..902d8a764 --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1,8 @@ +boto3 +botocore +jmespath +python-dateutil +python-dotenv +s3transfer +six +urllib3 diff --git a/startup.sh b/startup.sh index 7c77ca5f9..2a288924a 100755 --- a/startup.sh +++ b/startup.sh @@ -44,8 +44,7 @@ fi test -f .env || usage "Missing .env file" -# set shellcheck source to .env.build, as this is a 'complete' example -# shellcheck source=.env.build +# shellcheck source=/dev/null set -o allexport && source .env && set +o allexport # shellcheck source=./scripts/export_aws_creds.sh