diff --git a/data_safe_haven/external/__init__.py b/data_safe_haven/external/__init__.py index 18473b5f6d..6192050f13 100644 --- a/data_safe_haven/external/__init__.py +++ b/data_safe_haven/external/__init__.py @@ -2,7 +2,6 @@ from .api.azure_cli import AzureCliSingleton from .api.graph_api import GraphApi from .interface.azure_container_instance import AzureContainerInstance -from .interface.azure_fileshare import AzureFileShare from .interface.azure_ipv4_range import AzureIPv4Range from .interface.azure_postgresql_database import AzurePostgreSQLDatabase @@ -10,7 +9,6 @@ "AzureApi", "AzureCliSingleton", "AzureContainerInstance", - "AzureFileShare", "AzureIPv4Range", "AzurePostgreSQLDatabase", "GraphApi", diff --git a/data_safe_haven/external/api/azure_api.py b/data_safe_haven/external/api/azure_api.py index 9ac8a187dc..87b692fed0 100644 --- a/data_safe_haven/external/api/azure_api.py +++ b/data_safe_haven/external/api/azure_api.py @@ -1,7 +1,6 @@ """Interface to the Azure Python SDK""" import time -from collections.abc import Sequence from contextlib import suppress from typing import Any, cast @@ -11,20 +10,12 @@ ResourceNotFoundError, ServiceRequestError, ) -from azure.core.polling import LROPoller from azure.keyvault.certificates import ( CertificateClient, - CertificatePolicy, KeyVaultCertificate, ) from azure.keyvault.keys import KeyClient, KeyVaultKey -from azure.keyvault.secrets import KeyVaultSecret, SecretClient -from azure.mgmt.automation import AutomationClient -from azure.mgmt.automation.models import ( - DscCompilationJobCreateParameters, - DscConfigurationAssociationProperty, - Module, -) +from azure.keyvault.secrets import SecretClient from azure.mgmt.compute.v2021_07_01 import ComputeManagementClient from azure.mgmt.compute.v2021_07_01.models import ( ResourceSkuCapabilities, @@ -106,90 +97,23 @@ def blob_client( return blob_client - def compile_desired_state( + def blob_exists( self, - automation_account_name: str, - configuration_name: str, - location: str, - parameters: dict[str, str], + blob_name: str, resource_group_name: str, - required_modules: Sequence[str], - ) -> None: - """Ensure that a Powershell Desired State Configuration is compiled - - Raises: - DataSafeHavenAzureError if the configuration could not be compiled - """ - # Connect to Azure clients - automation_client = AutomationClient(self.credential, self.subscription_id) - # Wait until all modules are available - while True: - # Cast to correct spurious type hint in Azure libraries - available_modules = cast( - list[Module], - automation_client.module.list_by_automation_account( - resource_group_name, automation_account_name - ), - ) - available_module_names = [ - module.name - for module in available_modules - if module.provisioning_state == "Succeeded" - ] - if all( - module_name in available_module_names - for module_name in required_modules - ): - break - time.sleep(10) - # Wait until configuration is available - while True: - try: - automation_client.dsc_configuration.get( - resource_group_name=resource_group_name, - automation_account_name=automation_account_name, - configuration_name=configuration_name, - ) - break - except ResourceNotFoundError: - self.logger.debug( - f"Could not load configuration {configuration_name}, retrying." - ) - time.sleep(10) - # Begin creation - compilation_job_name = f"{configuration_name}-{time.time_ns()}" - with suppress(ResourceExistsError): - automation_client.dsc_compilation_job.begin_create( - resource_group_name=resource_group_name, - automation_account_name=automation_account_name, - compilation_job_name=compilation_job_name, - parameters=DscCompilationJobCreateParameters( - name=compilation_job_name, - location=location, - configuration=DscConfigurationAssociationProperty( - name=configuration_name - ), - parameters=parameters, - ), - ) - # Poll until creation succeeds or fails - while True: - result = automation_client.dsc_compilation_job.get( - resource_group_name=resource_group_name, - automation_account_name=automation_account_name, - compilation_job_name=compilation_job_name, - ) - time.sleep(10) - with suppress(AttributeError): - if (result.provisioning_state == "Succeeded") and ( - result.status == "Completed" - ): - break - if (result.provisioning_state == "Suspended") and ( - result.status == "Suspended" - ): - msg = f"Could not compile DSC '{configuration_name}'\n{result.exception}." - raise DataSafeHavenAzureError(msg) + storage_account_name: str, + storage_container_name: str, + ) -> bool: + blob_client = self.blob_client( + resource_group_name, storage_account_name, storage_container_name, blob_name + ) + # Upload the created file + exists: bool = blob_client.exists() + response = "exists" if exists else "does not exist" + self.logger.info( + f"File [green]{blob_name}[/] {response} in blob storage.", + ) + return exists def download_blob( self, @@ -375,90 +299,6 @@ def ensure_keyvault_key( msg = f"Failed to create key {key_name}.\n{exc}" raise DataSafeHavenAzureError(msg) from exc - def ensure_keyvault_secret( - self, key_vault_name: str, secret_name: str, secret_value: str - ) -> KeyVaultSecret: - """Ensure that a secret exists in the KeyVault - - Returns: - str: The secret value - - Raises: - DataSafeHavenAzureError if the existence of the secret could not be verified - """ - # Ensure that key exists - self.logger.debug( - f"Ensuring that secret [green]{secret_name}[/] exists...", - ) - try: - # Connect to Azure clients - secret_client = SecretClient( - f"https://{key_vault_name}.vault.azure.net", self.credential - ) - try: - secret = secret_client.get_secret(secret_name) - except DataSafeHavenAzureError: - secret = None - if not secret: - self.set_keyvault_secret(key_vault_name, secret_name, secret_value) - secret = secret_client.get_secret(secret_name) - self.logger.info( - f"Ensured that secret [green]{secret_name}[/] exists.", - ) - return secret - except Exception as exc: - msg = f"Failed to create secret {secret_name}.\n{exc}" - raise DataSafeHavenAzureError(msg) from exc - - def ensure_keyvault_self_signed_certificate( - self, - certificate_name: str, - certificate_url: str, - key_vault_name: str, - ) -> KeyVaultCertificate: - """Ensure that a self-signed certificate exists in the KeyVault - - Returns: - KeyVaultCertificate: The self-signed certificate - - Raises: - DataSafeHavenAzureError if the existence of the certificate could not be verified - """ - try: - # Connect to Azure clients - certificate_client = CertificateClient( - vault_url=f"https://{key_vault_name}.vault.azure.net", - credential=self.credential, - ) - - # Ensure that certificate exists - self.logger.debug( - f"Ensuring that certificate [green]{certificate_url}[/] exists...", - ) - policy = CertificatePolicy( - issuer_name="Self", - subject=f"CN={certificate_url}", - exportable=True, - key_type="RSA", - key_size=2048, - reuse_key=False, - enhanced_key_usage=["1.3.6.1.5.5.7.3.1", "1.3.6.1.5.5.7.3.2"], - validity_in_months=12, - ) - poller: LROPoller[KeyVaultCertificate] = ( - certificate_client.begin_create_certificate( - certificate_name=certificate_name, policy=policy - ) - ) - certificate = poller.result() - self.logger.info( - f"Ensured that certificate [green]{certificate_url}[/] exists.", - ) - return certificate - except Exception as exc: - msg = f"Failed to create certificate '{certificate_url}'.\n{exc}" - raise DataSafeHavenAzureError(msg) from exc - def ensure_managed_identity( self, identity_name: str, @@ -741,28 +581,6 @@ def get_storage_account_keys( msg = f"Keys could not be loaded for {msg_sa} in {msg_rg}.\n{exc}" raise DataSafeHavenAzureError(msg) from exc - def get_vm_sku_details(self, sku: str) -> tuple[str, str, str]: - # Connect to Azure client - cpus, gpus, ram = None, None, None - compute_client = ComputeManagementClient(self.credential, self.subscription_id) - for resource_sku in compute_client.resource_skus.list(): - if resource_sku.name == sku: - if resource_sku.capabilities: - # Cast to correct spurious type hint in Azure libraries - for capability in cast( - list[ResourceSkuCapabilities], resource_sku.capabilities - ): - if capability.name == "vCPUs": - cpus = capability.value - if capability.name == "GPUs": - gpus = capability.value - if capability.name == "MemoryGB": - ram = capability.value - if cpus and gpus and ram: - return (cpus, gpus, ram) - msg = f"Could not find information for VM SKU {sku}." - raise DataSafeHavenAzureError(msg) - def import_keyvault_certificate( self, certificate_name: str, @@ -1050,29 +868,6 @@ def remove_resource_group(self, resource_group_name: str) -> None: msg = f"Failed to remove resource group {resource_group_name}.\n{exc}" raise DataSafeHavenAzureError(msg) from exc - def restart_virtual_machine(self, resource_group_name: str, vm_name: str) -> None: - try: - self.logger.debug( - f"Attempting to restart virtual machine '[green]{vm_name}[/]'" - f" in resource group '[green]{resource_group_name}[/]'...", - ) - # Connect to Azure clients - compute_client = ComputeManagementClient( - self.credential, self.subscription_id - ) - poller = compute_client.virtual_machines.begin_restart( - resource_group_name, vm_name - ) - _ = ( - poller.result() - ) # returns 'None' on success or raises an exception on failure - self.logger.info( - f"Restarted virtual machine '[green]{vm_name}[/]' in resource group '[green]{resource_group_name}[/]'.", - ) - except Exception as exc: - msg = f"Failed to restart virtual machine '{vm_name}' in resource group '{resource_group_name}'.\n{exc}" - raise DataSafeHavenAzureError(msg) from exc - def run_remote_script( self, resource_group_name: str, @@ -1206,34 +1001,6 @@ def set_blob_container_acl( msg = f"Failed to set ACL '{desired_acl}' on container '{container_name}'.\n{exc}" raise DataSafeHavenAzureError(msg) from exc - def set_keyvault_secret( - self, key_vault_name: str, secret_name: str, secret_value: str - ) -> KeyVaultSecret: - """Ensure that a KeyVault secret has the desired value - - Returns: - str: The secret value - - Raises: - DataSafeHavenAzureError if the secret could not be set - """ - try: - # Connect to Azure clients - secret_client = SecretClient( - f"https://{key_vault_name}.vault.azure.net", self.credential - ) - # Set the secret to the desired value - try: - existing_value = secret_client.get_secret(secret_name).value - except ResourceNotFoundError: - existing_value = None - if (not existing_value) or (existing_value != secret_value): - secret_client.set_secret(secret_name, secret_value) - return secret_client.get_secret(secret_name) - except Exception as exc: - msg = f"Failed to set secret '{secret_name}'.\n{exc}" - raise DataSafeHavenAzureError(msg) from exc - def upload_blob( self, blob_data: bytes | str, @@ -1265,21 +1032,3 @@ def upload_blob( except Exception as exc: msg = f"Blob file '{blob_name}' could not be uploaded to '{storage_account_name}'\n{exc}." raise DataSafeHavenAzureError(msg) from exc - - def blob_exists( - self, - blob_name: str, - resource_group_name: str, - storage_account_name: str, - storage_container_name: str, - ) -> bool: - blob_client = self.blob_client( - resource_group_name, storage_account_name, storage_container_name, blob_name - ) - # Upload the created file - exists: bool = blob_client.exists() - response = "exists" if exists else "does not exist" - self.logger.info( - f"File [green]{blob_name}[/] {response} in blob storage.", - ) - return exists diff --git a/data_safe_haven/external/api/graph_api.py b/data_safe_haven/external/api/graph_api.py index bb9441cd6d..f5356eb602 100644 --- a/data_safe_haven/external/api/graph_api.py +++ b/data_safe_haven/external/api/graph_api.py @@ -576,15 +576,6 @@ def get_service_principal_by_name( except (DataSafeHavenMicrosoftGraphError, StopIteration): return None - def get_id_from_application_name(self, application_name: str) -> str | None: - try: - application = self.get_application_by_name(application_name) - if not application: - return None - return str(application["appId"]) - except DataSafeHavenMicrosoftGraphError: - return None - def get_id_from_groupname(self, group_name: str) -> str | None: try: return str( diff --git a/data_safe_haven/external/interface/azure_fileshare.py b/data_safe_haven/external/interface/azure_fileshare.py deleted file mode 100644 index d846dbbac4..0000000000 --- a/data_safe_haven/external/interface/azure_fileshare.py +++ /dev/null @@ -1,114 +0,0 @@ -"""Helper class for Azure fileshares""" - -from contextlib import suppress - -from azure.core.exceptions import ResourceNotFoundError -from azure.mgmt.storage import StorageManagementClient -from azure.storage.fileshare import ShareDirectoryClient, ShareFileClient - -from data_safe_haven.exceptions import DataSafeHavenAzureError -from data_safe_haven.external import AzureApi - - -class AzureFileShare: - """Interface for Azure fileshares""" - - def __init__( - self, - storage_account_name: str, - storage_account_resource_group_name: str, - subscription_name: str, - share_name: str, - ): - self.azure_api = AzureApi(subscription_name) - self.storage_client_: StorageManagementClient | None = None - self.storage_account_key_: str | None = None - self.storage_account_name: str = storage_account_name - self.resource_group_name: str = storage_account_resource_group_name - self.share_name: str = share_name - - @property - def storage_client(self) -> StorageManagementClient: - if not self.storage_client_: - self.storage_client_ = StorageManagementClient( - self.azure_api.credential, self.azure_api.subscription_id - ) - return self.storage_client_ - - @property - def storage_account_key(self) -> str: - if not self.storage_account_key_: - storage_account_keys = [ - k.value - for k in self.azure_api.get_storage_account_keys( - self.resource_group_name, self.storage_account_name - ) - if isinstance(k.value, str) - ] - if not storage_account_keys: - msg = f"Could not load key values for storage account {self.storage_account_name}." - raise DataSafeHavenAzureError(msg) - self.storage_account_key_ = storage_account_keys[0] - return self.storage_account_key_ - - def upload(self, destination_path: str, file_contents: str) -> None: - """Upload file contents to the target storage account location.""" - target = "UNKNOWN" - try: - tokens = destination_path.split("/") - directory = "/".join(tokens[:-1]) - target = tokens[-1] - file_client = self.file_client( - target, - directory=directory, - ) - file_client.upload_file(file_contents.encode("utf-8")) - except Exception as exc: - msg = f"Failed to upload data to [green]{target}[/] in [green]{self.share_name}[/]." - raise DataSafeHavenAzureError(msg) from exc - - def delete(self, destination_path: str) -> None: - """Delete a file from the target storage account""" - target = "UNKNOWN" - try: - tokens = destination_path.split("/") - directory = "/".join(tokens[:-1]) - target = tokens[-1] - file_client = self.file_client( - target, - directory=directory, - ) - if self.file_exists(file_client): - file_client.delete_file() - except Exception as exc: - msg = f"Failed to delete file [green]{target}[/] in [green]{self.share_name}[/]." - raise DataSafeHavenAzureError(msg) from exc - - @staticmethod - def file_exists(file_client: ShareFileClient) -> bool: - with suppress(ResourceNotFoundError): - file_client.get_file_properties() - return True - return False - - def file_client( - self, - file_name: str, - directory: str | None = None, - ) -> ShareFileClient: - if directory: - directory_client = ShareDirectoryClient( - account_url=f"https://{self.storage_account_name}.file.core.windows.net", - share_name=self.share_name, - directory_path=directory, - credential=self.storage_account_key, - ) - if not directory_client.exists(): - directory_client.create_directory() - return directory_client.get_file_client(file_name) - return ShareFileClient( - account_url=f"https://{self.storage_account_name}.file.core.windows.net", - share_name=self.share_name, - file_path=file_name, - credential=self.storage_account_key, - ) diff --git a/data_safe_haven/functions/__init__.py b/data_safe_haven/functions/__init__.py index 00dfcce569..1e8ab4edfc 100644 --- a/data_safe_haven/functions/__init__.py +++ b/data_safe_haven/functions/__init__.py @@ -5,11 +5,8 @@ ) from .strings import ( alphanumeric, - b64decode, b64encode, - hex_string, password, - random_letters, replace_separators, sanitise_sre_name, seeded_uuid, @@ -20,12 +17,9 @@ __all__ = [ "allowed_dns_lookups", "alphanumeric", - "b64decode", "b64encode", - "hex_string", "ordered_private_dns_zones", "password", - "random_letters", "replace_separators", "sanitise_sre_name", "seeded_uuid", diff --git a/data_safe_haven/functions/strings.py b/data_safe_haven/functions/strings.py index d96d5fd962..31f099e4e0 100644 --- a/data_safe_haven/functions/strings.py +++ b/data_safe_haven/functions/strings.py @@ -16,21 +16,11 @@ def sanitise_sre_name(name: str) -> str: return alphanumeric(name).lower() -def b64decode(input_string: str) -> str: - """Decode a Base64 string into a normal string.""" - return base64.b64decode(input_string.encode("utf-8")).decode() - - def b64encode(input_string: str) -> str: """Encode a normal string into a Base64 string.""" return base64.b64encode(input_string.encode("utf-8")).decode() -def hex_string(length: int) -> str: - """Generate a string of 'length' random hexadecimal characters.""" - return secrets.token_hex(length) - - def password(length: int) -> str: """ Generate a string of 'length' random alphanumeric characters. @@ -48,11 +38,6 @@ def password(length: int) -> str: return password_ -def random_letters(length: int) -> str: - """Generate a string of 'length' random letters.""" - return "".join(secrets.choice(string.ascii_letters) for _ in range(length)) - - def replace_separators(input_string: str, separator: str = "") -> str: """Return a string using underscores as a separator""" return ( diff --git a/data_safe_haven/infrastructure/components/__init__.py b/data_safe_haven/infrastructure/components/__init__.py index 8342aa18bd..ecee8e32b6 100644 --- a/data_safe_haven/infrastructure/components/__init__.py +++ b/data_safe_haven/infrastructure/components/__init__.py @@ -1,6 +1,4 @@ from .composite import ( - AutomationDscNode, - AutomationDscNodeProps, LinuxVMComponentProps, LocalDnsRecordComponent, LocalDnsRecordProps, @@ -14,16 +12,12 @@ from .dynamic import ( BlobContainerAcl, BlobContainerAclProps, - CompiledDsc, - CompiledDscProps, EntraApplication, EntraApplicationProps, FileShareFile, FileShareFileProps, FileUpload, FileUploadProps, - RemoteScript, - RemoteScriptProps, SSLCertificate, SSLCertificateProps, ) @@ -33,12 +27,8 @@ ) __all__ = [ - "AutomationDscNode", - "AutomationDscNodeProps", "BlobContainerAcl", "BlobContainerAclProps", - "CompiledDsc", - "CompiledDscProps", "EntraApplication", "EntraApplicationProps", "FileShareFile", @@ -52,8 +42,6 @@ "MicrosoftSQLDatabaseProps", "PostgresqlDatabaseComponent", "PostgresqlDatabaseProps", - "RemoteScript", - "RemoteScriptProps", "SSLCertificate", "SSLCertificateProps", "VMComponent", diff --git a/data_safe_haven/infrastructure/components/composite/__init__.py b/data_safe_haven/infrastructure/components/composite/__init__.py index 51b14d56e1..756f9cfd11 100644 --- a/data_safe_haven/infrastructure/components/composite/__init__.py +++ b/data_safe_haven/infrastructure/components/composite/__init__.py @@ -1,4 +1,3 @@ -from .automation_dsc_node import AutomationDscNode, AutomationDscNodeProps from .local_dns_record import LocalDnsRecordComponent, LocalDnsRecordProps from .microsoft_sql_database import ( MicrosoftSQLDatabaseComponent, @@ -8,8 +7,6 @@ from .virtual_machine import LinuxVMComponentProps, VMComponent, WindowsVMComponentProps __all__ = [ - "AutomationDscNode", - "AutomationDscNodeProps", "LinuxVMComponentProps", "LocalDnsRecordComponent", "LocalDnsRecordProps", diff --git a/data_safe_haven/infrastructure/components/composite/automation_dsc_node.py b/data_safe_haven/infrastructure/components/composite/automation_dsc_node.py deleted file mode 100644 index 19c9e5026c..0000000000 --- a/data_safe_haven/infrastructure/components/composite/automation_dsc_node.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Register a VM as an Azure Automation DSC node""" - -from collections.abc import Mapping, Sequence - -from pulumi import ComponentResource, Input, Output, ResourceOptions -from pulumi_azure_native import automation, compute - -from data_safe_haven.infrastructure.components.dynamic import ( - CompiledDsc, - CompiledDscProps, -) -from data_safe_haven.infrastructure.components.wrapped import ( - WrappedAutomationAccount, -) -from data_safe_haven.utility import FileReader - -from .virtual_machine import VMComponent - - -class AutomationDscNodeProps: - """Props for the AutomationDscNode class""" - - def __init__( - self, - automation_account: WrappedAutomationAccount, - configuration_name: Input[str], - dsc_description: Input[str], - dsc_file: Input[FileReader], - dsc_parameters: Input[dict[str, str]], - dsc_required_modules: Input[Sequence[str]], - location: Input[str], - subscription_name: Input[str], - vm: Input[VMComponent], - ) -> None: - self.automation_account = automation_account - self.configuration_name = configuration_name - self.dsc_description = dsc_description - self.dsc_file = dsc_file - self.dsc_parameters = dsc_parameters - self.dsc_required_modules = dsc_required_modules - self.location = location - self.subscription_name = subscription_name - self.vm_log_analytics_extension = Output.from_input(vm).apply( - lambda vm: vm.log_analytics_extension - ) - self.vm_name = Output.from_input(vm).apply(lambda vm: vm.vm_name) - self.vm_resource_group_name = Output.from_input(vm).apply( - lambda vm: vm.resource_group_name - ) - - -class AutomationDscNode(ComponentResource): - """Deploy an AutomationDscNode with Pulumi""" - - def __init__( - self, - name: str, - props: AutomationDscNodeProps, - opts: ResourceOptions | None = None, - tags: Input[Mapping[str, Input[str]]] | None = None, - ) -> None: - super().__init__("dsh:common:AutomationDscNode", name, {}, opts) - child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self)) - child_tags = tags if tags else {} - - # Upload the primary domain controller DSC - dsc = automation.DscConfiguration( - f"{self._name}_dsc", - automation_account_name=props.automation_account.name, - configuration_name=props.configuration_name, - description=props.dsc_description, - location=props.location, - name=props.configuration_name, - resource_group_name=props.automation_account.resource_group_name, - source=automation.ContentSourceArgs( - hash=automation.ContentHashArgs( - algorithm="sha256", - value=Output.from_input(props.dsc_file).apply(lambda f: f.sha256()), - ), - type="embeddedContent", - value=Output.from_input(props.dsc_file).apply( - lambda f: f.file_contents() - ), - ), - opts=ResourceOptions.merge( - child_opts, - ResourceOptions( - delete_before_replace=True, - depends_on=[props.automation_account], - replace_on_changes=["source.hash"], - ), - ), - tags=child_tags, - ) - dsc_compiled = CompiledDsc( - f"{self._name}_dsc_compiled", - CompiledDscProps( - automation_account_name=props.automation_account.name, - configuration_name=dsc.name, - location=props.location, - parameters=props.dsc_parameters, - resource_group_name=props.automation_account.resource_group_name, - required_modules=props.dsc_required_modules, - subscription_name=props.subscription_name, - ), - opts=child_opts, - ) - compute.VirtualMachineExtension( - f"{self._name}_dsc_extension", - auto_upgrade_minor_version=True, - location=props.location, - publisher="Microsoft.Powershell", - resource_group_name=props.vm_resource_group_name, - settings={ - "configurationArguments": { - "ActionAfterReboot": "ContinueConfiguration", - "AllowModuleOverwrite": True, - "ConfigurationMode": "ApplyAndMonitor", - "ConfigurationModeFrequencyMins": 15, - "NodeConfigurationName": dsc_compiled.local_configuration_name, - "RebootNodeIfNeeded": True, - "RefreshFrequencyMins": 30, - "RegistrationUrl": props.automation_account.agentsvc_url, - } - }, - protected_settings={ - "configurationArguments": { - "registrationKey": { - "userName": "notused", - "Password": props.automation_account.primary_key, - } - } - }, - type="DSC", - type_handler_version="2.77", - vm_name=props.vm_name, - vm_extension_name="Microsoft.Powershell.DSC", - # To avoid a race condition when applying two VM extensions at the same - # time, we explicitly add a dependency on the Log Analytics extension. - opts=ResourceOptions.merge( - child_opts, - ResourceOptions( - depends_on=[props.vm_log_analytics_extension], - ), - ), - tags=child_tags, - ) diff --git a/data_safe_haven/infrastructure/components/dynamic/__init__.py b/data_safe_haven/infrastructure/components/dynamic/__init__.py index f28be2aa31..e54669845e 100644 --- a/data_safe_haven/infrastructure/components/dynamic/__init__.py +++ b/data_safe_haven/infrastructure/components/dynamic/__init__.py @@ -1,24 +1,18 @@ from .blob_container_acl import BlobContainerAcl, BlobContainerAclProps -from .compiled_dsc import CompiledDsc, CompiledDscProps from .entra_application import EntraApplication, EntraApplicationProps from .file_share_file import FileShareFile, FileShareFileProps from .file_upload import FileUpload, FileUploadProps -from .remote_script import RemoteScript, RemoteScriptProps from .ssl_certificate import SSLCertificate, SSLCertificateProps __all__ = [ "BlobContainerAcl", "BlobContainerAclProps", - "CompiledDsc", - "CompiledDscProps", "EntraApplication", "EntraApplicationProps", "FileShareFile", "FileShareFileProps", "FileUpload", "FileUploadProps", - "RemoteScript", - "RemoteScriptProps", "SSLCertificate", "SSLCertificateProps", ] diff --git a/data_safe_haven/infrastructure/components/dynamic/compiled_dsc.py b/data_safe_haven/infrastructure/components/dynamic/compiled_dsc.py deleted file mode 100644 index f1b1236ef6..0000000000 --- a/data_safe_haven/infrastructure/components/dynamic/compiled_dsc.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Pulumi dynamic component for compiled desired state configuration.""" - -from collections.abc import Sequence -from typing import Any - -from pulumi import Input, Output, ResourceOptions -from pulumi.dynamic import CreateResult, DiffResult, Resource - -from data_safe_haven.external import AzureApi - -from .dsh_resource_provider import DshResourceProvider - - -class CompiledDscProps: - """Props for the CompiledDsc class""" - - def __init__( - self, - automation_account_name: Input[str], - configuration_name: Input[str], - location: Input[str], - parameters: Input[dict[str, Any]], - resource_group_name: Input[str], - required_modules: Input[Sequence[str]], - subscription_name: Input[str], - ) -> None: - self.automation_account_name = automation_account_name - self.configuration_name = configuration_name - self.location = location - self.parameters = parameters - self.resource_group_name = resource_group_name - self.required_modules = required_modules - self.subscription_name = subscription_name - - -class CompiledDscProvider(DshResourceProvider): - def create(self, props: dict[str, Any]) -> CreateResult: - """Create compiled desired state file.""" - outputs = dict(**props) - azure_api = AzureApi(props["subscription_name"], disable_logging=True) - # Compile desired state - azure_api.compile_desired_state( - automation_account_name=props["automation_account_name"], - configuration_name=props["configuration_name"], - location=props["location"], - parameters=props["parameters"], - resource_group_name=props["resource_group_name"], - required_modules=props["required_modules"], - ) - # Set localhost configuration name - outputs["local_configuration_name"] = f"{props['configuration_name']}.localhost" - return CreateResult( - f"CompiledDsc-{props['configuration_name']}", - outs=outputs, - ) - - def delete(self, id_: str, props: dict[str, Any]) -> None: - """The Python SDK does not support configuration deletion""" - # Use `id` as a no-op to avoid ARG002 while maintaining function signature - id((id_, props)) - - def diff( - self, - id_: str, - old_props: dict[str, Any], - new_props: dict[str, Any], - ) -> DiffResult: - """Calculate diff between old and new state""" - # Use `id` as a no-op to avoid ARG002 while maintaining function signature - id(id_) - return self.partial_diff(old_props, new_props, []) - - -class CompiledDsc(Resource): - automation_account_name: Output[str] - configuration_name: Output[str] - local_configuration_name: Output[str] - location: Output[str] - resource_group_name: Output[str] - _resource_type_name = "dsh:common:CompiledDsc" # set resource type - - def __init__( - self, - name: str, - props: CompiledDscProps, - opts: ResourceOptions | None = None, - ): - super().__init__( - CompiledDscProvider(), - name, - {"local_configuration_name": None, **vars(props)}, - opts, - ) diff --git a/data_safe_haven/infrastructure/components/dynamic/remote_script.py b/data_safe_haven/infrastructure/components/dynamic/remote_script.py deleted file mode 100644 index dd3f03cb72..0000000000 --- a/data_safe_haven/infrastructure/components/dynamic/remote_script.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Pulumi dynamic component for running remote scripts on an Azure VM.""" - -from typing import Any - -from pulumi import Input, Output, ResourceOptions -from pulumi.dynamic import CreateResult, DiffResult, Resource - -from data_safe_haven.external import AzureApi - -from .dsh_resource_provider import DshResourceProvider - - -class RemoteScriptProps: - """Props for the RemoteScript class""" - - def __init__( - self, - script_contents: Input[str], - script_hash: Input[str], - script_parameters: Input[dict[str, Any]], - subscription_name: Input[str], - vm_name: Input[str], - vm_resource_group_name: Input[str], - force_refresh: Input[bool] | None, - ) -> None: - self.force_refresh = force_refresh - self.script_contents = script_contents - self.script_hash = script_hash - self.script_parameters = script_parameters - self.subscription_name = subscription_name - self.vm_name = vm_name - self.vm_resource_group_name = vm_resource_group_name - - -class RemoteScriptProvider(DshResourceProvider): - def create(self, props: dict[str, Any]) -> CreateResult: - """Run a remote script on a VM""" - outs = dict(**props) - azure_api = AzureApi(props["subscription_name"], disable_logging=True) - # Run remote script - outs["script_output"] = azure_api.run_remote_script( - props["vm_resource_group_name"], - props["script_contents"], - props["script_parameters"], - props["vm_name"], - ) - return CreateResult( - f"RemoteScript-{props['script_hash']}", - outs=outs, - ) - - def delete(self, id_: str, props: dict[str, Any]) -> None: - """It is not possible to un-run a script""" - # Use `id` as a no-op to avoid ARG002 while maintaining function signature - id((id_, props)) - - def diff( - self, - id_: str, - old_props: dict[str, Any], - new_props: dict[str, Any], - ) -> DiffResult: - """Calculate diff between old and new state""" - # Use `id` as a no-op to avoid ARG002 while maintaining function signature - id(id_) - if new_props["force_refresh"]: - return DiffResult( - changes=True, - replaces=list(new_props.keys()), - stables=[], - delete_before_replace=True, - ) - return self.partial_diff(old_props, new_props, []) - - -class RemoteScript(Resource): - script_output: Output[str] - _resource_type_name = "dsh:common:RemoteScript" # set resource type - - def __init__( - self, - name: str, - props: RemoteScriptProps, - opts: ResourceOptions | None = None, - ): - super().__init__( - RemoteScriptProvider(), name, {"script_output": None, **vars(props)}, opts - )