diff --git a/.flake8 b/.flake8 deleted file mode 100644 index ce0f6aa24a..0000000000 --- a/.flake8 +++ /dev/null @@ -1,15 +0,0 @@ -[flake8] -# Default ignored error codes (rules not enforced by PEP8) -# E121 - continuation line under-indented for hanging indent -# E123 - closing bracket does not match indentation of opening bracket’s line -# E126 - continuation line over-indented for hanging indent -# E226 - missing whitespace around arithmetic operator -# E241 - multiple spaces after ‘,’ -# E242 - tab after ‘,’ -# E704 - multiple statements on one line (def) -# W503 - line break before binary operator -# W504 - line break after binary operator -# -# Extra ignored error codes -# E501 - line too long -ignore = E121, E123, E126, E226, E24, E704, E501, W503, W504 diff --git a/.github/workflows/lint_code.yaml b/.github/workflows/lint_code.yaml index 6b5815f172..9031c2c321 100644 --- a/.github/workflows/lint_code.yaml +++ b/.github/workflows/lint_code.yaml @@ -4,9 +4,9 @@ name: Lint code # Run workflow on pushes to matching branches on: # yamllint disable-line rule:truthy push: - branches: [develop] + branches: [develop, python-migration] pull_request: - branches: [develop] + branches: [develop, python-migration] jobs: lint_json: @@ -53,12 +53,14 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v3 - - name: Install requirements - shell: bash - run: pip install flake8 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: Install hatch + run: pip install hatch - name: Lint Python - shell: bash - run: flake8 . --statistics --count + run: hatch run lint:style lint_shell: runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index b6c96e43a3..4c20d0cb4d 100644 --- a/.gitignore +++ b/.gitignore @@ -42,5 +42,5 @@ expanded.yaml # mypy cache .mypy_cache -# Pulumi secrets files -pulumi/Pulumi.*.yaml +# ruff cache +.ruff_cache diff --git a/data_safe_haven/README.md b/data_safe_haven/README.md index 950627e324..ca72429e0a 100644 --- a/data_safe_haven/README.md +++ b/data_safe_haven/README.md @@ -5,10 +5,7 @@ Install the following requirements before starting - [Poetry](https://python-poetry.org/docs/#installation) - [Pulumi](https://www.pulumi.com/docs/get-started/install/) -# Deploying a Data Safe Haven - -Create a directory where you want to store local configuration files for this deployment. -This is the `project directory` +## Deploying a Data Safe Haven - Run the following to initialise the deployment [approx 5 minutes]: @@ -25,42 +22,41 @@ If you prefer to enter these at the command line, run `dsh init -h` to see the n > dsh deploy shm ``` -You will be prompted for various project settings. -If you prefer to enter these at the command line, run `dsh deploy shm -h` to see the necessary command line flags. +You will be prompted for various settings. +Run `dsh deploy shm -h` to see the necessary command line flags and provide them as arguments. - Add one or more users from a CSV file with columns named (`GivenName`, `Surname`, `Phone`, `Email`). Note that the phone number must be in full international format. ```bash -> dsh users add +> dsh admin add-users ``` - Next deploy the infrastructure for one or more Secure Research Environments (SREs) [approx 30 minutes]: ```bash -> dsh deploy sre -r -r +> dsh deploy sre ``` -where you must specify a VM SKU for each user-accessible secure research desktop that you want to deploy -On first run, you will be prompted for various project settings. -If you prefer to enter these at the command line, run `dsh deploy sre -h` to see the necessary command line flags. +You will be prompted for various settings. +Run `dsh deploy sre -h` to see the necessary command line flags and provide them as arguments. - Next add one or more existing users to your SRE ```bash -> dsh users register -s +> dsh admin register-users -s ``` where you must specify the usernames for each user you want to add to this SRE -# Administering a Data Safe Haven +## Administering a Data Safe Haven - Run the following to list the currently available users ```bash -> dsh users list +> dsh admin list-users ``` -# Removing a deployed Data Safe Haven +## Removing a deployed Data Safe Haven - Run the following if you want to teardown a deployed SRE: diff --git a/data_safe_haven/__init__.py b/data_safe_haven/__init__.py index bd007957f9..f0a613dc08 100644 --- a/data_safe_haven/__init__.py +++ b/data_safe_haven/__init__.py @@ -1,4 +1,5 @@ """Data Safe Haven""" -import pkg_resources -__version__ = pkg_resources.get_distribution("data-safe-haven").version +from .version import __version__, __version_info__ + +__all__ = ["__version__", "__version_info__"] diff --git a/data_safe_haven/administration/users/active_directory_users.py b/data_safe_haven/administration/users/active_directory_users.py index b41863a001..760b94c9ac 100644 --- a/data_safe_haven/administration/users/active_directory_users.py +++ b/data_safe_haven/administration/users/active_directory_users.py @@ -1,14 +1,14 @@ """Interact with users in an Azure Active Directory""" -# Standard library imports import pathlib -from typing import Any, Optional, Sequence +from collections.abc import Sequence +from typing import Any -# Local imports from data_safe_haven.config import Config from data_safe_haven.external import AzureApi from data_safe_haven.functions import b64encode from data_safe_haven.pulumi import PulumiSHMStack from data_safe_haven.utility import FileReader, Logger + from .research_user import ResearchUser @@ -70,7 +70,7 @@ def add(self, new_users: Sequence[ResearchUser]) -> None: for line in output.split("\n"): self.logger.parse(line) - def list(self, sre_name: Optional[str] = None) -> Sequence[ResearchUser]: + def list(self, sre_name: str | None = None) -> Sequence[ResearchUser]: # noqa: A003 """List users in a local Active Directory""" list_users_script = FileReader( self.resources_path / "active_directory" / "list_users.ps1" @@ -85,7 +85,7 @@ def list(self, sre_name: Optional[str] = None) -> Sequence[ResearchUser]: users = [] for line in output.split("\n"): tokens = line.split(";") - if len(tokens) >= 6: + if len(tokens) >= 6: # noqa: PLR2004 users.append( ResearchUser( email_address=tokens[4], @@ -127,7 +127,7 @@ def remove(self, users: Sequence[ResearchUser]) -> None: for line in output.split("\n"): self.logger.parse(line) - def set(self, users: Sequence[ResearchUser]) -> None: + def set(self, users: Sequence[ResearchUser]) -> None: # noqa: A003 """Set local Active Directory users to specified list""" users_to_remove = [user for user in self.list() if user not in users] self.remove(users_to_remove) diff --git a/data_safe_haven/administration/users/azure_ad_users.py b/data_safe_haven/administration/users/azure_ad_users.py index 8350d4b727..149db04c8c 100644 --- a/data_safe_haven/administration/users/azure_ad_users.py +++ b/data_safe_haven/administration/users/azure_ad_users.py @@ -1,11 +1,11 @@ """Interact with users in an Azure Active Directory""" -# Standard library imports -from typing import Any, Sequence +from collections.abc import Sequence +from typing import Any -# Local imports from data_safe_haven.external import GraphApi from data_safe_haven.functions import password from data_safe_haven.utility import Logger + from .research_user import ResearchUser @@ -25,11 +25,11 @@ def __init__( def add(self, new_users: Sequence[ResearchUser]) -> None: """Add list of users to AzureAD""" # Get the default domain - default_domain = [ + default_domain = next( domain["id"] for domain in self.graph_api.read_domains() if domain["isDefault"] - ][0] + ) for user in new_users: request_json = { "accountEnabled": user.account_enabled, @@ -56,7 +56,7 @@ def add(self, new_users: Sequence[ResearchUser]) -> None: # # Also add the user to the research users group # self.graph_api.add_user_to_group(user.username, self.researchers_group_name) - def list(self) -> Sequence[ResearchUser]: + def list(self) -> Sequence[ResearchUser]: # noqa: A003 user_list = self.graph_api.read_users() return [ ResearchUser( @@ -98,14 +98,14 @@ def remove(self, users: Sequence[ResearchUser]) -> None: # f"Removed '{user.preferred_username}' from group '{self.researchers_group_name}'" # ) # else: - # raise DataSafeHavenMicrosoftGraphException - # except DataSafeHavenMicrosoftGraphException: + # raise DataSafeHavenMicrosoftGraphError + # except DataSafeHavenMicrosoftGraphError: # self.logger.error( # f"Unable to remove '{user.preferred_username}' from group '{self.researchers_group_name}'" # ) pass - def set(self, users: Sequence[ResearchUser]) -> None: + def set(self, users: Sequence[ResearchUser]) -> None: # noqa: A003 """Set Guacamole users to specified list""" users_to_remove = [user for user in self.list() if user not in users] self.remove(users_to_remove) diff --git a/data_safe_haven/administration/users/guacamole_users.py b/data_safe_haven/administration/users/guacamole_users.py index b9f4b23a69..47d99f47d6 100644 --- a/data_safe_haven/administration/users/guacamole_users.py +++ b/data_safe_haven/administration/users/guacamole_users.py @@ -1,11 +1,11 @@ -# Standard library imports import pathlib -from typing import Any, Optional, Sequence +from collections.abc import Sequence +from typing import Any -# Local imports from data_safe_haven.config import Config from data_safe_haven.external import AzurePostgreSQLDatabase from data_safe_haven.pulumi import PulumiSREStack + from .research_user import ResearchUser @@ -20,7 +20,7 @@ def __init__(self, config: Config, sre_name: str, *args: Any, **kwargs: Any): sre_stack.output("remote_desktop")["resource_group_name"], config.subscription_name, ) - self.users_: Optional[Sequence[ResearchUser]] = None + self.users_: Sequence[ResearchUser] | None = None self.postgres_script_path: pathlib.Path = ( pathlib.Path(__file__).parent.parent.parent / "resources" @@ -30,14 +30,17 @@ def __init__(self, config: Config, sre_name: str, *args: Any, **kwargs: Any): self.sre_name = sre_name self.group_name = f"Data Safe Haven Users SRE {sre_name}" - def list(self) -> Sequence[ResearchUser]: + def list(self) -> Sequence[ResearchUser]: # noqa: A003 """List all Guacamole users""" if self.users_ is None: # Allow for the possibility of an empty list of users postgres_output = self.postgres_provisioner.execute_scripts( [self.postgres_script_path / "list_users.mustache.sql"], mustache_values={"group_name": self.group_name}, ) - # The output is of the form [["sam_account_name1", "email_address1"], ["sam_account_name2", "email_address2"]] + # The output is of the form [ + # ["sam_account_name1", "email_address1"], + # ["sam_account_name2", "email_address2"] + # ] self.users_ = [ ResearchUser( sam_account_name=user_details[0].split("@")[0], diff --git a/data_safe_haven/administration/users/research_user.py b/data_safe_haven/administration/users/research_user.py index 1f208dc405..49d2624a18 100644 --- a/data_safe_haven/administration/users/research_user.py +++ b/data_safe_haven/administration/users/research_user.py @@ -1,18 +1,17 @@ -# Standard library imports -from typing import Any, Optional +from typing import Any class ResearchUser: def __init__( self, - account_enabled: Optional[bool] = None, - country: Optional[str] = None, - email_address: Optional[str] = None, - given_name: Optional[str] = None, - phone_number: Optional[str] = None, - sam_account_name: Optional[str] = None, - surname: Optional[str] = None, - user_principal_name: Optional[str] = None, + account_enabled: bool | None = None, + country: str | None = None, + email_address: str | None = None, + given_name: str | None = None, + phone_number: str | None = None, + sam_account_name: str | None = None, + surname: str | None = None, + user_principal_name: str | None = None, ) -> None: self.account_enabled = account_enabled self.country = country diff --git a/data_safe_haven/administration/users/user_handler.py b/data_safe_haven/administration/users/user_handler.py index f8fec3f6c9..30823a5602 100644 --- a/data_safe_haven/administration/users/user_handler.py +++ b/data_safe_haven/administration/users/user_handler.py @@ -1,11 +1,9 @@ -# Standard library imports import csv import pathlib -from typing import Dict, List, Sequence +from collections.abc import Sequence -# Local imports from data_safe_haven.config import Config -from data_safe_haven.exceptions import DataSafeHavenUserHandlingException +from data_safe_haven.exceptions import DataSafeHavenUserHandlingError from data_safe_haven.external import GraphApi from data_safe_haven.utility import Logger @@ -33,7 +31,7 @@ def add(self, users_csv_path: pathlib.Path) -> None: """Add AzureAD and Guacamole users Raises: - DataSafeHavenUserHandlingException if the users could not be added + DataSafeHavenUserHandlingError if the users could not be added """ try: # Construct user list @@ -49,9 +47,8 @@ def add(self, users_csv_path: pathlib.Path) -> None: if (not reader.fieldnames) or ( required_field not in reader.fieldnames ): - raise ValueError( - f"Missing required CSV field '{required_field}'." - ) + msg = f"Missing required CSV field '{required_field}'." + raise ValueError(msg) users = [ ResearchUser( country=user["CountryCode"], @@ -68,11 +65,10 @@ def add(self, users_csv_path: pathlib.Path) -> None: # Commit changes self.active_directory_users.add(users) except Exception as exc: - raise DataSafeHavenUserHandlingException( - f"Could not add users from '{users_csv_path}'.\n{str(exc)}" - ) from exc + msg = f"Could not add users from '{users_csv_path}'.\n{exc}" + raise DataSafeHavenUserHandlingError(msg) from exc - def get_usernames(self) -> Dict[str, List[str]]: + def get_usernames(self) -> dict[str, list[str]]: """Load usernames from all sources""" usernames = {} usernames["Azure AD"] = self.get_usernames_azure_ad() @@ -81,29 +77,29 @@ def get_usernames(self) -> Dict[str, List[str]]: usernames[f"SRE {sre_name}"] = self.get_usernames_guacamole(sre_name) return usernames - def get_usernames_azure_ad(self) -> List[str]: + def get_usernames_azure_ad(self) -> list[str]: """Load usernames from Azure AD""" return [user.username for user in self.azure_ad_users.list()] - def get_usernames_domain_controller(self) -> List[str]: + def get_usernames_domain_controller(self) -> list[str]: """Load usernames from all domain controller""" return [user.username for user in self.active_directory_users.list()] - def get_usernames_guacamole(self, sre_name: str) -> List[str]: + def get_usernames_guacamole(self, sre_name: str) -> list[str]: """Load usernames from Guacamole""" return [user.username for user in self.sre_guacamole_users[sre_name].list()] - def list(self) -> None: + def list(self) -> None: # noqa: A003 """List Active Directory, AzureAD and Guacamole users Raises: - DataSafeHavenUserHandlingException if the users could not be listed + DataSafeHavenUserHandlingError if the users could not be listed """ try: # Load usernames usernames = self.get_usernames() # Fill user information as a table - user_headers = ["username"] + list(usernames.keys()) + user_headers = ["username", *list(usernames.keys())] user_data = [] for username in sorted(set(sum(usernames.values(), []))): user_memberships = [username] @@ -117,29 +113,27 @@ def list(self) -> None: for line in self.logger.tabulate(user_headers, user_data): self.logger.info(line) except Exception as exc: - raise DataSafeHavenUserHandlingException( - f"Could not list users.\n{str(exc)}" - ) from exc + msg = f"Could not list users.\n{exc}" + raise DataSafeHavenUserHandlingError(msg) from exc def register(self, sre_name: str, user_names: Sequence[str]) -> None: """Register usernames with SRE Raises: - DataSafeHavenUserHandlingException if the users could not be registered in the SRE + DataSafeHavenUserHandlingError if the users could not be registered in the SRE """ try: # Add users to the SRE security group self.active_directory_users.register(sre_name, user_names) except Exception as exc: - raise DataSafeHavenUserHandlingException( - f"Could not register {len(user_names)} users with SRE '{sre_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not register {len(user_names)} users with SRE '{sre_name}'.\n{exc}" + raise DataSafeHavenUserHandlingError(msg) from exc def remove(self, user_names: Sequence[str]) -> None: """Remove AzureAD and Guacamole users Raises: - DataSafeHavenUserHandlingException if the users could not be removed + DataSafeHavenUserHandlingError if the users could not be removed """ try: # Construct user lists @@ -152,15 +146,14 @@ def remove(self, user_names: Sequence[str]) -> None: # Commit changes self.active_directory_users.remove(active_directory_users_to_remove) except Exception as exc: - raise DataSafeHavenUserHandlingException( - f"Could not remove users: {user_names}.\n{str(exc)}" - ) from exc + msg = f"Could not remove users: {user_names}.\n{exc}" + raise DataSafeHavenUserHandlingError(msg) from exc - def set(self, users_csv_path: str) -> None: + def set(self, users_csv_path: str) -> None: # noqa: A003 """Set AzureAD and Guacamole users Raises: - DataSafeHavenUserHandlingException if the users could not be set to the desired list + DataSafeHavenUserHandlingError if the users could not be set to the desired list """ try: # Construct user list @@ -170,9 +163,8 @@ def set(self, users_csv_path: str) -> None: if (not reader.fieldnames) or ( required_field not in reader.fieldnames ): - raise ValueError( - f"Missing required CSV field '{required_field}'." - ) + msg = f"Missing required CSV field '{required_field}'." + raise ValueError(msg) desired_users = [ ResearchUser( country="GB", @@ -203,20 +195,18 @@ def set(self, users_csv_path: str) -> None: # Commit changes self.active_directory_users.set(active_directory_desired_users) except Exception as exc: - raise DataSafeHavenUserHandlingException( - f"Could not set users from '{users_csv_path}'.\n{str(exc)}" - ) from exc + msg = f"Could not set users from '{users_csv_path}'.\n{exc}" + raise DataSafeHavenUserHandlingError(msg) from exc def unregister(self, sre_name: str, user_names: Sequence[str]) -> None: """Unregister usernames with SRE Raises: - DataSafeHavenUserHandlingException if the users could not be registered in the SRE + DataSafeHavenUserHandlingError if the users could not be registered in the SRE """ try: # Remove users from the SRE security group self.active_directory_users.unregister(sre_name, user_names) except Exception as exc: - raise DataSafeHavenUserHandlingException( - f"Could not register {len(user_names)} users with SRE '{sre_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not register {len(user_names)} users with SRE '{sre_name}'.\n{exc}" + raise DataSafeHavenUserHandlingError(msg) from exc diff --git a/data_safe_haven/backend/backend.py b/data_safe_haven/backend/backend.py index 8b027084d9..39de275fdb 100644 --- a/data_safe_haven/backend/backend.py +++ b/data_safe_haven/backend/backend.py @@ -1,10 +1,7 @@ """Azure backend for a Data Safe Haven deployment""" -# Standard library imports -from typing import Optional -# Local imports from data_safe_haven.config import Config -from data_safe_haven.exceptions import DataSafeHavenAzureException +from data_safe_haven.exceptions import DataSafeHavenAzureError from data_safe_haven.external import AzureApi @@ -12,7 +9,7 @@ class Backend: """Azure backend for a Data Safe Haven deployment""" def __init__(self) -> None: - self.azure_api_: Optional[AzureApi] = None + self.azure_api_: AzureApi | None = None self.config = Config() self.tags = {"component": "backend"} | self.config.tags.to_dict() @@ -33,7 +30,7 @@ def create(self) -> None: """Create all desired resources Raises: - DataSafeHavenAzureException if any resources cannot be created + DataSafeHavenAzureError if any resources cannot be created """ try: self.config.azure.subscription_id = self.azure_api.subscription_id @@ -44,9 +41,8 @@ def create(self) -> None: tags=self.tags, ) if not resource_group.name: - raise DataSafeHavenAzureException( - f"Resource group '{self.config.backend.resource_group_name}' was not created." - ) + msg = f"Resource group '{self.config.backend.resource_group_name}' was not created." + raise DataSafeHavenAzureError(msg) identity = self.azure_api.ensure_managed_identity( identity_name=self.config.backend.managed_identity_name, location=resource_group.location, @@ -59,9 +55,8 @@ def create(self) -> None: tags=self.tags, ) if not storage_account.name: - raise DataSafeHavenAzureException( - f"Storage account '{self.config.backend.storage_account_name}' was not created." - ) + msg = f"Storage account '{self.config.backend.storage_account_name}' was not created." + raise DataSafeHavenAzureError(msg) _ = self.azure_api.ensure_storage_blob_container( container_name=self.config.backend.storage_container_name, resource_group_name=resource_group.name, @@ -81,9 +76,10 @@ def create(self) -> None: tags=self.tags, ) if not keyvault.name: - raise DataSafeHavenAzureException( + msg = ( f"Keyvault '{self.config.backend.key_vault_name}' was not created." ) + raise DataSafeHavenAzureError(msg) pulumi_encryption_key = self.azure_api.ensure_keyvault_key( key_name=self.config.pulumi.encryption_key_name, key_vault_name=keyvault.name, @@ -92,21 +88,19 @@ def create(self) -> None: -1 ] except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create backend resources.\n{str(exc)}" - ) from exc + msg = f"Failed to create backend resources.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def teardown(self) -> None: """Destroy all created resources Raises: - DataSafeHavenAzureException if any resources cannot be destroyed + DataSafeHavenAzureError if any resources cannot be destroyed """ try: self.azure_api.remove_resource_group( self.config.backend.resource_group_name ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to destroy backend resources.\n{str(exc)}" - ) from exc + msg = f"Failed to destroy backend resources.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc diff --git a/data_safe_haven/cli.py b/data_safe_haven/cli.py index 70be02b4f2..6a01f76dce 100644 --- a/data_safe_haven/cli.py +++ b/data_safe_haven/cli.py @@ -1,12 +1,9 @@ """Command line entrypoint for Data Safe Haven application""" -# Standard library imports import pathlib from typing import Annotated, Optional -# Third party imports import typer -# Local imports from data_safe_haven import __version__ from data_safe_haven.commands import ( admin_command_group, @@ -14,19 +11,19 @@ initialise_command, teardown_command_group, ) -from data_safe_haven.exceptions import DataSafeHavenException +from data_safe_haven.exceptions import DataSafeHavenError from data_safe_haven.utility import Logger def callback( output: Annotated[ - Optional[pathlib.Path], + Optional[pathlib.Path], # noqa: UP007 typer.Option( "--output", "-o", resolve_path=True, help="Path to an output log file" ), ] = None, verbosity: Annotated[ - Optional[int], + Optional[int], # noqa: UP007 typer.Option( "--verbosity", "-v", @@ -36,7 +33,7 @@ def callback( ), ] = None, version: Annotated[ - Optional[bool], + Optional[bool], # noqa: UP007 typer.Option( "--version", "-V", help="Display the version of this application." ), @@ -45,7 +42,7 @@ def callback( """Arguments to the main executable""" Logger(verbosity, output) # initialise logging singleton if version: - print(f"Data Safe Haven {__version__}") + print(f"Data Safe Haven {__version__}") # noqa: T201 raise typer.Exit() @@ -87,7 +84,7 @@ def main() -> None: # Start the application try: application() - except DataSafeHavenException as exc: + except DataSafeHavenError as exc: logger = Logger() for line in str(exc).split("\n"): logger.error(line) diff --git a/data_safe_haven/commands/deploy_shm_command.py b/data_safe_haven/commands/deploy_shm_command.py index a36b6d6f13..32c3f32ad0 100644 --- a/data_safe_haven/commands/deploy_shm_command.py +++ b/data_safe_haven/commands/deploy_shm_command.py @@ -1,15 +1,10 @@ """Command-line application for deploying a Data Safe Haven from project files""" -# Standard library imports -from typing import List, Optional - -# Third party imports import pytz -# Local imports from data_safe_haven.config import Config from data_safe_haven.exceptions import ( - DataSafeHavenConfigException, - DataSafeHavenException, + DataSafeHavenConfigError, + DataSafeHavenError, ) from data_safe_haven.external import GraphApi from data_safe_haven.functions import password @@ -27,11 +22,11 @@ def __init__(self): def __call__( self, - aad_tenant_id: Optional[str] = None, - admin_email_address: Optional[str] = None, - admin_ip_addresses: Optional[List[str]] = None, - fqdn: Optional[str] = None, - timezone: Optional[str] = None, + aad_tenant_id: str | None = None, + admin_email_address: str | None = None, + admin_ip_addresses: list[str] | None = None, + fqdn: str | None = None, + timezone: str | None = None, ) -> None: """Typer command line entrypoint""" try: @@ -60,18 +55,34 @@ def __call__( # Initialise Pulumi stack stack = PulumiSHMStack(config) # Set Azure options - stack.add_option("azure-native:location", config.azure.location) stack.add_option( - "azure-native:subscriptionId", config.azure.subscription_id + "azure-native:location", config.azure.location, replace=False + ) + stack.add_option( + "azure-native:subscriptionId", + config.azure.subscription_id, + replace=False, + ) + stack.add_option( + "azure-native:tenantId", config.azure.tenant_id, replace=False ) - stack.add_option("azure-native:tenantId", config.azure.tenant_id) # Add necessary secrets - stack.add_secret("password-domain-admin", password(20)) - stack.add_secret("password-domain-azure-ad-connect", password(20)) - stack.add_secret("password-domain-computer-manager", password(20)) - stack.add_secret("password-domain-ldap-searcher", password(20)) - stack.add_secret("password-update-server-linux-admin", password(20)) - stack.add_secret("verification-azuread-custom-domain", verification_record) + stack.add_secret("password-domain-admin", password(20), replace=False) + stack.add_secret( + "password-domain-azure-ad-connect", password(20), replace=False + ) + stack.add_secret( + "password-domain-computer-manager", password(20), replace=False + ) + stack.add_secret( + "password-domain-ldap-searcher", password(20), replace=False + ) + stack.add_secret( + "password-update-server-linux-admin", password(20), replace=False + ) + stack.add_secret( + "verification-azuread-custom-domain", verification_record, replace=False + ) # Deploy Azure infrastructure with Pulumi stack.deploy() @@ -93,19 +104,18 @@ def __call__( stack=stack, ) manager.run() - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not deploy Data Safe Haven Management environment.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not deploy Data Safe Haven Management environment.\n{exc}" + raise DataSafeHavenError(msg) from exc def update_config( self, config: Config, - aad_tenant_id: Optional[str] = None, - admin_email_address: Optional[str] = None, - admin_ip_addresses: Optional[List[str]] = None, - fqdn: Optional[str] = None, - timezone: Optional[str] = None, + aad_tenant_id: str | None = None, + admin_email_address: str | None = None, + admin_ip_addresses: list[str] | None = None, + fqdn: str | None = None, + timezone: str | None = None, ) -> None: # Update AzureAD tenant ID if aad_tenant_id is not None: @@ -118,9 +128,8 @@ def update_config( ) config.shm.aad_tenant_id = aad_tenant_id if not config.shm.aad_tenant_id: - raise DataSafeHavenConfigException( - "No AzureAD tenant ID was found. Use [bright_cyan]'--aad-tenant-id / -a'[/] to set one." - ) + msg = "No AzureAD tenant ID was found. Use [bright_cyan]'--aad-tenant-id / -a'[/] to set one." + raise DataSafeHavenConfigError(msg) # Update admin email address if admin_email_address is not None: @@ -135,9 +144,8 @@ def update_config( ) config.shm.admin_email_address = admin_email_address if not config.shm.admin_email_address: - raise DataSafeHavenConfigException( - "No admin email address was found. Use [bright_cyan]'--email / -e'[/] to set one." - ) + msg = "No admin email address was found. Use [bright_cyan]'--email / -e'[/] to set one." + raise DataSafeHavenConfigError(msg) # Update admin IP addresses if admin_ip_addresses: @@ -152,9 +160,8 @@ def update_config( ) config.shm.admin_ip_addresses = admin_ip_addresses if len(config.shm.admin_ip_addresses) == 0: - raise DataSafeHavenConfigException( - "No admin IP addresses were found. Use [bright_cyan]'--ip-address / -i'[/] to set one." - ) + msg = "No admin IP addresses were found. Use [bright_cyan]'--ip-address / -i'[/] to set one." + raise DataSafeHavenConfigError(msg) # Update FQDN if fqdn is not None: @@ -167,9 +174,8 @@ def update_config( ) config.shm.fqdn = fqdn if not config.shm.fqdn: - raise DataSafeHavenConfigException( - "No fully-qualified domain name was found. Use [bright_cyan]'--fqdn / -f'[/] to set one." - ) + msg = "No fully-qualified domain name was found. Use [bright_cyan]'--fqdn / -f'[/] to set one." + raise DataSafeHavenConfigError(msg) # Update timezone if it passes validation if timezone is not None: @@ -185,6 +191,5 @@ def update_config( self.logger.info(f"Setting [bold]timezone[/] to [green]{timezone}[/].") config.shm.timezone = timezone if not config.shm.timezone: - raise DataSafeHavenConfigException( - "No timezone was found. Use [bright_cyan]'--timezone / -t'[/] to set one." - ) + msg = "No timezone was found. Use [bright_cyan]'--timezone / -t'[/] to set one." + raise DataSafeHavenConfigError(msg) diff --git a/data_safe_haven/commands/deploy_sre_command.py b/data_safe_haven/commands/deploy_sre_command.py index 2804db6d22..f1f99459a6 100644 --- a/data_safe_haven/commands/deploy_sre_command.py +++ b/data_safe_haven/commands/deploy_sre_command.py @@ -1,12 +1,10 @@ """Command-line application for deploying a Secure Research Environment from project files""" -# Standard library imports -from typing import Any, Dict, List, Optional +from typing import Any -# Local imports from data_safe_haven.config import Config from data_safe_haven.exceptions import ( - DataSafeHavenConfigException, - DataSafeHavenException, + DataSafeHavenConfigError, + DataSafeHavenError, ) from data_safe_haven.external import AzureApi, GraphApi from data_safe_haven.functions import alphanumeric, password @@ -20,18 +18,18 @@ class DeploySRECommand: def __init__(self): """Constructor""" - self._available_vm_skus: Dict[str, Dict[str, Any]] = {} + self._available_vm_skus: dict[str, dict[str, Any]] = {} self.logger = Logger() def __call__( self, name: str, - allow_copy: Optional[bool] = None, - allow_paste: Optional[bool] = None, - data_provider_ip_addresses: Optional[List[str]] = None, - research_desktops: Optional[List[str]] = None, - software_packages: Optional[SoftwarePackageCategory] = None, - user_ip_addresses: Optional[List[str]] = None, + allow_copy: bool | None = None, + allow_paste: bool | None = None, + data_provider_ip_addresses: list[str] | None = None, + research_desktops: list[str] | None = None, + software_packages: SoftwarePackageCategory | None = None, + user_ip_addresses: list[str] | None = None, ) -> None: """Typer command line entrypoint""" sre_name = "UNKNOWN" @@ -52,7 +50,8 @@ def __call__( user_ip_addresses=user_ip_addresses, ) - # Load GraphAPI as this may require user-interaction that is not possible as part of a Pulumi declarative command + # Load GraphAPI as this may require user-interaction that is not + # possible as part of a Pulumi declarative command graph_api = GraphApi( tenant_id=config.shm.aad_tenant_id, default_scopes=["Application.ReadWrite.All", "Group.ReadWrite.All"], @@ -62,94 +61,108 @@ def __call__( shm_stack = PulumiSHMStack(config) stack = PulumiSREStack(config, sre_name) # Set Azure options - stack.add_option("azure-native:location", config.azure.location) stack.add_option( - "azure-native:subscriptionId", config.azure.subscription_id + "azure-native:location", config.azure.location, replace=False + ) + stack.add_option( + "azure-native:subscriptionId", + config.azure.subscription_id, + replace=False, + ) + stack.add_option( + "azure-native:tenantId", config.azure.tenant_id, replace=False ) - stack.add_option("azure-native:tenantId", config.azure.tenant_id) # Load SHM stack outputs stack.add_option( "shm-domain_controllers-domain_sid", shm_stack.output("domain_controllers")["domain_sid"], - True, + replace=True, ) stack.add_option( "shm-domain_controllers-ldap_root_dn", shm_stack.output("domain_controllers")["ldap_root_dn"], - True, + replace=True, ) stack.add_option( "shm-domain_controllers-ldap_server_ip", shm_stack.output("domain_controllers")["ldap_server_ip"], - True, + replace=True, ) stack.add_option( "shm-domain_controllers-netbios_name", shm_stack.output("domain_controllers")["netbios_name"], - True, + replace=True, ) stack.add_option( "shm-monitoring-automation_account_name", shm_stack.output("monitoring")["automation_account_name"], - True, + replace=True, ) stack.add_option( "shm-monitoring-log_analytics_workspace_id", shm_stack.output("monitoring")["log_analytics_workspace_id"], - True, + replace=True, ) stack.add_secret( "shm-monitoring-log_analytics_workspace_key", shm_stack.output("monitoring")["log_analytics_workspace_key"], - True, + replace=True, ) stack.add_option( "shm-monitoring-resource_group_name", shm_stack.output("monitoring")["resource_group_name"], - True, + replace=True, ) stack.add_option( "shm-networking-private_dns_zone_base_id", shm_stack.output("networking")["private_dns_zone_base_id"], - True, + replace=True, ) stack.add_option( "shm-networking-resource_group_name", shm_stack.output("networking")["resource_group_name"], - True, + replace=True, ) stack.add_option( "shm-networking-subnet_identity_servers_prefix", shm_stack.output("networking")["subnet_identity_servers_prefix"], - True, + replace=True, ) stack.add_option( "shm-networking-subnet_subnet_monitoring_prefix", shm_stack.output("networking")["subnet_monitoring_prefix"], - True, + replace=True, ) stack.add_option( "shm-networking-subnet_update_servers_prefix", shm_stack.output("networking")["subnet_update_servers_prefix"], - True, + replace=True, ) stack.add_option( "shm-networking-virtual_network_name", shm_stack.output("networking")["virtual_network_name"], - True, + replace=True, ) stack.add_option( "shm-update_servers-ip_address_linux", shm_stack.output("update_servers")["ip_address_linux"], - True, + replace=True, ) # Add necessary secrets stack.copy_secret("password-domain-ldap-searcher", shm_stack) - stack.add_secret("password-gitea-database-admin", password(20)) - stack.add_secret("password-hedgedoc-database-admin", password(20)) - stack.add_secret("password-nexus-admin", password(20)) - stack.add_secret("password-user-database-admin", password(20)) - stack.add_secret("password-secure-research-desktop-admin", password(20)) + stack.add_secret( + "password-gitea-database-admin", password(20), replace=False + ) + stack.add_secret( + "password-hedgedoc-database-admin", password(20), replace=False + ) + stack.add_secret("password-nexus-admin", password(20), replace=False) + stack.add_secret( + "password-user-database-admin", password(20), replace=False + ) + stack.add_secret( + "password-secure-research-desktop-admin", password(20), replace=False + ) stack.add_secret("token-azuread-graphapi", graph_api.token, replace=True) # Deploy Azure infrastructure with Pulumi @@ -171,22 +184,20 @@ def __call__( timezone=config.shm.timezone, ) manager.run() - - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not deploy Secure Research Environment {sre_name}.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not deploy Secure Research Environment {sre_name}.\n{exc}" + raise DataSafeHavenError(msg) from exc def update_config( self, sre_name, config: Config, - allow_copy: Optional[bool] = None, - allow_paste: Optional[bool] = None, - data_provider_ip_addresses: Optional[List[str]] = None, - research_desktops: Optional[List[str]] = None, - software_packages: Optional[SoftwarePackageCategory] = None, - user_ip_addresses: Optional[List[str]] = None, + allow_copy: bool | None = None, + allow_paste: bool | None = None, + data_provider_ip_addresses: list[str] | None = None, + research_desktops: list[str] | None = None, + software_packages: SoftwarePackageCategory | None = None, + user_ip_addresses: list[str] | None = None, ) -> None: # Create a config entry for this SRE if it does not exist if sre_name not in config.sres.keys(): @@ -202,13 +213,13 @@ def update_config( f"Overwriting existing text copying rule {config.sres[sre_name].remote_desktop.allow_copy}" ) self.logger.info( - f"Setting [bold]text copying out of SRE {sre_name}[/] to [green]{'allowed' if allow_copy else 'forbidden'}[/]." + f"Setting [bold]text copying out of SRE {sre_name}[/]" + f" to [green]{'allowed' if allow_copy else 'forbidden'}[/]." ) config.sres[sre_name].remote_desktop.allow_copy = allow_copy if config.sres[sre_name].remote_desktop.allow_copy is None: - raise DataSafeHavenConfigException( - "No text copying rule was found. Use [bright_cyan]'--allow-copy / -c'[/] to set one." - ) + msg = "No text copying rule was found. Use [bright_cyan]'--allow-copy / -c'[/] to set one." + raise DataSafeHavenConfigError(msg) # Set whether pasting text into the SRE is allowed if allow_paste is not None: @@ -219,13 +230,13 @@ def update_config( f"Overwriting existing text pasting rule {config.sres[sre_name].remote_desktop.allow_paste}" ) self.logger.info( - f"Setting [bold]text pasting into SRE {sre_name}[/] to [green]{'allowed' if allow_paste else 'forbidden'}[/]." + f"Setting [bold]text pasting into SRE {sre_name}[/]" + f" to [green]{'allowed' if allow_paste else 'forbidden'}[/]." ) config.sres[sre_name].remote_desktop.allow_paste = allow_paste if config.sres[sre_name].remote_desktop.allow_paste is None: - raise DataSafeHavenConfigException( - "No text pasting rule was found. Use [bright_cyan]'--allow-paste / -p'[/] to set one." - ) + msg = "No text pasting rule was found. Use [bright_cyan]'--allow-paste / -p'[/] to set one." + raise DataSafeHavenConfigError(msg) # Set data provider IP addresses if data_provider_ip_addresses: @@ -234,7 +245,8 @@ def update_config( != data_provider_ip_addresses ): self.logger.debug( - f"Overwriting existing data provider IP addresses {config.sres[sre_name].data_provider_ip_addresses}" + "Overwriting existing data provider IP addresses" + f" {config.sres[sre_name].data_provider_ip_addresses}" ) self.logger.info( f"Setting [bold]data provider IP addresses[/] to [green]{data_provider_ip_addresses}[/]." @@ -243,9 +255,11 @@ def update_config( sre_name ].data_provider_ip_addresses = data_provider_ip_addresses if len(config.sres[sre_name].data_provider_ip_addresses) == 0: - raise DataSafeHavenConfigException( - "No data provider IP addresses were found. Use [bright_cyan]'--data-provider-ip-address / -d'[/] to set one." + msg = ( + "No data provider IP addresses were found." + " Use [bright_cyan]'--data-provider-ip-address / -d'[/] to set one." ) + raise DataSafeHavenConfigError(msg) # Set research desktops if research_desktops: @@ -271,9 +285,8 @@ def update_config( config.sres[sre_name].add_research_desktop(vm_name) config.sres[sre_name].research_desktops[vm_name].sku = vm_sku if len(config.sres[sre_name].research_desktops) == 0: - raise DataSafeHavenConfigException( - "No research desktops were found. Use [bright_cyan]'--research-desktop / -r'[/] to add one." - ) + msg = "No research desktops were found. Use [bright_cyan]'--research-desktop / -r'[/] to add one." + raise DataSafeHavenConfigError(msg) # Select which software packages can be installed by users if software_packages is not None: @@ -284,13 +297,13 @@ def update_config( f"Overwriting existing software package rule {config.sres[sre_name].software_packages}" ) self.logger.info( - f"Setting [bold]allowed software packages in SRE {sre_name}[/] to [green]{'allowed' if software_packages else 'forbidden'}[/]." + f"Setting [bold]allowed software packages in SRE {sre_name}[/]" + f" to [green]{'allowed' if software_packages else 'forbidden'}[/]." ) config.sres[sre_name].software_packages = software_packages if not config.sres[sre_name].software_packages: - raise DataSafeHavenConfigException( - "No software package rule was found. Use [bright_cyan]'--software-packages / -s'[/] to set one." - ) + msg = "No software package rule was found. Use [bright_cyan]'--software-packages / -s'[/] to set one." + raise DataSafeHavenConfigError(msg) # Set user IP addresses if user_ip_addresses: @@ -305,11 +318,10 @@ def update_config( ) config.sres[sre_name].research_user_ip_addresses = user_ip_addresses if len(config.sres[sre_name].research_user_ip_addresses) == 0: - raise DataSafeHavenConfigException( - "No user IP addresses were found. Use [bright_cyan]'--user-ip-address / -u'[/] to set one." - ) + msg = "No user IP addresses were found. Use [bright_cyan]'--user-ip-address / -u'[/] to set one." + raise DataSafeHavenConfigError(msg) - def available_vm_skus(self, config: Config) -> Dict[str, Dict[str, Any]]: + def available_vm_skus(self, config: Config) -> dict[str, dict[str, Any]]: """Load available VM SKUs for this region""" if not self._available_vm_skus: azure_api = AzureApi(config.subscription_name) diff --git a/data_safe_haven/commands/initialise_command.py b/data_safe_haven/commands/initialise_command.py index cfdb1b9e08..6608160e31 100644 --- a/data_safe_haven/commands/initialise_command.py +++ b/data_safe_haven/commands/initialise_command.py @@ -1,11 +1,7 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Standard library imports -from typing import Optional - -# Local imports from data_safe_haven.backend import Backend from data_safe_haven.config import BackendSettings -from data_safe_haven.exceptions import DataSafeHavenException +from data_safe_haven.exceptions import DataSafeHavenError from data_safe_haven.utility import Logger @@ -18,10 +14,10 @@ def __init__(self): def __call__( self, - admin_group: Optional[str] = None, - location: Optional[str] = None, - name: Optional[str] = None, - subscription: Optional[str] = None, + admin_group: str | None = None, + location: str | None = None, + name: str | None = None, + subscription: str | None = None, ) -> None: """Typer command line entrypoint""" try: @@ -41,7 +37,6 @@ def __call__( # Load the generated configuration file and upload it to blob storage backend.config.upload() - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not initialise Data Safe Haven.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not initialise Data Safe Haven.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/teardown_backend_command.py b/data_safe_haven/commands/teardown_backend_command.py index eacc46a499..9fcc680ed6 100644 --- a/data_safe_haven/commands/teardown_backend_command.py +++ b/data_safe_haven/commands/teardown_backend_command.py @@ -1,9 +1,8 @@ """Command-line application for tearing down a Data Safe Haven""" -# Local imports from data_safe_haven.backend import Backend from data_safe_haven.exceptions import ( - DataSafeHavenException, - DataSafeHavenInputException, + DataSafeHavenError, + DataSafeHavenInputError, ) @@ -18,10 +17,8 @@ def __call__(self) -> None: backend = Backend() backend.teardown() except Exception as exc: - raise DataSafeHavenInputException( - f"Unable to teardown Pulumi backend.\n{str(exc)}" - ) from exc - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not teardown Data Safe Haven backend.\n{str(exc)}" - ) from exc + msg = f"Unable to teardown Pulumi backend.\n{exc}" + raise DataSafeHavenInputError(msg) from exc + except DataSafeHavenError as exc: + msg = f"Could not teardown Data Safe Haven backend.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/teardown_shm_command.py b/data_safe_haven/commands/teardown_shm_command.py index 45d33e402e..bea698fa4e 100644 --- a/data_safe_haven/commands/teardown_shm_command.py +++ b/data_safe_haven/commands/teardown_shm_command.py @@ -1,9 +1,8 @@ """Command-line application for tearing down a Data Safe Haven""" -# Local imports from data_safe_haven.config import Config from data_safe_haven.exceptions import ( - DataSafeHavenException, - DataSafeHavenInputException, + DataSafeHavenError, + DataSafeHavenInputError, ) from data_safe_haven.pulumi import PulumiSHMStack @@ -22,9 +21,8 @@ def __call__(self) -> None: stack = PulumiSHMStack(config) stack.teardown() except Exception as exc: - raise DataSafeHavenInputException( - f"Unable to teardown Pulumi infrastructure.\n{str(exc)}" - ) from exc + msg = f"Unable to teardown Pulumi infrastructure.\n{exc}" + raise DataSafeHavenInputError(msg) from exc # Remove information from config file if stack.stack_name in config.pulumi.stacks.keys(): @@ -32,7 +30,6 @@ def __call__(self) -> None: # Upload config to blob storage config.upload() - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not teardown Safe Haven Management component.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not teardown Safe Haven Management component.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/teardown_sre_command.py b/data_safe_haven/commands/teardown_sre_command.py index f20cffa9e6..17a2128b2a 100644 --- a/data_safe_haven/commands/teardown_sre_command.py +++ b/data_safe_haven/commands/teardown_sre_command.py @@ -1,9 +1,8 @@ """Command-line application for tearing down a Secure Research Environment""" -# Local imports from data_safe_haven.config import Config from data_safe_haven.exceptions import ( - DataSafeHavenException, - DataSafeHavenInputException, + DataSafeHavenError, + DataSafeHavenInputError, ) from data_safe_haven.functions import alphanumeric from data_safe_haven.pulumi import PulumiSREStack @@ -32,13 +31,13 @@ def __call__( if stack.work_dir.exists(): stack.teardown() else: - raise DataSafeHavenInputException( + msg = ( f"SRE {sre_name} not found - check the name is spelt correctly." ) + raise DataSafeHavenInputError(msg) except Exception as exc: - raise DataSafeHavenInputException( - f"Unable to teardown Pulumi infrastructure.\n{str(exc)}" - ) from exc + msg = f"Unable to teardown Pulumi infrastructure.\n{exc}" + raise DataSafeHavenInputError(msg) from exc # Remove information from config file config.remove_stack(stack.stack_name) @@ -46,7 +45,6 @@ def __call__( # Upload config to blob storage config.upload() - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not teardown Data Safe Haven '{environment_name}'.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not teardown Data Safe Haven '{environment_name}'.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/typer_admin.py b/data_safe_haven/commands/typer_admin.py index a6512d53ca..bc6a86252b 100644 --- a/data_safe_haven/commands/typer_admin.py +++ b/data_safe_haven/commands/typer_admin.py @@ -1,12 +1,9 @@ -"""Command-line application for performing administrative tasks for a Data Safe Haven deployment, delegating the details to a subcommand""" -# Standard library imports +"""Command-line application for performing administrative tasks""" import pathlib -from typing import Annotated, List +from typing import Annotated -# Third party imports import typer -# Local imports from .users_add_command import UsersAddCommand from .users_list_command import UsersListCommand from .users_register_command import UsersRegisterCommand @@ -38,7 +35,7 @@ def list_users() -> None: ) def register_users( usernames: Annotated[ - List[str], + list[str], typer.Option( "--username", "-u", @@ -60,7 +57,7 @@ def register_users( ) def remove_users( usernames: Annotated[ - List[str], + list[str], typer.Option( "--username", "-u", @@ -74,7 +71,7 @@ def remove_users( @admin_command_group.command(help="Unregister existing users from a deployed SRE.") def unregister_users( usernames: Annotated[ - List[str], + list[str], typer.Option( "--username", "-u", diff --git a/data_safe_haven/commands/typer_deploy.py b/data_safe_haven/commands/typer_deploy.py index 00127e53d4..16ab2d6160 100644 --- a/data_safe_haven/commands/typer_deploy.py +++ b/data_safe_haven/commands/typer_deploy.py @@ -1,11 +1,8 @@ """Command-line application for deploying a Data Safe Haven component, delegating the details to a subcommand""" -# Standard library imports -from typing import Annotated, List, Optional +from typing import Annotated, Optional -# Third party imports import typer -# Local imports from data_safe_haven.functions import ( validate_aad_guid, validate_azure_vm_sku, @@ -14,6 +11,7 @@ validate_timezone, ) from data_safe_haven.utility import SoftwarePackageCategory + from .deploy_shm_command import DeploySHMCommand from .deploy_sre_command import DeploySRECommand @@ -23,16 +21,19 @@ @deploy_command_group.command() def shm( aad_tenant_id: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--aad-tenant-id", "-a", - help="The tenant ID for the AzureAD where users will be created, for example '10de18e7-b238-6f1e-a4ad-772708929203'.", + help=( + "The tenant ID for the AzureAD where users will be created," + " for example '10de18e7-b238-6f1e-a4ad-772708929203'." + ), callback=validate_aad_guid, ), ] = None, admin_email_address: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--email", "-e", @@ -41,16 +42,19 @@ def shm( ), ] = None, admin_ip_addresses: Annotated[ - Optional[List[str]], + Optional[list[str]], # noqa: UP007 typer.Option( "--ip-address", "-i", - help="An IP address or range used by your system deployers and administrators. [*may be specified several times*]", + help=( + "An IP address or range used by your system deployers and administrators." + " [*may be specified several times*]" + ), callback=lambda ips: [validate_ip_address(ip) for ip in ips], ), ] = None, fqdn: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--fqdn", "-f", @@ -58,7 +62,7 @@ def shm( ), ] = None, timezone: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--timezone", "-t", @@ -81,7 +85,7 @@ def shm( def sre( name: Annotated[str, typer.Argument(help="Name of SRE to deploy")], allow_copy: Annotated[ - Optional[bool], + Optional[bool], # noqa: UP007 typer.Option( "--allow-copy", "-c", @@ -89,7 +93,7 @@ def sre( ), ] = None, allow_paste: Annotated[ - Optional[bool], + Optional[bool], # noqa: UP007 typer.Option( "--allow-paste", "-p", @@ -97,7 +101,7 @@ def sre( ), ] = None, data_provider_ip_addresses: Annotated[ - Optional[List[str]], + Optional[list[str]], # noqa: UP007 typer.Option( "--data-provider-ip-address", "-d", @@ -106,16 +110,19 @@ def sre( ), ] = None, research_desktops: Annotated[ - Optional[List[str]], + Optional[list[str]], # noqa: UP007 typer.Option( "--research-desktop", "-r", - help="A virtual machine SKU to make available to your users as a research desktop. [*may be specified several times*]", + help=( + "A virtual machine SKU to make available to your users as a research desktop." + " [*may be specified several times*]" + ), callback=lambda ips: [validate_azure_vm_sku(ip) for ip in ips], ), ] = None, software_packages: Annotated[ - Optional[SoftwarePackageCategory], + Optional[SoftwarePackageCategory], # noqa: UP007 typer.Option( "--software-packages", "-s", @@ -123,7 +130,7 @@ def sre( ), ] = None, user_ip_addresses: Annotated[ - Optional[List[str]], + Optional[list[str]], # noqa: UP007 typer.Option( "--user-ip-address", "-u", diff --git a/data_safe_haven/commands/typer_init.py b/data_safe_haven/commands/typer_init.py index 90af97fee4..3980668e2b 100644 --- a/data_safe_haven/commands/typer_init.py +++ b/data_safe_haven/commands/typer_init.py @@ -1,18 +1,16 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Standard library imports from typing import Annotated, Optional -# Third party imports import typer -# Local imports from data_safe_haven.functions import validate_aad_guid + from .initialise_command import InitialiseCommand def initialise_command( admin_group: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--admin-group", "-a", @@ -21,7 +19,7 @@ def initialise_command( ), ] = None, location: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--location", "-l", @@ -29,7 +27,7 @@ def initialise_command( ), ] = None, name: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--name", "-n", @@ -37,7 +35,7 @@ def initialise_command( ), ] = None, subscription: Annotated[ - Optional[str], + Optional[str], # noqa: UP007 typer.Option( "--subscription", "-s", diff --git a/data_safe_haven/commands/typer_teardown.py b/data_safe_haven/commands/typer_teardown.py index 3edc50a514..cd8dac648a 100644 --- a/data_safe_haven/commands/typer_teardown.py +++ b/data_safe_haven/commands/typer_teardown.py @@ -1,11 +1,8 @@ """Command-line application for tearing down a Data Safe Haven component, delegating the details to a subcommand""" -# Standard library imports from typing import Annotated -# Third party imports import typer -# Local imports from .teardown_backend_command import TeardownBackendCommand from .teardown_shm_command import TeardownSHMCommand from .teardown_sre_command import TeardownSRECommand diff --git a/data_safe_haven/commands/users_add_command.py b/data_safe_haven/commands/users_add_command.py index 2d0cf06b1a..e8c8fee4c1 100644 --- a/data_safe_haven/commands/users_add_command.py +++ b/data_safe_haven/commands/users_add_command.py @@ -1,12 +1,10 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Standard library imports import pathlib -# Local imports from data_safe_haven.administration.users import UserHandler from data_safe_haven.config import Config from data_safe_haven.exceptions import ( - DataSafeHavenException, + DataSafeHavenError, ) from data_safe_haven.external import GraphApi @@ -22,7 +20,8 @@ def __call__(self, csv_path: pathlib.Path) -> None: config = Config() shm_name = config.name - # Load GraphAPI as this may require user-interaction that is not possible as part of a Pulumi declarative command + # Load GraphAPI as this may require user-interaction that is not + # possible as part of a Pulumi declarative command graph_api = GraphApi( tenant_id=config.shm.aad_tenant_id, default_scopes=["Group.Read.All"], @@ -31,7 +30,6 @@ def __call__(self, csv_path: pathlib.Path) -> None: # Add users to SHM users = UserHandler(config, graph_api) users.add(csv_path) - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not add users to Data Safe Haven '{shm_name}'.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not add users to Data Safe Haven '{shm_name}'.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/users_list_command.py b/data_safe_haven/commands/users_list_command.py index 38e9263455..c429511459 100644 --- a/data_safe_haven/commands/users_list_command.py +++ b/data_safe_haven/commands/users_list_command.py @@ -1,10 +1,7 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Local imports from data_safe_haven.administration.users import UserHandler from data_safe_haven.config import Config -from data_safe_haven.exceptions import ( - DataSafeHavenException, -) +from data_safe_haven.exceptions import DataSafeHavenError from data_safe_haven.external import GraphApi @@ -18,7 +15,8 @@ def __call__(self) -> None: config = Config() shm_name = config.name - # Load GraphAPI as this may require user-interaction that is not possible as part of a Pulumi declarative command + # Load GraphAPI as this may require user-interaction that is not + # possible as part of a Pulumi declarative command graph_api = GraphApi( tenant_id=config.shm.aad_tenant_id, default_scopes=["Directory.Read.All", "Group.Read.All"], @@ -27,7 +25,6 @@ def __call__(self) -> None: # List users from all sources users = UserHandler(config, graph_api) users.list() - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not list users for Data Safe Haven '{shm_name}'.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not list users for Data Safe Haven '{shm_name}'.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/users_register_command.py b/data_safe_haven/commands/users_register_command.py index 3d13c03f91..df10703131 100644 --- a/data_safe_haven/commands/users_register_command.py +++ b/data_safe_haven/commands/users_register_command.py @@ -1,13 +1,7 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Standard library imports -from typing import List - -# Local imports from data_safe_haven.administration.users import UserHandler from data_safe_haven.config import Config -from data_safe_haven.exceptions import ( - DataSafeHavenException, -) +from data_safe_haven.exceptions import DataSafeHavenError from data_safe_haven.external import GraphApi from data_safe_haven.functions import alphanumeric from data_safe_haven.utility import Logger @@ -22,7 +16,7 @@ def __init__(self): def __call__( self, - usernames: List[str], + usernames: list[str], sre: str, ) -> None: shm_name = "UNKNOWN" @@ -37,12 +31,14 @@ def __call__( # Check that SRE option has been provided if not sre_name: - raise DataSafeHavenException("SRE name must be specified.") + msg = "SRE name must be specified." + raise DataSafeHavenError(msg) self.logger.info( f"Preparing to register {len(usernames)} users with SRE '{sre_name}'" ) - # Load GraphAPI as this may require user-interaction that is not possible as part of a Pulumi declarative command + # Load GraphAPI as this may require user-interaction that is not + # possible as part of a Pulumi declarative command graph_api = GraphApi( tenant_id=config.shm.aad_tenant_id, default_scopes=["Group.Read.All"], @@ -57,10 +53,10 @@ def __call__( usernames_to_register.append(username) else: self.logger.error( - f"Username '{username}' does not belong to this Data Safe Haven deployment. Please use 'dsh users add' to create it." + f"Username '{username}' does not belong to this Data Safe Haven deployment." + " Please use 'dsh users add' to create it." ) users.register(sre_name, usernames_to_register) - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not register users from Data Safe Haven '{shm_name}' with SRE '{sre_name}'.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not register users from Data Safe Haven '{shm_name}' with SRE '{sre_name}'.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/commands/users_remove_command.py b/data_safe_haven/commands/users_remove_command.py index 595b652c5c..0d445bbee5 100644 --- a/data_safe_haven/commands/users_remove_command.py +++ b/data_safe_haven/commands/users_remove_command.py @@ -1,13 +1,7 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Standard library imports -from typing import List - -# Local imports from data_safe_haven.administration.users import UserHandler from data_safe_haven.config import Config -from data_safe_haven.exceptions import ( - DataSafeHavenException, -) +from data_safe_haven.exceptions import DataSafeHavenError from data_safe_haven.external import GraphApi from data_safe_haven.utility import Logger @@ -21,7 +15,7 @@ def __init__(self): def __call__( self, - usernames: List[str], + usernames: list[str], ) -> None: shm_name = "UNKNOWN" try: @@ -29,7 +23,8 @@ def __call__( config = Config() shm_name = config.name - # Load GraphAPI as this may require user-interaction that is not possible as part of a Pulumi declarative command + # Load GraphAPI as this may require user-interaction that is not + # possible as part of a Pulumi declarative command graph_api = GraphApi( tenant_id=config.shm.aad_tenant_id, default_scopes=["Group.Read.All"], @@ -39,10 +34,10 @@ def __call__( if usernames: users = UserHandler(config, graph_api) users.remove(usernames) - except DataSafeHavenException as exc: + except DataSafeHavenError as exc: for ( line - ) in f"Could not remove users from Data Safe Haven '{shm_name}'.\n{str(exc)}".split( + ) in f"Could not remove users from Data Safe Haven '{shm_name}'.\n{exc}".split( "\n" ): self.logger.error(line) diff --git a/data_safe_haven/commands/users_unregister_command.py b/data_safe_haven/commands/users_unregister_command.py index f6359c7c03..9b324e4927 100644 --- a/data_safe_haven/commands/users_unregister_command.py +++ b/data_safe_haven/commands/users_unregister_command.py @@ -1,13 +1,7 @@ """Command-line application for initialising a Data Safe Haven deployment""" -# Standard library imports -from typing import List - -# Local imports from data_safe_haven.administration.users import UserHandler from data_safe_haven.config import Config -from data_safe_haven.exceptions import ( - DataSafeHavenException, -) +from data_safe_haven.exceptions import DataSafeHavenError from data_safe_haven.external import GraphApi from data_safe_haven.functions import alphanumeric from data_safe_haven.utility import Logger @@ -22,7 +16,7 @@ def __init__(self): def __call__( self, - usernames: List[str], + usernames: list[str], sre: str, ) -> None: shm_name = "UNKNOWN" @@ -37,12 +31,14 @@ def __call__( # Check that SRE option has been provided if not sre_name: - raise DataSafeHavenException("SRE name must be specified.") + msg = "SRE name must be specified." + raise DataSafeHavenError(msg) self.logger.info( f"Preparing to unregister {len(usernames)} users with SRE '{sre_name}'" ) - # Load GraphAPI as this may require user-interaction that is not possible as part of a Pulumi declarative command + # Load GraphAPI as this may require user-interaction that is not + # possible as part of a Pulumi declarative command graph_api = GraphApi( tenant_id=config.shm.aad_tenant_id, default_scopes=["Group.Read.All"], @@ -57,10 +53,10 @@ def __call__( usernames_to_unregister.append(username) else: self.logger.error( - f"Username '{username}' does not belong to this Data Safe Haven deployment. Please use 'dsh users add' to create it." + f"Username '{username}' does not belong to this Data Safe Haven deployment." + " Please use 'dsh users add' to create it." ) users.unregister(sre_name, usernames_to_unregister) - except DataSafeHavenException as exc: - raise DataSafeHavenException( - f"Could not unregister users from Data Safe Haven '{shm_name}' with SRE '{sre_name}'.\n{str(exc)}" - ) from exc + except DataSafeHavenError as exc: + msg = f"Could not unregister users from Data Safe Haven '{shm_name}' with SRE '{sre_name}'.\n{exc}" + raise DataSafeHavenError(msg) from exc diff --git a/data_safe_haven/config/__init__.py b/data_safe_haven/config/__init__.py index 046f7ca0e6..d7132460da 100644 --- a/data_safe_haven/config/__init__.py +++ b/data_safe_haven/config/__init__.py @@ -1,5 +1,5 @@ -from .config import Config from .backend_settings import BackendSettings +from .config import Config __all__ = [ "BackendSettings", diff --git a/data_safe_haven/config/backend_settings.py b/data_safe_haven/config/backend_settings.py index 6129ff2881..2c223c364a 100644 --- a/data_safe_haven/config/backend_settings.py +++ b/data_safe_haven/config/backend_settings.py @@ -1,17 +1,13 @@ """Load global and local settings from dotfiles""" -# Standard library imports import pathlib -from typing import Optional -# Third party imports import appdirs import yaml from yaml.parser import ParserError -# Local imports from data_safe_haven.exceptions import ( - DataSafeHavenParameterException, - DataSafeHavenConfigException, + DataSafeHavenConfigError, + DataSafeHavenParameterError, ) from data_safe_haven.utility import Logger @@ -31,10 +27,10 @@ def __init__( self, ) -> None: # Define instance variables - self._admin_group_id: Optional[str] = None - self._location: Optional[str] = None - self._name: Optional[str] = None - self._subscription_name: Optional[str] = None + self._admin_group_id: str | None = None + self._location: str | None = None + self._name: str | None = None + self._subscription_name: str | None = None self.logger = Logger() # Load previous backend settings (if any) @@ -47,10 +43,10 @@ def __init__( def update( self, *, - admin_group_id: Optional[str] = None, - location: Optional[str] = None, - name: Optional[str] = None, - subscription_name: Optional[str] = None, + admin_group_id: str | None = None, + location: str | None = None, + name: str | None = None, + subscription_name: str | None = None, ) -> None: """Overwrite defaults with provided parameters""" if admin_group_id: @@ -76,40 +72,39 @@ def update( @property def admin_group_id(self) -> str: if not self._admin_group_id: - raise DataSafeHavenParameterException( - "Azure administrator group not provided: use '[bright_cyan]--admin-group[/]' / '[green]-a[/]' to do so." - ) + msg = "Azure administrator group not provided: use '[bright_cyan]--admin-group[/]' / '[green]-a[/]' to do so." + raise DataSafeHavenParameterError(msg) return self._admin_group_id @property def location(self) -> str: if not self._location: - raise DataSafeHavenParameterException( - "Azure location not provided: use '[bright_cyan]--location[/]' / '[green]-l[/]' to do so." - ) + msg = "Azure location not provided: use '[bright_cyan]--location[/]' / '[green]-l[/]' to do so." + raise DataSafeHavenParameterError(msg) return self._location @property def name(self) -> str: if not self._name: - raise DataSafeHavenParameterException( - "Data Safe Haven deployment name not provided: use '[bright_cyan]--deployment-name[/]' / '[green]-d[/]' to do so." + msg = ( + "Data Safe Haven deployment name not provided:" + " use '[bright_cyan]--deployment-name[/]' / '[green]-d[/]' to do so." ) + raise DataSafeHavenParameterError(msg) return self._name @property def subscription_name(self) -> str: if not self._subscription_name: - raise DataSafeHavenParameterException( - "Azure subscription not provided: use '[bright_cyan]--subscription[/]' / '[green]-s[/]' to do so." - ) + msg = "Azure subscription not provided: use '[bright_cyan]--subscription[/]' / '[green]-s[/]' to do so." + raise DataSafeHavenParameterError(msg) return self._subscription_name def read(self) -> None: """Read settings from YAML file""" try: if self.config_file_path.exists(): - with open(self.config_file_path, "r", encoding="utf-8") as f_yaml: + with open(self.config_file_path, encoding="utf-8") as f_yaml: settings = yaml.safe_load(f_yaml) if isinstance(settings, dict): self.logger.info( @@ -128,9 +123,8 @@ def read(self) -> None: ): self._subscription_name = subscription_name except ParserError as exc: - raise DataSafeHavenConfigException( - f"Could not load settings from {self.config_file_path}.\n{str(exc)}" - ) from exc + msg = f"Could not load settings from {self.config_file_path}.\n{exc}" + raise DataSafeHavenConfigError(msg) from exc def write(self) -> None: """Write settings to YAML file""" diff --git a/data_safe_haven/config/config.py b/data_safe_haven/config/config.py index 21beca9be5..11d9e32a43 100644 --- a/data_safe_haven/config/config.py +++ b/data_safe_haven/config/config.py @@ -1,19 +1,16 @@ """Configuration file backed by blob storage""" -# Standard library imports import pathlib from collections import defaultdict from contextlib import suppress from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional +from typing import Any -# Third party imports import chili import yaml from yaml.parser import ParserError -# Local imports from data_safe_haven import __version__ -from data_safe_haven.exceptions import DataSafeHavenAzureException +from data_safe_haven.exceptions import DataSafeHavenAzureError from data_safe_haven.external import AzureApi from data_safe_haven.functions import ( alphanumeric, @@ -28,6 +25,7 @@ validate_timezone, ) from data_safe_haven.utility import SoftwarePackageCategory + from .backend_settings import BackendSettings @@ -43,29 +41,27 @@ def validate(self) -> None: try: validate_aad_guid(self.admin_group_id) except Exception as exc: - raise ValueError( - f"Invalid value for 'admin_group_id' ({self.admin_group_id}).\n{str(exc)}" - ) + msg = f"Invalid value for 'admin_group_id' ({self.admin_group_id}).\n{exc}" + raise ValueError(msg) from exc try: validate_azure_location(self.location) except Exception as exc: - raise ValueError( - f"Invalid value for 'location' ({self.location}).\n{str(exc)}" - ) + msg = f"Invalid value for 'location' ({self.location}).\n{exc}" + raise ValueError(msg) from exc try: validate_aad_guid(self.subscription_id) except Exception as exc: - raise ValueError( - f"Invalid value for 'subscription_id' ({self.subscription_id}).\n{str(exc)}" + msg = ( + f"Invalid value for 'subscription_id' ({self.subscription_id}).\n{exc}" ) + raise ValueError(msg) from exc try: validate_aad_guid(self.tenant_id) except Exception as exc: - raise ValueError( - f"Invalid value for 'tenant_id' ({self.tenant_id}).\n{str(exc)}" - ) + msg = f"Invalid value for 'tenant_id' ({self.tenant_id}).\n{exc}" + raise ValueError(msg) from exc - def to_dict(self) -> Dict[str, str]: + def to_dict(self) -> dict[str, str]: self.validate() return as_dict(chili.encode(self)) @@ -81,27 +77,24 @@ class ConfigSectionBackend: def validate(self) -> None: """Validate input parameters""" if not self.key_vault_name: - raise ValueError( - f"Invalid value for 'key_vault_name' ({self.key_vault_name})." - ) + msg = f"Invalid value for 'key_vault_name' ({self.key_vault_name})." + raise ValueError(msg) if not self.managed_identity_name: - raise ValueError( - f"Invalid value for 'managed_identity_name' ({self.managed_identity_name})." - ) + msg = f"Invalid value for 'managed_identity_name' ({self.managed_identity_name})." + raise ValueError(msg) if not self.resource_group_name: - raise ValueError( + msg = ( f"Invalid value for 'resource_group_name' ({self.resource_group_name})." ) + raise ValueError(msg) if not self.storage_account_name: - raise ValueError( - f"Invalid value for 'storage_account_name' ({self.storage_account_name})." - ) + msg = f"Invalid value for 'storage_account_name' ({self.storage_account_name})." + raise ValueError(msg) if not self.storage_container_name: - raise ValueError( - f"Invalid value for 'storage_container_name' ({self.storage_container_name})." - ) + msg = f"Invalid value for 'storage_container_name' ({self.storage_container_name})." + raise ValueError(msg) - def to_dict(self) -> Dict[str, str]: + def to_dict(self) -> dict[str, str]: self.validate() return as_dict(chili.encode(self)) @@ -110,17 +103,16 @@ def to_dict(self) -> Dict[str, str]: class ConfigSectionPulumi: encryption_key_id: str = "" encryption_key_name: str = "pulumi-encryption-key" - stacks: Dict[str, str] = field(default_factory=dict) + stacks: dict[str, str] = field(default_factory=dict) storage_container_name: str = "pulumi" def validate(self) -> None: """Validate input parameters""" if not isinstance(self.encryption_key_id, str) or not self.encryption_key_id: - raise ValueError( - f"Invalid value for 'encryption_key_id' ({self.encryption_key_id})." - ) + msg = f"Invalid value for 'encryption_key_id' ({self.encryption_key_id})." + raise ValueError(msg) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: self.validate() return as_dict(chili.encode(self)) @@ -129,7 +121,7 @@ def to_dict(self) -> Dict[str, Any]: class ConfigSectionSHM: aad_tenant_id: str = "" admin_email_address: str = "" - admin_ip_addresses: List[str] = field(default_factory=list) + admin_ip_addresses: list[str] = field(default_factory=list) fqdn: str = "" name: str = "" timezone: str = "" @@ -139,34 +131,32 @@ def validate(self) -> None: try: validate_aad_guid(self.aad_tenant_id) except Exception as exc: - raise ValueError( - f"Invalid value for 'aad_tenant_id' ({self.aad_tenant_id}).\n{str(exc)}" - ) + msg = f"Invalid value for 'aad_tenant_id' ({self.aad_tenant_id}).\n{exc}" + raise ValueError(msg) from exc try: validate_email_address(self.admin_email_address) except Exception as exc: - raise ValueError( - f"Invalid value for 'admin_email_address' ({self.admin_email_address}).\n{str(exc)}" - ) + msg = f"Invalid value for 'admin_email_address' ({self.admin_email_address}).\n{exc}" + raise ValueError(msg) from exc try: for ip in self.admin_ip_addresses: validate_ip_address(ip) except Exception as exc: - raise ValueError( - f"Invalid value for 'admin_ip_addresses' ({self.admin_ip_addresses}).\n{str(exc)}" - ) + msg = f"Invalid value for 'admin_ip_addresses' ({self.admin_ip_addresses}).\n{exc}" + raise ValueError(msg) from exc if not isinstance(self.fqdn, str) or not self.fqdn: - raise ValueError(f"Invalid value for 'fqdn' ({self.fqdn}).") + msg = f"Invalid value for 'fqdn' ({self.fqdn})." + raise ValueError(msg) if not isinstance(self.name, str) or not self.name: - raise ValueError(f"Invalid value for 'name' ({self.name}).") + msg = f"Invalid value for 'name' ({self.name})." + raise ValueError(msg) try: validate_timezone(self.timezone) except Exception as exc: - raise ValueError( - f"Invalid value for 'timezone' ({self.timezone}).\n{str(exc)}" - ) + msg = f"Invalid value for 'timezone' ({self.timezone}).\n{exc}" + raise ValueError(msg) from exc - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: self.validate() return as_dict(chili.encode(self)) @@ -181,13 +171,13 @@ class ConfigSectionRemoteDesktopOpts: def validate(self) -> None: """Validate input parameters""" if not isinstance(self.allow_copy, bool): - raise ValueError(f"Invalid value for 'allow_copy' ({self.allow_copy}).") + msg = f"Invalid value for 'allow_copy' ({self.allow_copy})." + raise ValueError(msg) if not isinstance(self.allow_paste, bool): - raise ValueError( - f"Invalid value for 'allow_paste' ({self.allow_paste})." - ) + msg = f"Invalid value for 'allow_paste' ({self.allow_paste})." + raise ValueError(msg) - def to_dict(self) -> Dict[str, bool]: + def to_dict(self) -> dict[str, bool]: self.validate() return as_dict(chili.encode(self)) @@ -200,22 +190,25 @@ def validate(self) -> None: try: validate_azure_vm_sku(self.sku) except Exception as exc: - raise ValueError(f"Invalid value for 'sku' ({self.sku}).\n{str(exc)}") + msg = f"Invalid value for 'sku' ({self.sku}).\n{exc}" + raise ValueError(msg) from exc - def to_dict(self) -> Dict[str, str]: + def to_dict(self) -> dict[str, str]: self.validate() return as_dict(chili.encode(self)) - data_provider_ip_addresses: List[str] = field(default_factory=list) + data_provider_ip_addresses: list[str] = field(default_factory=list) index: int = 0 remote_desktop: ConfigSectionRemoteDesktopOpts = field( default_factory=ConfigSectionRemoteDesktopOpts ) - # NB. we cannot use defaultdict here until https://github.com/python/cpython/pull/32056 is included in the Python version we are using - research_desktops: Dict[str, ConfigSectionResearchDesktopOpts] = field( + # NB. we cannot use defaultdict here until + # https://github.com/python/cpython/pull/32056 is included in the Python + # version we are using + research_desktops: dict[str, ConfigSectionResearchDesktopOpts] = field( default_factory=dict ) - research_user_ip_addresses: List[str] = field(default_factory=list) + research_user_ip_addresses: list[str] = field(default_factory=list) software_packages: SoftwarePackageCategory = SoftwarePackageCategory.NONE def add_research_desktop(self, name: str): @@ -229,19 +222,17 @@ def validate(self) -> None: for ip in self.data_provider_ip_addresses: validate_ip_address(ip) except Exception as exc: - raise ValueError( - f"Invalid value for 'data_provider_ip_addresses' ({self.data_provider_ip_addresses}).\n{str(exc)}" - ) + msg = f"Invalid value for 'data_provider_ip_addresses' ({self.data_provider_ip_addresses}).\n{exc}" + raise ValueError(msg) from exc self.remote_desktop.validate() try: for ip in self.research_user_ip_addresses: validate_ip_address(ip) except Exception as exc: - raise ValueError( - f"Invalid value for 'research_user_ip_addresses' ({self.research_user_ip_addresses}).\n{str(exc)}" - ) + msg = f"Invalid value for 'research_user_ip_addresses' ({self.research_user_ip_addresses}).\n{exc}" + raise ValueError(msg) from exc - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: self.validate() return as_dict(chili.encode(self)) @@ -256,9 +247,10 @@ class ConfigSectionTags: def validate(self) -> None: """Validate input parameters""" if not self.deployment: - raise ValueError(f"Invalid value for 'deployment' ({self.deployment}).") + msg = f"Invalid value for 'deployment' ({self.deployment})." + raise ValueError(msg) - def to_dict(self) -> Dict[str, str]: + def to_dict(self) -> dict[str, str]: self.validate() return as_dict(chili.encode(self)) @@ -266,12 +258,12 @@ def to_dict(self) -> Dict[str, str]: class Config: def __init__(self): # Initialise config sections - self.azure_: Optional[ConfigSectionAzure] = None - self.backend_: Optional[ConfigSectionBackend] = None - self.pulumi_: Optional[ConfigSectionPulumi] = None - self.shm_: Optional[ConfigSectionSHM] = None - self.tags_: Optional[ConfigSectionTags] = None - self.sres: Dict[str, ConfigSectionSRE] = defaultdict(ConfigSectionSRE) + self.azure_: ConfigSectionAzure | None = None + self.backend_: ConfigSectionBackend | None = None + self.pulumi_: ConfigSectionPulumi | None = None + self.shm_: ConfigSectionSHM | None = None + self.tags_: ConfigSectionTags | None = None + self.sres: dict[str, ConfigSectionSRE] = defaultdict(ConfigSectionSRE) # Read backend settings settings = BackendSettings() self.name = settings.name @@ -290,7 +282,7 @@ def __init__(self): self.azure_api = AzureApi(subscription_name=self.subscription_name) # Attempt to load YAML dictionary from blob storage yaml_input = {} - with suppress(DataSafeHavenAzureException, ParserError): + with suppress(DataSafeHavenAzureError, ParserError): yaml_input = yaml.safe_load( self.azure_api.download_blob( self.filename, @@ -370,9 +362,9 @@ def __str__(self) -> str: def read_stack(self, name: str, path: pathlib.Path): """Add a Pulumi stack file to config""" - with open(path, "r", encoding="utf-8") as f_stack: - b64string = f_stack.read() - self.pulumi.stacks[name] = b64encode(b64string) + with open(path, encoding="utf-8") as f_stack: + pulumi_cfg = f_stack.read() + self.pulumi.stacks[name] = b64encode(pulumi_cfg) def remove_sre(self, name: str) -> None: """Remove SRE config section by name""" @@ -386,9 +378,9 @@ def remove_stack(self, name: str) -> None: def write_stack(self, name: str, path: pathlib.Path): """Write a Pulumi stack file from config""" - b64string = b64decode(self.pulumi.stacks[name]) + pulumi_cfg = b64decode(self.pulumi.stacks[name]) with open(path, "w", encoding="utf-8") as f_stack: - f_stack.write(b64string) + f_stack.write(pulumi_cfg) def upload(self): """Upload config to Azure storage""" diff --git a/data_safe_haven/exceptions/__init__.py b/data_safe_haven/exceptions/__init__.py index e786d621e2..2a519ec604 100644 --- a/data_safe_haven/exceptions/__init__.py +++ b/data_safe_haven/exceptions/__init__.py @@ -1,50 +1,50 @@ -class DataSafeHavenException(Exception): +class DataSafeHavenError(Exception): pass -class DataSafeHavenCloudException(DataSafeHavenException): +class DataSafeHavenCloudError(DataSafeHavenError): pass -class DataSafeHavenConfigException(DataSafeHavenException): +class DataSafeHavenConfigError(DataSafeHavenError): pass -class DataSafeHavenInputException(DataSafeHavenException): +class DataSafeHavenInputError(DataSafeHavenError): pass -class DataSafeHavenInternalException(DataSafeHavenException): +class DataSafeHavenInternalError(DataSafeHavenError): pass -class DataSafeHavenIPRangeException(DataSafeHavenException): +class DataSafeHavenIPRangeError(DataSafeHavenError): pass -class DataSafeHavenNotImplementedException(DataSafeHavenInternalException): +class DataSafeHavenNotImplementedError(DataSafeHavenInternalError): pass -class DataSafeHavenParameterException(DataSafeHavenException): +class DataSafeHavenParameterError(DataSafeHavenError): pass -class DataSafeHavenSSLException(DataSafeHavenException): +class DataSafeHavenSSLError(DataSafeHavenError): pass -class DataSafeHavenAzureException(DataSafeHavenCloudException): +class DataSafeHavenAzureError(DataSafeHavenCloudError): pass -class DataSafeHavenUserHandlingException(DataSafeHavenInternalException): +class DataSafeHavenUserHandlingError(DataSafeHavenInternalError): pass -class DataSafeHavenMicrosoftGraphException(DataSafeHavenAzureException): +class DataSafeHavenMicrosoftGraphError(DataSafeHavenAzureError): pass -class DataSafeHavenPulumiException(DataSafeHavenCloudException): +class DataSafeHavenPulumiError(DataSafeHavenCloudError): pass diff --git a/data_safe_haven/external/api/azure_api.py b/data_safe_haven/external/api/azure_api.py index 3905219ad1..0d3f5e76a3 100644 --- a/data_safe_haven/external/api/azure_api.py +++ b/data_safe_haven/external/api/azure_api.py @@ -1,10 +1,9 @@ """Interface to the Azure Python SDK""" -# Standard library imports import time +from collections.abc import Sequence from contextlib import suppress -from typing import Any, Dict, List, Optional, Sequence, Tuple +from typing import Any -# Third party imports from azure.core.exceptions import ( HttpResponseError, ResourceExistsError, @@ -28,9 +27,10 @@ from azure.mgmt.dns import DnsManagementClient from azure.mgmt.dns.models import RecordSet, TxtRecord from azure.mgmt.keyvault import KeyVaultManagementClient -from azure.mgmt.keyvault.models import AccessPolicyEntry, Permissions -from azure.mgmt.keyvault.models import Sku as KeyVaultSku from azure.mgmt.keyvault.models import ( + AccessPolicyEntry, + Permissions, + Sku as KeyVaultSku, Vault, VaultCreateOrUpdateParameters, VaultProperties, @@ -53,13 +53,12 @@ from azure.storage.blob import BlobServiceClient from azure.storage.filedatalake import DataLakeServiceClient -# Local imports from data_safe_haven.exceptions import ( - DataSafeHavenAzureException, - DataSafeHavenInternalException, + DataSafeHavenAzureError, + DataSafeHavenInternalError, ) +from data_safe_haven.external.interface.azure_authenticator import AzureAuthenticator from data_safe_haven.utility import Logger -from ..interface.azure_authenticator import AzureAuthenticator class AzureApi(AzureAuthenticator): @@ -74,14 +73,14 @@ def compile_desired_state( automation_account_name: str, configuration_name: str, location: str, - parameters: Dict[str, str], + parameters: dict[str, str], resource_group_name: str, required_modules: Sequence[str], ) -> None: """Ensure that a Powershell Desired State Configuration is compiled Raises: - DataSafeHavenAzureException if the configuration could not be compiled + DataSafeHavenAzureError if the configuration could not be compiled """ # Connect to Azure clients automation_client = AutomationClient(self.credential, self.subscription_id) @@ -133,9 +132,8 @@ def compile_desired_state( if (result.provisioning_state == "Suspended") and ( result.status == "Suspended" ): - raise DataSafeHavenAzureException( - f"Could not compile DSC '{configuration_name}'\n{result.exception}." - ) + msg = f"Could not compile DSC '{configuration_name}'\n{result.exception}." + raise DataSafeHavenAzureError(msg) def download_blob( self, @@ -150,7 +148,7 @@ def download_blob( str: The contents of the blob Raises: - DataSafeHavenAzureException if the blob could not be downloaded + DataSafeHavenAzureError if the blob could not be downloaded """ try: # Connect to Azure client @@ -158,21 +156,19 @@ def download_blob( resource_group_name, storage_account_name ) blob_service_client = BlobServiceClient.from_connection_string( - f"DefaultEndpointsProtocol=https;AccountName={storage_account_name};AccountKey={str(storage_account_keys[0].value)};EndpointSuffix=core.windows.net" + f"DefaultEndpointsProtocol=https;AccountName={storage_account_name};AccountKey={storage_account_keys[0].value};EndpointSuffix=core.windows.net" ) if not isinstance(blob_service_client, BlobServiceClient): - raise DataSafeHavenAzureException( - f"Could not connect to storage account '{storage_account_name}'." - ) + msg = f"Could not connect to storage account '{storage_account_name}'." + raise DataSafeHavenAzureError(msg) # Download the requested file blob_client = blob_service_client.get_blob_client( container=storage_container_name, blob=blob_name ) return blob_client.download_blob(encoding="utf-8").readall() except Exception as exc: - raise DataSafeHavenAzureException( - f"Blob file '{blob_name}' could not be downloaded from '{storage_account_name}'\n{str(exc)}." - ) from exc + msg = f"Blob file '{blob_name}' could not be downloaded from '{storage_account_name}'\n{exc}." + raise DataSafeHavenAzureError(msg) from exc def ensure_dns_txt_record( self, @@ -187,7 +183,7 @@ def ensure_dns_txt_record( RecordSet: The DNS record set Raises: - DataSafeHavenAzureException if the record could not be created + DataSafeHavenAzureError if the record could not be created """ try: # Connect to Azure clients @@ -211,9 +207,10 @@ def ensure_dns_txt_record( ) return record_set except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create DNS record {record_name} in zone {zone_name}.\n{str(exc)}" - ) from exc + msg = ( + f"Failed to create DNS record {record_name} in zone {zone_name}.\n{exc}" + ) + raise DataSafeHavenAzureError(msg) from exc def ensure_keyvault( self, @@ -223,13 +220,13 @@ def ensure_keyvault( managed_identity: Identity, resource_group_name: str, tags: Any = None, - tenant_id: Optional[str] = None, + tenant_id: str | None = None, ) -> Vault: """Ensure that a KeyVault exists Raises: - DataSafeHavenAzureException if the existence of the KeyVault could not be verified + DataSafeHavenAzureError if the existence of the KeyVault could not be verified """ try: self.logger.debug( @@ -287,9 +284,8 @@ def ensure_keyvault( ) return key_vaults[0] except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create key vault {key_vault_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to create key vault {key_vault_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def ensure_keyvault_key( self, @@ -302,7 +298,7 @@ def ensure_keyvault_key( str: The key ID Raises: - DataSafeHavenAzureException if the existence of the key could not be verified + DataSafeHavenAzureError if the existence of the key could not be verified """ try: # Connect to Azure clients @@ -325,9 +321,8 @@ def ensure_keyvault_key( ) return key except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create key {key_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to create key {key_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def ensure_keyvault_secret( self, key_vault_name: str, secret_name: str, secret_value: str @@ -338,7 +333,7 @@ def ensure_keyvault_secret( str: The secret value Raises: - DataSafeHavenAzureException if the existence of the secret could not be verified + DataSafeHavenAzureError if the existence of the secret could not be verified """ # Ensure that key exists self.logger.debug( @@ -351,7 +346,7 @@ def ensure_keyvault_secret( ) try: secret = secret_client.get_secret(secret_name) - except DataSafeHavenAzureException: + except DataSafeHavenAzureError: secret = None if not secret: self.set_keyvault_secret(key_vault_name, secret_name, secret_value) @@ -361,9 +356,8 @@ def ensure_keyvault_secret( ) return secret except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create secret {secret_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to create secret {secret_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def ensure_keyvault_self_signed_certificate( self, @@ -377,7 +371,7 @@ def ensure_keyvault_self_signed_certificate( KeyVaultCertificate: The self-signed certificate Raises: - DataSafeHavenAzureException if the existence of the certificate could not be verified + DataSafeHavenAzureError if the existence of the certificate could not be verified """ try: # Connect to Azure clients @@ -411,9 +405,8 @@ def ensure_keyvault_self_signed_certificate( ) return certificate except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create certificate '{certificate_url}'." - ) from exc + msg = f"Failed to create certificate '{certificate_url}'." + raise DataSafeHavenAzureError(msg) from exc def ensure_managed_identity( self, @@ -427,7 +420,7 @@ def ensure_managed_identity( Identity: The managed identity Raises: - DataSafeHavenAzureException if the existence of the managed identity could not be verified + DataSafeHavenAzureError if the existence of the managed identity could not be verified """ try: self.logger.debug( @@ -447,9 +440,8 @@ def ensure_managed_identity( ) return managed_identity # type: ignore except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create managed identity {identity_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to create managed identity {identity_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def ensure_resource_group( self, @@ -460,7 +452,7 @@ def ensure_resource_group( """Ensure that a resource group exists Raises: - DataSafeHavenAzureException if the existence of the resource group could not be verified + DataSafeHavenAzureError if the existence of the resource group could not be verified """ try: # Connect to Azure clients @@ -482,13 +474,13 @@ def ensure_resource_group( if rg.name == resource_group_name ] self.logger.info( - f"Ensured that resource group [green]{resource_groups[0].name}[/] exists in [green]{resource_groups[0].location}[/].", + f"Ensured that resource group [green]{resource_groups[0].name}[/] exists" + f" in [green]{resource_groups[0].location}[/].", ) return resource_groups[0] except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create resource group {resource_group_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to create resource group {resource_group_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def ensure_storage_account( self, @@ -503,7 +495,7 @@ def ensure_storage_account( str: The certificate secret ID Raises: - DataSafeHavenAzureException if the existence of the certificate could not be verified + DataSafeHavenAzureError if the existence of the certificate could not be verified """ try: # Connect to Azure clients @@ -529,9 +521,8 @@ def ensure_storage_account( ) return storage_account except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to create storage account {storage_account_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to create storage account {storage_account_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def ensure_storage_blob_container( self, @@ -545,7 +536,7 @@ def ensure_storage_blob_container( str: The certificate secret ID Raises: - DataSafeHavenAzureException if the existence of the certificate could not be verified + DataSafeHavenAzureError if the existence of the certificate could not be verified """ # Connect to Azure clients storage_client = StorageManagementClient(self.credential, self.subscription_id) @@ -565,9 +556,8 @@ def ensure_storage_blob_container( ) return container except HttpResponseError as exc: - raise DataSafeHavenAzureException( - f"Failed to create storage container [green]{container_name}." - ) from exc + msg = f"Failed to create storage container [green]{container_name}." + raise DataSafeHavenAzureError(msg) from exc def get_keyvault_certificate( self, certificate_name: str, key_vault_name: str @@ -578,7 +568,7 @@ def get_keyvault_certificate( KeyVaultCertificate: The certificate Raises: - DataSafeHavenAzureException if the secret could not be read + DataSafeHavenAzureError if the secret could not be read """ # Connect to Azure clients certificate_client = CertificateClient( @@ -589,9 +579,8 @@ def get_keyvault_certificate( try: return certificate_client.get_certificate(certificate_name) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to retrieve certificate {certificate_name}." - ) from exc + msg = f"Failed to retrieve certificate {certificate_name}." + raise DataSafeHavenAzureError(msg) from exc def get_keyvault_secret(self, key_vault_name: str, secret_name: str) -> str: """Read a secret from the KeyVault @@ -600,7 +589,7 @@ def get_keyvault_secret(self, key_vault_name: str, secret_name: str) -> str: str: The secret value Raises: - DataSafeHavenAzureException if the secret could not be read + DataSafeHavenAzureError if the secret could not be read """ # Connect to Azure clients secret_client = SecretClient( @@ -611,13 +600,13 @@ def get_keyvault_secret(self, key_vault_name: str, secret_name: str) -> str: secret = secret_client.get_secret(secret_name) if secret.value: return secret.value - raise DataSafeHavenAzureException(f"Secret {secret_name} has no value.") + msg = f"Secret {secret_name} has no value." + raise DataSafeHavenAzureError(msg) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to retrieve secret {secret_name}." - ) from exc + msg = f"Failed to retrieve secret {secret_name}." + raise DataSafeHavenAzureError(msg) from exc - def get_locations(self) -> List[str]: + def get_locations(self) -> list[str]: """Retrieve list of Azure locations Returns: @@ -632,20 +621,19 @@ def get_locations(self) -> List[str]: ) ] except Exception as exc: - raise DataSafeHavenAzureException( - f"Azure locations could not be loaded.\n{str(exc)}" - ) from exc + msg = f"Azure locations could not be loaded.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def get_storage_account_keys( self, resource_group_name: str, storage_account_name: str - ) -> List[StorageAccountKey]: + ) -> list[StorageAccountKey]: """Retrieve the storage account keys for an existing storage account Returns: List[StorageAccountKey]: The keys for this storage account Raises: - DataSafeHavenAzureException if the keys could not be loaded + DataSafeHavenAzureError if the keys could not be loaded """ # Connect to Azure client try: @@ -657,21 +645,27 @@ def get_storage_account_keys( storage_account_name, ) if not isinstance(storage_keys, StorageAccountListKeysResult): - raise DataSafeHavenAzureException( - f"Could not connect to storage account '{storage_account_name}' in resource group '{resource_group_name}'." + msg = ( + f"Could not connect to storage account '{storage_account_name}'" + f" in resource group '{resource_group_name}'." ) + raise DataSafeHavenAzureError(msg) keys = storage_keys.keys if not keys or len(keys) == 0: - raise DataSafeHavenAzureException( - f"No keys were retrieved for storage account '{storage_account_name}' in resource group '{resource_group_name}'." + msg = ( + f"No keys were retrieved for storage account '{storage_account_name}'" + f" in resource group '{resource_group_name}'." ) + raise DataSafeHavenAzureError(msg) return keys except Exception as exc: - raise DataSafeHavenAzureException( - f"Keys could not be loaded for storage account '{storage_account_name}' in resource group '{resource_group_name}'.\n{str(exc)}" - ) from exc + msg = ( + f"Keys could not be loaded for storage account '{storage_account_name}'" + f" in resource group '{resource_group_name}'.\n{exc}" + ) + raise DataSafeHavenAzureError(msg) from exc - def get_vm_sku_details(self, sku: str) -> Tuple[str, str, str]: + def get_vm_sku_details(self, sku: str) -> tuple[str, str, str]: # Connect to Azure client cpus, gpus, ram = None, None, None compute_client = ComputeManagementClient(self.credential, self.subscription_id) @@ -687,9 +681,8 @@ def get_vm_sku_details(self, sku: str) -> Tuple[str, str, str]: ram = capability.value if cpus and gpus and ram: return (cpus, gpus, ram) - raise DataSafeHavenAzureException( - f"Could not find information for VM SKU {sku}." - ) + msg = f"Could not find information for VM SKU {sku}." + raise DataSafeHavenAzureError(msg) def import_keyvault_certificate( self, @@ -703,7 +696,7 @@ def import_keyvault_certificate( KeyVaultCertificate: The imported certificate Raises: - DataSafeHavenAzureException if the existence of the certificate could not be verified + DataSafeHavenAzureError if the existence of the certificate could not be verified """ try: # Connect to Azure clients @@ -732,11 +725,10 @@ def import_keyvault_certificate( ) return certificate except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to import certificate '{certificate_name}'." - ) from exc + msg = f"Failed to import certificate '{certificate_name}'." + raise DataSafeHavenAzureError(msg) from exc - def list_available_vm_skus(self, location: str) -> Dict[str, Dict[str, Any]]: + def list_available_vm_skus(self, location: str) -> dict[str, dict[str, Any]]: try: # Connect to Azure client compute_client = ComputeManagementClient( @@ -758,8 +750,9 @@ def list_available_vm_skus(self, location: str) -> Dict[str, Dict[str, Any]]: skus[resource_sku.name][capability.name] = capability.value return skus except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to load available VM sizes for Azure location {location}.\n{str(exc)}", + msg = f"Failed to load available VM sizes for Azure location {location}.\n{exc}" + raise DataSafeHavenAzureError( + msg, ) from exc def purge_keyvault_certificate( @@ -770,7 +763,7 @@ def purge_keyvault_certificate( """Purge a deleted certificate from the KeyVault Raises: - DataSafeHavenAzureException if the existence of the certificate could not be verified + DataSafeHavenAzureError if the existence of the certificate could not be verified """ try: # Connect to Azure clients @@ -796,8 +789,9 @@ def purge_keyvault_certificate( f"Purged certificate [green]{certificate_name}[/] from Key Vault [green]{key_vault_name}[/].", ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to remove certificate '{certificate_name}' from Key Vault '{key_vault_name}'.", + msg = f"Failed to remove certificate '{certificate_name}' from Key Vault '{key_vault_name}'." + raise DataSafeHavenAzureError( + msg, ) from exc def remove_dns_txt_record( @@ -809,7 +803,7 @@ def remove_dns_txt_record( """Remove a DNS record if it exists in a DNS zone Raises: - DataSafeHavenAzureException if the record could not be removed + DataSafeHavenAzureError if the record could not be removed """ try: # Connect to Azure clients @@ -828,9 +822,8 @@ def remove_dns_txt_record( f"Ensured that DNS record [green]{record_name}[/] is removed from zone [green]{zone_name}[/].", ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to remove DNS record [green]{record_name}[/] from zone [green]{zone_name}[/].\n{str(exc)}" - ) from exc + msg = f"Failed to remove DNS record [green]{record_name}[/] from zone [green]{zone_name}[/].\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def remove_keyvault_certificate( self, @@ -840,7 +833,7 @@ def remove_keyvault_certificate( """Remove a certificate from the KeyVault Raises: - DataSafeHavenAzureException if the existence of the certificate could not be verified + DataSafeHavenAzureError if the existence of the certificate could not be verified """ try: # Connect to Azure clients @@ -870,15 +863,16 @@ def remove_keyvault_certificate( except ResourceNotFoundError: pass except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to remove certificate '{certificate_name}' from Key Vault '{key_vault_name}'.", + msg = f"Failed to remove certificate '{certificate_name}' from Key Vault '{key_vault_name}'." + raise DataSafeHavenAzureError( + msg, ) from exc def remove_resource_group(self, resource_group_name: str) -> None: """Remove a resource group with its contents Raises: - DataSafeHavenAzureException if the resource group could not be removed + DataSafeHavenAzureError if the resource group could not be removed """ try: # Connect to Azure clients @@ -901,21 +895,20 @@ def remove_resource_group(self, resource_group_name: str) -> None: if rg.name == resource_group_name ] if resource_groups: - raise DataSafeHavenInternalException( - f"There are still {len(resource_groups)} resource group(s) remaining." - ) + msg = f"There are still {len(resource_groups)} resource group(s) remaining." + raise DataSafeHavenInternalError(msg) self.logger.info( f"Ensured that resource group [green]{resource_group_name}[/] does not exist.", ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to remove resource group {resource_group_name}.\n{str(exc)}" - ) from exc + msg = f"Failed to remove resource group {resource_group_name}.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def restart_virtual_machine(self, resource_group_name: str, vm_name: str) -> None: try: self.logger.debug( - f"Attempting to restart virtual machine '[green]{vm_name}[/]' in resource group '[green]{resource_group_name}[/]'...", + f"Attempting to restart virtual machine '[green]{vm_name}[/]'" + f" in resource group '[green]{resource_group_name}[/]'...", ) # Connect to Azure clients compute_client = ComputeManagementClient( @@ -931,15 +924,14 @@ def restart_virtual_machine(self, resource_group_name: str, vm_name: str) -> Non f"Restarted virtual machine '[green]{vm_name}[/]' in resource group '[green]{resource_group_name}[/]'.", ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to restart virtual machine '{vm_name}' in resource group '{resource_group_name}'.\n{str(exc)}" - ) from exc + msg = f"Failed to restart virtual machine '{vm_name}' in resource group '{resource_group_name}'.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def run_remote_script( self, resource_group_name: str, script: str, - script_parameters: Dict[str, str], + script_parameters: dict[str, str], vm_name: str, ) -> str: """Run a script on a remote virtual machine @@ -948,7 +940,7 @@ def run_remote_script( str: The script output Raises: - DataSafeHavenAzureException if running the script failed + DataSafeHavenAzureError if running the script failed """ try: # Connect to Azure clients @@ -957,7 +949,8 @@ def run_remote_script( ) vm = compute_client.virtual_machines.get(resource_group_name, vm_name) if not vm.os_profile: - raise ValueError(f"No OSProfile available for VM {vm_name}") + msg = f"No OSProfile available for VM {vm_name}" + raise ValueError(msg) command_id = ( "RunPowerShellScript" if ( @@ -982,9 +975,8 @@ def run_remote_script( # Return stdout/stderr from the command return str(result.value[0].message) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to run command on '{vm_name}'.\n{str(exc)}" - ) from exc + msg = f"Failed to run command on '{vm_name}'.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def set_blob_container_acl( self, @@ -996,7 +988,7 @@ def set_blob_container_acl( """Set the ACL for a blob container Raises: - DataSafeHavenAzureException if the ACL could not be set + DataSafeHavenAzureError if the ACL could not be set """ try: # Ensure that storage container exists in the storage account @@ -1008,10 +1000,12 @@ def set_blob_container_acl( resource_group_name, storage_account_name, container_name ) if container.name != container_name: - raise HttpResponseError("Container could not be found.") + msg = "Container could not be found." + raise HttpResponseError(msg) except HttpResponseError: self.logger.warning( - f"Blob container '[green]{container_name}[/]' could not be found in storage account '[green]{storage_account_name}[/]'." + f"Blob container '[green]{container_name}[/]' could not be found" + f" in storage account '[green]{storage_account_name}[/]'." ) return @@ -1026,9 +1020,8 @@ def set_blob_container_acl( # Set the desired ACL directory_client.set_access_control_recursive(acl=desired_acl) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to set ACL '{desired_acl}' on container '{container_name}'.\n{str(exc)}" - ) from exc + msg = f"Failed to set ACL '{desired_acl}' on container '{container_name}'.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def set_keyvault_secret( self, key_vault_name: str, secret_name: str, secret_value: str @@ -1039,7 +1032,7 @@ def set_keyvault_secret( str: The secret value Raises: - DataSafeHavenAzureException if the secret could not be set + DataSafeHavenAzureError if the secret could not be set """ try: # Connect to Azure clients @@ -1055,9 +1048,8 @@ def set_keyvault_secret( secret_client.set_secret(secret_name, secret_value) return secret_client.get_secret(secret_name) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to set secret '{secret_name}'.\n{str(exc)}" - ) from exc + msg = f"Failed to set secret '{secret_name}'.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def upload_blob( self, @@ -1073,7 +1065,7 @@ def upload_blob( None Raises: - DataSafeHavenAzureException if the blob could not be uploaded + DataSafeHavenAzureError if the blob could not be uploaded """ try: # Connect to Azure client @@ -1081,12 +1073,11 @@ def upload_blob( resource_group_name, storage_account_name ) blob_service_client = BlobServiceClient.from_connection_string( - f"DefaultEndpointsProtocol=https;AccountName={storage_account_name};AccountKey={str(storage_account_keys[0].value)};EndpointSuffix=core.windows.net" + f"DefaultEndpointsProtocol=https;AccountName={storage_account_name};AccountKey={storage_account_keys[0].value};EndpointSuffix=core.windows.net" ) if not isinstance(blob_service_client, BlobServiceClient): - raise DataSafeHavenAzureException( - f"Could not connect to storage account '{storage_account_name}'." - ) + msg = f"Could not connect to storage account '{storage_account_name}'." + raise DataSafeHavenAzureError(msg) # Upload the created file blob_client = blob_service_client.get_blob_client( container=storage_container_name, blob=blob_name @@ -1096,6 +1087,5 @@ def upload_blob( f"Uploaded file [green]{blob_name}[/] to blob storage.", ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Blob file '{blob_name}' could not be uploaded to '{storage_account_name}'\n{str(exc)}." - ) from exc + msg = f"Blob file '{blob_name}' could not be uploaded to '{storage_account_name}'\n{exc}." + raise DataSafeHavenAzureError(msg) from exc diff --git a/data_safe_haven/external/api/azure_cli.py b/data_safe_haven/external/api/azure_cli.py index b25c79aa55..a6935fffc0 100644 --- a/data_safe_haven/external/api/azure_cli.py +++ b/data_safe_haven/external/api/azure_cli.py @@ -1,10 +1,8 @@ """Interface to the Azure CLI""" -# Standard library imports import subprocess from typing import Any -# Local imports -from data_safe_haven.exceptions import DataSafeHavenAzureException +from data_safe_haven.exceptions import DataSafeHavenAzureError from data_safe_haven.utility import Logger @@ -30,7 +28,6 @@ def login(self) -> None: "If no web browser is available, please run `az login --use-device-code` in a command line window." ) subprocess.run(["az", "login"], capture_output=True) - except FileNotFoundError as exc: - raise DataSafeHavenAzureException( - f"Please ensure that the Azure CLI is installed.\n{str(exc)}" - ) + except (FileNotFoundError, subprocess.CalledProcessError) as exc: + msg = f"Please ensure that the Azure CLI is installed.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc diff --git a/data_safe_haven/external/api/graph_api.py b/data_safe_haven/external/api/graph_api.py index a837c511e8..4a8e3e412c 100644 --- a/data_safe_haven/external/api/graph_api.py +++ b/data_safe_haven/external/api/graph_api.py @@ -1,13 +1,12 @@ """Interface to the Microsoft Graph API""" -# Standard library imports import datetime import pathlib import time +from collections.abc import Sequence from contextlib import suppress from io import UnsupportedOperation -from typing import Any, Dict, List, Optional, Sequence +from typing import Any, ClassVar -# Third party imports import requests from dns import resolver from msal import ( @@ -16,11 +15,10 @@ SerializableTokenCache, ) -# Local imports from data_safe_haven.exceptions import ( - DataSafeHavenInputException, - DataSafeHavenInternalException, - DataSafeHavenMicrosoftGraphException, + DataSafeHavenInputError, + DataSafeHavenInternalError, + DataSafeHavenMicrosoftGraphError, ) from data_safe_haven.utility import Logger @@ -31,7 +29,7 @@ def __init__(self, token_cache_filename: pathlib.Path) -> None: self.token_cache_filename = token_cache_filename try: if self.token_cache_filename.exists(): - with open(self.token_cache_filename, "r", encoding="utf-8") as f_token: + with open(self.token_cache_filename, encoding="utf-8") as f_token: self.deserialize(f_token.read()) except (FileNotFoundError, UnsupportedOperation): self.deserialize(None) @@ -45,8 +43,10 @@ class GraphApi: """Interface to the Microsoft Graph REST API""" linux_schema = "extj8xolrvw_linux" # this is the "Extension with Properties for Linux User and Groups" extension - role_template_ids = {"Global Administrator": "62e90394-69f5-4237-9190-012177145e10"} - uuid_application = { + role_template_ids: ClassVar[dict[str, str]] = { + "Global Administrator": "62e90394-69f5-4237-9190-012177145e10" + } + uuid_application: ClassVar[dict[str, str]] = { "Directory.Read.All": "7ab1d382-f21e-4acd-a863-ba3e13f7da61", "Domain.Read.All": "dbb9058a-0e50-45d7-ae91-66909b5d4664", "Group.Read.All": "5b567255-7703-4780-807c-7be8301ae99b", @@ -55,7 +55,7 @@ class GraphApi: "User.ReadWrite.All": "741f803b-c850-494e-b5df-cde7c675a1ca", "UserAuthenticationMethod.ReadWrite.All": "50483e42-d915-4231-9639-7fdb7fd190e5", } - uuid_delegated = { + uuid_delegated: ClassVar[dict[str, str]] = { "GroupMember.Read.All": "bc024368-1153-4739-b217-4326f2e966d0", "User.Read.All": "a154be20-db9c-4678-8ab7-66f6cc099a59", } @@ -63,10 +63,10 @@ class GraphApi: def __init__( self, *args: Any, - tenant_id: Optional[str] = None, - auth_token: Optional[str] = None, - application_id: Optional[str] = None, - application_secret: Optional[str] = None, + tenant_id: str | None = None, + auth_token: str | None = None, + application_id: str | None = None, + application_secret: str | None = None, base_endpoint: str = "", default_scopes: Sequence[str] = [], **kwargs: Any, @@ -94,7 +94,7 @@ def add_custom_domain(self, domain_name: str) -> str: str: Registration TXT record Raises: - DataSafeHavenMicrosoftGraphException if domain could not be added + DataSafeHavenMicrosoftGraphError if domain could not be added """ try: # Create the AzureAD custom domain if it does not already exist @@ -109,20 +109,18 @@ def add_custom_domain(self, domain_name: str) -> str: response = self.http_get( f"{self.base_endpoint}/domains/{domain_name}/verificationDnsRecords" ) - txt_records: List[str] = [ + txt_records: list[str] = [ record["text"] for record in response.json()["value"] if record["recordType"] == "Txt" ] if not txt_records: - raise DataSafeHavenMicrosoftGraphException( - f"Could not retrieve verification DNS records for {domain_name}." - ) + msg = f"Could not retrieve verification DNS records for {domain_name}." + raise DataSafeHavenMicrosoftGraphError(msg) return txt_records[0] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not register domain '{domain_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not register domain '{domain_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def add_user_to_group( self, @@ -132,7 +130,7 @@ def add_user_to_group( """Add a user to a group Raises: - DataSafeHavenMicrosoftGraphException if the token could not be created + DataSafeHavenMicrosoftGraphError if the token could not be created """ try: user_id = self.get_id_from_username(username) @@ -157,26 +155,25 @@ def add_user_to_group( self.logger.info( f"Added user [green]'{username}'[/] to group [green]'{group_name}'[/]." ) - except (DataSafeHavenMicrosoftGraphException, IndexError) as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not add user '{username}' to group '{group_name}'.\n{str(exc)}" - ) from exc + except (DataSafeHavenMicrosoftGraphError, IndexError) as exc: + msg = f"Could not add user '{username}' to group '{group_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def create_application( self, application_name: str, application_scopes: Sequence[str] = [], delegated_scopes: Sequence[str] = [], - request_json: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Any]: + request_json: dict[str, Any] | None = None, + ) -> dict[str, Any]: """Create an AzureAD application if it does not already exist Raises: - DataSafeHavenMicrosoftGraphException if the application could not be created + DataSafeHavenMicrosoftGraphError if the application could not be created """ try: # Check for an existing application - json_response: Dict[str, Any] + json_response: dict[str, Any] if existing_application := self.get_application_by_name(application_name): self.logger.info( f"Application '[green]{application_name}[/]' already exists." @@ -236,13 +233,18 @@ def create_application( and self.read_application_permissions(application_sp["id"]) ): self.logger.info( - f"Application [green]{application_name}[/] has requested permissions that need administrator approval." + f"Application [green]{application_name}[/] has requested permissions" + " that need administrator approval." ) self.logger.info( - "Please sign-in with [bold]global administrator[/] credentials for the Azure Active Directory where your users are stored." + "Please sign-in with [bold]global administrator[/] credentials for the" + " Azure Active Directory where your users are stored." ) self.logger.info( - f"To sign in, use a web browser to open the page [green]https://login.microsoftonline.com/{self.tenant_id}/adminconsent?client_id={application_id}&redirect_uri=https://login.microsoftonline.com/common/oauth2/nativeclient[/] and follow the instructions." + "To sign in, use a web browser to open the page" + f" [green]https://login.microsoftonline.com/{self.tenant_id}/adminconsent?client_id=" + f"{application_id}&redirect_uri=https://login.microsoftonline.com/common/oauth2/nativeclient[/]" + " and follow the instructions." ) while True: if application_sp := self.get_service_principal_by_name( @@ -254,9 +256,8 @@ def create_application( # Return JSON representation of the AzureAD application return json_response except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not create application '{application_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not create application '{application_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def create_application_secret( self, application_secret_name: str, application_name: str @@ -267,22 +268,20 @@ def create_application_secret( str: Contents of newly-created secret Raises: - DataSafeHavenMicrosoftGraphException if the secret could not be created or already exists + DataSafeHavenMicrosoftGraphError if the secret could not be created or already exists """ try: application_json = self.get_application_by_name(application_name) if not application_json: - raise DataSafeHavenMicrosoftGraphException( - f"Could not retrieve application '{application_name}'" - ) + msg = f"Could not retrieve application '{application_name}'" + raise DataSafeHavenMicrosoftGraphError(msg) # If the secret already exists then raise an exception if "passwordCredentials" in application_json and any( cred["displayName"] == application_secret_name for cred in application_json["passwordCredentials"] ): - raise DataSafeHavenInputException( - f"Secret '{application_secret_name}' already exists in application '{application_name}'." - ) + msg = f"Secret '{application_secret_name}' already exists in application '{application_name}'." + raise DataSafeHavenInputError(msg) # Create the application secret if it does not exist self.logger.debug( f"Creating application secret '[green]{application_secret_name}[/]'...", @@ -305,15 +304,14 @@ def create_application_secret( ) return str(json_response["secretText"]) except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not create application secret '{application_secret_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not create application secret '{application_secret_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def create_group(self, group_name: str, group_id: str) -> None: """Create an AzureAD group if it does not already exist Raises: - DataSafeHavenMicrosoftGraphException if the group could not be created + DataSafeHavenMicrosoftGraphError if the group could not be created """ try: if self.get_id_from_groupname(group_name): @@ -351,15 +349,14 @@ def create_group(self, group_name: str, group_id: str) -> None: f"Created AzureAD group '[green]{group_name}[/]'.", ) except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not create AzureAD group '{group_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not create AzureAD group '{group_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def create_token_administrator(self) -> str: """Create an access token for a global administrator Raises: - DataSafeHavenMicrosoftGraphException if the token could not be created + DataSafeHavenMicrosoftGraphError if the token could not be created """ result = None try: @@ -380,17 +377,18 @@ def create_token_administrator(self) -> str: if not result: flow = app.initiate_device_flow(scopes=self.default_scopes) if "user_code" not in flow: - raise DataSafeHavenMicrosoftGraphException( - f"Could not initiate device login for scopes {self.default_scopes}." - ) + msg = f"Could not initiate device login for scopes {self.default_scopes}." + raise DataSafeHavenMicrosoftGraphError(msg) self.logger.info( "Administrator approval is needed in order to interact with Azure Active Directory." ) self.logger.info( - f"Please sign-in with [bold]global administrator[/] credentials for Azure Active Directory [green]{self.tenant_id}[/]." + "Please sign-in with [bold]global administrator[/] credentials for" + f" Azure Active Directory [green]{self.tenant_id}[/]." ) self.logger.info( - "Note that the sign-in screen will prompt you to sign-in to [blue]Microsoft Graph Command Line Tools[/] - this is expected." + "Note that the sign-in screen will prompt you to sign-in to" + " [blue]Microsoft Graph Command Line Tools[/] - this is expected." ) self.logger.info(flow["message"]) # Block until a response is received @@ -400,9 +398,8 @@ def create_token_administrator(self) -> str: error_description = "Could not create access token" if isinstance(result, dict) and "error_description" in result: error_description += f": {result['error_description']}" - raise DataSafeHavenMicrosoftGraphException( - f"{error_description}.\n{str(exc)}" - ) from exc + msg = f"{error_description}.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def create_token_application( self, application_id: str, application_secret: str @@ -410,7 +407,7 @@ def create_token_application( """Return an access token for the given application ID and secret Raises: - DataSafeHavenMicrosoftGraphException if the token could not be created + DataSafeHavenMicrosoftGraphError if the token could not be created """ result = None try: @@ -425,29 +422,27 @@ def create_token_application( result = app.acquire_token_for_client( scopes=["https://graph.microsoft.com/.default"] ) - if not isinstance(result, Dict): - raise DataSafeHavenMicrosoftGraphException( - "Invalid application token returned from Microsoft Graph." - ) + if not isinstance(result, dict): + msg = "Invalid application token returned from Microsoft Graph." + raise DataSafeHavenMicrosoftGraphError(msg) return str(result["access_token"]) except Exception as exc: error_description = "Could not create access token" if result and "error_description" in result: error_description += f": {result['error_description']}" - raise DataSafeHavenMicrosoftGraphException( - f"{error_description}.\n{str(exc)}" - ) from exc + msg = f"{error_description}.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def create_user( self, - request_json: Dict[str, Any], + request_json: dict[str, Any], email_address: str, phone_number: str, ) -> None: """Create an AzureAD user if it does not already exist Raises: - DataSafeHavenMicrosoftGraphException if the user could not be created + DataSafeHavenMicrosoftGraphError if the user could not be created """ username = request_json["mailNickname"] try: @@ -477,7 +472,7 @@ def create_user( f"https://graph.microsoft.com/beta/users/{user_id}/authentication/emailMethods", json={"emailAddress": email_address}, ) - except DataSafeHavenMicrosoftGraphException as exc: + except DataSafeHavenMicrosoftGraphError as exc: if "already exists" not in str(exc): raise # Set the authentication phone number @@ -486,7 +481,7 @@ def create_user( f"https://graph.microsoft.com/beta/users/{user_id}/authentication/phoneMethods", json={"phoneNumber": phone_number, "phoneType": "mobile"}, ) - except DataSafeHavenMicrosoftGraphException as exc: + except DataSafeHavenMicrosoftGraphError as exc: if "already exists" not in str(exc): raise # Ensure user is enabled @@ -497,10 +492,9 @@ def create_user( self.logger.info( f"{final_verb} AzureAD user '[green]{username}[/]'.", ) - except (DataSafeHavenMicrosoftGraphException, IndexError) as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not create/update user {username}.\n{str(exc)}" - ) from exc + except (DataSafeHavenMicrosoftGraphError, IndexError) as exc: + msg = f"Could not create/update user {username}.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def delete_application( self, @@ -509,7 +503,7 @@ def delete_application( """Remove an application from AzureAD Raises: - DataSafeHavenMicrosoftGraphException if the application could not be deleted + DataSafeHavenMicrosoftGraphError if the application could not be deleted """ try: # Delete the application if it exists @@ -524,30 +518,29 @@ def delete_application( f"Deleted application '[green]{application_name}[/]'.", ) except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not delete application '{application_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not delete application '{application_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc - def get_application_by_name(self, application_name: str) -> Dict[str, Any] | None: + def get_application_by_name(self, application_name: str) -> dict[str, Any] | None: try: - return [ + return next( application for application in self.read_applications() if application["displayName"] == application_name - ][0] - except (DataSafeHavenMicrosoftGraphException, IndexError): + ) + except (DataSafeHavenMicrosoftGraphError, IndexError): return None def get_service_principal_by_name( self, service_principal_name: str - ) -> Dict[str, Any] | None: + ) -> dict[str, Any] | None: try: - return [ + return next( service_principal for service_principal in self.read_service_principals() if service_principal["displayName"] == service_principal_name - ][0] - except (DataSafeHavenMicrosoftGraphException, IndexError): + ) + except (DataSafeHavenMicrosoftGraphError, IndexError): return None def get_id_from_application_name(self, application_name: str) -> str | None: @@ -556,31 +549,31 @@ def get_id_from_application_name(self, application_name: str) -> str | None: if not application: return None return str(application["appId"]) - except DataSafeHavenMicrosoftGraphException: + except DataSafeHavenMicrosoftGraphError: return None def get_id_from_groupname(self, group_name: str) -> str | None: try: return str( - [ + next( group for group in self.read_groups() if group["displayName"] == group_name - ][0]["id"] + )["id"] ) - except (DataSafeHavenMicrosoftGraphException, IndexError): + except (DataSafeHavenMicrosoftGraphError, IndexError): return None def get_id_from_username(self, username: str) -> str | None: try: return str( - [ + next( user for user in self.read_users() if user["mailNickname"] == username - ][0]["id"] + )["id"] ) - except (DataSafeHavenMicrosoftGraphException, IndexError): + except (DataSafeHavenMicrosoftGraphError, IndexError): return None def http_delete(self, url: str, **kwargs: Any) -> requests.Response: @@ -590,7 +583,7 @@ def http_delete(self, url: str, **kwargs: Any) -> requests.Response: requests.Response: The response from the remote server Raises: - DataSafeHavenMicrosoftGraphException if the request failed + DataSafeHavenMicrosoftGraphError if the request failed """ try: response = requests.delete( @@ -600,12 +593,11 @@ def http_delete(self, url: str, **kwargs: Any) -> requests.Response: **kwargs, ) if not response.ok: - raise DataSafeHavenInternalException(response.content) + raise DataSafeHavenInternalError(response.content) return response except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not execute DELETE request.\n{str(exc)}" - ) from exc + msg = f"Could not execute DELETE request.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def http_get(self, url: str, **kwargs: Any) -> requests.Response: """Make an HTTP GET request @@ -614,7 +606,7 @@ def http_get(self, url: str, **kwargs: Any) -> requests.Response: requests.Response: The response from the remote server Raises: - DataSafeHavenMicrosoftGraphException if the request failed + DataSafeHavenMicrosoftGraphError if the request failed """ try: response = requests.get( @@ -624,12 +616,11 @@ def http_get(self, url: str, **kwargs: Any) -> requests.Response: **kwargs, ) if not response.ok: - raise DataSafeHavenInternalException(response.content) + raise DataSafeHavenInternalError(response.content) return response except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not execute GET request.\n{str(exc)}" - ) from exc + msg = f"Could not execute GET request.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def http_patch(self, url: str, **kwargs: Any) -> requests.Response: """Make an HTTP PATCH request @@ -638,7 +629,7 @@ def http_patch(self, url: str, **kwargs: Any) -> requests.Response: requests.Response: The response from the remote server Raises: - DataSafeHavenMicrosoftGraphException if the request failed + DataSafeHavenMicrosoftGraphError if the request failed """ try: response = requests.patch( @@ -648,12 +639,11 @@ def http_patch(self, url: str, **kwargs: Any) -> requests.Response: **kwargs, ) if not response.ok: - raise DataSafeHavenInternalException(response.content) + raise DataSafeHavenInternalError(response.content) return response except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not execute PATCH request.\n{str(exc)}" - ) from exc + msg = f"Could not execute PATCH request.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def http_post(self, url: str, **kwargs: Any) -> requests.Response: """Make an HTTP POST request @@ -662,7 +652,7 @@ def http_post(self, url: str, **kwargs: Any) -> requests.Response: requests.Response: The response from the remote server Raises: - DataSafeHavenMicrosoftGraphException if the request failed + DataSafeHavenMicrosoftGraphError if the request failed """ try: response = requests.post( @@ -672,22 +662,21 @@ def http_post(self, url: str, **kwargs: Any) -> requests.Response: **kwargs, ) if not response.ok: - raise DataSafeHavenInternalException(response.content) + raise DataSafeHavenInternalError(response.content) time.sleep(30) # wait for operation to complete return response except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not execute POST request.\n{str(exc)}" - ) from exc + msg = f"Could not execute POST request.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc - def read_applications(self) -> Sequence[Dict[str, Any]]: + def read_applications(self) -> Sequence[dict[str, Any]]: """Get list of applications Returns: JSON: A JSON list of applications Raises: - DataSafeHavenMicrosoftGraphException if applications could not be loaded + DataSafeHavenMicrosoftGraphError if applications could not be loaded """ try: return [ @@ -697,20 +686,19 @@ def read_applications(self) -> Sequence[Dict[str, Any]]: ] ] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not load list of applications.\n{str(exc)}" - ) from exc + msg = f"Could not load list of applications.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def read_application_permissions( self, application_service_principal_id: str - ) -> Sequence[Dict[str, Any]]: + ) -> Sequence[dict[str, Any]]: """Get list of application permissions Returns: JSON: A JSON list of application permissions Raises: - DataSafeHavenMicrosoftGraphException if application permissions could not be loaded + DataSafeHavenMicrosoftGraphError if application permissions could not be loaded """ try: delegated = self.http_get( @@ -721,38 +709,36 @@ def read_application_permissions( ).json()["value"] return [dict(obj) for obj in (delegated + application)] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not load list of application permissions.\n{str(exc)}" - ) from exc + msg = f"Could not load list of application permissions.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc - def read_domains(self) -> Sequence[Dict[str, Any]]: + def read_domains(self) -> Sequence[dict[str, Any]]: """Get details of AzureAD domains Returns: JSON: A JSON list of AzureAD domains Raises: - DataSafeHavenMicrosoftGraphException if domains could not be loaded + DataSafeHavenMicrosoftGraphError if domains could not be loaded """ try: json_response = self.http_get(f"{self.base_endpoint}/domains").json() return [dict(obj) for obj in json_response["value"]] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not load list of domains.\n{str(exc)}" - ) from exc + msg = f"Could not load list of domains.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def read_groups( self, attributes: Sequence[str] | None = None, - ) -> Sequence[Dict[str, Any]]: + ) -> Sequence[dict[str, Any]]: """Get details of AzureAD groups Returns: JSON: A JSON list of AzureAD groups Raises: - DataSafeHavenMicrosoftGraphException if groups could not be loaded + DataSafeHavenMicrosoftGraphError if groups could not be loaded """ try: endpoint = f"{self.base_endpoint}/groups" @@ -760,11 +746,10 @@ def read_groups( endpoint += f"?$select={','.join(attributes)}" return [dict(obj) for obj in self.http_get(endpoint).json()["value"]] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not load list of groups.\n{str(exc)}" - ) from exc + msg = f"Could not load list of groups.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc - def read_service_principals(self) -> Sequence[Dict[str, Any]]: + def read_service_principals(self) -> Sequence[dict[str, Any]]: """Get list of service principals""" try: return [ @@ -774,20 +759,19 @@ def read_service_principals(self) -> Sequence[Dict[str, Any]]: ).json()["value"] ] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not load list of service principals.\n{str(exc)}" - ) from exc + msg = f"Could not load list of service principals.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def read_users( self, attributes: Sequence[str] | None = None - ) -> Sequence[Dict[str, Any]]: + ) -> Sequence[dict[str, Any]]: """Get details of AzureAD users Returns: JSON: A JSON list of AzureAD users Raises: - DataSafeHavenMicrosoftGraphException if users could not be loaded + DataSafeHavenMicrosoftGraphError if users could not be loaded """ attributes = ( attributes @@ -810,14 +794,15 @@ def read_users( self.linux_schema, ] ) - users: Sequence[Dict[str, Any]] + users: Sequence[dict[str, Any]] try: endpoint = f"{self.base_endpoint}/users" if attributes: endpoint += f"?$select={','.join(attributes)}" users = self.http_get(endpoint).json()["value"] administrators = self.http_get( - f"{self.base_endpoint}/directoryRoles/roleTemplateId={self.role_template_ids['Global Administrator']}/members" + f"{self.base_endpoint}/directoryRoles/roleTemplateId=" + f"{self.role_template_ids['Global Administrator']}/members" ).json()["value"] for user in users: user["isGlobalAdmin"] = any( @@ -828,9 +813,8 @@ def read_users( user[self.linux_schema] = {} return users except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not load list of users.\n{str(exc)}" - ) from exc + msg = f"Could not load list of users.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def remove_user_from_group( self, @@ -840,7 +824,7 @@ def remove_user_from_group( """Remove a user from an AzureAD group Raises: - DataSafeHavenMicrosoftGraphException if the user could not be removed + DataSafeHavenMicrosoftGraphError if the user could not be removed """ try: user_id = self.get_id_from_username(username) @@ -850,9 +834,10 @@ def remove_user_from_group( f"{self.base_endpoint}/groups/{group_id}/members/{user_id}/$ref", ) except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not remove user '{username}' from group '{group_name}'.\n{str(exc)}" - ) from exc + msg = ( + f"Could not remove user '{username}' from group '{group_name}'.\n{exc}" + ) + raise DataSafeHavenMicrosoftGraphError(msg) from exc def verify_custom_domain( self, domain_name: str, expected_nameservers: Sequence[str] @@ -860,15 +845,14 @@ def verify_custom_domain( """Verify AzureAD custom domain Raises: - DataSafeHavenMicrosoftGraphException if domain could not be verified + DataSafeHavenMicrosoftGraphError if domain could not be verified """ try: # Create the AzureAD custom domain if it does not already exist domains = self.read_domains() if not any(d["id"] == domain_name for d in domains): - raise DataSafeHavenMicrosoftGraphException( - f"Domain {domain_name} has not been added to AzureAD." - ) + msg = f"Domain {domain_name} has not been added to AzureAD." + raise DataSafeHavenMicrosoftGraphError(msg) # Wait until domain delegation is complete while True: # Check whether all expected nameservers are active @@ -884,14 +868,16 @@ def verify_custom_domain( break # Prompt user to set domain delegation manually self.logger.info( - f"To proceed you will need to delegate [green]{domain_name}[/] to Azure (https://learn.microsoft.com/en-us/azure/dns/dns-delegate-domain-azure-dns#delegate-the-domain)" + f"To proceed you will need to delegate [green]{domain_name}[/] to Azure" + " (https://learn.microsoft.com/en-us/azure/dns/dns-delegate-domain-azure-dns#delegate-the-domain)" ) self.logger.info( - f"You will need to delegate to the following nameservers: {', '.join([f'[green]{n}[/]' for n in expected_nameservers])}" + "You will need to delegate to the following nameservers:" + f" {', '.join([f'[green]{n}[/]' for n in expected_nameservers])}" ) self.logger.confirm( f"Have you delegated {domain_name} to the Azure nameservers above?", - True, + default_to_yes=True, ) # Send verification request if needed if not any((d["id"] == domain_name and d["isVerified"]) for d in domains): @@ -899,8 +885,7 @@ def verify_custom_domain( f"{self.base_endpoint}/domains/{domain_name}/verify" ) if not response.json()["isVerified"]: - raise DataSafeHavenMicrosoftGraphException(response.content) + raise DataSafeHavenMicrosoftGraphError(response.content) except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Could not verify domain '{domain_name}'.\n{str(exc)}" - ) from exc + msg = f"Could not verify domain '{domain_name}'.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc diff --git a/data_safe_haven/external/interface/azure_authenticator.py b/data_safe_haven/external/interface/azure_authenticator.py index 807ef73750..850d8459ed 100644 --- a/data_safe_haven/external/interface/azure_authenticator.py +++ b/data_safe_haven/external/interface/azure_authenticator.py @@ -1,16 +1,11 @@ """Standalone utility class for anything that needs to authenticate against Azure""" -# Standard library imports -from typing import Optional - -# Third party imports from azure.core.exceptions import ClientAuthenticationError from azure.identity import DefaultAzureCredential from azure.mgmt.resource import SubscriptionClient -# Local imports from data_safe_haven.exceptions import ( - DataSafeHavenAzureException, - DataSafeHavenInputException, + DataSafeHavenAzureError, + DataSafeHavenInputError, ) @@ -19,9 +14,9 @@ class AzureAuthenticator: def __init__(self, subscription_name: str) -> None: self.subscription_name: str = subscription_name - self.credential_: Optional[DefaultAzureCredential] = None - self.subscription_id_: Optional[str] = None - self.tenant_id_: Optional[str] = None + self.credential_: DefaultAzureCredential | None = None + self.subscription_id_: str | None = None + self.tenant_id_: str | None = None @property def credential(self) -> DefaultAzureCredential: @@ -38,7 +33,8 @@ def subscription_id(self) -> str: if not self.subscription_id_: self.login() if not self.subscription_id_: - raise DataSafeHavenAzureException("Failed to load subscription ID.") + msg = "Failed to load subscription ID." + raise DataSafeHavenAzureError(msg) return self.subscription_id_ @property @@ -46,7 +42,8 @@ def tenant_id(self) -> str: if not self.tenant_id_: self.login() if not self.tenant_id_: - raise DataSafeHavenAzureException("Failed to load tenant ID.") + msg = "Failed to load tenant ID." + raise DataSafeHavenAzureError(msg) return self.tenant_id_ def login(self) -> None: @@ -56,16 +53,14 @@ def login(self) -> None: # Check that the Azure credentials are valid try: - for subscription in [s for s in subscription_client.subscriptions.list()]: + for subscription in list(subscription_client.subscriptions.list()): if subscription.display_name == self.subscription_name: self.subscription_id_ = subscription.subscription_id self.tenant_id_ = subscription.tenant_id break except ClientAuthenticationError as exc: - raise DataSafeHavenAzureException( - f"Failed to authenticate with Azure.\n{str(exc)}" - ) from exc + msg = f"Failed to authenticate with Azure.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc if not (self.subscription_id and self.tenant_id): - raise DataSafeHavenInputException( - f"Could not find subscription '{self.subscription_name}'" - ) + msg = f"Could not find subscription '{self.subscription_name}'" + raise DataSafeHavenInputError(msg) diff --git a/data_safe_haven/external/interface/azure_container_instance.py b/data_safe_haven/external/interface/azure_container_instance.py index c3fc8ab3ad..6d0dd838ff 100644 --- a/data_safe_haven/external/interface/azure_container_instance.py +++ b/data_safe_haven/external/interface/azure_container_instance.py @@ -1,10 +1,7 @@ """Backend for a Data Safe Haven environment""" -# Standard library imports import contextlib import time -from typing import List, Optional -# Third party imports import websocket from azure.core.polling import LROPoller from azure.mgmt.containerinstance import ContainerInstanceManagementClient @@ -13,8 +10,7 @@ ContainerExecRequestTerminalSize, ) -# Local imports -from data_safe_haven.exceptions import DataSafeHavenAzureException +from data_safe_haven.exceptions import DataSafeHavenAzureError from data_safe_haven.external import AzureApi from data_safe_haven.utility import Logger @@ -48,11 +44,10 @@ def current_ip_address(self) -> str: ).ip_address if ip_address and isinstance(ip_address.ip, str): return ip_address.ip - raise DataSafeHavenAzureException( - f"Could not determine IP address for container group {self.container_group_name}." - ) + msg = f"Could not determine IP address for container group {self.container_group_name}." + raise DataSafeHavenAzureError(msg) - def restart(self, target_ip_address: Optional[str] = None) -> None: + def restart(self, target_ip_address: str | None = None) -> None: """Restart the container group""" # Connect to Azure clients try: @@ -64,7 +59,8 @@ def restart(self, target_ip_address: Optional[str] = None) -> None: # Restart container group self.logger.debug( - f"Restarting container group [green]{self.container_group_name}[/] with IP address [green]{target_ip_address}[/]...", + f"Restarting container group [green]{self.container_group_name}[/]" + f" with IP address [green]{target_ip_address}[/]...", ) while True: if ( @@ -87,14 +83,16 @@ def restart(self, target_ip_address: Optional[str] = None) -> None: if self.current_ip_address == target_ip_address: break self.logger.info( - f"Restarted container group [green]{self.container_group_name}[/] with IP address [green]{self.current_ip_address}[/].", + f"Restarted container group [green]{self.container_group_name}[/]" + f" with IP address [green]{self.current_ip_address}[/].", ) except Exception as exc: - raise DataSafeHavenAzureException( - f"Could not restart container group {self.container_group_name}.\n{str(exc)}" - ) from exc + msg = ( + f"Could not restart container group {self.container_group_name}.\n{exc}" + ) + raise DataSafeHavenAzureError(msg) from exc - def run_executable(self, container_name: str, executable_path: str) -> List[str]: + def run_executable(self, container_name: str, executable_path: str) -> list[str]: """ Run a script or command on one of the containers. diff --git a/data_safe_haven/external/interface/azure_fileshare.py b/data_safe_haven/external/interface/azure_fileshare.py index b99a3bf9d0..91da5c6bd2 100644 --- a/data_safe_haven/external/interface/azure_fileshare.py +++ b/data_safe_haven/external/interface/azure_fileshare.py @@ -1,15 +1,11 @@ """Helper class for Azure fileshares""" -# Standard library imports from contextlib import suppress -from typing import Optional -# Third party imports from azure.core.exceptions import ResourceNotFoundError from azure.mgmt.storage import StorageManagementClient from azure.storage.fileshare import ShareDirectoryClient, ShareFileClient -# Local imports -from data_safe_haven.exceptions import DataSafeHavenAzureException +from data_safe_haven.exceptions import DataSafeHavenAzureError from data_safe_haven.external import AzureApi @@ -24,8 +20,8 @@ def __init__( share_name: str, ): self.azure_api = AzureApi(subscription_name) - self.storage_client_: Optional[StorageManagementClient] = None - self.storage_account_key_: Optional[str] = None + self.storage_client_: StorageManagementClient | None = None + self.storage_account_key_: str | None = None self.storage_account_name: str = storage_account_name self.resource_group_name: str = storage_account_resource_group_name self.share_name: str = share_name @@ -49,9 +45,8 @@ def storage_account_key(self) -> str: if isinstance(k.value, str) ] if not storage_account_keys: - raise DataSafeHavenAzureException( - f"Could not load key values for storage account {self.storage_account_name}." - ) + msg = f"Could not load key values for storage account {self.storage_account_name}." + raise DataSafeHavenAzureError(msg) self.storage_account_key_ = storage_account_keys[0] return self.storage_account_key_ @@ -68,9 +63,8 @@ def upload(self, destination_path: str, file_contents: str) -> None: ) file_client.upload_file(file_contents.encode("utf-8")) except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to upload data to [green]{target}[/] in [green]{self.share_name}[/]." - ) from exc + msg = f"Failed to upload data to [green]{target}[/] in [green]{self.share_name}[/]." + raise DataSafeHavenAzureError(msg) from exc def delete(self, destination_path: str) -> None: """Delete a file from the target storage account""" @@ -86,9 +80,8 @@ def delete(self, destination_path: str) -> None: if self.file_exists(file_client): file_client.delete_file() except Exception as exc: - raise DataSafeHavenAzureException( - f"Failed to delete file [green]{target}[/] in [green]{self.share_name}[/]." - ) from exc + msg = f"Failed to delete file [green]{target}[/] in [green]{self.share_name}[/]." + raise DataSafeHavenAzureError(msg) from exc @staticmethod def file_exists(file_client: ShareFileClient) -> bool: @@ -100,7 +93,7 @@ def file_exists(file_client: ShareFileClient) -> bool: def file_client( self, file_name: str, - directory: Optional[str] = None, + directory: str | None = None, ) -> ShareFileClient: if directory: directory_client = ShareDirectoryClient( diff --git a/data_safe_haven/external/interface/azure_ipv4_range.py b/data_safe_haven/external/interface/azure_ipv4_range.py index 94ec127d57..7160919a53 100644 --- a/data_safe_haven/external/interface/azure_ipv4_range.py +++ b/data_safe_haven/external/interface/azure_ipv4_range.py @@ -1,11 +1,8 @@ -# Standard library imports import ipaddress import math from contextlib import suppress -from typing import List -# Local imports -from data_safe_haven.exceptions import DataSafeHavenIPRangeException +from data_safe_haven.exceptions import DataSafeHavenIPRangeError class AzureIPv4Range(ipaddress.IPv4Network): @@ -23,35 +20,33 @@ def __init__( ) ) if len(networks) != 1: - raise DataSafeHavenIPRangeException( - f"{ip_address_first}-{ip_address_last} cannot be expressed as a single network range." - ) + msg = f"{ip_address_first}-{ip_address_last} cannot be expressed as a single network range." + raise DataSafeHavenIPRangeError(msg) super().__init__(networks[0]) - self._subnets: List["AzureIPv4Range"] = [] + self._subnets: list[AzureIPv4Range] = [] @classmethod def from_cidr(cls, ip_cidr: str) -> "AzureIPv4Range": network = ipaddress.IPv4Network(ip_cidr) return cls(network[0], network[-1]) - def all(self) -> List[ipaddress.IPv4Address]: + def all_ips(self) -> list[ipaddress.IPv4Address]: """All IP addresses in the range""" return list(self.hosts()) - def available(self) -> List[ipaddress.IPv4Address]: + def available(self) -> list[ipaddress.IPv4Address]: """Azure reserves x.x.x.1 for the default gateway and (x.x.x.2, x.x.x.3) to map Azure DNS IPs.""" - return list(self.all())[3:] + return list(self.all_ips())[3:] def next_subnet(self, number_of_addresses: int) -> "AzureIPv4Range": """Find the next unused subnet of a given size""" if not math.log2(number_of_addresses).is_integer(): - raise DataSafeHavenIPRangeException( - f"Number of address '{number_of_addresses}' must be a power of 2" - ) + msg = f"Number of address '{number_of_addresses}' must be a power of 2" + raise DataSafeHavenIPRangeError(msg) ip_address_first = self[0] while True: ip_address_last = ip_address_first + int(number_of_addresses - 1) - with suppress(DataSafeHavenIPRangeException): + with suppress(DataSafeHavenIPRangeError): candidate = AzureIPv4Range(ip_address_first, ip_address_last) if not any(subnet.overlaps(candidate) for subnet in self._subnets): self._subnets.append(candidate) diff --git a/data_safe_haven/external/interface/azure_postgresql_database.py b/data_safe_haven/external/interface/azure_postgresql_database.py index e56a65d942..280fdd8c9e 100644 --- a/data_safe_haven/external/interface/azure_postgresql_database.py +++ b/data_safe_haven/external/interface/azure_postgresql_database.py @@ -1,11 +1,10 @@ """Backend for a Data Safe Haven environment""" -# Standard library imports +import datetime import pathlib import time -from datetime import datetime -from typing import Any, Dict, List, Optional, Sequence +from collections.abc import Sequence +from typing import Any -# Third party imports import psycopg2 import requests from azure.core.polling import LROPoller @@ -16,10 +15,9 @@ ServerUpdateParameters, ) -# Local imports from data_safe_haven.exceptions import ( - DataSafeHavenAzureException, - DataSafeHavenInputException, + DataSafeHavenAzureError, + DataSafeHavenInputError, ) from data_safe_haven.external import AzureApi from data_safe_haven.utility import FileReader, Logger, PathType @@ -29,9 +27,9 @@ class AzurePostgreSQLDatabase: """Interface for Azure PostgreSQL databases.""" current_ip: str - db_client_: Optional[PostgreSQLManagementClient] + db_client_: PostgreSQLManagementClient | None db_name: str - db_server_: Optional[Server] + db_server_: Server | None db_server_admin_password: str resource_group_name: str server_name: str @@ -56,7 +54,9 @@ def __init__( self.logger = Logger() self.resource_group_name = resource_group_name self.server_name = database_server_name - self.rule_suffix = datetime.now().strftime(r"%Y%m%d-%H%M%S") + self.rule_suffix = datetime.datetime.now(tz=datetime.timezone.utc).strftime( + r"%Y%m%d-%H%M%S" + ) @staticmethod def wait(poller: LROPoller[Any]) -> None: @@ -100,13 +100,12 @@ def db_connection(self, n_retries: int = 0) -> psycopg2.extensions.connection: n_retries -= 1 time.sleep(10) else: - raise DataSafeHavenAzureException( - f"Could not connect to database.\n{str(exc)}" - ) from exc + msg = f"Could not connect to database.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc return connection def load_sql( - self, filepath: PathType, mustache_values: Optional[Dict[str, str]] = None + self, filepath: PathType, mustache_values: dict[str, str] | None = None ) -> str: """Load filepath into a single SQL string.""" reader = FileReader(filepath) @@ -121,11 +120,11 @@ def load_sql( def execute_scripts( self, filepaths: Sequence[PathType], - mustache_values: Optional[Dict[str, Any]] = None, - ) -> List[List[str]]: + mustache_values: dict[str, Any] | None = None, + ) -> list[list[str]]: """Execute scripts on the PostgreSQL server.""" - outputs: List[List[str]] = [] - connection: Optional[psycopg2.extensions.connection] = None + outputs: list[list[str]] = [] + connection: psycopg2.extensions.connection | None = None cursor = None try: @@ -138,9 +137,9 @@ def execute_scripts( # Apply the Guacamole initialisation script for filepath in filepaths: - filepath = pathlib.Path(filepath) - self.logger.info(f"Running SQL script: [green]{filepath.name}[/].") - commands = self.load_sql(filepath, mustache_values) + _filepath = pathlib.Path(filepath) + self.logger.info(f"Running SQL script: [green]{_filepath.name}[/].") + commands = self.load_sql(_filepath, mustache_values) cursor.execute(commands) if "SELECT" in cursor.statusmessage: outputs += [[str(msg) for msg in msg_tuple] for msg_tuple in cursor] @@ -149,9 +148,8 @@ def execute_scripts( connection.commit() self.logger.info(f"Finished running {len(filepaths)} SQL scripts.") except (Exception, psycopg2.Error) as exc: - raise DataSafeHavenAzureException( - f"Error while connecting to PostgreSQL.\n{str(exc)}" - ) from exc + msg = f"Error while connecting to PostgreSQL.\n{exc}" + raise DataSafeHavenAzureError(msg) from exc finally: # Close the connection if it is open if connection: @@ -211,10 +209,10 @@ def set_database_access(self, action: str) -> None: f"Removed temporary firewall rule for [green]{self.current_ip}[/].", ) else: - raise DataSafeHavenInputException( - f"Database access action {action} was not recognised." - ) + msg = f"Database access action {action} was not recognised." + raise DataSafeHavenInputError(msg) self.db_server_ = None # Force refresh of self.db_server self.logger.info( - f"Public network access to [green]{self.server_name}[/] is [green]{self.db_server.public_network_access}[/]." + f"Public network access to [green]{self.server_name}[/]" + f" is [green]{self.db_server.public_network_access}[/]." ) diff --git a/data_safe_haven/functions/miscellaneous.py b/data_safe_haven/functions/miscellaneous.py index ab435a6207..89070728ab 100644 --- a/data_safe_haven/functions/miscellaneous.py +++ b/data_safe_haven/functions/miscellaneous.py @@ -1,22 +1,21 @@ -# Standard library imports import datetime -from typing import Any, Dict, List, Optional +from typing import Any -# Third-party imports import pytz -def as_dict(object: Any) -> Dict[str, Any]: +def as_dict(container: Any) -> dict[str, Any]: if ( - not isinstance(object, dict) - and hasattr(object, "keys") - and all(isinstance(x, str) for x in object.keys()) + not isinstance(container, dict) + and hasattr(container, "keys") + and all(isinstance(x, str) for x in container.keys()) ): - raise TypeError(f"{object} {type(object)} is not a valid Dict[str, Any]") - return object + msg = f"{container} {type(container)} is not a valid dict[str, Any]" + raise TypeError(msg) + return container -def ordered_private_dns_zones(resource_type: Optional[str] = None) -> List[str]: +def ordered_private_dns_zones(resource_type: str | None = None) -> list[str]: """ Return required DNS zones for a given resource type. See https://learn.microsoft.com/en-us/azure/private-link/private-endpoint-dns for details. @@ -35,12 +34,12 @@ def ordered_private_dns_zones(resource_type: Optional[str] = None) -> List[str]: } if resource_type and (resource_type in dns_zones): return dns_zones[resource_type] - return sorted(set(zone for zones in dns_zones.values() for zone in zones)) + return sorted({zone for zones in dns_zones.values() for zone in zones}) def time_as_string(hour: int, minute: int, timezone: str) -> str: """Get the next occurence of a repeating daily time as a string""" - dt = datetime.datetime.now().replace( + dt = datetime.datetime.now(datetime.timezone.utc).replace( hour=hour, minute=minute, second=0, diff --git a/data_safe_haven/functions/strings.py b/data_safe_haven/functions/strings.py index 26654d05c8..fdfb92e5b3 100644 --- a/data_safe_haven/functions/strings.py +++ b/data_safe_haven/functions/strings.py @@ -1,9 +1,8 @@ -# Standard library imports import base64 import hashlib import secrets import string -from typing import List, Sequence +from collections.abc import Sequence def alphanumeric(input_string: str) -> str: @@ -27,7 +26,10 @@ def hex_string(length: int) -> str: def password(length: int) -> str: - """Generate a string of 'length' random alphanumeric characters. Require at least one lower-case, one upper-case and one digit.""" + """ + Generate a string of 'length' random alphanumeric characters. + Require at least one lower-case, one upper-case and one digit. + """ alphabet = string.ascii_letters + string.digits while True: password_ = "".join(secrets.choice(alphabet) for _ in range(length)) @@ -60,7 +62,13 @@ def sha256hash(input_string: str) -> str: return hashlib.sha256(str.encode(input_string, encoding="utf-8")).hexdigest() -def truncate_tokens(tokens: Sequence[str], max_length: int) -> List[str]: +def truncate_tokens(tokens: Sequence[str], max_length: int) -> list[str]: + """ + Recursively remove the final character from the longest strings in the input. + Terminate when the total length of all strings is no greater than max_length. + For example: + truncate_tokens(["the", "quick", "fox"], 6) -> ["th", "qu", "fo"] + """ output_tokens = list(tokens) token_lengths = [len(t) for t in output_tokens] while sum(token_lengths) > max_length: diff --git a/data_safe_haven/functions/validators.py b/data_safe_haven/functions/validators.py index 1cee23d530..df9c6fcd0e 100644 --- a/data_safe_haven/functions/validators.py +++ b/data_safe_haven/functions/validators.py @@ -1,67 +1,60 @@ -# Standard library imports import ipaddress import re -from typing import Optional -# Third-party imports import pytz import typer -def validate_aad_guid(aad_guid: Optional[str]) -> Optional[str]: +def validate_aad_guid(aad_guid: str | None) -> str | None: if aad_guid is not None: if not re.match( r"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$", aad_guid, ): - raise typer.BadParameter( - "Expected GUID, for example '10de18e7-b238-6f1e-a4ad-772708929203'" - ) + msg = "Expected GUID, for example '10de18e7-b238-6f1e-a4ad-772708929203'" + raise typer.BadParameter(msg) return aad_guid -def validate_azure_location(azure_location: Optional[str]) -> Optional[str]: +def validate_azure_location(azure_location: str | None) -> str | None: if azure_location is not None: if not re.match(r"^[a-z]+[0-9]?[a-z]*$", azure_location): - raise typer.BadParameter( - "Expected valid Azure location, for example 'uksouth'" - ) + msg = "Expected valid Azure location, for example 'uksouth'" + raise typer.BadParameter(msg) return azure_location -def validate_azure_vm_sku(azure_vm_sku: Optional[str]) -> Optional[str]: +def validate_azure_vm_sku(azure_vm_sku: str | None) -> str | None: if azure_vm_sku is not None: if not re.match(r"^(Standard|Basic)_\w+$", azure_vm_sku): - raise typer.BadParameter( - "Expected valid Azure VM SKU, for example 'Standard_D2s_v4'" - ) + msg = "Expected valid Azure VM SKU, for example 'Standard_D2s_v4'" + raise typer.BadParameter(msg) return azure_vm_sku -def validate_email_address(email_address: Optional[str]) -> Optional[str]: +def validate_email_address(email_address: str | None) -> str | None: if email_address is not None: if not re.match(r"^\S+@\S+$", email_address): - raise typer.BadParameter( - "Expected valid email address, for example 'sherlock@holmes.com'" - ) + msg = "Expected valid email address, for example 'sherlock@holmes.com'" + raise typer.BadParameter(msg) return email_address def validate_ip_address( - ip_address: Optional[str], -) -> Optional[str]: + ip_address: str | None, +) -> str | None: try: if ip_address: return str(ipaddress.ip_network(ip_address)) return None - except Exception: - raise typer.BadParameter("Expected valid IPv4 address, for example '1.1.1.1'") + except Exception as exc: + msg = "Expected valid IPv4 address, for example '1.1.1.1'" + raise typer.BadParameter(msg) from exc -def validate_timezone(timezone: Optional[str]) -> Optional[str]: +def validate_timezone(timezone: str | None) -> str | None: if timezone is not None: if timezone not in pytz.all_timezones: - raise typer.BadParameter( - "Expected valid timezone, for example 'Europe/London'" - ) + msg = "Expected valid timezone, for example 'Europe/London'" + raise typer.BadParameter(msg) return timezone diff --git a/data_safe_haven/provisioning/shm_provisioning_manager.py b/data_safe_haven/provisioning/shm_provisioning_manager.py index 269b3e8ac9..fe8e8aad4f 100644 --- a/data_safe_haven/provisioning/shm_provisioning_manager.py +++ b/data_safe_haven/provisioning/shm_provisioning_manager.py @@ -1,5 +1,4 @@ """Provisioning manager for a deployed SHM.""" -# Local imports from data_safe_haven.external import AzureApi from data_safe_haven.pulumi import PulumiSHMStack diff --git a/data_safe_haven/provisioning/sre_provisioning_manager.py b/data_safe_haven/provisioning/sre_provisioning_manager.py index 4237541659..e6f1265702 100644 --- a/data_safe_haven/provisioning/sre_provisioning_manager.py +++ b/data_safe_haven/provisioning/sre_provisioning_manager.py @@ -1,9 +1,7 @@ """Provisioning manager for a deployed SRE.""" -# Standard library imports import pathlib -from typing import Any, Dict +from typing import Any -# Local imports from data_safe_haven.external import ( AzureApi, AzureContainerInstance, @@ -18,7 +16,7 @@ class SREProvisioningManager: def __init__( self, - available_vm_skus: Dict[str, Dict[str, Any]], + available_vm_skus: dict[str, dict[str, Any]], shm_stack: PulumiSHMStack, sre_name: str, sre_stack: PulumiSREStack, @@ -110,7 +108,10 @@ def update_remote_desktop_connections(self) -> None: connection_data = { "connections": [ { - "connection_name": f"{vm_identifier} [{vm_details['cpus']} CPU(s), {vm_details['gpus']} GPU(s), {vm_details['ram']} GB RAM]", + "connection_name": ( + f"{vm_identifier} [{vm_details['cpus']} CPU(s)," + f" {vm_details['gpus']} GPU(s), {vm_details['ram']} GB RAM]" + ), "disable_copy": self.remote_desktop_params["disable_copy"], "disable_paste": self.remote_desktop_params["disable_paste"], "ip_address": vm_details["ip_address"], diff --git a/data_safe_haven/pulumi/common/transformations.py b/data_safe_haven/pulumi/common/transformations.py index eb6692902a..537d850ac2 100644 --- a/data_safe_haven/pulumi/common/transformations.py +++ b/data_safe_haven/pulumi/common/transformations.py @@ -1,17 +1,12 @@ """Common transformations needed when manipulating Pulumi resources""" -# Standard library imports -from typing import List - -# Third party imports from pulumi import Output from pulumi_azure_native import containerinstance, network, resources -# Local imports -from data_safe_haven.exceptions import DataSafeHavenPulumiException +from data_safe_haven.exceptions import DataSafeHavenPulumiError from data_safe_haven.external import AzureIPv4Range -def get_available_ips_from_subnet(subnet: network.GetSubnetResult) -> List[str]: +def get_available_ips_from_subnet(subnet: network.GetSubnetResult) -> list[str]: """Get list of available IP addresses from a subnet""" if address_prefix := subnet.address_prefix: return [str(ip) for ip in AzureIPv4Range.from_cidr(address_prefix).available()] @@ -22,14 +17,16 @@ def get_id_from_rg(rg: resources.ResourceGroup) -> Output[str]: """Get the ID of a resource group""" if isinstance(rg.id, Output): return rg.id - raise DataSafeHavenPulumiException(f"Resource group '{rg.name}' has no ID.") + msg = f"Resource group '{rg.name}' has no ID." + raise DataSafeHavenPulumiError(msg) def get_id_from_subnet(subnet: network.GetSubnetResult) -> str: """Get the ID of a subnet""" if id_ := subnet.id: return str(id_) - raise DataSafeHavenPulumiException(f"Subnet '{subnet.name}' has no ID.") + msg = f"Subnet '{subnet.name}' has no ID." + raise DataSafeHavenPulumiError(msg) def get_ip_address_from_container_group( @@ -44,7 +41,7 @@ def get_ip_address_from_container_group( def get_ip_addresses_from_private_endpoint( endpoint: network.PrivateEndpoint, -) -> Output[List[str]]: +) -> Output[list[str]]: """Get a list of IP addresses from a private endpoint""" if isinstance(endpoint.custom_dns_configs, Output): return endpoint.custom_dns_configs.apply( @@ -54,27 +51,29 @@ def get_ip_addresses_from_private_endpoint( if cfgs else [] ) - raise DataSafeHavenPulumiException( - f"Private endpoint '{endpoint.name}' has no IP addresses." - ) + msg = f"Private endpoint '{endpoint.name}' has no IP addresses." + raise DataSafeHavenPulumiError(msg) def get_name_from_rg(rg: resources.ResourceGroup) -> Output[str]: """Get the name of a resource group""" if isinstance(rg.name, Output): return rg.name.apply(lambda s: str(s)) - raise DataSafeHavenPulumiException(f"Resource group '{rg.id}' has no name.") + msg = f"Resource group '{rg.id}' has no name." + raise DataSafeHavenPulumiError(msg) def get_name_from_subnet(subnet: network.GetSubnetResult) -> str: """Get the name of a subnet""" if name := subnet.name: return str(name) - raise DataSafeHavenPulumiException(f"Subnet '{subnet.id}' has no name.") + msg = f"Subnet '{subnet.id}' has no name." + raise DataSafeHavenPulumiError(msg) def get_name_from_vnet(vnet: network.VirtualNetwork) -> Output[str]: """Get the ID of a virtual network""" if isinstance(vnet.name, Output): return vnet.name.apply(lambda s: str(s)) - raise DataSafeHavenPulumiException(f"Virtual network '{vnet.id}' has no name.") + msg = f"Virtual network '{vnet.id}' has no name." + raise DataSafeHavenPulumiError(msg) diff --git a/data_safe_haven/pulumi/components/automation_dsc_node.py b/data_safe_haven/pulumi/components/automation_dsc_node.py index cac809c75e..c6a13a84a0 100644 --- a/data_safe_haven/pulumi/components/automation_dsc_node.py +++ b/data_safe_haven/pulumi/components/automation_dsc_node.py @@ -1,16 +1,13 @@ """Register a VM as an Azure Automation DSC node""" -# Standard library imports import pathlib import time -from typing import Dict, Optional, Sequence +from collections.abc import Sequence -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import automation, compute -# Local imports +from data_safe_haven.pulumi.dynamic.compiled_dsc import CompiledDsc, CompiledDscProps from data_safe_haven.utility import FileReader -from ..dynamic.compiled_dsc import CompiledDsc, CompiledDscProps class AutomationDscNodeProps: @@ -25,7 +22,7 @@ def __init__( configuration_name: Input[str], dsc_description: Input[str], dsc_file: Input[FileReader], - dsc_parameters: Input[Dict[str, str]], + dsc_parameters: Input[dict[str, str]], dsc_required_modules: Input[Sequence[str]], location: Input[str], subscription_name: Input[str], @@ -56,11 +53,11 @@ def __init__( self, name: str, props: AutomationDscNodeProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:common:AutomationDscNode", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) - resources_path = pathlib.Path(__file__).parent.parent.parent / "resources" + pathlib.Path(__file__).parent.parent.parent / "resources" # Upload the primary domain controller DSC dsc = automation.DscConfiguration( @@ -104,7 +101,7 @@ def __init__( ), opts=ResourceOptions.merge(ResourceOptions(depends_on=[dsc]), child_opts), ) - dsc_extension = compute.VirtualMachineExtension( + compute.VirtualMachineExtension( f"{self._name}_dsc_extension", auto_upgrade_minor_version=True, location=props.location, diff --git a/data_safe_haven/pulumi/components/shm_bastion.py b/data_safe_haven/pulumi/components/shm_bastion.py index da6fddb4ed..6b09193131 100644 --- a/data_safe_haven/pulumi/components/shm_bastion.py +++ b/data_safe_haven/pulumi/components/shm_bastion.py @@ -1,8 +1,4 @@ """Pulumi component for SHM monitoring""" -# Standard library import -from typing import Optional - -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network @@ -29,9 +25,8 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMBastionProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:BastionComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) diff --git a/data_safe_haven/pulumi/components/shm_data.py b/data_safe_haven/pulumi/components/shm_data.py index ff6965244d..5f82e204f6 100644 --- a/data_safe_haven/pulumi/components/shm_data.py +++ b/data_safe_haven/pulumi/components/shm_data.py @@ -1,12 +1,9 @@ """Pulumi component for SHM state""" -# Standard library imports -from typing import Optional, Sequence +from collections.abc import Sequence -# Third party imports from pulumi import ComponentResource, Config, Input, Output, ResourceOptions from pulumi_azure_native import keyvault, resources, storage -# Local imports from data_safe_haven.external import AzureIPv4Range from data_safe_haven.functions import alphanumeric, replace_separators, truncate_tokens @@ -53,9 +50,8 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMDataProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:DataComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -138,7 +134,7 @@ def __init__( ) # Deploy key vault secrets - password_domain_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_domain_admin", properties=keyvault.SecretPropertiesArgs(value=props.password_domain_admin), resource_group_name=resource_group.name, @@ -146,7 +142,7 @@ def __init__( vault_name=key_vault.name, opts=child_opts, ) - password_domain_azure_ad_connect = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_domain_azure_ad_connect", properties=keyvault.SecretPropertiesArgs( value=props.password_domain_azure_ad_connect @@ -156,7 +152,7 @@ def __init__( vault_name=key_vault.name, opts=child_opts, ) - password_domain_computer_manager = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_domain_computer_manager", properties=keyvault.SecretPropertiesArgs( value=props.password_domain_computer_manager @@ -166,7 +162,7 @@ def __init__( vault_name=key_vault.name, opts=child_opts, ) - password_domain_searcher = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_domain_searcher", properties=keyvault.SecretPropertiesArgs( value=props.password_domain_searcher @@ -176,7 +172,7 @@ def __init__( vault_name=key_vault.name, opts=child_opts, ) - password_update_server_linux_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_update_server_linux_admin", properties=keyvault.SecretPropertiesArgs( value=props.password_update_server_linux_admin @@ -219,7 +215,7 @@ def __init__( i_p_address_or_range=str(ip_address), ) for ip_range in sorted(ip_ranges) - for ip_address in AzureIPv4Range.from_cidr(ip_range).all() + for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips() ] ), ), @@ -228,7 +224,7 @@ def __init__( opts=child_opts, ) # Deploy staging container for holding any data that does not have an SRE - storage_container_staging = storage.BlobContainer( + storage.BlobContainer( f"{self._name}_st_data_staging", account_name=storage_account_persistent_data.name, container_name=replace_separators(f"{stack_name}-staging", "-")[:63], diff --git a/data_safe_haven/pulumi/components/shm_domain_controllers.py b/data_safe_haven/pulumi/components/shm_domain_controllers.py index 2db4f28265..4680b974a3 100644 --- a/data_safe_haven/pulumi/components/shm_domain_controllers.py +++ b/data_safe_haven/pulumi/components/shm_domain_controllers.py @@ -1,16 +1,17 @@ """Pulumi component for SHM domain controllers""" -# Standard library import import pathlib -from typing import Optional, Sequence +from collections.abc import Sequence -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network, resources -# Local from data_safe_haven.pulumi.common.transformations import get_name_from_subnet +from data_safe_haven.pulumi.dynamic.remote_powershell import ( + RemoteScript, + RemoteScriptProps, +) from data_safe_haven.utility import FileReader -from ..dynamic.remote_powershell import RemoteScript, RemoteScriptProps + from .automation_dsc_node import AutomationDscNode, AutomationDscNodeProps from .virtual_machine import VMComponent, WindowsVMProps @@ -82,9 +83,8 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMDomainControllersProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:DomainControllersComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -99,7 +99,8 @@ def __init__( ) # Create the DC - # We use the domain admin credentials here as the VM admin is promoted to domain admin when setting up the domain + # We use the domain admin credentials here as the VM admin is promoted + # to domain admin when setting up the domain primary_domain_controller = VMComponent( f"{self._name}_primary_domain_controller", WindowsVMProps( diff --git a/data_safe_haven/pulumi/components/shm_firewall.py b/data_safe_haven/pulumi/components/shm_firewall.py index 285c79f10f..a62cd61c61 100644 --- a/data_safe_haven/pulumi/components/shm_firewall.py +++ b/data_safe_haven/pulumi/components/shm_firewall.py @@ -1,12 +1,7 @@ """Pulumi component for SHM traffic routing""" -# Standard library import -from typing import Optional - -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network -# Local imports from data_safe_haven.pulumi.common.transformations import get_id_from_subnet @@ -25,9 +20,7 @@ def __init__( subnet_update_servers: Input[network.GetSubnetResult], ): self.domain_controller_private_ip = domain_controller_private_ip - self.dns_zone_name = Output.from_input(dns_zone).apply( - lambda zone: zone.name # type: ignore - ) + self.dns_zone_name = Output.from_input(dns_zone).apply(lambda zone: zone.name) # type: ignore self.location = location self.resource_group_name = resource_group_name self.route_table_name = route_table_name @@ -49,15 +42,15 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMFirewallProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:FirewallComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) # Important IP addresses - external_dns_resolver = "168.63.129.16" # https://docs.microsoft.com/en-us/azure/virtual-network/what-is-ip-address-168-63-129-16 + # https://docs.microsoft.com/en-us/azure/virtual-network/what-is-ip-address-168-63-129-16 + external_dns_resolver = "168.63.129.16" ntp_ip_addresses = [ "216.239.35.0", "216.239.35.4", @@ -1131,7 +1124,7 @@ def __init__( if not cfgs else next(filter(lambda _: _, [cfg.private_ip_address for cfg in cfgs])) ) - route = network.Route( + network.Route( f"{self._name}_via_firewall", address_prefix="0.0.0.0/0", next_hop_ip_address=private_ip_address, @@ -1143,7 +1136,7 @@ def __init__( ) # Add an A record for the domain controller - a_record = network.RecordSet( + network.RecordSet( f"{self._name}_a_record", a_records=public_ip.ip_address.apply( lambda ip: [network.ARecordArgs(ipv4_address=ip)] if ip else [] diff --git a/data_safe_haven/pulumi/components/shm_monitoring.py b/data_safe_haven/pulumi/components/shm_monitoring.py index a4887b0d9c..fac929437b 100644 --- a/data_safe_haven/pulumi/components/shm_monitoring.py +++ b/data_safe_haven/pulumi/components/shm_monitoring.py @@ -1,8 +1,4 @@ """Pulumi component for SHM monitoring""" -# Standard library import -from typing import Dict, Optional, Tuple - -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import ( automation, @@ -13,7 +9,6 @@ resources, ) -# Local imports from data_safe_haven.functions import ( ordered_private_dns_zones, replace_separators, @@ -49,9 +44,8 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMMonitoringProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:MonitoringComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -80,7 +74,7 @@ def __init__( # List of modules as 'name: (version, SHA256 hash)' # Note that we exclude ComputerManagementDsc which is already present (https://docs.microsoft.com/en-us/azure/automation/shared-resources/modules#default-modules) - modules: Dict[str, Tuple[str, str]] = { + modules: dict[str, tuple[str, str]] = { "ActiveDirectoryDsc": ( "6.2.0", "60b7cc2c578248f23c5b871b093db268a1c1bd89f5ccafc45d9a65c3f0621dca", @@ -133,7 +127,7 @@ def __init__( ) # Add a private DNS record for each automation custom DNS config - automation_account_private_dns_zone_group = network.PrivateDnsZoneGroup( + network.PrivateDnsZoneGroup( f"{self._name}_automation_account_private_dns_zone_group", private_dns_zone_configs=[ network.PrivateDnsZoneConfigArgs( @@ -190,7 +184,7 @@ def __init__( subnet=network.SubnetArgs(id=props.subnet_monitoring_id), opts=child_opts, ) - log_analytics_ampls_connection = insights.PrivateLinkScopedResource( + insights.PrivateLinkScopedResource( f"{self._name}_log_analytics_ampls_connection", linked_resource_id=log_analytics.id, name=f"{stack_name}-cnxn-ampls-log-to-log", @@ -200,7 +194,7 @@ def __init__( ) # Add a private DNS record for each log analytics workspace custom DNS config - log_analytics_private_dns_zone_group = network.PrivateDnsZoneGroup( + network.PrivateDnsZoneGroup( f"{self._name}_log_analytics_private_dns_zone_group", private_dns_zone_configs=[ network.PrivateDnsZoneConfigArgs( @@ -219,7 +213,7 @@ def __init__( ) # Link automation account to log analytics workspace - automation_log_analytics_link = operationalinsights.LinkedService( + operationalinsights.LinkedService( f"{self._name}_automation_log_analytics_link", linked_service_name="Automation", resource_group_name=resource_group.name, @@ -260,7 +254,7 @@ def __init__( lambda id_: id_.split("/resourceGroups/")[0] ) # Create Windows VM virus definitions update schedule: daily at 01:01 - schedule_windows_definitions = automation.SoftwareUpdateConfigurationByName( + automation.SoftwareUpdateConfigurationByName( f"{self._name}_schedule_windows_definitions", automation_account_name=automation_account.name, resource_group_name=resource_group.name, @@ -296,7 +290,7 @@ def __init__( ), ) # Create Windows VM system update schedule: daily at 02:02 - schedule_windows_updates = automation.SoftwareUpdateConfigurationByName( + automation.SoftwareUpdateConfigurationByName( f"{self._name}_schedule_windows_updates", automation_account_name=automation_account.name, resource_group_name=resource_group.name, @@ -349,7 +343,7 @@ def __init__( ), ) # Create Linux VM system update schedule: daily at 02:02 - schedule_linux_updates = automation.SoftwareUpdateConfigurationByName( + automation.SoftwareUpdateConfigurationByName( f"{self._name}_schedule_linux_updates", automation_account_name=automation_account.name, resource_group_name=resource_group.name, diff --git a/data_safe_haven/pulumi/components/shm_networking.py b/data_safe_haven/pulumi/components/shm_networking.py index c9ea319b4a..9be3aac1e9 100644 --- a/data_safe_haven/pulumi/components/shm_networking.py +++ b/data_safe_haven/pulumi/components/shm_networking.py @@ -1,15 +1,12 @@ """Pulumi component for SHM networking""" -# Standard library imports -from typing import List, Optional, Sequence +from collections.abc import Sequence -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network, resources -# Local imports from data_safe_haven.external import AzureIPv4Range from data_safe_haven.functions import ordered_private_dns_zones -from ..common.enums import NetworkingPriorities +from data_safe_haven.pulumi.common.enums import NetworkingPriorities class SHMNetworkingProps: @@ -46,9 +43,8 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMNetworkingProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:NetworkingComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -412,7 +408,7 @@ def __init__( zone_type=network.ZoneType.PUBLIC, opts=child_opts, ) - caa_record = network.RecordSet( + network.RecordSet( f"{self._name}_caa_record", caa_records=[ network.CaaRecordArgs( @@ -428,7 +424,7 @@ def __init__( zone_name=dns_zone.name, opts=child_opts, ) - domain_verification_record = network.RecordSet( + network.RecordSet( f"{self._name}_domain_verification_record", record_type="TXT", relative_record_set_name="@", @@ -442,7 +438,7 @@ def __init__( ) # Set up private link domains - private_zone_ids: List[Output[str]] = [] + private_zone_ids: list[Output[str]] = [] for private_link_domain in ordered_private_dns_zones(): private_zone = network.PrivateZone( f"{self._name}_private_zone_{private_link_domain}", @@ -451,7 +447,7 @@ def __init__( resource_group_name=resource_group.name, opts=child_opts, ) - virtual_network_link = network.VirtualNetworkLink( + network.VirtualNetworkLink( f"{self._name}_private_zone_{private_link_domain}_vnet_link", location="Global", private_zone_name=private_zone.name, diff --git a/data_safe_haven/pulumi/components/shm_update_servers.py b/data_safe_haven/pulumi/components/shm_update_servers.py index 9c0c495bf7..8318167fa0 100644 --- a/data_safe_haven/pulumi/components/shm_update_servers.py +++ b/data_safe_haven/pulumi/components/shm_update_servers.py @@ -1,18 +1,15 @@ """Pulumi component for SHM monitoring""" -# Standard library import import pathlib -from typing import Optional -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network -# Local imports from data_safe_haven.functions import b64encode from data_safe_haven.pulumi.common.transformations import ( get_available_ips_from_subnet, get_name_from_subnet, ) + from .virtual_machine import LinuxVMProps, VMComponent @@ -52,16 +49,15 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SHMUpdateServersProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:shm:UpdateServersComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) # Load cloud-init file b64cloudinit = self.read_cloudinit() - vm_name = f"shm-{shm_name}-vm-linux-updates" + vm_name = f"{stack_name}-vm-linux-updates" VMComponent( vm_name, LinuxVMProps( @@ -93,7 +89,6 @@ def read_cloudinit( ) with open( resources_path / "update_server_linux.cloud_init.yaml", - "r", encoding="utf-8", ) as f_cloudinit: cloudinit = f_cloudinit.read() diff --git a/data_safe_haven/pulumi/components/sre_application_gateway.py b/data_safe_haven/pulumi/components/sre_application_gateway.py index caae666411..76849e54b4 100644 --- a/data_safe_haven/pulumi/components/sre_application_gateway.py +++ b/data_safe_haven/pulumi/components/sre_application_gateway.py @@ -1,12 +1,9 @@ """Pulumi component for SRE application gateway""" -# Standard library imports -from typing import Any, Optional +from typing import Any -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import managedidentity, network, resources -# Local imports from data_safe_haven.pulumi.common.transformations import ( get_available_ips_from_subnet, get_id_from_rg, @@ -43,7 +40,7 @@ def __init__( ).apply(get_available_ips_from_subnet) # Unwrap key vault identity so that it has the required type self.user_assigned_identities = Output.from_input(key_vault_identity).apply( - lambda identity: identity.id.apply(lambda id: {str(id): {}}) + lambda identity: identity.id.apply(lambda id_: {str(id_): {}}) ) @@ -54,9 +51,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SREApplicationGatewayProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:ApplicationGatewayComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -89,7 +85,7 @@ def __init__( # Define application gateway application_gateway_name = f"{stack_name}-ag-entrypoint" - application_gateway = network.ApplicationGateway( + network.ApplicationGateway( f"{self._name}_application_gateway", application_gateway_name=application_gateway_name, backend_address_pools=[ @@ -275,7 +271,9 @@ def __init__( ), name="sslProfile", ssl_policy=network.ApplicationGatewaySslPolicyArgs( - # We take the ones recommended by SSL Labs (https://github.com/ssllabs/research/wiki/SSL-and-TLS-Deployment-Best-Practices) excluding any that are unsupported + # We take the ones recommended by SSL Labs + # (https://github.com/ssllabs/research/wiki/SSL-and-TLS-Deployment-Best-Practices) + # excluding any that are unsupported cipher_suites=[ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", diff --git a/data_safe_haven/pulumi/components/sre_data.py b/data_safe_haven/pulumi/components/sre_data.py index 5040dcd772..3cd38480a8 100644 --- a/data_safe_haven/pulumi/components/sre_data.py +++ b/data_safe_haven/pulumi/components/sre_data.py @@ -1,8 +1,6 @@ """Pulumi component for SRE state""" -# Standard library imports -from typing import Optional, Sequence +from collections.abc import Sequence -# Third party imports from pulumi import ComponentResource, Config, Input, Output, ResourceOptions from pulumi_azure_native import ( authorization, @@ -13,7 +11,6 @@ storage, ) -# Local imports from data_safe_haven.external import AzureIPv4Range from data_safe_haven.functions import ( alphanumeric, @@ -26,8 +23,14 @@ get_id_from_subnet, get_name_from_rg, ) -from ..dynamic.blob_container_acl import BlobContainerAcl, BlobContainerAclProps -from ..dynamic.ssl_certificate import SSLCertificate, SSLCertificateProps +from data_safe_haven.pulumi.dynamic.blob_container_acl import ( + BlobContainerAcl, + BlobContainerAclProps, +) +from data_safe_haven.pulumi.dynamic.ssl_certificate import ( + SSLCertificate, + SSLCertificateProps, +) class SREDataProps: @@ -98,9 +101,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SREDataProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:DataComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -229,7 +231,7 @@ def __init__( ) # Deploy key vault secrets - password_secure_research_desktop_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_secure_research_desktop_admin", properties=keyvault.SecretPropertiesArgs( value=props.password_secure_research_desktop_admin @@ -239,7 +241,7 @@ def __init__( vault_name=key_vault.name, opts=ResourceOptions(parent=key_vault), ) - password_gitea_database_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_gitea_database_admin", properties=keyvault.SecretPropertiesArgs( value=props.password_gitea_database_admin @@ -249,7 +251,7 @@ def __init__( vault_name=key_vault.name, opts=ResourceOptions(parent=key_vault), ) - password_hedgedoc_database_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_hedgedoc_database_admin", properties=keyvault.SecretPropertiesArgs( value=props.password_hedgedoc_database_admin @@ -259,7 +261,7 @@ def __init__( vault_name=key_vault.name, opts=ResourceOptions(parent=key_vault), ) - password_nexus_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_nexus_admin", properties=keyvault.SecretPropertiesArgs(value=props.password_nexus_admin), resource_group_name=resource_group.name, @@ -267,7 +269,7 @@ def __init__( vault_name=key_vault.name, opts=ResourceOptions(parent=key_vault), ) - password_user_database_admin = keyvault.Secret( + keyvault.Secret( f"{self._name}_kvs_password_user_database_admin", properties=keyvault.SecretPropertiesArgs( value=props.password_user_database_admin @@ -331,7 +333,7 @@ def __init__( i_p_address_or_range=str(ip_address), ) for ip_range in sorted(ip_ranges) - for ip_address in AzureIPv4Range.from_cidr(ip_range).all() + for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips() ] ), virtual_network_rules=[ @@ -345,7 +347,7 @@ def __init__( opts=child_opts, ) # Give the "Storage Blob Data Owner" role to the Azure admin group - storage_account_securedata_data_owner_role_assignment = authorization.RoleAssignment( + authorization.RoleAssignment( f"{self._name}_storage_account_securedata_data_owner_role_assignment", principal_id=props.admin_group_id, principal_type=authorization.PrincipalType.GROUP, @@ -380,13 +382,16 @@ def __init__( opts=ResourceOptions(parent=storage_account_securedata), ) # Set storage container ACLs - storage_container_egress_acl = BlobContainerAcl( + BlobContainerAcl( f"{self._name}_storage_container_egress_acl", BlobContainerAclProps( acl_user="rwx", acl_group="rwx", acl_other="rwx", - apply_default_permissions=False, # due to an Azure bug this also gives ownership of the fileshare to user 65533 (preventing use inside the SRE) + # due to an Azure bug `apply_default_permissions=True` also + # gives ownership of the fileshare to user 65533 (preventing + # use inside the SRE) + apply_default_permissions=False, container_name=storage_container_egress.name, resource_group_name=resource_group.name, storage_account_name=storage_account_securedata.name, @@ -394,13 +399,15 @@ def __init__( ), opts=ResourceOptions(parent=storage_container_egress), ) - storage_container_ingress_acl = BlobContainerAcl( + BlobContainerAcl( f"{self._name}_storage_container_ingress_acl", BlobContainerAclProps( acl_user="rwx", acl_group="r-x", acl_other="r-x", - apply_default_permissions=True, # ensure that the above permissions are also set on any newly created files (eg. with Azure Storage Explorer) + # ensure that the above permissions are also set on any newly + # created files (eg. with Azure Storage Explorer) + apply_default_permissions=True, container_name=storage_container_ingress.name, resource_group_name=resource_group.name, storage_account_name=storage_account_securedata.name, @@ -425,7 +432,7 @@ def __init__( opts=ResourceOptions(parent=storage_account_securedata), ) # Add a private DNS record for each securedata data custom DNS config - storage_account_securedata_private_dns_zone_group = network.PrivateDnsZoneGroup( + network.PrivateDnsZoneGroup( f"{self._name}_storage_account_securedata_private_dns_zone_group", private_dns_zone_configs=[ network.PrivateDnsZoneConfigArgs( @@ -480,18 +487,19 @@ def __init__( sku=storage.SkuArgs(name=storage.SkuName.PREMIUM_ZRS), opts=child_opts, ) - file_container_home = storage.FileShare( + storage.FileShare( f"{self._name}_storage_container_home", access_tier=storage.ShareAccessTier.PREMIUM, account_name=storage_account_userdata.name, enabled_protocols=storage.EnabledProtocols.NFS, resource_group_name=resource_group.name, - root_squash=storage.RootSquashType.NO_ROOT_SQUASH, # Squashing prevents root from creating user home directories + # Squashing prevents root from creating user home directories + root_squash=storage.RootSquashType.NO_ROOT_SQUASH, share_name="home", share_quota=1024, opts=ResourceOptions(parent=storage_account_userdata), ) - file_container_shared = storage.FileShare( + storage.FileShare( f"{self._name}_storage_container_shared", access_tier=storage.ShareAccessTier.PREMIUM, account_name=storage_account_userdata.name, @@ -519,7 +527,7 @@ def __init__( opts=ResourceOptions(parent=storage_account_userdata), ) # Add a private DNS record for each userdata custom DNS config - storage_account_userdata_private_dns_zone_group = network.PrivateDnsZoneGroup( + network.PrivateDnsZoneGroup( f"{self._name}_storage_account_userdata_private_dns_zone_group", private_dns_zone_configs=[ network.PrivateDnsZoneConfigArgs( diff --git a/data_safe_haven/pulumi/components/sre_gitea_server.py b/data_safe_haven/pulumi/components/sre_gitea_server.py index 7f6a2400dc..d84c61f293 100644 --- a/data_safe_haven/pulumi/components/sre_gitea_server.py +++ b/data_safe_haven/pulumi/components/sre_gitea_server.py @@ -1,18 +1,17 @@ -# Standard library imports import pathlib -from typing import Optional -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import containerinstance, dbforpostgresql, network, storage -# Local imports -from data_safe_haven.utility import FileReader -from ..common.transformations import ( +from data_safe_haven.pulumi.common.transformations import ( get_ip_address_from_container_group, get_ip_addresses_from_private_endpoint, ) -from ..dynamic.file_share_file import FileShareFile, FileShareFileProps +from data_safe_haven.pulumi.dynamic.file_share_file import ( + FileShareFile, + FileShareFileProps, +) +from data_safe_haven.utility import FileReader class SREGiteaServerProps: @@ -39,7 +38,7 @@ def __init__( user_services_resource_group_name: Input[str], virtual_network: Input[network.VirtualNetwork], virtual_network_resource_group_name: Input[str], - database_username: Optional[Input[str]] = None, + database_username: Input[str] | None = None, ): self.database_password = database_password self.database_subnet_id = database_subnet_id @@ -72,9 +71,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SREGiteaServerProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:GiteaServerComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -194,7 +192,7 @@ def __init__( opts=child_opts, ) gitea_db_database_name = "gitea" - gitea_db = dbforpostgresql.Database( + dbforpostgresql.Database( f"{self._name}_gitea_db", charset="UTF8", database_name=gitea_db_database_name, @@ -290,7 +288,8 @@ def __init__( ), containerinstance.EnvironmentVariableArgs( name="GITEA__log__LEVEL", - value="Debug", # Options are: "Trace", "Debug", "Info" [default], "Warn", "Error", "Critical" or "None". + # Options are: "Trace", "Debug", "Info" [default], "Warn", "Error", "Critical" or "None". + value="Debug", ), containerinstance.EnvironmentVariableArgs( name="GITEA__security__INSTALL_LOCK", value="true" @@ -365,7 +364,7 @@ def __init__( ), ) # Register the container group in the SRE private DNS zone - gitea_private_record_set = network.PrivateRecordSet( + network.PrivateRecordSet( f"{self._name}_gitea_private_record_set", a_records=[ network.ARecordArgs( @@ -380,7 +379,7 @@ def __init__( opts=child_opts, ) # Redirect the public DNS to private DNS - gitea_public_record_set = network.RecordSet( + network.RecordSet( f"{self._name}_gitea_public_record_set", cname_record=network.CnameRecordArgs( cname=Output.concat("gitea.privatelink.", props.sre_fqdn) diff --git a/data_safe_haven/pulumi/components/sre_hedgedoc_server.py b/data_safe_haven/pulumi/components/sre_hedgedoc_server.py index c12b98ab80..43232bdda9 100644 --- a/data_safe_haven/pulumi/components/sre_hedgedoc_server.py +++ b/data_safe_haven/pulumi/components/sre_hedgedoc_server.py @@ -1,19 +1,18 @@ -# Standard library imports import pathlib -from typing import Optional -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import containerinstance, dbforpostgresql, network, storage -# Local imports from data_safe_haven.functions import b64encode -from data_safe_haven.utility import FileReader -from ..common.transformations import ( +from data_safe_haven.pulumi.common.transformations import ( get_ip_address_from_container_group, get_ip_addresses_from_private_endpoint, ) -from ..dynamic.file_share_file import FileShareFile, FileShareFileProps +from data_safe_haven.pulumi.dynamic.file_share_file import ( + FileShareFile, + FileShareFileProps, +) +from data_safe_haven.utility import FileReader class SREHedgeDocServerProps: @@ -41,7 +40,7 @@ def __init__( user_services_resource_group_name: Input[str], virtual_network: Input[network.VirtualNetwork], virtual_network_resource_group_name: Input[str], - database_username: Optional[Input[str]] = None, + database_username: Input[str] | None = None, ): self.database_subnet_id = database_subnet_id self.database_password = database_password @@ -54,7 +53,17 @@ def __init__( self.ldap_search_password = ldap_search_password self.ldap_server_ip = ldap_server_ip self.ldap_user_search_base = ldap_user_search_base - self.ldap_user_security_group_name = ldap_user_security_group_name + self.ldap_user_security_group_cn = Output.all( + group_name=ldap_user_security_group_name, root_dn=ldap_root_dn + ).apply( + lambda kwargs: ",".join( + ( + kwargs["group_name"], + "OU=Data Safe Haven Security Groups", + kwargs["root_dn"], + ) + ) + ) self.location = location self.networking_resource_group_name = networking_resource_group_name self.network_profile_id = network_profile_id @@ -75,9 +84,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SREHedgeDocServerProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:HedgeDocServerComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -148,7 +156,7 @@ def __init__( opts=child_opts, ) hedgedoc_db_database_name = "hedgedoc" - hedgedoc_db = dbforpostgresql.Database( + dbforpostgresql.Database( f"{self._name}_hedgedoc_db", charset="UTF8", database_name=hedgedoc_db_database_name, @@ -268,7 +276,13 @@ def __init__( ), containerinstance.EnvironmentVariableArgs( name="CMD_LDAP_SEARCHFILTER", - value=f"(&(objectClass=user)(memberOf=CN={props.ldap_user_security_group_name},OU=Data Safe Haven Security Groups,{props.ldap_root_dn})(sAMAccountName={{{{username}}}}))", + value=( + "(&" + "(objectClass=user)" + f"(memberOf=CN={props.ldap_user_security_group_cn})" + f"(sAMAccountName={{{{username}}}})" + ")" + ), ), containerinstance.EnvironmentVariableArgs( name="CMD_LDAP_URL", @@ -345,7 +359,7 @@ def __init__( ), ) # Register the container group in the SRE private DNS zone - hedgedoc_private_record_set = network.PrivateRecordSet( + network.PrivateRecordSet( f"{self._name}_hedgedoc_private_record_set", a_records=[ network.ARecordArgs( @@ -360,7 +374,7 @@ def __init__( opts=child_opts, ) # Redirect the public DNS to private DNS - hedgedoc_public_record_set = network.RecordSet( + network.RecordSet( f"{self._name}_hedgedoc_public_record_set", cname_record=network.CnameRecordArgs( cname=Output.concat("hedgedoc.privatelink.", props.sre_fqdn) diff --git a/data_safe_haven/pulumi/components/sre_monitoring.py b/data_safe_haven/pulumi/components/sre_monitoring.py index 76e4a6c906..c9aac358dd 100644 --- a/data_safe_haven/pulumi/components/sre_monitoring.py +++ b/data_safe_haven/pulumi/components/sre_monitoring.py @@ -1,12 +1,7 @@ """Pulumi component for SHM monitoring""" -# Standard library import -from typing import Optional - -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import automation -# Local imports from data_safe_haven.functions import time_as_string @@ -37,15 +32,14 @@ def __init__( self, name: str, stack_name: str, - shm_name: str, props: SREMonitoringProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:MonitoringComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) # Create Linux VM system update schedule: daily at 03: - schedule_linux_updates = automation.SoftwareUpdateConfigurationByName( + automation.SoftwareUpdateConfigurationByName( f"{self._name}_schedule_linux_updates", automation_account_name=props.automation_account_name, resource_group_name=props.resource_group_name, diff --git a/data_safe_haven/pulumi/components/sre_networking.py b/data_safe_haven/pulumi/components/sre_networking.py index e4b5d17b70..108d5cc3e2 100644 --- a/data_safe_haven/pulumi/components/sre_networking.py +++ b/data_safe_haven/pulumi/components/sre_networking.py @@ -1,15 +1,10 @@ """Pulumi component for SRE networking""" -# Standard library import -from typing import Optional - -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network, resources -# Local imports from data_safe_haven.external import AzureIPv4Range from data_safe_haven.functions import alphanumeric, ordered_private_dns_zones -from ..common.enums import NetworkingPriorities +from data_safe_haven.pulumi.common.enums import NetworkingPriorities class SRENetworkingProps: @@ -26,6 +21,7 @@ def __init__( shm_virtual_network_name: Input[str], shm_zone_name: Input[str], sre_index: Input[int], + sre_name: Input[str], ): # Virtual network and subnet IP ranges self.vnet_iprange = Output.from_input(sre_index).apply( @@ -65,6 +61,7 @@ def __init__( self.shm_subnet_update_servers_prefix = shm_subnet_update_servers_prefix self.shm_virtual_network_name = shm_virtual_network_name self.shm_zone_name = shm_zone_name + self.sre_name = sre_name class SRENetworkingComponent(ComponentResource): @@ -74,9 +71,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SRENetworkingProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:NetworkingComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -259,7 +255,10 @@ def __init__( ), network.SecurityRuleArgs( access=network.SecurityRuleAccess.ALLOW, - description="Allow LDAP client requests over TCP (see https://devopstales.github.io/linux/pfsense-ad-join/ for details).", + description=( + "Allow LDAP client requests over TCP. " + "See https://devopstales.github.io/linux/pfsense-ad-join/ for details." + ), destination_address_prefix=props.shm_subnet_identity_servers_prefix, destination_port_ranges=["389", "636"], direction=network.SecurityRuleDirection.OUTBOUND, @@ -432,27 +431,31 @@ def __init__( virtual_network_name=kwargs["virtual_network_name"], ) ) - peering_sre_to_shm = network.VirtualNetworkPeering( + network.VirtualNetworkPeering( f"{self._name}_sre_to_shm_peering", remote_virtual_network=network.SubResourceArgs(id=shm_virtual_network.id), resource_group_name=resource_group.name, virtual_network_name=sre_virtual_network.name, - virtual_network_peering_name=f"peer_sre_{sre_name}_to_shm", + virtual_network_peering_name=Output.concat( + "peer_sre_", props.sre_name, "_to_shm" + ), opts=child_opts, ) - peering_shm_to_sre = network.VirtualNetworkPeering( + network.VirtualNetworkPeering( f"{self._name}_shm_to_sre_peering", allow_gateway_transit=True, remote_virtual_network=network.SubResourceArgs(id=sre_virtual_network.id), resource_group_name=props.shm_networking_resource_group_name, virtual_network_name=shm_virtual_network.name, - virtual_network_peering_name=f"peer_shm_to_sre_{sre_name}", + virtual_network_peering_name=Output.concat( + "peer_shm_to_sre_", props.sre_name + ), opts=child_opts, ) # Link to SHM private DNS zones for private_link_domain in ordered_private_dns_zones(): - virtual_network_link = network.VirtualNetworkLink( + network.VirtualNetworkLink( f"{self._name}_private_zone_{private_link_domain}_vnet_link", location="Global", private_zone_name=f"privatelink.{private_link_domain}", @@ -475,10 +478,10 @@ def __init__( zone_name=kwargs["zone_name"], ) ) - sre_subdomain = alphanumeric(sre_name).lower() - sre_fqdn = Output.from_input(props.shm_fqdn).apply( - lambda parent: f"{sre_subdomain}.{parent}" + sre_subdomain = Output.from_input(props.sre_name).apply( + lambda name: alphanumeric(name).lower() ) + sre_fqdn = Output.concat(sre_subdomain, ".", props.shm_fqdn) sre_dns_zone = network.Zone( f"{self._name}_dns_zone", location="Global", @@ -499,7 +502,7 @@ def __init__( zone_name=shm_dns_zone.name, opts=child_opts, ) - sre_caa_record = network.RecordSet( + network.RecordSet( f"{self._name}_caa_record", caa_records=[ network.CaaRecordArgs( @@ -524,7 +527,7 @@ def __init__( resource_group_name=resource_group.name, opts=child_opts, ) - virtual_network_link = network.VirtualNetworkLink( + network.VirtualNetworkLink( f"{self._name}_private_zone_vnet_link", location="Global", private_zone_name=sre_private_dns_zone.name, diff --git a/data_safe_haven/pulumi/components/sre_remote_desktop.py b/data_safe_haven/pulumi/components/sre_remote_desktop.py index 6b3a99afff..4ea1d226f1 100644 --- a/data_safe_haven/pulumi/components/sre_remote_desktop.py +++ b/data_safe_haven/pulumi/components/sre_remote_desktop.py @@ -1,9 +1,6 @@ """Pulumi component for SRE remote desktop""" -# Standard library imports import pathlib -from typing import Optional -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import ( containerinstance, @@ -13,15 +10,20 @@ storage, ) -# Local imports from data_safe_haven.external import AzureIPv4Range from data_safe_haven.pulumi.common.transformations import ( get_id_from_subnet, get_ip_address_from_container_group, ) +from data_safe_haven.pulumi.dynamic.azuread_application import ( + AzureADApplication, + AzureADApplicationProps, +) +from data_safe_haven.pulumi.dynamic.file_share_file import ( + FileShareFile, + FileShareFileProps, +) from data_safe_haven.utility import FileReader -from ..dynamic.azuread_application import AzureADApplication, AzureADApplicationProps -from ..dynamic.file_share_file import FileShareFile, FileShareFileProps class SRERemoteDesktopProps: @@ -50,7 +52,7 @@ def __init__( subnet_guacamole_database: Input[network.GetSubnetResult], virtual_network: Input[network.VirtualNetwork], virtual_network_resource_group_name: Input[str], - database_username: Optional[Input[str]] = "postgresadmin", + database_username: Input[str] | None = "postgresadmin", ): self.aad_application_name = aad_application_name self.aad_application_url = Output.concat("https://", aad_application_fqdn) @@ -107,9 +109,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SRERemoteDesktopProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:RemoteDesktopComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -147,7 +148,7 @@ def __init__( # Upload Caddyfile resources_path = pathlib.Path(__file__).parent.parent.parent / "resources" reader = FileReader(resources_path / "remote_desktop" / "caddy" / "Caddyfile") - caddyfile = FileShareFile( + FileShareFile( f"{self._name}_file_share_caddyfile", FileShareFileProps( destination_path=reader.name, @@ -189,7 +190,7 @@ def __init__( ), opts=child_opts, ) - connection_db_private_endpoint = network.PrivateEndpoint( + network.PrivateEndpoint( f"{self._name}_connection_db_private_endpoint", custom_dns_configs=[ network.CustomDnsConfigPropertiesFormatArgs( diff --git a/data_safe_haven/pulumi/components/sre_research_desktop.py b/data_safe_haven/pulumi/components/sre_research_desktop.py index 7add911d4d..7211ee3bef 100644 --- a/data_safe_haven/pulumi/components/sre_research_desktop.py +++ b/data_safe_haven/pulumi/components/sre_research_desktop.py @@ -1,14 +1,11 @@ -# Standard library imports import pathlib -from typing import Any, List, Optional, Tuple +from typing import Any -# Third party imports import chevron from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network, resources -# Local imports -from data_safe_haven.exceptions import DataSafeHavenPulumiException +from data_safe_haven.exceptions import DataSafeHavenPulumiError from data_safe_haven.functions import b64encode, replace_separators from data_safe_haven.pulumi.common.transformations import ( get_available_ips_from_subnet, @@ -16,6 +13,7 @@ get_name_from_subnet, get_name_from_vnet, ) + from .virtual_machine import LinuxVMProps, VMComponent @@ -38,13 +36,14 @@ def __init__( log_analytics_workspace_id: Input[str], log_analytics_workspace_key: Input[str], sre_fqdn: Input[str], + sre_name: Input[str], storage_account_userdata_name: Input[str], storage_account_securedata_name: Input[str], subnet_research_desktops: Input[network.GetSubnetResult], virtual_network_resource_group: Input[resources.ResourceGroup], virtual_network: Input[network.VirtualNetwork], - vm_details: List[ - Tuple[int, str, str] + vm_details: list[ + tuple[int, str, str] ], # this must *not* be passed as an Input[T] ): self.admin_password = Output.secret(admin_password) @@ -62,6 +61,7 @@ def __init__( self.log_analytics_workspace_id = log_analytics_workspace_id self.log_analytics_workspace_key = log_analytics_workspace_key self.sre_fqdn = sre_fqdn + self.sre_name = sre_name self.storage_account_userdata_name = storage_account_userdata_name self.storage_account_securedata_name = storage_account_securedata_name self.virtual_network_name = Output.from_input(virtual_network).apply( @@ -78,11 +78,11 @@ def __init__( ) self.vm_details = vm_details - def get_ip_addresses(self, subnet: Any, vm_details: Any) -> List[str]: + def get_ip_addresses(self, subnet: Any, vm_details: Any) -> list[str]: if not isinstance(subnet, network.GetSubnetResult): - DataSafeHavenPulumiException(f"'subnet' has invalid type {type(subnet)}") + DataSafeHavenPulumiError(f"'subnet' has invalid type {type(subnet)}") if not isinstance(vm_details, list): - DataSafeHavenPulumiException( + DataSafeHavenPulumiError( f"'vm_details' has invalid type {type(vm_details)}" ) return get_available_ips_from_subnet(subnet)[: len(vm_details)] @@ -95,9 +95,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SREResearchDesktopProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:ResearchDesktopComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -142,7 +141,9 @@ def __init__( subnet_name=props.subnet_research_desktops_name, virtual_network_name=props.virtual_network_name, virtual_network_resource_group_name=props.virtual_network_resource_group_name, - vm_name=replace_separators(f"sre-{sre_name}-vm-{vm_name}", "-"), + vm_name=Output.concat( + "sre-", props.sre_name, "-vm-", vm_name + ).apply(lambda s: replace_separators(s, "-")), vm_size=vm_size, ), opts=child_opts, @@ -189,7 +190,7 @@ def read_cloudinit( / "secure_research_desktop" ) with open( - resources_path / "srd.cloud_init.mustache.yaml", "r", encoding="utf-8" + resources_path / "srd.cloud_init.mustache.yaml", encoding="utf-8" ) as f_cloudinit: mustache_values = { "domain_sid": domain_sid, diff --git a/data_safe_haven/pulumi/components/sre_software_repositories.py b/data_safe_haven/pulumi/components/sre_software_repositories.py index f85e98c7cb..bc32fe37fd 100644 --- a/data_safe_haven/pulumi/components/sre_software_repositories.py +++ b/data_safe_haven/pulumi/components/sre_software_repositories.py @@ -1,20 +1,19 @@ """Pulumi component for SRE monitoring""" -# Standard library import import pathlib -from typing import Optional -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import containerinstance, network, resources, storage -# Local imports from data_safe_haven.pulumi.common.transformations import ( get_available_ips_from_subnet, get_id_from_subnet, get_ip_address_from_container_group, ) +from data_safe_haven.pulumi.dynamic.file_share_file import ( + FileShareFile, + FileShareFileProps, +) from data_safe_haven.utility import FileReader -from ..dynamic.file_share_file import FileShareFile, FileShareFileProps class SRESoftwareRepositoriesProps: @@ -62,9 +61,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SRESoftwareRepositoriesProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:SRESoftwareRepositoriesComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -111,7 +109,7 @@ def __init__( caddyfile_reader = FileReader( resources_path / "software_repositories" / "caddy" / "Caddyfile" ) - caddyfile = FileShareFile( + FileShareFile( f"{self._name}_file_share_caddyfile", FileShareFileProps( destination_path=caddyfile_reader.name, @@ -127,7 +125,7 @@ def __init__( cran_reader = FileReader( resources_path / "software_repositories" / "allowlists" / "cran.allowlist" ) - cran_allowlist = FileShareFile( + FileShareFile( f"{self._name}_file_share_cran_allowlist", FileShareFileProps( destination_path=cran_reader.name, @@ -141,7 +139,7 @@ def __init__( pypi_reader = FileReader( resources_path / "software_repositories" / "allowlists" / "pypi.allowlist" ) - pypi_allowlist = FileShareFile( + FileShareFile( f"{self._name}_file_share_pypi_allowlist", FileShareFileProps( destination_path=pypi_reader.name, @@ -322,7 +320,7 @@ def __init__( ), ) # Register the container group in the SRE private DNS zone - nexus_private_record_set = network.PrivateRecordSet( + network.PrivateRecordSet( f"{self._name}_nexus_private_record_set", a_records=[ network.ARecordArgs( @@ -339,7 +337,7 @@ def __init__( opts=child_opts, ) # Redirect the public DNS to private DNS - nexus_public_record_set = network.RecordSet( + network.RecordSet( f"{self._name}_nexus_public_record_set", cname_record=network.CnameRecordArgs( cname=Output.concat("nexus.privatelink.", props.sre_fqdn) diff --git a/data_safe_haven/pulumi/components/sre_user_services.py b/data_safe_haven/pulumi/components/sre_user_services.py index 3c26964b63..558152157c 100644 --- a/data_safe_haven/pulumi/components/sre_user_services.py +++ b/data_safe_haven/pulumi/components/sre_user_services.py @@ -1,12 +1,8 @@ -# Standard library imports -from typing import Optional - -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import network, resources -# Local imports from data_safe_haven.pulumi.common.transformations import get_id_from_subnet + from .sre_gitea_server import SREGiteaServerComponent, SREGiteaServerProps from .sre_hedgedoc_server import SREHedgeDocServerComponent, SREHedgeDocServerProps @@ -70,9 +66,8 @@ def __init__( self, name: str, stack_name: str, - sre_name: str, props: SREUserServicesProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__("dsh:sre:UserServicesComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -115,10 +110,9 @@ def __init__( ) # Deploy the Gitea server - gitea_server = SREGiteaServerComponent( + SREGiteaServerComponent( "sre_gitea_server", stack_name, - sre_name, SREGiteaServerProps( database_subnet_id=props.subnet_databases_id, database_password=props.gitea_database_password, @@ -144,10 +138,9 @@ def __init__( ) # Deploy the HedgeDoc server - hedgedoc_server = SREHedgeDocServerComponent( + SREHedgeDocServerComponent( "sre_hedgedoc_server", stack_name, - sre_name, SREHedgeDocServerProps( database_subnet_id=props.subnet_databases_id, database_password=props.hedgedoc_database_password, diff --git a/data_safe_haven/pulumi/components/virtual_machine.py b/data_safe_haven/pulumi/components/virtual_machine.py index 57d23101ea..8c999f37f6 100644 --- a/data_safe_haven/pulumi/components/virtual_machine.py +++ b/data_safe_haven/pulumi/components/virtual_machine.py @@ -1,22 +1,19 @@ """Pulumi component for virtual machines""" -# Standard library imports -from typing import Any, Dict, Optional +from typing import Any -# Third party imports from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import compute, network -# Local imports from data_safe_haven.functions import replace_separators class VMProps: """Properties for WindowsVMComponent""" - image_reference_args: Optional[compute.ImageReferenceArgs] + image_reference_args: compute.ImageReferenceArgs | None log_analytics_extension_name: str log_analytics_extension_version: str - os_profile_args: Optional[compute.OSProfileArgs] + os_profile_args: compute.OSProfileArgs | None def __init__( self, @@ -29,10 +26,10 @@ def __init__( virtual_network_resource_group_name: Input[str], vm_name: Input[str], vm_size: Input[str], - admin_username: Optional[Input[str]] = None, - ip_address_public: Optional[Input[bool]] = None, - log_analytics_workspace_id: Optional[Input[str]] = None, - log_analytics_workspace_key: Optional[Input[str]] = None, + admin_username: Input[str] | None = None, + ip_address_public: Input[bool] | None = None, + log_analytics_workspace_id: Input[str] | None = None, + log_analytics_workspace_key: Input[str] | None = None, ): self.admin_password = admin_password self.admin_username = admin_username if admin_username else "dshvmadmin" @@ -49,7 +46,7 @@ def __init__( self.virtual_network_resource_group_name = virtual_network_resource_group_name self.vm_name = vm_name self.vm_name_underscored = Output.from_input(vm_name).apply( - lambda n: n.replace("-", "_") + lambda n: replace_separators(n, "_") ) self.vm_size = vm_size @@ -84,7 +81,7 @@ def __init__( windows_configuration=compute.WindowsConfigurationArgs( enable_automatic_updates=True, patch_settings=compute.PatchSettingsArgs( - patch_mode=compute.LinuxVMGuestPatchMode.AUTOMATIC_BY_PLATFORM, + patch_mode=compute.WindowsVMGuestPatchMode.AUTOMATIC_BY_PLATFORM, ), provision_vm_agent=True, ), @@ -128,9 +125,7 @@ def __init__( class VMComponent(ComponentResource): """Deploy SHM secrets with Pulumi""" - def __init__( - self, name: str, props: VMProps, opts: Optional[ResourceOptions] = None - ): + def __init__(self, name: str, props: VMProps, opts: ResourceOptions | None = None): super().__init__("dsh:common:VMComponent", name, {}, opts) child_opts = ResourceOptions.merge(ResourceOptions(parent=self), opts) @@ -142,11 +137,11 @@ def __init__( ) # Define public IP address if relevant - network_interface_ip_params: Dict[str, Any] = {} + network_interface_ip_params: dict[str, Any] = {} if props.ip_address_public: public_ip = network.PublicIPAddress( f"{self._name}_public_ip", - public_ip_address_name=f"{props.vm_name}-public-ip", + public_ip_address_name=Output.concat(props.vm_name, "-public-ip"), public_ip_allocation_method="Static", resource_group_name=props.resource_group_name, sku=network.PublicIPAddressSkuArgs( @@ -165,7 +160,7 @@ def __init__( ip_configurations=[ network.NetworkInterfaceIPConfigurationArgs( name=props.vm_name_underscored.apply( - lambda n: f"ipconfig{n}".replace("_", "") + lambda n: replace_separators(f"ipconfig{n}", "") ), private_ip_address=props.ip_address_private, private_ip_allocation_method=network.IPAllocationMethod.STATIC, @@ -173,7 +168,7 @@ def __init__( **network_interface_ip_params, ) ], - network_interface_name=f"{props.vm_name}-nic", + network_interface_name=Output.concat(props.vm_name, "-nic"), resource_group_name=props.resource_group_name, opts=child_opts, ) @@ -207,7 +202,7 @@ def __init__( managed_disk=compute.ManagedDiskParametersArgs( storage_account_type=compute.StorageAccountTypes.PREMIUM_LRS, ), - name=f"{props.vm_name}-osdisk", + name=Output.concat(props.vm_name, "-osdisk"), ), ), vm_name=props.vm_name, @@ -221,7 +216,7 @@ def __init__( # Register with Log Analytics workspace if props.log_analytics_workspace_key and props.log_analytics_workspace_id: - log_analytics_extension = compute.VirtualMachineExtension( + compute.VirtualMachineExtension( replace_separators(f"{self._name}_log_analytics_extension", "_"), auto_upgrade_minor_version=True, enable_automatic_upgrade=False, @@ -232,7 +227,7 @@ def __init__( ).apply(lambda key: {"workspaceKey": key}), resource_group_name=props.resource_group_name, settings=Output.from_input(props.log_analytics_workspace_id).apply( - lambda id: {"workspaceId": id} + lambda wid: {"workspaceId": wid} ), type=props.log_analytics_extension_name, type_handler_version=props.log_analytics_extension_version, diff --git a/data_safe_haven/pulumi/declarative_shm.py b/data_safe_haven/pulumi/declarative_shm.py index 609cc9aff5..0aec8fb27a 100644 --- a/data_safe_haven/pulumi/declarative_shm.py +++ b/data_safe_haven/pulumi/declarative_shm.py @@ -1,9 +1,8 @@ """Deploy Data Safe Haven Management environment with Pulumi""" -# Third party imports import pulumi -# Local imports from data_safe_haven.config import Config + from .components.shm_bastion import SHMBastionComponent, SHMBastionProps from .components.shm_data import SHMDataComponent, SHMDataProps from .components.shm_domain_controllers import ( @@ -25,7 +24,8 @@ class DeclarativeSHM: def __init__(self, config: Config, shm_name: str) -> None: self.cfg = config self.shm_name = shm_name - self.stack_name = f"shm-{shm_name}" + self.short_name = f"shm-{shm_name}" + self.stack_name = self.short_name def run(self) -> None: # Load pulumi configuration options @@ -35,7 +35,6 @@ def run(self) -> None: networking = SHMNetworkingComponent( "shm_networking", self.stack_name, - self.shm_name, SHMNetworkingProps( admin_ip_addresses=self.cfg.shm.admin_ip_addresses, fqdn=self.cfg.shm.fqdn, @@ -47,10 +46,9 @@ def run(self) -> None: ) # Deploy firewall and routing - firewall = SHMFirewallComponent( + SHMFirewallComponent( "shm_firewall", self.stack_name, - self.shm_name, SHMFirewallProps( domain_controller_private_ip=networking.domain_controller_private_ip, dns_zone=networking.dns_zone, @@ -64,10 +62,9 @@ def run(self) -> None: ) # Deploy firewall and routing - bastion = SHMBastionComponent( + SHMBastionComponent( "shm_bastion", self.stack_name, - self.shm_name, SHMBastionProps( location=self.cfg.azure.location, resource_group_name=networking.resource_group_name, @@ -79,7 +76,6 @@ def run(self) -> None: data = SHMDataComponent( "shm_data", self.stack_name, - self.shm_name, SHMDataProps( admin_group_id=self.cfg.azure.admin_group_id, admin_ip_addresses=self.cfg.shm.admin_ip_addresses, @@ -93,7 +89,6 @@ def run(self) -> None: monitoring = SHMMonitoringComponent( "shm_monitoring", self.stack_name, - self.shm_name, SHMMonitoringProps( dns_resource_group_name=networking.resource_group_name, location=self.cfg.azure.location, @@ -107,7 +102,6 @@ def run(self) -> None: update_servers = SHMUpdateServersComponent( "shm_update_servers", self.stack_name, - self.shm_name, SHMUpdateServersProps( admin_password=data.password_update_server_linux_admin, location=self.cfg.azure.location, @@ -124,7 +118,6 @@ def run(self) -> None: domain_controllers = SHMDomainControllersComponent( "shm_domain_controllers", self.stack_name, - self.shm_name, SHMDomainControllersProps( automation_account_modules=monitoring.automation_account_modules, automation_account_name=monitoring.automation_account.name, diff --git a/data_safe_haven/pulumi/declarative_sre.py b/data_safe_haven/pulumi/declarative_sre.py index d9195bc458..a91f851583 100644 --- a/data_safe_haven/pulumi/declarative_sre.py +++ b/data_safe_haven/pulumi/declarative_sre.py @@ -1,9 +1,8 @@ """Pulumi declarative program""" -# Third party imports import pulumi -# Local imports from data_safe_haven.config import Config + from .components.sre_application_gateway import ( SREApplicationGatewayComponent, SREApplicationGatewayProps, @@ -33,7 +32,8 @@ def __init__(self, config: Config, shm_name: str, sre_name: str) -> None: self.cfg = config self.shm_name = shm_name self.sre_name = sre_name - self.stack_name = f"shm-{shm_name}-sre-{sre_name}" + self.short_name = f"sre-{sre_name}" + self.stack_name = f"shm-{shm_name}-{self.short_name}" def run(self) -> None: # Load pulumi configuration options @@ -62,7 +62,6 @@ def run(self) -> None: networking = SRENetworkingComponent( "sre_networking", self.stack_name, - self.sre_name, SRENetworkingProps( location=self.cfg.azure.location, shm_fqdn=self.cfg.shm.fqdn, @@ -78,19 +77,19 @@ def run(self) -> None: shm_subnet_update_servers_prefix=self.pulumi_opts.require( "shm-networking-subnet_update_servers_prefix", ), - shm_zone_name=self.cfg.shm.fqdn, - sre_index=self.cfg.sres[self.sre_name].index, shm_virtual_network_name=self.pulumi_opts.require( "shm-networking-virtual_network_name" ), + shm_zone_name=self.cfg.shm.fqdn, + sre_index=self.cfg.sres[self.sre_name].index, + sre_name=self.sre_name, ), ) # Deploy automated monitoring - monitoring = SREMonitoringComponent( + SREMonitoringComponent( "sre_monitoring", self.stack_name, - self.shm_name, SREMonitoringProps( automation_account_name=self.pulumi_opts.require( "shm-monitoring-automation_account_name" @@ -111,7 +110,6 @@ def run(self) -> None: data = SREDataComponent( "sre_data", self.stack_name, - self.sre_name, SREDataProps( admin_email_address=self.cfg.shm.admin_email_address, admin_group_id=self.cfg.azure.admin_group_id, @@ -132,10 +130,9 @@ def run(self) -> None: ) # Deploy frontend application gateway - application_gateway = SREApplicationGatewayComponent( + SREApplicationGatewayComponent( "sre_application_gateway", self.stack_name, - self.sre_name, SREApplicationGatewayProps( key_vault_certificate_id=data.certificate_secret_id, key_vault_identity=data.managed_identity, @@ -150,7 +147,6 @@ def run(self) -> None: remote_desktop = SRERemoteDesktopComponent( "sre_remote_desktop", self.stack_name, - self.sre_name, SRERemoteDesktopProps( aad_application_name=f"sre-{self.sre_name}-azuread-guacamole", aad_application_fqdn=networking.sre_fqdn, @@ -180,7 +176,6 @@ def run(self) -> None: research_desktops = SREResearchDesktopComponent( "sre_secure_research_desktop", self.stack_name, - self.sre_name, SREResearchDesktopProps( admin_password=data.password_secure_research_desktop_admin, domain_sid=self.pulumi_opts.require( @@ -204,6 +199,7 @@ def run(self) -> None: "shm-monitoring-log_analytics_workspace_key" ), sre_fqdn=networking.sre_fqdn, + sre_name=self.sre_name, storage_account_userdata_name=data.storage_account_userdata_name, storage_account_securedata_name=data.storage_account_securedata_name, subnet_research_desktops=networking.subnet_research_desktops, @@ -219,10 +215,9 @@ def run(self) -> None: ) # Deploy software repository servers - software_repositories = SRESoftwareRepositoriesComponent( + SRESoftwareRepositoriesComponent( "shm_update_servers", self.stack_name, - self.shm_name, SRESoftwareRepositoriesProps( location=self.cfg.azure.location, networking_resource_group_name=networking.resource_group.name, @@ -239,10 +234,9 @@ def run(self) -> None: ) # Deploy containerised user services - user_services = SREUserServicesComponent( + SREUserServicesComponent( "sre_user_services", self.stack_name, - self.sre_name, SREUserServicesProps( domain_netbios_name=self.pulumi_opts.require( "shm-domain_controllers-netbios_name" diff --git a/data_safe_haven/pulumi/dynamic/azuread_application.py b/data_safe_haven/pulumi/dynamic/azuread_application.py index 3a7a69b678..6175f2acb5 100644 --- a/data_safe_haven/pulumi/dynamic/azuread_application.py +++ b/data_safe_haven/pulumi/dynamic/azuread_application.py @@ -1,15 +1,13 @@ """Pulumi dynamic component for AzureAD applications.""" -# Standard library imports from contextlib import suppress -from typing import Any, Dict, Optional +from typing import Any -# Third party imports from pulumi import Input, Output, ResourceOptions from pulumi.dynamic import CreateResult, DiffResult, Resource, UpdateResult -# Local imports -from data_safe_haven.exceptions import DataSafeHavenMicrosoftGraphException +from data_safe_haven.exceptions import DataSafeHavenMicrosoftGraphError from data_safe_haven.external import GraphApi + from .dsh_resource_provider import DshResourceProvider @@ -29,7 +27,7 @@ def __init__( class AzureADApplicationProvider(DshResourceProvider): @staticmethod - def refresh(props: Dict[str, Any]) -> Dict[str, Any]: + def refresh(props: dict[str, Any]) -> dict[str, Any]: outs = dict(**props) with suppress(Exception): graph_api = GraphApi(auth_token=outs["auth_token"]) @@ -40,7 +38,7 @@ def refresh(props: Dict[str, Any]) -> Dict[str, Any]: outs["application_id"] = json_response["appId"] return outs - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Create new AzureAD application.""" outs = dict(**props) try: @@ -61,41 +59,43 @@ def create(self, props: Dict[str, Any]) -> CreateResult: outs["object_id"] = json_response["id"] outs["application_id"] = json_response["appId"] except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Failed to create application [green]{props['application_name']}[/] in AzureAD.\n{str(exc)}" - ) from exc + msg = f"Failed to create application [green]{props['application_name']}[/] in AzureAD.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc return CreateResult( f"AzureADApplication-{props['application_name']}", outs=outs, ) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """Delete an AzureAD application.""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) try: graph_api = GraphApi( auth_token=props["auth_token"], ) graph_api.delete_application(props["application_name"]) except Exception as exc: - raise DataSafeHavenMicrosoftGraphException( - f"Failed to delete application [green]{props['application_name']}[/] from AzureAD.\n{str(exc)}" - ) from exc + msg = f"Failed to delete application [green]{props['application_name']}[/] from AzureAD.\n{exc}" + raise DataSafeHavenMicrosoftGraphError(msg) from exc def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: """Calculate diff between old and new state""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) # Exclude "auth_token" which should not trigger a diff return self.partial_diff(old_props, new_props, ["auth_token"]) def update( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> UpdateResult: """Updating is deleting followed by creating.""" # Note that we need to use the auth token from new_props @@ -115,7 +115,7 @@ def __init__( self, name: str, props: AzureADApplicationProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__( AzureADApplicationProvider(), diff --git a/data_safe_haven/pulumi/dynamic/blob_container_acl.py b/data_safe_haven/pulumi/dynamic/blob_container_acl.py index 16942c3f2c..b921eec0ec 100644 --- a/data_safe_haven/pulumi/dynamic/blob_container_acl.py +++ b/data_safe_haven/pulumi/dynamic/blob_container_acl.py @@ -1,14 +1,12 @@ """Pulumi dynamic component for setting ACLs on an Azure blob container.""" -# Standard library imports -from typing import Any, Dict, Optional +from typing import Any -# Third party imports from pulumi import Input, Output, ResourceOptions from pulumi.dynamic import CreateResult, DiffResult, Resource -# Local imports -from data_safe_haven.exceptions import DataSafeHavenPulumiException +from data_safe_haven.exceptions import DataSafeHavenPulumiError from data_safe_haven.external import AzureApi + from .dsh_resource_provider import DshResourceProvider @@ -17,6 +15,7 @@ class BlobContainerAclProps: def __init__( self, + *, acl_user: Input[str], acl_group: Input[str], acl_other: Input[str], @@ -51,7 +50,7 @@ def __init__( class BlobContainerAclProvider(DshResourceProvider): - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Set ACLs for a given blob container.""" outs = dict(**props) try: @@ -63,16 +62,17 @@ def create(self, props: Dict[str, Any]) -> CreateResult: storage_account_name=props["storage_account_name"], ) except Exception as exc: - raise DataSafeHavenPulumiException( - f"Failed to set ACLs on storage account [green]{props['storage_account_name']}[/].\n{str(exc)}" - ) from exc + msg = f"Failed to set ACLs on storage account [green]{props['storage_account_name']}[/].\n{exc}" + raise DataSafeHavenPulumiError(msg) from exc return CreateResult( f"BlobContainerAcl-{props['container_name']}", outs=outs, ) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """Restore default ACLs""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) try: azure_api = AzureApi(props["subscription_name"]) azure_api.set_blob_container_acl( @@ -82,18 +82,18 @@ def delete(self, id_: str, props: Dict[str, Any]) -> None: storage_account_name=props["storage_account_name"], ) except Exception as exc: - raise DataSafeHavenPulumiException( - f"Failed to delete custom ACLs on storage account [green]{props['storage_account_name']}[/].\n{str(exc)}" - ) from exc - return + msg = f"Failed to delete custom ACLs on storage account [green]{props['storage_account_name']}[/].\n{exc}" + raise DataSafeHavenPulumiError(msg) from exc def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: """Calculate diff between old and new state""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) return self.partial_diff(old_props, new_props) @@ -104,6 +104,6 @@ def __init__( self, name: str, props: BlobContainerAclProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__(BlobContainerAclProvider(), name, vars(props), opts) diff --git a/data_safe_haven/pulumi/dynamic/compiled_dsc.py b/data_safe_haven/pulumi/dynamic/compiled_dsc.py index d4f6e5fb35..d2c59df274 100644 --- a/data_safe_haven/pulumi/dynamic/compiled_dsc.py +++ b/data_safe_haven/pulumi/dynamic/compiled_dsc.py @@ -1,13 +1,12 @@ """Pulumi dynamic component for compiled desired state configuration.""" -# Standard library imports -from typing import Any, Dict, Optional, Sequence +from collections.abc import Sequence +from typing import Any -# Third party imports from pulumi import Input, ResourceOptions from pulumi.dynamic import CreateResult, DiffResult, Resource -# Local imports from data_safe_haven.external import AzureApi + from .dsh_resource_provider import DshResourceProvider @@ -20,7 +19,7 @@ def __init__( configuration_name: Input[str], content_hash: Input[str], location: Input[str], - parameters: Input[Dict[str, Any]], + parameters: Input[dict[str, Any]], resource_group_name: Input[str], required_modules: Input[Sequence[str]], subscription_name: Input[str], @@ -36,7 +35,7 @@ def __init__( class CompiledDscProvider(DshResourceProvider): - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Create compiled desired state file.""" azure_api = AzureApi(props["subscription_name"]) # Compile desired state @@ -53,17 +52,20 @@ def create(self, props: Dict[str, Any]) -> CreateResult: outs=dict(**props), ) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """The Python SDK does not support configuration deletion""" - return + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id((id_, props)) def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: """Calculate diff between old and new state""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) return self.partial_diff(old_props, new_props, []) @@ -74,6 +76,6 @@ def __init__( self, name: str, props: CompiledDscProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__(CompiledDscProvider(), name, {**vars(props)}, opts) diff --git a/data_safe_haven/pulumi/dynamic/dsh_resource_provider.py b/data_safe_haven/pulumi/dynamic/dsh_resource_provider.py index 743aead128..4d9a5dbbba 100644 --- a/data_safe_haven/pulumi/dynamic/dsh_resource_provider.py +++ b/data_safe_haven/pulumi/dynamic/dsh_resource_provider.py @@ -1,8 +1,7 @@ """Pulumi base dynamic component.""" -# Standard library imports -from typing import Any, Dict, Sequence +from collections.abc import Sequence +from typing import Any -# Third party imports from pulumi.dynamic import ( CheckResult, CreateResult, @@ -12,30 +11,31 @@ UpdateResult, ) -# Local imports -from data_safe_haven.exceptions import DataSafeHavenNotImplementedException +from data_safe_haven.exceptions import DataSafeHavenNotImplementedError class DshResourceProvider(ResourceProvider): @staticmethod def partial_diff( - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], excluded_props: Sequence[str] = [], ) -> DiffResult: """Calculate diff between old and new state""" # List any values that were not present in old_props or have been changed # Exclude any from excluded_props which should not trigger a diff altered_props = [ - property - for property in [ + property_ + for property_ in [ key for key in new_props.keys() if key not in excluded_props ] - if (property not in old_props) - or (old_props[property] != new_props[property]) + if (property_ not in old_props) + or (old_props[property_] != new_props[property_]) ] stable_props = [ - property for property in old_props.keys() if property not in altered_props + property_ + for property_ in old_props.keys() + if property_ not in altered_props ] return DiffResult( changes=(altered_props != []), # changes are needed @@ -45,38 +45,43 @@ def partial_diff( ) @staticmethod - def refresh(props: Dict[str, Any]) -> Dict[str, Any]: + def refresh(props: dict[str, Any]) -> dict[str, Any]: return dict(**props) def check( - self, old_props: Dict[str, Any], new_props: Dict[str, Any] + self, old_props: dict[str, Any], new_props: dict[str, Any] ) -> CheckResult: """Validate that the new properties are valid""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(old_props) return CheckResult(self.refresh(new_props), []) - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Create compiled desired state file.""" - raise DataSafeHavenNotImplementedException( - "DshResourceProvider::create() must be implemented" - ) + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(props) + msg = "DshResourceProvider::create() must be implemented" + raise DataSafeHavenNotImplementedError(msg) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """Delete the resource.""" - raise DataSafeHavenNotImplementedException( - "DshResourceProvider::delete() must be implemented" - ) + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id((id_, props)) + msg = "DshResourceProvider::delete() must be implemented" + raise DataSafeHavenNotImplementedError(msg) def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: - raise DataSafeHavenNotImplementedException( - "DshResourceProvider::diff() must be implemented" - ) + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id((id_, old_props, new_props)) + msg = "DshResourceProvider::diff() must be implemented" + raise DataSafeHavenNotImplementedError(msg) - def read(self, id_: str, props: Dict[str, Any]) -> ReadResult: + def read(self, id_: str, props: dict[str, Any]) -> ReadResult: """Read data for a resource not managed by Pulumi.""" props = self.refresh(props) return ReadResult(id_, props) @@ -84,8 +89,8 @@ def read(self, id_: str, props: Dict[str, Any]) -> ReadResult: def update( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> UpdateResult: """Updating is deleting followed by creating.""" self.delete(id_, old_props) diff --git a/data_safe_haven/pulumi/dynamic/file_share_file.py b/data_safe_haven/pulumi/dynamic/file_share_file.py index 29f8759839..ab80a1e8ae 100644 --- a/data_safe_haven/pulumi/dynamic/file_share_file.py +++ b/data_safe_haven/pulumi/dynamic/file_share_file.py @@ -1,16 +1,13 @@ """Pulumi dynamic component for files uploaded to an Azure FileShare.""" -# Standard library imports from contextlib import suppress -from typing import Any, Dict, Optional +from typing import Any -# Third party imports from azure.core.exceptions import ResourceNotFoundError from azure.storage.fileshare import ShareDirectoryClient, ShareFileClient from pulumi import Input, Output, ResourceOptions from pulumi.dynamic import CreateResult, DiffResult, Resource -# Local imports -from data_safe_haven.exceptions import DataSafeHavenAzureException +from data_safe_haven.exceptions import DataSafeHavenAzureError from .dsh_resource_provider import DshResourceProvider @@ -69,7 +66,7 @@ def get_file_client( ) @staticmethod - def refresh(props: Dict[str, Any]) -> Dict[str, Any]: + def refresh(props: dict[str, Any]) -> dict[str, Any]: with suppress(Exception): file_client = FileShareFileProvider.get_file_client( props["storage_account_name"], @@ -81,10 +78,10 @@ def refresh(props: Dict[str, Any]) -> Dict[str, Any]: props["file_name"] = "" return dict(**props) - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Create file in target storage account with specified contents.""" outs = dict(**props) - file_client: Optional[ShareFileClient] = None + file_client: ShareFileClient | None = None try: file_client = self.get_file_client( props["storage_account_name"], @@ -96,17 +93,18 @@ def create(self, props: Dict[str, Any]) -> CreateResult: outs["file_name"] = file_client.file_name except Exception as exc: file_name = file_client.file_name if file_client else "" - raise DataSafeHavenAzureException( - f"Failed to upload data to [green]{file_name}[/] in [green]{props['share_name']}[/].\n{str(exc)}" - ) from exc + msg = f"Failed to upload data to [green]{file_name}[/] in [green]{props['share_name']}[/].\n{exc}" + raise DataSafeHavenAzureError(msg) from exc return CreateResult( f"filesharefile-{props['destination_path'].replace('/', '-')}", outs=outs, ) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """Delete a file from the target storage account""" - file_client: Optional[ShareFileClient] = None + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) + file_client: ShareFileClient | None = None try: file_client = self.get_file_client( props["storage_account_name"], @@ -118,17 +116,18 @@ def delete(self, id_: str, props: Dict[str, Any]) -> None: file_client.delete_file() except Exception as exc: file_name = file_client.file_name if file_client else "" - raise DataSafeHavenAzureException( - f"Failed to delete file [green]{file_name}[/] in [green]{props['share_name']}[/].\n{str(exc)}" - ) from exc + msg = f"Failed to delete file [green]{file_name}[/] in [green]{props['share_name']}[/].\n{exc}" + raise DataSafeHavenAzureError(msg) from exc def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: """Calculate diff between old and new state""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) # Exclude "storage_account_key" which should not trigger a diff return self.partial_diff(old_props, new_props, ["storage_account_key"]) @@ -141,7 +140,7 @@ def __init__( self, name: str, props: FileShareFileProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__( FileShareFileProvider(), name, {"file_name": None, **vars(props)}, opts diff --git a/data_safe_haven/pulumi/dynamic/remote_powershell.py b/data_safe_haven/pulumi/dynamic/remote_powershell.py index b1836d37d8..58b51b29dd 100644 --- a/data_safe_haven/pulumi/dynamic/remote_powershell.py +++ b/data_safe_haven/pulumi/dynamic/remote_powershell.py @@ -1,13 +1,11 @@ """Pulumi dynamic component for running remote scripts on an Azure VM.""" -# Standard library imports -from typing import Any, Dict, Optional +from typing import Any -# Third party imports from pulumi import Input, Output, ResourceOptions from pulumi.dynamic import CreateResult, DiffResult, Resource -# Local imports from data_safe_haven.external import AzureApi + from .dsh_resource_provider import DshResourceProvider @@ -18,11 +16,11 @@ def __init__( self, script_contents: Input[str], script_hash: Input[str], - script_parameters: Input[Dict[str, Any]], + script_parameters: Input[dict[str, Any]], subscription_name: Input[str], vm_name: Input[str], vm_resource_group_name: Input[str], - force_refresh: Optional[Input[bool]] = False, + force_refresh: Input[bool] | None, ): self.force_refresh = force_refresh self.script_contents = script_contents @@ -34,7 +32,7 @@ def __init__( class RemoteScriptProvider(DshResourceProvider): - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Create compiled desired state file.""" outs = dict(**props) azure_api = AzureApi(props["subscription_name"]) @@ -50,17 +48,20 @@ def create(self, props: Dict[str, Any]) -> CreateResult: outs=outs, ) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """The Python SDK does not support configuration deletion""" - return + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id((id_, props)) def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: """Calculate diff between old and new state""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) if new_props["force_refresh"]: return DiffResult( changes=True, @@ -79,7 +80,7 @@ def __init__( self, name: str, props: RemoteScriptProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__( RemoteScriptProvider(), name, {"script_output": None, **vars(props)}, opts diff --git a/data_safe_haven/pulumi/dynamic/ssl_certificate.py b/data_safe_haven/pulumi/dynamic/ssl_certificate.py index 1a861ed4f5..a3417e2206 100644 --- a/data_safe_haven/pulumi/dynamic/ssl_certificate.py +++ b/data_safe_haven/pulumi/dynamic/ssl_certificate.py @@ -1,9 +1,7 @@ """Pulumi dynamic component for SSL certificates uploaded to an Azure KeyVault.""" -# Standard library imports from contextlib import suppress -from typing import Any, Dict, Optional +from typing import Any -# Third party imports from acme.errors import ValidationError from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey from cryptography.hazmat.primitives.serialization import ( @@ -16,9 +14,9 @@ from pulumi.dynamic import CreateResult, DiffResult, Resource from simple_acme_dns import ACMEClient -# Local imports -from data_safe_haven.exceptions import DataSafeHavenSSLException +from data_safe_haven.exceptions import DataSafeHavenSSLError from data_safe_haven.external import AzureApi + from .dsh_resource_provider import DshResourceProvider @@ -44,7 +42,7 @@ def __init__( class SSLCertificateProvider(DshResourceProvider): @staticmethod - def refresh(props: Dict[str, Any]) -> Dict[str, Any]: + def refresh(props: dict[str, Any]) -> dict[str, Any]: outs = dict(**props) with suppress(Exception): azure_api = AzureApi(outs["subscription_name"]) @@ -55,7 +53,7 @@ def refresh(props: Dict[str, Any]) -> Dict[str, Any]: outs["secret_id"] = certificate.secret_id return outs - def create(self, props: Dict[str, Any]) -> CreateResult: + def create(self, props: dict[str, Any]) -> CreateResult: """Create new SSL certificate.""" outs = dict(**props) try: @@ -84,12 +82,13 @@ def create(self, props: Dict[str, Any]) -> CreateResult: if not client.check_dns_propagation( authoritative=False, round_robin=True, verbose=False ): - raise DataSafeHavenSSLException("DNS propagation failed") + msg = "DNS propagation failed" + raise DataSafeHavenSSLError(msg) # Request a signed certificate try: certificate_bytes = client.request_certificate() except ValidationError as exc: - raise DataSafeHavenSSLException( + raise DataSafeHavenSSLError( "ACME validation error:\n" + "\n".join([str(auth_error) for auth_error in exc.failed_authzrs]) ) from exc @@ -98,18 +97,16 @@ def create(self, props: Dict[str, Any]) -> CreateResult: # compatibility with ApplicationGateway private_key = load_pem_private_key(private_key_bytes, None) if not isinstance(private_key, RSAPrivateKey): - raise TypeError( - f"Private key is of type {type(private_key)} not RSAPrivateKey." - ) + msg = f"Private key is of type {type(private_key)} not RSAPrivateKey." + raise TypeError(msg) all_certs = [ load_pem_x509_certificate(data) for data in certificate_bytes.split(b"\n\n") ] - certificate = [ + certificate = next( cert for cert in all_certs if props["domain_name"] in str(cert.subject) - ][0] + ) ca_certs = [cert for cert in all_certs if cert != certificate] - pkcs12._ALLOWED_PKCS12_TYPES pfx_bytes = pkcs12.serialize_key_and_certificates( props["certificate_secret_name"].encode("utf-8"), private_key, @@ -125,16 +122,20 @@ def create(self, props: Dict[str, Any]) -> CreateResult: ) outs["secret_id"] = kvcert.secret_id except Exception as exc: - raise DataSafeHavenSSLException( - f"Failed to create SSL certificate [green]{props['certificate_secret_name']}[/] for [green]{props['domain_name']}[/].\n{str(exc)}" - ) from exc + msg = ( + f"Failed to create SSL certificate [green]{props['certificate_secret_name']}[/]" + f" for [green]{props['domain_name']}[/].\n{exc}" + ) + raise DataSafeHavenSSLError(msg) from exc return CreateResult( f"SSLCertificate-{props['certificate_secret_name']}", outs=outs, ) - def delete(self, id_: str, props: Dict[str, Any]) -> None: + def delete(self, id_: str, props: dict[str, Any]) -> None: """Delete an SSL certificate.""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) try: # Remove the DNS record azure_api = AzureApi(props["subscription_name"]) @@ -149,17 +150,21 @@ def delete(self, id_: str, props: Dict[str, Any]) -> None: key_vault_name=props["key_vault_name"], ) except Exception as exc: - raise DataSafeHavenSSLException( - f"Failed to delete SSL certificate [green]{props['certificate_secret_name']}[/] for [green]{props['domain_name']}[/].\n{str(exc)}" - ) from exc + msg = ( + f"Failed to delete SSL certificate [green]{props['certificate_secret_name']}[/]" + f" for [green]{props['domain_name']}[/].\n{exc}" + ) + raise DataSafeHavenSSLError(msg) from exc def diff( self, id_: str, - old_props: Dict[str, Any], - new_props: Dict[str, Any], + old_props: dict[str, Any], + new_props: dict[str, Any], ) -> DiffResult: """Calculate diff between old and new state""" + # Use `id` as a no-op to avoid ARG002 while maintaining function signature + id(id_) return self.partial_diff(old_props, new_props, []) @@ -171,7 +176,7 @@ def __init__( self, name: str, props: SSLCertificateProps, - opts: Optional[ResourceOptions] = None, + opts: ResourceOptions | None = None, ): super().__init__( SSLCertificateProvider(), diff --git a/data_safe_haven/pulumi/pulumi_stack.py b/data_safe_haven/pulumi/pulumi_stack.py index 3d43117dcb..e388e0ac3e 100644 --- a/data_safe_haven/pulumi/pulumi_stack.py +++ b/data_safe_haven/pulumi/pulumi_stack.py @@ -1,21 +1,20 @@ """Deploy with Pulumi""" -# Standard library imports -import importlib.metadata as metadata +import os import pathlib import shutil import subprocess import time from contextlib import suppress -from typing import Any, Dict, Optional, Tuple +from importlib import metadata +from typing import Any -# Third party imports from pulumi import automation -# Local imports from data_safe_haven.config import Config -from data_safe_haven.exceptions import DataSafeHavenPulumiException +from data_safe_haven.exceptions import DataSafeHavenPulumiError from data_safe_haven.external import AzureApi, AzureCli from data_safe_haven.utility import Logger + from .declarative_shm import DeclarativeSHM from .declarative_sre import DeclarativeSRE @@ -30,13 +29,13 @@ def __init__( # sre_name: Optional[str] = None, ) -> None: self.cfg: Config = config - self.env_: Optional[Dict[str, Any]] = None + self.env_: dict[str, Any] | None = None self.logger = Logger() - self.stack_: Optional[automation.Stack] = None - self.options: Dict[str, Tuple[str, bool, bool]] = {} + self.stack_: automation.Stack | None = None + self.options: dict[str, tuple[str, bool, bool]] = {} self.program = program self.stack_name = self.program.stack_name - self.work_dir = config.work_directory / "pulumi" + self.work_dir = config.work_directory / "pulumi" / self.program.short_name self.work_dir.mkdir(parents=True, exist_ok=True) self.login() # Log in to the Pulumi backend @@ -46,7 +45,7 @@ def local_stack_path(self) -> pathlib.Path: return self.work_dir / f"Pulumi.{self.stack_name}.yaml" @property - def env(self) -> Dict[str, Any]: + def env(self) -> dict[str, Any]: if not self.env_: azure_api = AzureApi(self.cfg.subscription_name) backend_storage_account_keys = azure_api.get_storage_account_keys( @@ -77,16 +76,15 @@ def stack(self) -> automation.Stack: ), ) except automation.errors.CommandError as exc: - raise DataSafeHavenPulumiException( - f"Could not load Pulumi stack {self.stack_name}.\n{str(exc)}" - ) from exc + msg = f"Could not load Pulumi stack {self.stack_name}.\n{exc}" + raise DataSafeHavenPulumiError(msg) from exc return self.stack_ - def add_option(self, name: str, value: str, replace: bool = False) -> None: + def add_option(self, name: str, value: str, *, replace: bool) -> None: """Add a public configuration option""" self.options[name] = (value, False, replace) - def add_secret(self, name: str, value: str, replace: bool = False) -> None: + def add_secret(self, name: str, value: str, *, replace: bool) -> None: """Add a secret configuration option if it does not exist""" self.options[name] = (value, True, replace) @@ -95,14 +93,13 @@ def apply_config_options(self) -> None: try: for name, (value, is_secret, replace) in self.options.items(): if replace: - self.set_config(name, value, is_secret) + self.set_config(name, value, secret=is_secret) else: - self.ensure_config(name, value, is_secret) + self.ensure_config(name, value, secret=is_secret) self.options = {} except Exception as exc: - raise DataSafeHavenPulumiException( - f"Applying Pulumi configuration options failed.\n{str(exc)}." - ) from exc + msg = f"Applying Pulumi configuration options failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def copy_option(self, name: str, other_stack: "PulumiStack") -> None: """Copy a public configuration option from another Pulumi stack""" @@ -122,9 +119,8 @@ def deploy(self) -> None: self.preview() self.update() except Exception as exc: - raise DataSafeHavenPulumiException( - f"Pulumi deployment failed.\n{str(exc)}" - ) from exc + msg = f"Pulumi deployment failed.\n{exc}" + raise DataSafeHavenPulumiError(msg) from exc def destroy(self) -> None: """Destroy deployed infrastructure.""" @@ -152,14 +148,15 @@ def destroy(self) -> None: if self.stack_: self.stack_.workspace.remove_stack(self.stack_name) except automation.errors.CommandError as exc: - raise DataSafeHavenPulumiException("Pulumi destroy failed.") from exc + msg = "Pulumi destroy failed." + raise DataSafeHavenPulumiError(msg) from exc - def ensure_config(self, name: str, value: str, secret: bool = False) -> None: + def ensure_config(self, name: str, value: str, *, secret: bool) -> None: """Ensure that config values have been set, setting them if they do not exist""" try: self.stack.get_config(name) except automation.errors.CommandError: - self.set_config(name, value, secret) + self.set_config(name, value, secret=secret) def evaluate(self, result: str) -> None: """Evaluate a Pulumi operation.""" @@ -167,7 +164,8 @@ def evaluate(self, result: str) -> None: self.logger.info("Pulumi operation [green]succeeded[/].") else: self.logger.error("Pulumi operation [red]failed[/].") - raise DataSafeHavenPulumiException("Pulumi operation failed.") + msg = "Pulumi operation failed." + raise DataSafeHavenPulumiError(msg) def initialise_workdir(self) -> None: """Create project directory if it does not exist and update local stack.""" @@ -179,13 +177,12 @@ def initialise_workdir(self) -> None: # If stack information is saved in the config file then apply it here if self.stack_name in self.cfg.pulumi.stacks.keys(): self.logger.info( - f"Loading stack [green]{self.stack_name}[/] information from config" + f"Updating stack [green]{self.stack_name}[/] information from config" ) self.cfg.write_stack(self.stack_name, self.local_stack_path) except Exception as exc: - raise DataSafeHavenPulumiException( - f"Initialising Pulumi working directory failed.\n{str(exc)}." - ) from exc + msg = f"Initialising Pulumi working directory failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def install_plugins(self) -> None: """For inline programs, we must manage plugins ourselves.""" @@ -194,36 +191,41 @@ def install_plugins(self) -> None: "azure-native", metadata.version("pulumi-azure-native") ) except Exception as exc: - raise DataSafeHavenPulumiException( - f"Installing Pulumi plugins failed.\n{str(exc)}." - ) from exc + msg = f"Installing Pulumi plugins failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def login(self) -> None: """Login to Pulumi.""" try: - try: + # Check whether we're already logged in + with suppress(DataSafeHavenPulumiError): username = self.whoami() self.logger.info(f"Logged into Pulumi as [green]{username}[/]") - except DataSafeHavenPulumiException: - AzureCli().login() # this is needed to read the encryption key from the keyvault - env_vars = " ".join([f"{k}='{v}'" for k, v in self.env.items()]) - command = ( - f"pulumi login 'azblob://{self.cfg.pulumi.storage_container_name}'" - ) - with subprocess.Popen( - f"{env_vars} {command}", - shell=True, - cwd=self.work_dir, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, + return + # Otherwise log in to Pulumi + try: + # Ensure we are authenticated with the Azure CLI + # Without this, we cannot read the encryption key from the keyvault + AzureCli().login() + process = subprocess.run( + [ + "pulumi", + "login", + f"'azblob://{self.cfg.pulumi.storage_container_name}'", + ], + env={**os.environ, **self.env}, encoding="UTF-8", - ) as process: - if process.stdout: - self.logger.info(process.stdout.readline().strip()) + capture_output=True, + check=True, + cwd=self.work_dir, + ) + self.logger.info(process.stdout) + except (subprocess.CalledProcessError, FileNotFoundError) as exc: + msg = f"Logging into Pulumi failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc except Exception as exc: - raise DataSafeHavenPulumiException( - f"Logging into Pulumi failed.\n{str(exc)}." - ) from exc + msg = f"Logging into Pulumi failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def output(self, name: str) -> Any: return self.stack.outputs()[name].value @@ -239,9 +241,8 @@ def preview(self) -> None: color="always", diff=True, on_output=self.logger.info ) except Exception as exc: - raise DataSafeHavenPulumiException( - f"Pulumi preview failed.\n{str(exc)}." - ) from exc + msg = f"Pulumi preview failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def refresh(self) -> None: """Refresh the Pulumi stack.""" @@ -250,9 +251,8 @@ def refresh(self) -> None: # Note that we disable parallelisation which can cause deadlock self.stack.refresh(color="always", parallel=1) except automation.errors.CommandError as exc: - raise DataSafeHavenPulumiException( - f"Pulumi refresh failed.\n{str(exc)}" - ) from exc + msg = f"Pulumi refresh failed.\n{exc}" + raise DataSafeHavenPulumiError(msg) from exc def remove_workdir(self) -> None: """Remove project directory if it exists.""" @@ -262,20 +262,18 @@ def remove_workdir(self) -> None: shutil.rmtree(self.work_dir) self.logger.info(f"Removed [green]{self.work_dir}[/].") except Exception as exc: - raise DataSafeHavenPulumiException( - f"Removing Pulumi working directory failed.\n{str(exc)}." - ) from exc + msg = f"Removing Pulumi working directory failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def secret(self, name: str) -> str: """Read a secret from the Pulumi stack.""" try: return self.stack.get_config(name).value except automation.errors.CommandError as exc: - raise DataSafeHavenPulumiException( - f"Secret '{name}' was not found." - ) from exc + msg = f"Secret '{name}' was not found." + raise DataSafeHavenPulumiError(msg) from exc - def set_config(self, name: str, value: str, secret: bool = False) -> None: + def set_config(self, name: str, value: str, *, secret: bool) -> None: """Set config values, overwriting any existing value.""" self.stack.set_config(name, automation.ConfigValue(value=value, secret=secret)) @@ -288,9 +286,8 @@ def teardown(self) -> None: self.destroy() self.remove_workdir() except Exception as exc: - raise DataSafeHavenPulumiException( - f"Tearing down Pulumi infrastructure failed.\n{str(exc)}." - ) from exc + msg = f"Tearing down Pulumi infrastructure failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc def update(self) -> None: """Update deployed infrastructure.""" @@ -298,34 +295,30 @@ def update(self) -> None: result = self.stack.up(color="always", on_output=self.logger.info) self.evaluate(result.summary.result) except automation.errors.CommandError as exc: - raise DataSafeHavenPulumiException( - f"Pulumi update failed.\n{str(exc)}" - ) from exc + msg = f"Pulumi update failed.\n{exc}" + raise DataSafeHavenPulumiError(msg) from exc def whoami(self) -> str: """Check current Pulumi user.""" try: AzureCli().login() # this is needed to read the encryption key from the keyvault - env_vars = " ".join([f"{k}='{v}'" for k, v in self.env.items()]) - command = "pulumi whoami" self.work_dir.mkdir(parents=True, exist_ok=True) - with subprocess.Popen( - f"{env_vars} {command}", - shell=True, - cwd=self.work_dir, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - encoding="UTF-8", - ) as process: - if not process.stdout: - raise DataSafeHavenPulumiException( - f"No Pulumi user found {process.stderr}." - ) - return process.stdout.readline().strip() + try: + process = subprocess.run( + ["pulumi", "whoami"], + capture_output=True, + check=True, + cwd=self.work_dir, + encoding="UTF-8", + env={**os.environ, **self.env}, + ) + return process.stdout.strip() + except (subprocess.CalledProcessError, FileNotFoundError) as exc: + msg = f"No Pulumi user found.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc except Exception as exc: - raise DataSafeHavenPulumiException( - f"Pulumi user check failed.\n{str(exc)}." - ) from exc + msg = f"Pulumi user check failed.\n{exc}." + raise DataSafeHavenPulumiError(msg) from exc class PulumiSHMStack(PulumiStack): diff --git a/data_safe_haven/resources/secure_research_desktop/srd.cloud_init.mustache.yaml b/data_safe_haven/resources/secure_research_desktop/srd.cloud_init.mustache.yaml index 2dcbe20749..44c4015a86 100644 --- a/data_safe_haven/resources/secure_research_desktop/srd.cloud_init.mustache.yaml +++ b/data_safe_haven/resources/secure_research_desktop/srd.cloud_init.mustache.yaml @@ -66,20 +66,20 @@ mounts: # Install necessary apt packages packages: # System requirements - - libnss-ldapd # LDAP login - - libpam-ldapd # LDAP login - - ldap-utils # LDAP login - - nfs-common # Mounting shared volumes - - xfce4 # XFCE desktop - - xrdp # remote desktop client + - libnss-ldapd # LDAP login + - libpam-ldapd # LDAP login + - ldap-utils # LDAP login + - nfs-common # Mounting shared volumes + - xfce4 # XFCE desktop + - xrdp # remote desktop client # Programming - - python-is-python3 # Python language - - python3 # Python language - - python3-pip # Python language - - python3-venv # Python language - - r-base # R language + - python-is-python3 # Python language + - python3 # Python language + - python3-pip # Python language + - python3-venv # Python language + - r-base # R language # Tools - - firefox # web browser + - firefox # web browser package_update: true package_upgrade: true diff --git a/data_safe_haven/utility/__init__.py b/data_safe_haven/utility/__init__.py index 2b786f36fb..8a6ec3bcf5 100644 --- a/data_safe_haven/utility/__init__.py +++ b/data_safe_haven/utility/__init__.py @@ -1,12 +1,11 @@ from .enums import SoftwarePackageCategory from .file_reader import FileReader from .logger import Logger -from .types import PathType, YamlType +from .types import PathType __all__ = [ "FileReader", "Logger", "PathType", "SoftwarePackageCategory", - "YamlType", ] diff --git a/data_safe_haven/utility/file_reader.py b/data_safe_haven/utility/file_reader.py index eb027d922d..eeae4ab133 100644 --- a/data_safe_haven/utility/file_reader.py +++ b/data_safe_haven/utility/file_reader.py @@ -1,13 +1,11 @@ """Read local files, handling template expansion if needed""" -# Standard library imports import pathlib -from typing import Any, Dict, Optional +from typing import Any -# Third party imports import chevron -# Local imports from data_safe_haven.functions import sha256hash + from .types import PathType @@ -22,9 +20,9 @@ def __init__(self, file_path: PathType, *args: Any, **kwargs: Any): def name(self) -> str: return self.file_path.name.replace(".mustache", "") - def file_contents(self, mustache_values: Optional[Dict[str, Any]] = None) -> str: + def file_contents(self, mustache_values: dict[str, Any] | None = None) -> str: """Read a local file into a string, expanding template values""" - with open(self.file_path, "r", encoding="utf-8") as source_file: + with open(self.file_path, encoding="utf-8") as source_file: if mustache_values: contents = chevron.render(source_file, mustache_values) else: diff --git a/data_safe_haven/utility/logger.py b/data_safe_haven/utility/logger.py index eb970f3ee8..a8eaf4415b 100644 --- a/data_safe_haven/utility/logger.py +++ b/data_safe_haven/utility/logger.py @@ -1,10 +1,8 @@ """Standalone logging class implemented as a singleton""" -# Standard library imports import io import logging -from typing import Any, List, Optional +from typing import Any, ClassVar, Optional -# Third party imports from rich.console import Console from rich.highlighter import RegexHighlighter from rich.logging import RichHandler @@ -12,7 +10,6 @@ from rich.table import Table from rich.text import Text -# Local imports from .types import PathType @@ -25,7 +22,7 @@ def __init__( self, fmt: str, datefmt: str, filename: str, *args: Any, **kwargs: Any ): """Constructor""" - super().__init__(filename=filename, *args, **kwargs) + super().__init__(*args, **kwargs, filename=filename) self.setFormatter(logging.Formatter(self.strip_formatting(fmt), datefmt)) @staticmethod @@ -67,7 +64,7 @@ class LogLevelHighlighter(RegexHighlighter): """ base_style = "logging.level." - highlights = [ + highlights: ClassVar[list[str]] = [ r"(?P\[CRITICAL\])", r"(?P\[ DEBUG\])", r"(?P\[ ERROR\])", @@ -81,7 +78,7 @@ class RichStringAdaptor: A wrapper to convert Rich objects into strings. """ - def __init__(self, coloured=False): + def __init__(self, *, coloured: bool): self.string_io = io.StringIO() self.console = Console(file=self.string_io, force_terminal=coloured) @@ -100,7 +97,7 @@ class Logger: _instance: Optional["Logger"] = None def __new__( - cls, verbosity: Optional[int] = None, log_file: Optional[PathType] = None + cls, verbosity: int | None = None, log_file: PathType | None = None ) -> "Logger": desired_log_level = max( logging.INFO - 10 * (verbosity if verbosity else 0), logging.DEBUG @@ -116,10 +113,10 @@ def __new__( ) ) else: - cls._instance = super(Logger, cls).__new__(cls) + cls._instance = super().__new__(cls) # Initialise console handler console_handler = LoggingHandlerRichConsole(cls.rich_format, cls.date_fmt) - handlers: List[logging.Handler] = [console_handler] + handlers: list[logging.Handler] = [console_handler] # Initialise file handler if log_file: file_handler = LoggingHandlerPlainFile( @@ -143,7 +140,9 @@ def format_msg(self, message: str, level: int = logging.INFO) -> str: """Format a message using rich handler""" for handler in self.logger.handlers: if isinstance(handler, RichHandler): - fn, lno, func, sinfo = self.logger.findCaller(False, 1) + fn, lno, func, sinfo = self.logger.findCaller( + stack_info=False, stack_level=1 + ) return handler.format( self.logger.makeRecord( name=self.logger.name, @@ -181,11 +180,11 @@ def debug(self, message: str) -> None: return self.logger.debug(message) # Loggable wrappers for confirm/ask/choice - def confirm(self, message: str, default_to_yes: bool = True) -> bool: + def confirm(self, message: str, *, default_to_yes: bool) -> bool: formatted = self.format_msg(message, logging.INFO) return Confirm.ask(formatted, default=default_to_yes) - def ask(self, message: str, default: Optional[str] = None) -> str: + def ask(self, message: str, default: str | None = None) -> str: formatted = self.format_msg(message, logging.INFO) if default: return Prompt.ask(formatted, default=default) @@ -194,8 +193,8 @@ def ask(self, message: str, default: Optional[str] = None) -> str: def choose( self, message: str, - choices: Optional[List[str]] = None, - default: Optional[str] = None, + choices: list[str] | None = None, + default: str | None = None, ) -> str: formatted = self.format_msg(message, logging.INFO) if default: @@ -221,8 +220,8 @@ def parse(self, message: str) -> None: # Create a table def tabulate( - self, header: Optional[List[str]] = None, rows: Optional[List[List[str]]] = None - ) -> List[str]: + self, header: list[str] | None = None, rows: list[list[str]] | None = None + ) -> list[str]: table = Table() if header: for item in header: @@ -230,5 +229,5 @@ def tabulate( if rows: for row in rows: table.add_row(*row) - adaptor = RichStringAdaptor() + adaptor = RichStringAdaptor(coloured=False) return [line.strip() for line in adaptor.to_string(table).split("\n")] diff --git a/data_safe_haven/utility/types.py b/data_safe_haven/utility/types.py index 3cdcc0c30c..527e4e2ea3 100644 --- a/data_safe_haven/utility/types.py +++ b/data_safe_haven/utility/types.py @@ -1,6 +1,3 @@ -# Standard library imports from pathlib import Path -from typing import Any, Dict, Union -PathType = Union[str, Path] -YamlType = Dict[str, Dict[str, Any]] +PathType = str | Path diff --git a/data_safe_haven/version.py b/data_safe_haven/version.py new file mode 100644 index 0000000000..bfb9e4b4b8 --- /dev/null +++ b/data_safe_haven/version.py @@ -0,0 +1,2 @@ +__version__ = "5.0.0" +__version_info__ = tuple(__version__.split(".")) diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index d346b888b4..0000000000 --- a/poetry.lock +++ /dev/null @@ -1,1688 +0,0 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. - -[[package]] -name = "acme" -version = "2.2.0" -description = "ACME protocol implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "acme-2.2.0-py3-none-any.whl", hash = "sha256:699f4efc2a012c70eff4dbfaf7ef6798b44ef7677d8b3776bd1f0058288b2219"}, - {file = "acme-2.2.0.tar.gz", hash = "sha256:0a0432c8fdf3b7ff2098bf97d8f2d86f746208a1dc2bd5871dfdc413d6b167b5"}, -] - -[package.dependencies] -cryptography = ">=2.5.0" -josepy = ">=1.13.0" -PyOpenSSL = ">=17.5.0" -pyrfc3339 = "*" -pytz = ">=2019.3" -requests = ">=2.20.0" -setuptools = ">=41.6.0" - -[package.extras] -docs = ["Sphinx (>=1.0)", "sphinx-rtd-theme"] -test = ["pytest", "pytest-xdist", "typing-extensions"] - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = "*" -files = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] - -[[package]] -name = "arpeggio" -version = "2.0.0" -description = "Packrat parser interpreter" -optional = false -python-versions = "*" -files = [ - {file = "Arpeggio-2.0.0-py2.py3-none-any.whl", hash = "sha256:448e332deb0e9ccd04046f1c6c14529d197f41bc2fdb3931e43fc209042fbdd3"}, - {file = "Arpeggio-2.0.0.tar.gz", hash = "sha256:d6b03839019bb8a68785f9292ee6a36b1954eb84b925b84a6b8a5e1e26d3ed3d"}, -] - -[package.extras] -dev = ["mike", "mkdocs", "twine", "wheel"] -test = ["coverage", "coveralls", "flake8", "pytest"] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] - -[[package]] -name = "azure-common" -version = "1.1.28" -description = "Microsoft Azure Client Library for Python (Common)" -optional = false -python-versions = "*" -files = [ - {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, - {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, -] - -[[package]] -name = "azure-core" -version = "1.27.1" -description = "Microsoft Azure Core Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-core-1.27.1.zip", hash = "sha256:5975c20808fa388243f01a8b79021bfbe114f503a27c543f002c5fc8bbdd73dd"}, - {file = "azure_core-1.27.1-py3-none-any.whl", hash = "sha256:1b4b19f455eb7b4332c6f92adc2c669353ded07c2722eb436165f0c253737792"}, -] - -[package.dependencies] -requests = ">=2.18.4" -six = ">=1.11.0" -typing-extensions = ">=4.3.0" - -[package.extras] -aio = ["aiohttp (>=3.0)"] - -[[package]] -name = "azure-identity" -version = "1.13.0" -description = "Microsoft Azure Identity Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-identity-1.13.0.zip", hash = "sha256:c931c27301ffa86b07b4dcf574e29da73e3deba9ab5d1fe4f445bb6a3117e260"}, - {file = "azure_identity-1.13.0-py3-none-any.whl", hash = "sha256:bd700cebb80cd9862098587c29d8677e819beca33c62568ced6d5a8e5e332b82"}, -] - -[package.dependencies] -azure-core = ">=1.11.0,<2.0.0" -cryptography = ">=2.5" -msal = ">=1.20.0,<2.0.0" -msal-extensions = ">=0.3.0,<2.0.0" -six = ">=1.12.0" - -[[package]] -name = "azure-keyvault-certificates" -version = "4.7.0" -description = "Microsoft Azure Key Vault Certificates Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-keyvault-certificates-4.7.0.zip", hash = "sha256:9e47d9a74825e502b13d5481c99c182040c4f54723f43371e00859436dfcf3ca"}, - {file = "azure_keyvault_certificates-4.7.0-py3-none-any.whl", hash = "sha256:4ddf29529309da9587d9afdf8be3c018a3455ed27bffae9428acb1802789a3d6"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-core = ">=1.24.0,<2.0.0" -isodate = ">=0.6.1" -typing-extensions = ">=4.0.1" - -[[package]] -name = "azure-keyvault-keys" -version = "4.8.0" -description = "Microsoft Azure Key Vault Keys Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-keyvault-keys-4.8.0.zip", hash = "sha256:6c0bb2f783202a34a3e5ec74866e6212e591ac7124f03b9669d1b09b68224bc4"}, - {file = "azure_keyvault_keys-4.8.0-py3-none-any.whl", hash = "sha256:d1080fa1ffcb3bc16fc3a6b7acce63c8f0e81ad0b498673b2871b162396674f0"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-core = ">=1.24.0,<2.0.0" -cryptography = ">=2.1.4" -isodate = ">=0.6.1" -typing-extensions = ">=4.0.1" - -[[package]] -name = "azure-keyvault-secrets" -version = "4.7.0" -description = "Microsoft Azure Key Vault Secrets Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-keyvault-secrets-4.7.0.zip", hash = "sha256:77ee2534ba651a1f306c85d7b505bc3ccee8fea77450ebafafc26aec16e5445d"}, - {file = "azure_keyvault_secrets-4.7.0-py3-none-any.whl", hash = "sha256:a16c7e6dfa9cba68892bb6fcb905bf2e2ec1f2a6dc05522b61df79621e050901"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-core = ">=1.24.0,<2.0.0" -isodate = ">=0.6.1" -typing-extensions = ">=4.0.1" - -[[package]] -name = "azure-mgmt-automation" -version = "1.0.0" -description = "Microsoft Azure Automation Client Library for Python" -optional = false -python-versions = "*" -files = [ - {file = "azure-mgmt-automation-1.0.0.zip", hash = "sha256:a49d2d413fef57010cb36161e5bc056601b8d7c6d05bd3cb05a13512ef291e1e"}, - {file = "azure_mgmt_automation-1.0.0-py2.py3-none-any.whl", hash = "sha256:a7e41afd475a4c5c751f1bede56ce040688700640797dbc3b95b34fa928e1dd7"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.2.0,<2.0.0" -msrest = ">=0.5.0" - -[[package]] -name = "azure-mgmt-compute" -version = "29.1.0" -description = "Microsoft Azure Compute Management Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-mgmt-compute-29.1.0.zip", hash = "sha256:2d5a1bae7f5d307ca1e850d7e83fed9c839d4f635b10a4b8d3f8bc6098ac2888"}, - {file = "azure_mgmt_compute-29.1.0-py3-none-any.whl", hash = "sha256:2dfc9a812e28fec65105a3d14ff22e1650e3ddd56a5afbe82ef5009974301f9b"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.2,<2.0.0" -msrest = ">=0.7.1" - -[[package]] -name = "azure-mgmt-containerinstance" -version = "9.2.0" -description = "Microsoft Azure Container Instance Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-mgmt-containerinstance-9.2.0.zip", hash = "sha256:deb125554bdb1aa17df69a596a7514af0146b420176a4db384c54e77a9fd6e46"}, - {file = "azure_mgmt_containerinstance-9.2.0-py3-none-any.whl", hash = "sha256:8ce26aa0def2e14865cb5e649fb8a2eb09e6d43fcd96ca2c97321abb7316d78e"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.0,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-mgmt-core" -version = "1.4.0" -description = "Microsoft Azure Management Core Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-mgmt-core-1.4.0.zip", hash = "sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae"}, - {file = "azure_mgmt_core-1.4.0-py3-none-any.whl", hash = "sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d"}, -] - -[package.dependencies] -azure-core = ">=1.26.2,<2.0.0" - -[[package]] -name = "azure-mgmt-dns" -version = "8.1.0" -description = "Microsoft Azure DNS Management Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-mgmt-dns-8.1.0.zip", hash = "sha256:d8379d4bb9194b81b79e5284d875fa6df80707346f2cbb5c5491a20f35266fd0"}, - {file = "azure_mgmt_dns-8.1.0-py3-none-any.whl", hash = "sha256:640be3ae428a40e5c3576a760e8c345d64df421bd1be6385d7124244f6089897"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.2,<2.0.0" -isodate = ">=0.6.1,<1.0.0" - -[[package]] -name = "azure-mgmt-keyvault" -version = "9.3.0" -description = "Microsoft Azure Keyvault Management Client Library for Python" -optional = false -python-versions = "*" -files = [ - {file = "azure-mgmt-keyvault-9.3.0.zip", hash = "sha256:54156422e618b686d52232a7989594b240bd18afd0fa381e12e4772ed4ab5ea8"}, - {file = "azure_mgmt_keyvault-9.3.0-py2.py3-none-any.whl", hash = "sha256:4ef0285292de9d833e5b1a56b9667ef7f7fd435ac44ad179b917ed3f3470c974"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.2.0,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-mgmt-msi" -version = "6.1.0" -description = "Microsoft Azure Msi Management Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-mgmt-msi-6.1.0.zip", hash = "sha256:952f1d6b7025d73d6930b0c17fa66d59cd5415456090dd6aa4a4f1b785577654"}, - {file = "azure_mgmt_msi-6.1.0-py3-none-any.whl", hash = "sha256:8c2ab013171b32c42c1a1296501739d6d30cfa99e51898a9a76d1454259580cc"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.1,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-mgmt-network" -version = "20.0.0" -description = "Microsoft Azure Network Management Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-mgmt-network-20.0.0.zip", hash = "sha256:9a78cfc82009fab94d819e2e9926237d555583cdc4a1b65863fceea720e37686"}, - {file = "azure_mgmt_network-20.0.0-py3-none-any.whl", hash = "sha256:e56ca31ddedc5c4492e243b0cec42096207f6fd640829aeae1e221a1732a10f0"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.0,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-mgmt-rdbms" -version = "10.1.0" -description = "Microsoft Azure RDBMS Management Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-mgmt-rdbms-10.1.0.zip", hash = "sha256:a87d401c876c84734cdd4888af551e4a1461b4b328d9816af60cb8ac5979f035"}, - {file = "azure_mgmt_rdbms-10.1.0-py3-none-any.whl", hash = "sha256:8eac17d1341a91d7ed914435941ba917b5ef1568acabc3e65653603966a7cc88"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.0,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-mgmt-resource" -version = "20.1.0" -description = "Microsoft Azure Resource Management Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-mgmt-resource-20.1.0.zip", hash = "sha256:4fbb2158320e0bdd367882642f266a6dfb3b4b8610792b3afbbca39089f212d7"}, - {file = "azure_mgmt_resource-20.1.0-py3-none-any.whl", hash = "sha256:b009dcd66bee43691b71048b97c3da9c269ea24f338f1f9788bbd4c4726336c3"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.0,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-mgmt-storage" -version = "19.1.0" -description = "Microsoft Azure Storage Management Client Library for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "azure-mgmt-storage-19.1.0.zip", hash = "sha256:49ea22f00e0965a3550af34a41c1a1d3a481690f6500c78e85408802f56d7416"}, - {file = "azure_mgmt_storage-19.1.0-py3-none-any.whl", hash = "sha256:61c7a55395e7410a24bfc8def353429eb772a105dd8268dce91f5ee38e4fc04e"}, -] - -[package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.0,<2.0.0" -msrest = ">=0.6.21" - -[[package]] -name = "azure-storage-blob" -version = "12.16.0" -description = "Microsoft Azure Blob Storage Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-storage-blob-12.16.0.zip", hash = "sha256:43b45f19a518a5c6895632f263b3825ebc23574f25cc84b66e1630a6160e466f"}, - {file = "azure_storage_blob-12.16.0-py3-none-any.whl", hash = "sha256:91bb192b2a97939c4259c72373bac0f41e30810bbc853d5184f0f45904eacafd"}, -] - -[package.dependencies] -azure-core = ">=1.26.0,<2.0.0" -cryptography = ">=2.1.4" -isodate = ">=0.6.1" -typing-extensions = ">=4.0.1" - -[package.extras] -aio = ["azure-core[aio] (>=1.26.0,<2.0.0)"] - -[[package]] -name = "azure-storage-file-datalake" -version = "12.11.0" -description = "Microsoft Azure File DataLake Storage Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-storage-file-datalake-12.11.0.zip", hash = "sha256:89b7556403c29ca5b6531c112f0e2d2ef3f340a9add553e8c3484a25bce6216c"}, - {file = "azure_storage_file_datalake-12.11.0-py3-none-any.whl", hash = "sha256:0ebf00c7a54adc623e38673732dd07cf969f017024a597fc03631c0d1b9ed70f"}, -] - -[package.dependencies] -azure-core = ">=1.26.0,<2.0.0" -azure-storage-blob = ">=12.16.0b1,<13.0.0" -isodate = ">=0.6.1" -typing-extensions = ">=4.0.1" - -[package.extras] -aio = ["azure-core[aio] (>=1.26.0,<2.0.0)"] - -[[package]] -name = "azure-storage-file-share" -version = "12.12.0" -description = "Microsoft Azure Azure File Share Storage Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "azure-storage-file-share-12.12.0.zip", hash = "sha256:cf7e47f749fc06ecb99b64612ab4bd94227b9931497a4c27c2865717c3daa089"}, - {file = "azure_storage_file_share-12.12.0-py3-none-any.whl", hash = "sha256:3abb168b94daac3ffe74ef6552fd86822a97e340ba9c6db0f8a5ae5053a08852"}, -] - -[package.dependencies] -azure-core = ">=1.26.0,<2.0.0" -cryptography = ">=2.1.4" -isodate = ">=0.6.1" -typing-extensions = ">=4.0.1" - -[package.extras] -aio = ["azure-core[aio] (>=1.26.0,<2.0.0)"] - -[[package]] -name = "black" -version = "22.12.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.7" -files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "certifi" -version = "2023.5.7" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, -] - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = "*" -files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.1.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, -] - -[[package]] -name = "chevron" -version = "0.14.0" -description = "Mustache templating language renderer" -optional = false -python-versions = "*" -files = [ - {file = "chevron-0.14.0-py3-none-any.whl", hash = "sha256:fbf996a709f8da2e745ef763f482ce2d311aa817d287593a5b990d6d6e4f0443"}, - {file = "chevron-0.14.0.tar.gz", hash = "sha256:87613aafdf6d77b6a90ff073165a61ae5086e21ad49057aa0e53681601800ebf"}, -] - -[[package]] -name = "chili" -version = "2.1.0" -description = "Chili is serialisation library. It can serialise/deserialise almost any object." -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "chili-2.1.0-py3-none-any.whl", hash = "sha256:5cecbb8735583c293d06b78673b52ddbd8da864f888e13bf3c494457192913a3"}, - {file = "chili-2.1.0.tar.gz", hash = "sha256:0f20a7668b3dbda3f6797c91dcc4aef3c7e844a5e14364a144650142279bc61d"}, -] - -[package.dependencies] -gaffe = "0.2.0" -typing-extensions = ">=4.2,<5.0" - -[[package]] -name = "click" -version = "8.1.4" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"}, - {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "cryptography" -version = "3.4.8" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, - {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, - {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, - {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, - {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, - {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, - {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, - {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, - {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "dill" -version = "0.3.6" -description = "serialize all of python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - -[[package]] -name = "dnspython" -version = "2.3.0" -description = "DNS toolkit" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "dnspython-2.3.0-py3-none-any.whl", hash = "sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46"}, - {file = "dnspython-2.3.0.tar.gz", hash = "sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9"}, -] - -[package.extras] -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<40.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.11.0)"] -doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] - -[[package]] -name = "exceptiongroup" -version = "1.1.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "gaffe" -version = "0.2.0" -description = "Simple structured exceptions for python." -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "gaffe-0.2.0-py3-none-any.whl", hash = "sha256:579adebf44310ef2a55a6363152ce217f13afe918374f37bc5c749d98a49d70b"}, - {file = "gaffe-0.2.0.tar.gz", hash = "sha256:1948b01116d17e381c40415c7dcd6799c4894bdb80ff7f36beaad50fc698f3d0"}, -] - -[[package]] -name = "grpcio" -version = "1.51.3" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-1.51.3-cp310-cp310-linux_armv7l.whl", hash = "sha256:f601aaeae18dab81930fb8d4f916b0da21e89bb4b5f7367ef793f46b4a76b7b0"}, - {file = "grpcio-1.51.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:eef0450a4b5ed11feab639bf3eb1b6e23d0efa9b911bf7b06fb60e14f5f8a585"}, - {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82b0ad8ac825d4bb31bff9f638557c045f4a6d824d84b21e893968286f88246b"}, - {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3667c06e37d6cd461afdd51cefe6537702f3d1dc5ff4cac07e88d8b4795dc16f"}, - {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3709048fe0aa23dda09b3e69849a12055790171dab9e399a72ea8f9dfbf9ac80"}, - {file = "grpcio-1.51.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:200d69857f9910f7458b39b9bcf83ee4a180591b40146ba9e49314e3a7419313"}, - {file = "grpcio-1.51.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cd9a5e68e79c5f031500e67793048a90209711e0854a9ddee8a3ce51728de4e5"}, - {file = "grpcio-1.51.3-cp310-cp310-win32.whl", hash = "sha256:6604f614016127ae10969176bbf12eb0e03d2fb3d643f050b3b69e160d144fb4"}, - {file = "grpcio-1.51.3-cp310-cp310-win_amd64.whl", hash = "sha256:e95c7ccd4c5807adef1602005513bf7c7d14e5a41daebcf9d8d30d8bf51b8f81"}, - {file = "grpcio-1.51.3-cp311-cp311-linux_armv7l.whl", hash = "sha256:5e77ee138100f0bb55cbd147840f87ee6241dbd25f09ea7cd8afe7efff323449"}, - {file = "grpcio-1.51.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:68a7514b754e38e8de9075f7bb4dee919919515ec68628c43a894027e40ddec4"}, - {file = "grpcio-1.51.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c1b9f8afa62ff265d86a4747a2990ec5a96e4efce5d5888f245a682d66eca47"}, - {file = "grpcio-1.51.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de30f0b417744288cec65ec8cf84b8a57995cf7f1e84ccad2704d93f05d0aae"}, - {file = "grpcio-1.51.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b69c7adc7ed60da1cb1b502853db61f453fc745f940cbcc25eb97c99965d8f41"}, - {file = "grpcio-1.51.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d81528ffe0e973dc840ec73a4132fd18b8203ad129d7410155d951a0a7e4f5d0"}, - {file = "grpcio-1.51.3-cp311-cp311-win32.whl", hash = "sha256:040eb421613b57c696063abde405916dd830203c184c9000fc8c3b3b3c950325"}, - {file = "grpcio-1.51.3-cp311-cp311-win_amd64.whl", hash = "sha256:2a8e17286c4240137d933b8ca506465472248b4ce0fe46f3404459e708b65b68"}, - {file = "grpcio-1.51.3-cp37-cp37m-linux_armv7l.whl", hash = "sha256:d5cd1389669a847555df54177b911d9ff6f17345b2a6f19388707b7a9f724c88"}, - {file = "grpcio-1.51.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:be1bf35ce82cdbcac14e39d5102d8de4079a1c1a6a06b68e41fcd9ef64f9dd28"}, - {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:5eed34994c095e2bf7194ffac7381c6068b057ef1e69f8f08db77771350a7566"}, - {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9a7d88082b2a17ae7bd3c2354d13bab0453899e0851733f6afa6918373f476"}, - {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c8abbc5f837111e7bd619612eedc223c290b0903b952ce0c7b00840ea70f14"}, - {file = "grpcio-1.51.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:165b05af77e6aecb4210ae7663e25acf234ba78a7c1c157fa5f2efeb0d6ec53c"}, - {file = "grpcio-1.51.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54e36c2ee304ff15f2bfbdc43d2b56c63331c52d818c364e5b5214e5bc2ad9f6"}, - {file = "grpcio-1.51.3-cp37-cp37m-win32.whl", hash = "sha256:cd0daac21d9ef5e033a5100c1d3aa055bbed28bfcf070b12d8058045c4e821b1"}, - {file = "grpcio-1.51.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2fdd6333ce96435408565a9dbbd446212cd5d62e4d26f6a3c0feb1e3c35f1cc8"}, - {file = "grpcio-1.51.3-cp38-cp38-linux_armv7l.whl", hash = "sha256:54b0c29bdd9a3b1e1b61443ab152f060fc719f1c083127ab08d03fac5efd51be"}, - {file = "grpcio-1.51.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:ffaaf7e93fcb437356b5a4b23bf36e8a3d0221399ff77fd057e4bc77776a24be"}, - {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:eafbe7501a3268d05f2e450e1ddaffb950d842a8620c13ec328b501d25d2e2c3"}, - {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881ecb34feabf31c6b3b9bbbddd1a5b57e69f805041e5a2c6c562a28574f71c4"}, - {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e860a3222139b41d430939bbec2ec9c3f6c740938bf7a04471a9a8caaa965a2e"}, - {file = "grpcio-1.51.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49ede0528e9dac7e8a9fe30b16c73b630ddd9a576bf4b675eb6b0c53ee5ca00f"}, - {file = "grpcio-1.51.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6972b009638b40a448d10e1bc18e2223143b8a7aa20d7def0d78dd4af4126d12"}, - {file = "grpcio-1.51.3-cp38-cp38-win32.whl", hash = "sha256:5694448256e3cdfe5bd358f1574a3f2f51afa20cc834713c4b9788d60b7cc646"}, - {file = "grpcio-1.51.3-cp38-cp38-win_amd64.whl", hash = "sha256:3ea4341efe603b049e8c9a5f13c696ca37fcdf8a23ca35f650428ad3606381d9"}, - {file = "grpcio-1.51.3-cp39-cp39-linux_armv7l.whl", hash = "sha256:6c677581ce129f5fa228b8f418cee10bd28dd449f3a544ea73c8ba590ee49d0b"}, - {file = "grpcio-1.51.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:30e09b5e0531685e176f49679b6a3b190762cc225f4565e55a899f5e14b3aa62"}, - {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c831f31336e81243f85b6daff3e5e8a123302ce0ea1f2726ad752fd7a59f3aee"}, - {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cd2e4cefb724cab1ba2df4b7535a9980531b9ec51b4dbb5f137a1f3a3754ef0"}, - {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7a0d0bf44438869d307f85a54f25a896ad6b4b0ca12370f76892ad732928d87"}, - {file = "grpcio-1.51.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c02abd55409bfb293371554adf6a4401197ec2133dd97727c01180889014ba4d"}, - {file = "grpcio-1.51.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2f8ff75e61e1227ba7a3f16b2eadbcc11d0a54096d52ab75a6b88cfbe56f55d1"}, - {file = "grpcio-1.51.3-cp39-cp39-win32.whl", hash = "sha256:6c99a73a6260bdf844b2e5ddad02dcd530310f80e1fa72c300fa19c1c7496962"}, - {file = "grpcio-1.51.3-cp39-cp39-win_amd64.whl", hash = "sha256:22bdfac4f7f27acdd4da359b5e7e1973dc74bf1ed406729b07d0759fde2f064b"}, - {file = "grpcio-1.51.3.tar.gz", hash = "sha256:be7b2265b7527bb12109a7727581e274170766d5b3c9258d4e466f4872522d7a"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.51.3)"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "josepy" -version = "1.13.0" -description = "JOSE protocol implementation in Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "josepy-1.13.0-py2.py3-none-any.whl", hash = "sha256:6f64eb35186aaa1776b7a1768651b1c616cab7f9685f9660bffc6491074a5390"}, - {file = "josepy-1.13.0.tar.gz", hash = "sha256:8931daf38f8a4c85274a0e8b7cb25addfd8d1f28f9fb8fbed053dd51aec75dc9"}, -] - -[package.dependencies] -cryptography = ">=1.5" -PyOpenSSL = ">=0.13" -setuptools = ">=1.0" - -[package.extras] -dev = ["pytest", "tox"] -docs = ["Sphinx (>=1.0)", "sphinx-rtd-theme (>=1.0)"] -tests = ["coverage (>=4.0)", "flake8 (<4)", "isort", "mypy", "pytest (>=2.8.0)", "pytest-cov", "pytest-flake8 (>=0.5)", "types-pyOpenSSL", "types-pyRFC3339", "types-requests", "types-setuptools"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "msal" -version = "1.22.0" -description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." -optional = false -python-versions = "*" -files = [ - {file = "msal-1.22.0-py2.py3-none-any.whl", hash = "sha256:9120b7eafdf061c92f7b3d744e5f325fca35873445fa8ffebb40b1086a13dd58"}, - {file = "msal-1.22.0.tar.gz", hash = "sha256:8a82f5375642c1625c89058018430294c109440dce42ea667d466c2cab520acd"}, -] - -[package.dependencies] -cryptography = ">=0.6,<43" -PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} -requests = ">=2.0.0,<3" - -[package.extras] -broker = ["pymsalruntime (>=0.13.2,<0.14)"] - -[[package]] -name = "msal-extensions" -version = "1.0.0" -description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." -optional = false -python-versions = "*" -files = [ - {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, - {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, -] - -[package.dependencies] -msal = ">=0.4.1,<2.0.0" -portalocker = [ - {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, - {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, -] - -[[package]] -name = "msrest" -version = "0.7.1" -description = "AutoRest swagger generator Python client runtime." -optional = false -python-versions = ">=3.6" -files = [ - {file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"}, - {file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"}, -] - -[package.dependencies] -azure-core = ">=1.24.0" -certifi = ">=2017.4.17" -isodate = ">=0.6.0" -requests = ">=2.16,<3.0" -requests-oauthlib = ">=0.5.0" - -[package.extras] -async = ["aiodns", "aiohttp (>=3.0)"] - -[[package]] -name = "mypy" -version = "1.4.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] - -[[package]] -name = "parver" -version = "0.4" -description = "Parse and manipulate version numbers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "parver-0.4-py3-none-any.whl", hash = "sha256:c66d3347a4858643875ef959d8ba7a269d5964bfb690b0dd998b8f39da930be2"}, - {file = "parver-0.4.tar.gz", hash = "sha256:d4a3dbb93c53373ee9a0ba055e4858c44169b204b912e49d003ead95db9a9bca"}, -] - -[package.dependencies] -arpeggio = ">=1.7" -attrs = ">=19.2" - -[package.extras] -docs = ["furo", "sphinx"] -docstest = ["doc8"] -pep8test = ["flake8", "pep8-naming"] -test = ["hypothesis", "pretend", "pytest"] - -[[package]] -name = "pathspec" -version = "0.11.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, -] - -[[package]] -name = "platformdirs" -version = "3.8.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, - {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, -] - -[package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] - -[[package]] -name = "pluggy" -version = "1.2.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "portalocker" -version = "2.7.0" -description = "Wraps the portalocker recipe for easy usage" -optional = false -python-versions = ">=3.5" -files = [ - {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, - {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, -] - -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - -[package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] - -[[package]] -name = "protobuf" -version = "4.23.4" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, - {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, - {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, - {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, - {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, - {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, - {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, - {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, - {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, - {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, - {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, - {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, -] - -[[package]] -name = "psycopg2" -version = "2.9.6" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "psycopg2-2.9.6-cp310-cp310-win32.whl", hash = "sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1"}, - {file = "psycopg2-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494"}, - {file = "psycopg2-2.9.6-cp311-cp311-win32.whl", hash = "sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad"}, - {file = "psycopg2-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258"}, - {file = "psycopg2-2.9.6-cp38-cp38-win32.whl", hash = "sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214"}, - {file = "psycopg2-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394"}, - {file = "psycopg2-2.9.6-cp39-cp39-win32.whl", hash = "sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f"}, - {file = "psycopg2-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5"}, - {file = "psycopg2-2.9.6.tar.gz", hash = "sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011"}, -] - -[[package]] -name = "pulumi" -version = "3.74.0" -description = "Pulumi's Python SDK" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pulumi-3.74.0-py3-none-any.whl", hash = "sha256:d6b6c10ffc85f18694a6fbf3355d1f93a7004e3b6fb7d03da1ec4ccaae659a2e"}, -] - -[package.dependencies] -dill = ">=0.3,<1.0" -grpcio = "1.51.3" -protobuf = ">=4.21,<5.0" -pyyaml = ">=6.0,<7.0" -semver = ">=2.13,<3.0" -six = ">=1.12,<2.0" - -[[package]] -name = "pulumi-azure-native" -version = "1.103.0" -description = "A native Pulumi package for creating and managing Azure resources." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pulumi_azure_native-1.103.0.tar.gz", hash = "sha256:27290b9e641603cc1e0335faf191d45b97cb374dcc7e1136548f52019b99e48b"}, -] - -[package.dependencies] -parver = ">=0.2.1" -pulumi = ">=3.35.0,<4.0.0" -semver = ">=2.8.1" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pygments" -version = "2.15.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, -] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyjwt" -version = "2.7.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.7.0-py3-none-any.whl", hash = "sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1"}, - {file = "PyJWT-2.7.0.tar.gz", hash = "sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyopenssl" -version = "21.0.0" -description = "Python wrapper module around the OpenSSL library" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" -files = [ - {file = "pyOpenSSL-21.0.0-py2.py3-none-any.whl", hash = "sha256:8935bd4920ab9abfebb07c41a4f58296407ed77f04bd1a92914044b848ba1ed6"}, - {file = "pyOpenSSL-21.0.0.tar.gz", hash = "sha256:5e2d8c5e46d0d865ae933bef5230090bdaf5506281e9eec60fa250ee80600cb3"}, -] - -[package.dependencies] -cryptography = ">=3.3" -six = ">=1.5.2" - -[package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] - -[[package]] -name = "pyrfc3339" -version = "1.1" -description = "Generate and parse RFC 3339 timestamps" -optional = false -python-versions = "*" -files = [ - {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, - {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, -] - -[package.dependencies] -pytz = "*" - -[[package]] -name = "pytest" -version = "7.4.0" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytz" -version = "2022.7.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "1.3.1" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "rich" -version = "13.4.2" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, - {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "semver" -version = "2.13.0" -description = "Python helper for Semantic Versioning (http://semver.org/)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, - {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, -] - -[[package]] -name = "setuptools" -version = "68.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "simple-acme-dns" -version = "1.2.2" -description = "A Python ACME client for the DNS-01 challenge" -optional = false -python-versions = ">=3.7" -files = [ - {file = "simple_acme_dns-1.2.2-py3-none-any.whl", hash = "sha256:946590cba54916da4753ebf5657d99a6d2ab42392077a8c49b51384297d92334"}, - {file = "simple_acme_dns-1.2.2.tar.gz", hash = "sha256:ed164f78be9e5b5422df4d6fcc6673990b0566b956ef4af157c46f6b71d445b3"}, -] - -[package.dependencies] -acme = ">=2.2.0,<2.3.0" -dnspython = ">=2.3.0,<2.4.0" -validators = ">=0.20.0,<0.21.0" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "typer" -version = "0.9.0" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.6" -files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, -] - -[package.dependencies] -click = ">=7.1.1,<9.0.0" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] - -[[package]] -name = "types-chevron" -version = "0.14.2.4" -description = "Typing stubs for chevron" -optional = false -python-versions = "*" -files = [ - {file = "types-chevron-0.14.2.4.tar.gz", hash = "sha256:17e8132b60f668f9b60059453eb3a90b5b5dcdafc0b843b14924cf739a2d7a2b"}, - {file = "types_chevron-0.14.2.4-py3-none-any.whl", hash = "sha256:b6adb0a8a2e5c9063ac355282ff8e1bb7525812cb5fabcf4c2c25b3df2cb1820"}, -] - -[[package]] -name = "types-psycopg2" -version = "2.9.21.10" -description = "Typing stubs for psycopg2" -optional = false -python-versions = "*" -files = [ - {file = "types-psycopg2-2.9.21.10.tar.gz", hash = "sha256:c2600892312ae1c34e12f145749795d93dc4eac3ef7dbf8a9c1bfd45385e80d7"}, - {file = "types_psycopg2-2.9.21.10-py3-none-any.whl", hash = "sha256:918224a0731a3650832e46633e720703b5beef7693a064e777d9748654fcf5e5"}, -] - -[[package]] -name = "types-pytz" -version = "2022.7.1.2" -description = "Typing stubs for pytz" -optional = false -python-versions = "*" -files = [ - {file = "types-pytz-2022.7.1.2.tar.gz", hash = "sha256:487d3e8e9f4071eec8081746d53fa982bbc05812e719dcbf2ebf3d55a1a4cd28"}, - {file = "types_pytz-2022.7.1.2-py3-none-any.whl", hash = "sha256:40ca448a928d566f7d44ddfde0066e384f7ffbd4da2778e42a4570eaca572446"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.10" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"}, - {file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"}, -] - -[[package]] -name = "types-requests" -version = "2.31.0.1" -description = "Typing stubs for requests" -optional = false -python-versions = "*" -files = [ - {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"}, - {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"}, -] - -[package.dependencies] -types-urllib3 = "*" - -[[package]] -name = "types-setuptools" -version = "67.8.0.0" -description = "Typing stubs for setuptools" -optional = false -python-versions = "*" -files = [ - {file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"}, - {file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"}, -] - -[[package]] -name = "types-urllib3" -version = "1.26.25.13" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" -files = [ - {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"}, - {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"}, -] - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "urllib3" -version = "2.0.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, - {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "validators" -version = "0.20.0" -description = "Python Data Validation for Humans™." -optional = false -python-versions = ">=3.4" -files = [ - {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, -] - -[package.dependencies] -decorator = ">=3.4.0" - -[package.extras] -test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] - -[[package]] -name = "websocket-client" -version = "1.6.1" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.7" -files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, -] - -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "5074464851c4d0fa91e6d5cbc9e185dffce313d98991448756f02eff4e6a068b" diff --git a/pyproject.toml b/pyproject.toml index 31e16afe3b..eebd00ff08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,58 +1,133 @@ -[tool.poetry] +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] name = "data-safe-haven" -version = "4.0.0" +dynamic = ["version"] description = "An open-source framework for creating secure environments to analyse sensitive data." -authors = ["Data Safe Haven development team "] +authors = [ + { name = "Data Safe Haven development team", email = "safehavendevs@turing.ac.uk" }, +] +requires-python = ">=3.10" +license = "BSD-3-Clause" +dependencies = [ + "appdirs~=1.4.4", + "azure-core~=1.26.0", + "azure-identity~=1.12.0", + "azure-keyvault-certificates~=4.6.0", + "azure-keyvault-keys~=4.6.0", + "azure-keyvault-secrets~=4.6.0", + "azure-mgmt-automation~=1.0.0", + "azure-mgmt-compute~=29.1.0", + "azure-mgmt-containerinstance~=9.2.0", + "azure-mgmt-dns~=8.0.0", + "azure-mgmt-keyvault~=9.3.0", + "azure-mgmt-msi~=6.1.0", + "azure-mgmt-network~=20.0.0", + "azure-mgmt-rdbms~=10.1.0", + "azure-mgmt-resource~=20.1.0", + "azure-mgmt-storage~=19.1.0", + "azure-storage-blob~=12.15.0", + "azure-storage-file-datalake~=12.10.0", + "azure-storage-file-share~=12.10.0", + "chevron~=0.14.0", + "chili~=2.1.0", + "cryptography~=3.4.0", + "dnspython~=2.3.0", + "msal~=1.21.0", + "psycopg2~=2.9.0", + "pulumi~=3.67.0", + "pulumi-azure-native~=1.95.0", + "pytz~=2022.7.0", + "PyYAML~=6.0", + "rich~=13.4.2", + "simple-acme-dns~=1.2.0", + "typer~=0.9.0", + "websocket-client~=1.5.0", +] -[tool.poetry.dependencies] -python = "^3.10" -appdirs = "^1.4.4" -azure-core = "^1.26.0" -azure-identity = "^1.12.0" -azure-keyvault-certificates = "^4.6.0" -azure-keyvault-keys = "^4.6.0" -azure-keyvault-secrets = "^4.6.0" -azure-mgmt-automation = "^1.0.0" -azure-mgmt-compute = "^29.1.0" -azure-mgmt-containerinstance = "^9.2.0" -azure-mgmt-dns = "^8.0.0" -azure-mgmt-keyvault = "^9.3.0" -azure-mgmt-msi = "^6.1.0" -azure-mgmt-network = "^20.0.0" -azure-mgmt-rdbms = "^10.1.0" -azure-mgmt-resource = "^20.1.0" -azure-mgmt-storage = "^19.1.0" -azure-storage-blob = "^12.14.0" -azure-storage-file-datalake = "^12.10.0" -azure-storage-file-share = "^12.10.0" -chevron = "^0.14.0" -chili = "^2.1.0" -cryptography = "^3.4.0" -dnspython = "^2.3.0" -msal = "^1.21.0" -psycopg2 = "^2.9.0" -pulumi = "^3.67.0" -pulumi-azure-native = "^1.95.0" -pytz = "^2022.7.0" -PyYAML = "^6.0" -rich = "^13.4.2" -simple-acme-dns = "^1.2.0" -typer = "^0.9.0" -websocket-client = "^1.5.0" +[project.scripts] +dsh = "data_safe_haven.cli:main" -[tool.poetry.dev-dependencies] -black = "^22.12.0" -mypy = "^1.0.0" -pytest = "^7.2" -types-chevron = "^0.14.0" -types-psycopg2 = "^2.9.0" -types-pytz = "^2022.7.0" -types-PyYAML = "^6.0" -types-requests = "^2.28.0" -types-setuptools = "^67.3.0" +[tool.hatch.version] +path = "data_safe_haven/version.py" -[tool.poetry.scripts] -dsh = "data_safe_haven.cli:main" +[tool.hatch.envs.lint] +detached = true +dependencies = [ + "black>=23.1.0", + "mypy>=1.0.0", + "ruff>=0.0.243", +] + +[tool.hatch.envs.lint.scripts] +typing = "mypy --strict --install-types --non-interactive --namespace-packages --explicit-package-bases {args:data_safe_haven}" +style = [ + "ruff {args:data_safe_haven}", + "black --check --diff {args:data_safe_haven}", +] +fmt = [ + "black {args:data_safe_haven}", + "ruff --fix {args:data_safe_haven}", + "style", +] +all = [ + "style", + "typing", +] + +[tool.black] +target-version = ["py310", "py311"] + +[tool.ruff] +select = [ + # See https://beta.ruff.rs/docs/rules/ + "A", # flake8-builtins + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + "C", # complexity, mcabe and flake8-comprehensions + "DTZ", # flake8-datetimez + "E", # pycodestyle errors + "EM", # flake8-errmsg + "F", # pyflakes + "FBT", # flake8-boolean-trap + "I", # isort + "ICN", # flake8-import-conventions + "ISC", # flake8-implicit-str-concat + "N", # pep8-naming + "PLC", # pylint convention + "PLE", # pylint error + "PLR", # pylint refactor + "PLW", # pylint warning + "Q", # flake8-quotes + "RUF", # ruff rules + "S", # flake8-bandits + "T", # flake8-debugger and flake8-print + "TID", # flake8-tidy-imports + "UP", # pyupgrade + "W", # pycodestyle warnings + "YTT", # flake8-2020 +] +ignore = [ + # Ignore line length + "E501", + # Ignore checks for possible passwords + "S105", "S106", "S107", + # Ignore not using absolute path in subprocess call + "S607", + # Allow subprocess without shell (less vulnerable than with shell) + "S603", + # Ignore complexity + "C901", "PLR0911", "PLR0912", "PLR0913", "PLR0915", +] + +[tool.ruff.isort] +known-first-party = ["data_safe_haven"] +combine-as-imports = true + +[tool.ruff.flake8-tidy-imports] +ban-relative-imports = "parents" [tool.mypy] strict = true @@ -68,15 +143,8 @@ module = [ "azure.mgmt.rdbms.postgresql.*", "azure.mgmt.resource.*", "azure.mgmt.storage.*", - "cleo.*", - "clikit.*", - "dotmap.*", "msal.*", "simple_acme_dns.*", "websocket.*", ] ignore_missing_imports = true - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api"