From 0f79dfb0008d0a5afdc10ff7becbd6288914dd22 Mon Sep 17 00:00:00 2001 From: swastik959 Date: Sun, 22 Oct 2023 21:57:23 +0530 Subject: [PATCH 01/16] modified the projects and zones function Signed-off-by: swastik959 --- src/_nebari/provider/cloud/google_cloud.py | 24 +++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index 746bcbc7c..cc774eddb 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -3,7 +3,7 @@ import os import subprocess from typing import Dict, List - +from google.cloud import resourcemanager , compute from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version from nebari import schema @@ -22,22 +22,22 @@ def check_credentials(): def projects() -> Dict[str, str]: """Return a dict of available projects.""" check_credentials() - output = subprocess.check_output( - ["gcloud", "projects", "list", "--format=json(name,projectId)"] - ) - data = json.loads(output) - return {_["name"]: _["projectId"] for _ in data} + client = resourcemanager.Client() + projects = client.list_projects() + project_dict = {project.name: project.project_id for project in projects} + + return project_dict + @functools.lru_cache() def regions(project: str) -> Dict[str, str]: """Return a dict of available regions.""" - check_credentials() - output = subprocess.check_output( - ["gcloud", "compute", "regions", "list", "--project", project, "--format=json"] - ) - data = json.loads(output.decode("utf-8")) - return {_["description"]: _["name"] for _ in data} + client = compute.Client() + regions = client.list_regions(project=project) + region_dict = {region.description: region.name for region in regions} + + return region_dict @functools.lru_cache() From 305157429af103ea65174674aeaf1cef1871319f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 22 Oct 2023 16:30:44 +0000 Subject: [PATCH 02/16] [pre-commit.ci] Apply automatic pre-commit fixes --- src/_nebari/provider/cloud/google_cloud.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index cc774eddb..f51eceb00 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -3,7 +3,9 @@ import os import subprocess from typing import Dict, List -from google.cloud import resourcemanager , compute + +from google.cloud import compute, resourcemanager + from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version from nebari import schema @@ -29,7 +31,6 @@ def projects() -> Dict[str, str]: return project_dict - @functools.lru_cache() def regions(project: str) -> Dict[str, str]: """Return a dict of available regions.""" From 5d593b107c3d57871dbc690d9db1e6dea6619e73 Mon Sep 17 00:00:00 2001 From: swastik959 Date: Fri, 3 Nov 2023 11:43:48 +0530 Subject: [PATCH 03/16] modified whole code Signed-off-by: swastik959 --- pyproject.toml | 6 + src/_nebari/provider/cloud/google_cloud.py | 182 ++++++--------------- 2 files changed, 57 insertions(+), 131 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 089b262a5..87e429080 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,6 +73,12 @@ dependencies = [ "rich==13.5.1", "ruamel.yaml==0.17.32", "typer==0.9.0", + "google-cloud-resourcemanager==1.10.1", + "google-cloud-compute==4.15.0", + "google-cloud-storage==2.13.0", + "google-cloud-container==2.32.0", + "google-cloud-iam-credentials== 1.4.1", + ] [project.optional-dependencies] diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index f51eceb00..3ef1258d7 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,11 +1,8 @@ import functools import json import os -import subprocess from typing import Dict, List - -from google.cloud import compute, resourcemanager - +from google.cloud import resourcemanager , compute_v1 , container_v1 , storage , iam_credentials_v1 from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version from nebari import schema @@ -34,8 +31,9 @@ def projects() -> Dict[str, str]: @functools.lru_cache() def regions(project: str) -> Dict[str, str]: """Return a dict of available regions.""" - client = compute.Client() - regions = client.list_regions(project=project) + client = compute_v1.RegionClient() + request = compute_v1.ListRegionsRequest(project="project_value",) + regions = client.list(request=request) region_dict = {region.description: region.name for region in regions} return region_dict @@ -45,107 +43,57 @@ def regions(project: str) -> Dict[str, str]: def zones(project: str, region: str) -> Dict[str, str]: """Return a dict of available zones.""" check_credentials() - output = subprocess.check_output( - ["gcloud", "compute", "zones", "list", "--project", project, "--format=json"] - ) - data = json.loads(output.decode("utf-8")) - return {_["description"]: _["name"] for _ in data if _["name"].startswith(region)} + client = compute_v1.ZonesClient() + request = compute_v1.ListZonesRequest(project="project_value",) + zones = client.list(request=request) + zone_dict = {zone.description: zone.name for zone in zones if zone.name.startswith(region)} + return zone_dict @functools.lru_cache() def kubernetes_versions(region: str) -> List[str]: """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" check_credentials() - output = subprocess.check_output( - [ - "gcloud", - "container", - "get-server-config", - "--region", - region, - "--format=json", - ] - ) - data = json.loads(output.decode("utf-8")) - supported_kubernetes_versions = sorted([_ for _ in data["validMasterVersions"]]) - return filter_by_highest_supported_k8s_version(supported_kubernetes_versions) + client = container_v1.ClusterManagerClient() + request = container_v1.GetServerConfigRequest() + response = client.get_server_config(request=request) + supported_kubernetes_versions = sorted(response.valid_master_versions) + filtered_versions = filter_by_highest_supported_k8s_version(supported_kubernetes_versions) + return filtered_versions + @functools.lru_cache() -def instances(project: str) -> Dict[str, str]: - """Return a dict of available instances.""" +def instances(project: str , zone: str) -> Dict[str, str]: + """Return a dict of available instances of a particular zone.""" check_credentials() - output = subprocess.check_output( - [ - "gcloud", - "compute", - "machine-types", - "list", - "--project", - project, - "--format=json", - ] - ) - data = json.loads(output.decode("utf-8")) - return {_["description"]: _["name"] for _ in data} + client = compute_v1.InstancesClient() + request = compute_v1.ListInstancesRequest(project="project",zone="zone",) + instances = client.list(request=request) + instance_dict = {instances.description: instances.name for instance in instances} + return instance_dict -def cluster_exists(cluster_name: str, project_id: str, region: str) -> bool: +def cluster_exists(cluster_name: str, project_id: str, zone: str) -> bool: """Check if a GKE cluster exists.""" - try: - subprocess.check_output( - [ - "gcloud", - "container", - "clusters", - "describe", - cluster_name, - "--project", - project_id, - "--region", - region, - ] - ) - return True - except subprocess.CalledProcessError: - return False + client = container_v1.ClusterManagerClient() + request = container_v1.GetClusterRequest() + response = client.get_cluster(request=request, project_id= project_id, zone=zone) + return response is not None def bucket_exists(bucket_name: str, project_id: str) -> bool: """Check if a storage bucket exists.""" - try: - print(f"Checking if bucket {bucket_name} exists in project {project_id}.") - subprocess.check_output( - [ - "gsutil", - "ls", - f"gs://{bucket_name}/", - "-p", - project_id, - ] - ) - return True - except subprocess.CalledProcessError: - return False + client = storage.Client(project=project_id) + bucket = client.get_bucket(bucket_name) + return bucket is not None def service_account_exists(service_account_name: str, project_id: str) -> bool: """Check if a service account exists.""" - try: - subprocess.check_output( - [ - "gcloud", - "iam", - "service-accounts", - "describe", - service_account_name, - "--project", - project_id, - ] - ) - return True - except subprocess.CalledProcessError: - return False + client = iam_credentials_v1.IAMCredentialsClient() + service_acc = client.service_account_path(project_id , service_account_name) + return service_acc is not None def delete_cluster(cluster_name: str, project_id: str, region: str): @@ -158,25 +106,15 @@ def delete_cluster(cluster_name: str, project_id: str, region: str): ) return + client = container_v1.ClusterManagerClient() + request = client.DeleteClusterRequest() try: - subprocess.check_call( - [ - "gcloud", - "container", - "clusters", - "delete", - cluster_name, - "--project", - project_id, - "--region", - region, - "--quiet", - ] - ) - print(f"Successfully deleted cluster {cluster_name}.") - except subprocess.CalledProcessError as e: - print(f"Failed to delete cluster {cluster_name}. Error: {e}") - + operation = client.delete_cluster(request=request) + except google.api_core.exceptions.GoogleAPICallError as e: + if e.status_code == 200: + print("Cluster deleted successfully!") + else: + print("error deleting cluster!") def delete_storage_bucket(bucket_name: str, project_id: str): """Delete a storage bucket if it exists.""" @@ -188,23 +126,14 @@ def delete_storage_bucket(bucket_name: str, project_id: str): ) return + client = storage.Client(project=project_id) + bucket = client.get_bucket(bucket_name) try: - subprocess.check_call( - [ - "gsutil", - "-m", - "rm", - "-r", - f"gs://{bucket_name}", - "-p", - project_id, - ] - ) + bucket.delete() print(f"Successfully deleted bucket {bucket_name}.") - except subprocess.CalledProcessError as e: + except storage.exceptions.BucketNotFoundError as e: print(f"Failed to delete bucket {bucket_name}. Error: {e}") - def delete_service_account(service_account_name: str, project_id: str): """Delete a service account if it exists.""" check_credentials() @@ -214,23 +143,14 @@ def delete_service_account(service_account_name: str, project_id: str): f"Service account {service_account_name} does not exist in project {project_id}. Exiting gracefully." ) return - + client = iam_credentials_v1.IAMCredentialsClient() + service_acc = client.service_account_path(project_id , service_account_name) try: - subprocess.check_call( - [ - "gcloud", - "iam", - "service-accounts", - "delete", - service_account_name, - "--quiet", - "--project", - project_id, - ] - ) + client.delete_service_account(service_account_name) print(f"Successfully deleted service account {service_account_name}.") - except subprocess.CalledProcessError as e: + except iam_credentials_v1.exceptions.IamServiceAccountNotFoundError as e: print(f"Failed to delete service account {service_account_name}. Error: {e}") + def gcp_cleanup(config: schema.Main): From 53ca190f1ea025bc79d64f938a95a09d4a10786a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 3 Nov 2023 06:14:26 +0000 Subject: [PATCH 04/16] [pre-commit.ci] Apply automatic pre-commit fixes --- pyproject.toml | 2 +- src/_nebari/provider/cloud/google_cloud.py | 55 +++++++++++++++------- 2 files changed, 38 insertions(+), 19 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 87e429080..5c20b6e61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ dependencies = [ "google-cloud-storage==2.13.0", "google-cloud-container==2.32.0", "google-cloud-iam-credentials== 1.4.1", - + ] [project.optional-dependencies] diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index 3ef1258d7..69ea96158 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,8 +1,15 @@ import functools -import json import os from typing import Dict, List -from google.cloud import resourcemanager , compute_v1 , container_v1 , storage , iam_credentials_v1 + +from google.cloud import ( + compute_v1, + container_v1, + iam_credentials_v1, + resourcemanager, + storage, +) + from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version from nebari import schema @@ -32,7 +39,9 @@ def projects() -> Dict[str, str]: def regions(project: str) -> Dict[str, str]: """Return a dict of available regions.""" client = compute_v1.RegionClient() - request = compute_v1.ListRegionsRequest(project="project_value",) + request = compute_v1.ListRegionsRequest( + project="project_value", + ) regions = client.list(request=request) region_dict = {region.description: region.name for region in regions} @@ -44,9 +53,13 @@ def zones(project: str, region: str) -> Dict[str, str]: """Return a dict of available zones.""" check_credentials() client = compute_v1.ZonesClient() - request = compute_v1.ListZonesRequest(project="project_value",) + request = compute_v1.ListZonesRequest( + project="project_value", + ) zones = client.list(request=request) - zone_dict = {zone.description: zone.name for zone in zones if zone.name.startswith(region)} + zone_dict = { + zone.description: zone.name for zone in zones if zone.name.startswith(region) + } return zone_dict @@ -58,17 +71,21 @@ def kubernetes_versions(region: str) -> List[str]: request = container_v1.GetServerConfigRequest() response = client.get_server_config(request=request) supported_kubernetes_versions = sorted(response.valid_master_versions) - filtered_versions = filter_by_highest_supported_k8s_version(supported_kubernetes_versions) + filtered_versions = filter_by_highest_supported_k8s_version( + supported_kubernetes_versions + ) return filtered_versions - @functools.lru_cache() -def instances(project: str , zone: str) -> Dict[str, str]: +def instances(project: str, zone: str) -> Dict[str, str]: """Return a dict of available instances of a particular zone.""" check_credentials() client = compute_v1.InstancesClient() - request = compute_v1.ListInstancesRequest(project="project",zone="zone",) + request = compute_v1.ListInstancesRequest( + project="project", + zone="zone", + ) instances = client.list(request=request) instance_dict = {instances.description: instances.name for instance in instances} return instance_dict @@ -78,10 +95,11 @@ def cluster_exists(cluster_name: str, project_id: str, zone: str) -> bool: """Check if a GKE cluster exists.""" client = container_v1.ClusterManagerClient() request = container_v1.GetClusterRequest() - response = client.get_cluster(request=request, project_id= project_id, zone=zone) + response = client.get_cluster(request=request, project_id=project_id, zone=zone) return response is not None + def bucket_exists(bucket_name: str, project_id: str) -> bool: """Check if a storage bucket exists.""" client = storage.Client(project=project_id) @@ -92,7 +110,7 @@ def bucket_exists(bucket_name: str, project_id: str) -> bool: def service_account_exists(service_account_name: str, project_id: str) -> bool: """Check if a service account exists.""" client = iam_credentials_v1.IAMCredentialsClient() - service_acc = client.service_account_path(project_id , service_account_name) + service_acc = client.service_account_path(project_id, service_account_name) return service_acc is not None @@ -109,12 +127,13 @@ def delete_cluster(cluster_name: str, project_id: str, region: str): client = container_v1.ClusterManagerClient() request = client.DeleteClusterRequest() try: - operation = client.delete_cluster(request=request) + client.delete_cluster(request=request) except google.api_core.exceptions.GoogleAPICallError as e: - if e.status_code == 200: - print("Cluster deleted successfully!") - else: - print("error deleting cluster!") + if e.status_code == 200: + print("Cluster deleted successfully!") + else: + print("error deleting cluster!") + def delete_storage_bucket(bucket_name: str, project_id: str): """Delete a storage bucket if it exists.""" @@ -134,6 +153,7 @@ def delete_storage_bucket(bucket_name: str, project_id: str): except storage.exceptions.BucketNotFoundError as e: print(f"Failed to delete bucket {bucket_name}. Error: {e}") + def delete_service_account(service_account_name: str, project_id: str): """Delete a service account if it exists.""" check_credentials() @@ -144,13 +164,12 @@ def delete_service_account(service_account_name: str, project_id: str): ) return client = iam_credentials_v1.IAMCredentialsClient() - service_acc = client.service_account_path(project_id , service_account_name) + client.service_account_path(project_id, service_account_name) try: client.delete_service_account(service_account_name) print(f"Successfully deleted service account {service_account_name}.") except iam_credentials_v1.exceptions.IamServiceAccountNotFoundError as e: print(f"Failed to delete service account {service_account_name}. Error: {e}") - def gcp_cleanup(config: schema.Main): From 7d319b54f67ce5bb320f0d8d144c33fd32b8572c Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Fri, 26 Jul 2024 10:34:07 -0500 Subject: [PATCH 05/16] Fix google dependency versions --- pyproject.toml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 26ada3a3b..72681f25f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,11 @@ dependencies = [ "bcrypt==4.0.1", "boto3==1.34.63", "cloudflare==2.11.7", + "google-cloud-resource-manager==1.12.4", + "google-cloud-compute==1.19.1", + "google-cloud-storage==2.18.0", + "google-cloud-container==2.49.0", + "google-cloud-iam==2.15.1", "kubernetes==27.2.0", "pluggy==1.3.0", "prompt-toolkit==3.0.36", @@ -71,11 +76,6 @@ dependencies = [ "rich==13.5.1", "ruamel.yaml==0.18.6", "typer==0.9.0", - "google-cloud-resourcemanager==1.10.1", - "google-cloud-compute==4.15.0", - "google-cloud-storage==2.13.0", - "google-cloud-container==2.32.0", - "google-cloud-iam-credentials== 1.4.1", "packaging==23.2", "typing-extensions==4.11.0", ] From 1088d9440b5c6c6775afbf21e688b5707c62c4e1 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Fri, 2 Aug 2024 11:54:47 -0500 Subject: [PATCH 06/16] Remove unused functions and fix calls to the Google Python APIs --- pyproject.toml | 3 +- src/_nebari/initialize.py | 3 +- src/_nebari/provider/cloud/google_cloud.py | 131 ++---------------- src/_nebari/stages/infrastructure/__init__.py | 7 +- src/_nebari/subcommands/init.py | 4 +- tests/tests_unit/test_provider.py | 54 -------- 6 files changed, 21 insertions(+), 181 deletions(-) delete mode 100644 tests/tests_unit/test_provider.py diff --git a/pyproject.toml b/pyproject.toml index 72681f25f..7661d2cc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,11 +60,10 @@ dependencies = [ "bcrypt==4.0.1", "boto3==1.34.63", "cloudflare==2.11.7", - "google-cloud-resource-manager==1.12.4", "google-cloud-compute==1.19.1", - "google-cloud-storage==2.18.0", "google-cloud-container==2.49.0", "google-cloud-iam==2.15.1", + "google-cloud-storage==2.18.0", "kubernetes==27.2.0", "pluggy==1.3.0", "prompt-toolkit==3.0.36", diff --git a/src/_nebari/initialize.py b/src/_nebari/initialize.py index df693ca8f..753eb3da4 100644 --- a/src/_nebari/initialize.py +++ b/src/_nebari/initialize.py @@ -133,9 +133,10 @@ def render_config( ] = f"{WELCOME_HEADER_TEXT} on Digital Ocean" elif cloud_provider == ProviderEnum.gcp: + gcp_project = os.environ.get("PROJECT_ID") gcp_region = region or constants.GCP_DEFAULT_REGION gcp_kubernetes_version = kubernetes_version or get_latest_kubernetes_version( - google_cloud.kubernetes_versions(gcp_region) + google_cloud.kubernetes_versions(gcp_project, gcp_region) ) config["google_cloud_platform"] = { "kubernetes_version": gcp_kubernetes_version, diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index f40ce77db..e7ec9bf74 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,15 +1,7 @@ import functools -import json -import subprocess -from typing import Dict, List, Set - -from google.cloud import ( - compute_v1, - container_v1, - iam_credentials_v1, - resourcemanager, - storage, -) +from typing import List, Set + +from google.cloud import compute_v1, container_v1, iam_credentials_v1, storage from _nebari.constants import GCP_ENV_DOCS from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version @@ -23,86 +15,24 @@ def check_credentials() -> None: @functools.lru_cache() -def projects() -> Dict[str, str]: - """Return a dict of available projects.""" - check_credentials() - client = resourcemanager.Client() - projects = client.list_projects() - project_dict = {project.name: project.project_id for project in projects} - - return project_dict - - -@functools.lru_cache() -def regions(project: str) -> Dict[str, str]: +def regions(project: str) -> Set[str]: """Return a dict of available regions.""" - client = compute_v1.RegionClient() - request = compute_v1.ListRegionsRequest( - project="project_value", - ) - regions = client.list(request=request) - region_dict = {region.description: region.name for region in regions} - - return region_dict - - -@functools.lru_cache() -def zones(project: str, region: str) -> Dict[str, str]: - """Return a dict of available zones.""" check_credentials() - client = compute_v1.ZonesClient() - request = compute_v1.ListZonesRequest( - project="project_value", - ) - zones = client.list(request=request) - zone_dict = { - zone.description: zone.name for zone in zones if zone.name.startswith(region) - } - return zone_dict + client = compute_v1.RegionsClient() + response = client.list(project=project) + + return {region.name for region in response} @functools.lru_cache() -def kubernetes_versions(region: str) -> List[str]: +def kubernetes_versions(project: str, region: str) -> List[str]: """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" check_credentials() client = container_v1.ClusterManagerClient() - request = container_v1.GetServerConfigRequest() - response = client.get_server_config(request=request) - supported_kubernetes_versions = sorted(response.valid_master_versions) - filtered_versions = filter_by_highest_supported_k8s_version( - supported_kubernetes_versions - ) - return filtered_versions - - -@functools.lru_cache() -def instances(project: str, zone: str) -> Dict[str, str]: - """Return a dict of available instances of a particular zone.""" - check_credentials() - client = compute_v1.InstancesClient() - request = compute_v1.ListInstancesRequest( - project="project", - zone="zone", - ) - instances = client.list(request=request) - instance_dict = {instances.description: instances.name for instance in instances} - return instance_dict - + response = client.get_server_config(name=f"projects/{project}/locations/{region}") + supported_kubernetes_versions = response.valid_master_versions -def activated_services() -> Set[str]: - """Return a list of activated services.""" - check_credentials() - output = subprocess.check_output( - [ - "gcloud", - "services", - "list", - "--enabled", - "--format=json(config.title)", - ] - ) - data = json.loads(output) - return {service["config"]["title"] for service in data} + return filter_by_highest_supported_k8s_version(supported_kubernetes_versions) def cluster_exists(cluster_name: str, project_id: str, zone: str) -> bool: @@ -202,40 +132,3 @@ def gcp_cleanup(config: schema.Main): delete_cluster(cluster_name, project_id, region) delete_storage_bucket(bucket_name, project_id) delete_service_account(service_account_name, project_id) - - -def check_missing_service() -> None: - """Check if all required services are activated.""" - required = { - "Compute Engine API", - "Kubernetes Engine API", - "Cloud Monitoring API", - "Cloud Autoscaling API", - "Identity and Access Management (IAM) API", - "Cloud Resource Manager API", - } - activated = activated_services() - common = required.intersection(activated) - missing = required.difference(common) - if missing: - raise ValueError( - f"""Missing required services: {missing}\n - Please see the documentation for more information: {GCP_ENV_DOCS}""" - ) - - -# Getting pricing data could come from here -# https://cloudpricingcalculator.appspot.com/static/data/pricelist.json - - -### PYDANTIC VALIDATORS ### - - -def validate_region(region: str) -> str: - """Validate the GCP region is valid.""" - available_regions = regions() - if region not in available_regions: - raise ValueError( - f"Region {region} is not one of available regions {available_regions}" - ) - return region diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index 8b188a720..125dc65ed 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -339,14 +339,15 @@ class GoogleCloudPlatformProvider(schema.Base): @model_validator(mode="before") @classmethod def _check_input(cls, data: Any) -> Any: - google_cloud.check_credentials() - avaliable_regions = google_cloud.regions() + avaliable_regions = google_cloud.regions(data["project"]) if data["region"] not in avaliable_regions: raise ValueError( f"Google Cloud region={data['region']} is not one of {avaliable_regions}" ) - available_kubernetes_versions = google_cloud.kubernetes_versions(data["region"]) + available_kubernetes_versions = google_cloud.kubernetes_versions( + data["project"], data["region"] + ) print(available_kubernetes_versions) if data["kubernetes_version"] not in available_kubernetes_versions: raise ValueError( diff --git a/src/_nebari/subcommands/init.py b/src/_nebari/subcommands/init.py index 8c3de6d5b..81d9103f1 100644 --- a/src/_nebari/subcommands/init.py +++ b/src/_nebari/subcommands/init.py @@ -406,7 +406,7 @@ def check_cloud_provider_kubernetes_version( f"Invalid Kubernetes version `{kubernetes_version}`. Please refer to the Azure docs for a list of valid versions: {versions}" ) elif cloud_provider == ProviderEnum.gcp.value.lower(): - versions = google_cloud.kubernetes_versions(region) + versions = google_cloud.kubernetes_versions(os.getenv("PROJECT_ID"), region) if not kubernetes_version or kubernetes_version == LATEST: kubernetes_version = get_latest_kubernetes_version(versions) @@ -458,7 +458,7 @@ def check_cloud_provider_region(region: str, cloud_provider: str) -> str: if not region: region = GCP_DEFAULT_REGION rich.print(DEFAULT_REGION_MSG.format(region=region)) - if region not in google_cloud.regions(): + if region not in google_cloud.regions(os.getenv("PROJECT_ID")): raise ValueError( f"Invalid region `{region}`. Please refer to the GCP docs for a list of valid regions: {GCP_REGIONS}" ) diff --git a/tests/tests_unit/test_provider.py b/tests/tests_unit/test_provider.py deleted file mode 100644 index 3c4f35a1d..000000000 --- a/tests/tests_unit/test_provider.py +++ /dev/null @@ -1,54 +0,0 @@ -from contextlib import nullcontext - -import pytest - -from _nebari.provider.cloud.google_cloud import check_missing_service - - -@pytest.mark.parametrize( - "activated_services, exception", - [ - ( - { - "Compute Engine API", - "Kubernetes Engine API", - "Cloud Monitoring API", - "Cloud Autoscaling API", - "Identity and Access Management (IAM) API", - "Cloud Resource Manager API", - }, - nullcontext(), - ), - ( - { - "Compute Engine API", - "Kubernetes Engine API", - "Cloud Monitoring API", - "Cloud Autoscaling API", - "Identity and Access Management (IAM) API", - "Cloud Resource Manager API", - "Cloud SQL Admin API", - }, - nullcontext(), - ), - ( - { - "Compute Engine API", - "Kubernetes Engine API", - "Cloud Monitoring API", - "Cloud Autoscaling API", - "Cloud SQL Admin API", - }, - pytest.raises(ValueError, match=r"Missing required services:.*"), - ), - ], -) -def test_gcp_missing_service(monkeypatch, activated_services, exception): - def mock_return(): - return activated_services - - monkeypatch.setattr( - "_nebari.provider.cloud.google_cloud.activated_services", mock_return - ) - with exception: - check_missing_service() From 97677ec590f23302152370991384f4750206efb5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:22:10 +0000 Subject: [PATCH 07/16] [pre-commit.ci] Apply automatic pre-commit fixes --- src/_nebari/stages/infrastructure/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index 6230d40cd..0e8d525ab 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -339,8 +339,8 @@ class GoogleCloudPlatformProvider(schema.Base): @model_validator(mode="before") @classmethod def _check_input(cls, data: Any) -> Any: - avaliable_regions = google_cloud.regions(data["project"]) - if data["region"] not in avaliable_regions: + available_regions = google_cloud.regions(data["project"]) + if data["region"] not in available_regions: raise ValueError( f"Google Cloud region={data['region']} is not one of {available_regions}" ) From 6f3d813cda967211f9c4524e34cc2869f8adf28d Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Mon, 5 Aug 2024 08:45:25 -0500 Subject: [PATCH 08/16] Add grpc-google-iam-v1 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 7661d2cc5..e7c9b0547 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,7 @@ dependencies = [ "google-cloud-container==2.49.0", "google-cloud-iam==2.15.1", "google-cloud-storage==2.18.0", + "grpc-google-iam-v1==0.13.1", "kubernetes==27.2.0", "pluggy==1.3.0", "prompt-toolkit==3.0.36", From 4daa37778986de80bd56524f4f7eb597dae64ebd Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Mon, 5 Aug 2024 11:27:07 -0500 Subject: [PATCH 09/16] Load credentials explictly from file and get project ID from environement variable in provider functions --- src/_nebari/initialize.py | 3 +- src/_nebari/provider/cloud/google_cloud.py | 30 ++++++++++++++----- src/_nebari/stages/infrastructure/__init__.py | 6 ++-- src/_nebari/subcommands/init.py | 4 +-- 4 files changed, 27 insertions(+), 16 deletions(-) diff --git a/src/_nebari/initialize.py b/src/_nebari/initialize.py index 753eb3da4..df693ca8f 100644 --- a/src/_nebari/initialize.py +++ b/src/_nebari/initialize.py @@ -133,10 +133,9 @@ def render_config( ] = f"{WELCOME_HEADER_TEXT} on Digital Ocean" elif cloud_provider == ProviderEnum.gcp: - gcp_project = os.environ.get("PROJECT_ID") gcp_region = region or constants.GCP_DEFAULT_REGION gcp_kubernetes_version = kubernetes_version or get_latest_kubernetes_version( - google_cloud.kubernetes_versions(gcp_project, gcp_region) + google_cloud.kubernetes_versions(gcp_region) ) config["google_cloud_platform"] = { "kubernetes_version": gcp_kubernetes_version, diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index e7ec9bf74..4226568f0 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,6 +1,8 @@ import functools +import os from typing import List, Set +from google.auth import load_credentials_from_file from google.cloud import compute_v1, container_v1, iam_credentials_v1, storage from _nebari.constants import GCP_ENV_DOCS @@ -15,20 +17,31 @@ def check_credentials() -> None: @functools.lru_cache() -def regions(project: str) -> Set[str]: - """Return a dict of available regions.""" +def load_credentials(): check_credentials() - client = compute_v1.RegionsClient() + credentials_file_path = os.environ["GOOGLE_CREDENTIALS"] + credentials = load_credentials_from_file(credentials_file_path) + + return credentials + + +@functools.lru_cache() +def regions() -> Set[str]: + """Return a dict of available regions.""" + credentials = load_credentials() + project = os.environ["PROJECT_ID"] + client = compute_v1.RegionsClient(credentials=credentials) response = client.list(project=project) return {region.name for region in response} @functools.lru_cache() -def kubernetes_versions(project: str, region: str) -> List[str]: +def kubernetes_versions(region: str) -> List[str]: """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" - check_credentials() - client = container_v1.ClusterManagerClient() + credentials = load_credentials() + project = os.environ["PROJECT_ID"] + client = container_v1.ClusterManagerClient(credentials=credentials) response = client.get_server_config(name=f"projects/{project}/locations/{region}") supported_kubernetes_versions = response.valid_master_versions @@ -37,8 +50,9 @@ def kubernetes_versions(project: str, region: str) -> List[str]: def cluster_exists(cluster_name: str, project_id: str, zone: str) -> bool: """Check if a GKE cluster exists.""" - client = container_v1.ClusterManagerClient() - request = container_v1.GetClusterRequest() + credentials = load_credentials() + client = container_v1.ClusterManagerClient(credentials=credentials) + request = container_v1.GetClusterRequest(credentials=credentials) response = client.get_cluster(request=request, project_id=project_id, zone=zone) return response is not None diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index 0e8d525ab..08bea316e 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -339,15 +339,13 @@ class GoogleCloudPlatformProvider(schema.Base): @model_validator(mode="before") @classmethod def _check_input(cls, data: Any) -> Any: - available_regions = google_cloud.regions(data["project"]) + available_regions = google_cloud.regions() if data["region"] not in available_regions: raise ValueError( f"Google Cloud region={data['region']} is not one of {available_regions}" ) - available_kubernetes_versions = google_cloud.kubernetes_versions( - data["project"], data["region"] - ) + available_kubernetes_versions = google_cloud.kubernetes_versions(data["region"]) print(available_kubernetes_versions) if data["kubernetes_version"] not in available_kubernetes_versions: raise ValueError( diff --git a/src/_nebari/subcommands/init.py b/src/_nebari/subcommands/init.py index a9b7f56bb..4738dbcd3 100644 --- a/src/_nebari/subcommands/init.py +++ b/src/_nebari/subcommands/init.py @@ -406,7 +406,7 @@ def check_cloud_provider_kubernetes_version( f"Invalid Kubernetes version `{kubernetes_version}`. Please refer to the Azure docs for a list of valid versions: {versions}" ) elif cloud_provider == ProviderEnum.gcp.value.lower(): - versions = google_cloud.kubernetes_versions(os.getenv("PROJECT_ID"), region) + versions = google_cloud.kubernetes_versions(region) if not kubernetes_version or kubernetes_version == LATEST: kubernetes_version = get_latest_kubernetes_version(versions) @@ -458,7 +458,7 @@ def check_cloud_provider_region(region: str, cloud_provider: str) -> str: if not region: region = GCP_DEFAULT_REGION rich.print(DEFAULT_REGION_MSG.format(region=region)) - if region not in google_cloud.regions(os.getenv("PROJECT_ID")): + if region not in google_cloud.regions(): raise ValueError( f"Invalid region `{region}`. Please refer to the GCP docs for a list of valid regions: {GCP_REGIONS}" ) From ab7f216608e1f86ec277a40811c0f15d5fbfc736 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Mon, 5 Aug 2024 18:08:34 -0500 Subject: [PATCH 10/16] Check if credentials are a file or not before reading them --- src/_nebari/provider/cloud/google_cloud.py | 32 +++++++++++++--------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index 4226568f0..106adef8c 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,8 +1,10 @@ import functools +import json import os +from pathlib import Path from typing import List, Set -from google.auth import load_credentials_from_file +from google.auth import load_credentials_from_dict, load_credentials_from_file from google.cloud import compute_v1, container_v1, iam_credentials_v1, storage from _nebari.constants import GCP_ENV_DOCS @@ -19,19 +21,22 @@ def check_credentials() -> None: @functools.lru_cache() def load_credentials(): check_credentials() - credentials_file_path = os.environ["GOOGLE_CREDENTIALS"] - credentials = load_credentials_from_file(credentials_file_path) - - return credentials + credentials = os.environ["GOOGLE_CREDENTIALS"] + # Google credentials are stored as strings in GHA secrets so we need + # to determine if the credentials are stored as a file or not before + # reading them + if Path(credentials).is_file(): + return load_credentials_from_file(credentials) + else: + return load_credentials_from_dict(json.loads(credentials)) @functools.lru_cache() def regions() -> Set[str]: """Return a dict of available regions.""" - credentials = load_credentials() - project = os.environ["PROJECT_ID"] + credentials, project_id = load_credentials() client = compute_v1.RegionsClient(credentials=credentials) - response = client.list(project=project) + response = client.list(project=project_id) return {region.name for region in response} @@ -39,18 +44,19 @@ def regions() -> Set[str]: @functools.lru_cache() def kubernetes_versions(region: str) -> List[str]: """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" - credentials = load_credentials() - project = os.environ["PROJECT_ID"] + credentials, project_id = load_credentials() client = container_v1.ClusterManagerClient(credentials=credentials) - response = client.get_server_config(name=f"projects/{project}/locations/{region}") + response = client.get_server_config( + name=f"projects/{project_id}/locations/{region}" + ) supported_kubernetes_versions = response.valid_master_versions return filter_by_highest_supported_k8s_version(supported_kubernetes_versions) -def cluster_exists(cluster_name: str, project_id: str, zone: str) -> bool: +def cluster_exists(cluster_name: str, zone: str) -> bool: """Check if a GKE cluster exists.""" - credentials = load_credentials() + credentials, project_id = load_credentials() client = container_v1.ClusterManagerClient(credentials=credentials) request = container_v1.GetClusterRequest(credentials=credentials) response = client.get_cluster(request=request, project_id=project_id, zone=zone) From 4ac39114f74c6b926ee098c71b717e510c2714cf Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Mon, 5 Aug 2024 18:08:48 -0500 Subject: [PATCH 11/16] Remove gcloud step --- src/_nebari/provider/cicd/github.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/_nebari/provider/cicd/github.py b/src/_nebari/provider/cicd/github.py index 2563af6ad..0c9003ecd 100644 --- a/src/_nebari/provider/cicd/github.py +++ b/src/_nebari/provider/cicd/github.py @@ -201,16 +201,6 @@ def setup_python_step(): ) -def setup_gcloud(): - return GHA_job_step( - name="Setup gcloud", - uses="google-github-actions/auth@v1", - with_={ - "credentials_json": "${{ secrets.GOOGLE_CREDENTIALS }}", - }, - ) - - def install_nebari_step(nebari_version): return GHA_job_step(name="Install Nebari", run=pip_install_nebari(nebari_version)) @@ -226,9 +216,6 @@ def gen_nebari_ops(config): step3 = install_nebari_step(config.nebari_version) gha_steps = [step1, step2, step3] - if config.provider == schema.ProviderEnum.gcp: - gha_steps.append(setup_gcloud()) - for step in config.ci_cd.before_script: gha_steps.append(GHA_job_step(**step)) From 8a47dcd3f809aa9ae63117bf0abbeeb4fed16e96 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Tue, 6 Aug 2024 16:35:41 -0500 Subject: [PATCH 12/16] Add google-auth as an explicit dependency --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index fd09e62eb..6d129fd49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,7 @@ dependencies = [ "bcrypt==4.0.1", "boto3==1.34.63", "cloudflare==2.11.7", + "google-auth=2.31.0", "google-cloud-compute==1.19.1", "google-cloud-container==2.49.0", "google-cloud-iam==2.15.1", From 7be8fff1e521e5998e359344ee704af6bf44c4a6 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Tue, 6 Aug 2024 16:36:23 -0500 Subject: [PATCH 13/16] Use string ending instead of Path.isfile to check whether env var is supposed to be a file --- src/_nebari/provider/cloud/google_cloud.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index 106adef8c..8681ab945 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,7 +1,6 @@ import functools import json import os -from pathlib import Path from typing import List, Set from google.auth import load_credentials_from_dict, load_credentials_from_file @@ -25,7 +24,7 @@ def load_credentials(): # Google credentials are stored as strings in GHA secrets so we need # to determine if the credentials are stored as a file or not before # reading them - if Path(credentials).is_file(): + if credentials.endswith(".json"): return load_credentials_from_file(credentials) else: return load_credentials_from_dict(json.loads(credentials)) From bdddbe24a1f97396c72dd1028ced3b1bba7bd48f Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Tue, 6 Aug 2024 16:39:19 -0500 Subject: [PATCH 14/16] Fix dependency version specifier --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6d129fd49..177d046a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ dependencies = [ "bcrypt==4.0.1", "boto3==1.34.63", "cloudflare==2.11.7", - "google-auth=2.31.0", + "google-auth==2.31.0", "google-cloud-compute==1.19.1", "google-cloud-container==2.49.0", "google-cloud-iam==2.15.1", From 633889960894a88f92846404abb24ccdc1b9d657 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Wed, 7 Aug 2024 18:06:43 -0500 Subject: [PATCH 15/16] Fix cleanup functions. --- src/_nebari/provider/cloud/google_cloud.py | 97 +++++++++++++--------- 1 file changed, 56 insertions(+), 41 deletions(-) diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index 8681ab945..bbd98c040 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -3,8 +3,9 @@ import os from typing import List, Set +import google.api_core.exceptions from google.auth import load_credentials_from_dict, load_credentials_from_file -from google.cloud import compute_v1, container_v1, iam_credentials_v1, storage +from google.cloud import compute_v1, container_v1, iam_admin_v1, storage from _nebari.constants import GCP_ENV_DOCS from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version @@ -53,85 +54,99 @@ def kubernetes_versions(region: str) -> List[str]: return filter_by_highest_supported_k8s_version(supported_kubernetes_versions) -def cluster_exists(cluster_name: str, zone: str) -> bool: +def cluster_exists(cluster_name: str, region: str) -> bool: """Check if a GKE cluster exists.""" credentials, project_id = load_credentials() client = container_v1.ClusterManagerClient(credentials=credentials) - request = container_v1.GetClusterRequest(credentials=credentials) - response = client.get_cluster(request=request, project_id=project_id, zone=zone) - return response is not None + try: + client.get_cluster( + name=f"projects/{project_id}/locations/{region}/clusters/{cluster_name}" + ) + except google.api_core.exceptions.NotFound: + return False + return True -def bucket_exists(bucket_name: str, project_id: str) -> bool: +def bucket_exists(bucket_name: str) -> bool: """Check if a storage bucket exists.""" - client = storage.Client(project=project_id) - bucket = client.get_bucket(bucket_name) - return bucket is not None + credentials, _ = load_credentials() + client = storage.Client(credentials=credentials) + + try: + client.get_bucket(bucket_name) + except google.api_core.exceptions.NotFound: + return False + return True -def service_account_exists(service_account_name: str, project_id: str) -> bool: +def service_account_exists(service_account_name: str) -> bool: """Check if a service account exists.""" - client = iam_credentials_v1.IAMCredentialsClient() - service_acc = client.service_account_path(project_id, service_account_name) - return service_acc is not None + credentials, project_id = load_credentials() + client = iam_admin_v1.IAMClient(credentials=credentials) + service_account_path = client.service_account_path(project_id, service_account_name) + try: + client.get_service_account(name=service_account_path) + except google.api_core.exceptions.NotFound: + return False + return True -def delete_cluster(cluster_name: str, project_id: str, region: str): - """Delete a GKE cluster if it exists.""" - check_credentials() - if not cluster_exists(cluster_name, project_id, region): +def delete_cluster(cluster_name: str, region: str): + """Delete a GKE cluster if it exists.""" + credentials, project_id = load_credentials() + if not cluster_exists(cluster_name, region): print( f"Cluster {cluster_name} does not exist in project {project_id}, region {region}. Exiting gracefully." ) return - client = container_v1.ClusterManagerClient() - request = client.DeleteClusterRequest() + client = container_v1.ClusterManagerClient(credentials=credentials) try: - client.delete_cluster(request=request) - except google.api_core.exceptions.GoogleAPICallError as e: - if e.status_code == 200: - print("Cluster deleted successfully!") - else: - print("error deleting cluster!") + client.delete_cluster( + name=f"projects/{project_id}/locations/{region}/clusters/{cluster_name}" + ) + print(f"Successfully deleted cluster {cluster_name}.") + except google.api_core.exceptions.GoogleAPIError as e: + print(f"Failed to delete bucket {bucket_name}. Error: {e}") -def delete_storage_bucket(bucket_name: str, project_id: str): +def delete_storage_bucket(bucket_name: str): """Delete a storage bucket if it exists.""" - check_credentials() + credentials, project_id = load_credentials() - if not bucket_exists(bucket_name, project_id): + if not bucket_exists(bucket_name): print( f"Bucket {bucket_name} does not exist in project {project_id}. Exiting gracefully." ) return - client = storage.Client(project=project_id) + client = storage.Client(credentials=credentials) bucket = client.get_bucket(bucket_name) try: - bucket.delete() + bucket.delete(force=True) print(f"Successfully deleted bucket {bucket_name}.") - except storage.exceptions.BucketNotFoundError as e: + except google.api_core.exceptions.GoogleAPIError as e: print(f"Failed to delete bucket {bucket_name}. Error: {e}") -def delete_service_account(service_account_name: str, project_id: str): +def delete_service_account(service_account_name: str): """Delete a service account if it exists.""" - check_credentials() + credentials, project_id = load_credentials() - if not service_account_exists(service_account_name, project_id): + if not service_account_exists(service_account_name): print( f"Service account {service_account_name} does not exist in project {project_id}. Exiting gracefully." ) return - client = iam_credentials_v1.IAMCredentialsClient() - client.service_account_path(project_id, service_account_name) + + client = iam_admin_v1.IAMClient(credentials=credentials) + service_account_path = client.service_account_path(project_id, service_account_name) try: - client.delete_service_account(service_account_name) + client.delete_service_account(name=service_account_path) print(f"Successfully deleted service account {service_account_name}.") - except iam_credentials_v1.exceptions.IamServiceAccountNotFoundError as e: + except google.api_core.exceptions.GoogleAPIError as e: print(f"Failed to delete service account {service_account_name}. Error: {e}") @@ -148,6 +163,6 @@ def gcp_cleanup(config: schema.Main): f"{project_name}-{namespace}@{project_id}.iam.gserviceaccount.com" ) - delete_cluster(cluster_name, project_id, region) - delete_storage_bucket(bucket_name, project_id) - delete_service_account(service_account_name, project_id) + delete_cluster(cluster_name, region) + delete_storage_bucket(bucket_name) + delete_service_account(service_account_name) From 1c2fec31dd61fab278fb9705f1ae2f6b1d089dde Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Thu, 8 Aug 2024 20:08:29 -0500 Subject: [PATCH 16/16] Add explicit google auth scopes --- .github/workflows/test_gcp_integration.yaml | 1 - src/_nebari/provider/cloud/google_cloud.py | 14 ++++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_gcp_integration.yaml b/.github/workflows/test_gcp_integration.yaml index 1fe9e4f8d..f67557047 100644 --- a/.github/workflows/test_gcp_integration.yaml +++ b/.github/workflows/test_gcp_integration.yaml @@ -93,7 +93,6 @@ jobs: - name: 'Authenticate to GCP' uses: 'google-github-actions/auth@v1' with: - token_format: access_token workload_identity_provider: ${{ env.GCP_WORKFLOW_PROVIDER }} service_account: ${{ env.GCP_SERVICE_ACCOUNT }} diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index bbd98c040..6b54e40e9 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -22,13 +22,23 @@ def check_credentials() -> None: def load_credentials(): check_credentials() credentials = os.environ["GOOGLE_CREDENTIALS"] + project_id = os.environ["PROJECT_ID"] + + # Scopes need to be explicitly defined when using workload identity + # federation. + scopes = ["https://www.googleapis.com/auth/cloud-platform"] + # Google credentials are stored as strings in GHA secrets so we need # to determine if the credentials are stored as a file or not before # reading them if credentials.endswith(".json"): - return load_credentials_from_file(credentials) + loaded_credentials, _ = load_credentials_from_file(credentials, scopes=scopes) else: - return load_credentials_from_dict(json.loads(credentials)) + loaded_credentials, _ = load_credentials_from_dict( + json.loads(credentials), scopes=scopes + ) + + return loaded_credentials, project_id @functools.lru_cache()