From bb321cf1159a950d58333f2f919242f4324755b6 Mon Sep 17 00:00:00 2001 From: Amit Kumar Date: Mon, 6 May 2024 21:52:55 +0100 Subject: [PATCH 01/32] Update allowed admin groups (#2429) Co-authored-by: Vinicius D. Cerutti <51954708+viniciusdc@users.noreply.github.com> --- .../template/modules/kubernetes/services/jupyterhub/main.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index cf86d5a03e..12a942c93a 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -159,8 +159,8 @@ resource "helm_release" "jupyterhub" { login_service = "Keycloak" username_claim = "preferred_username" claim_groups_key = "groups" - allowed_groups = ["/analyst", "/developer", "/admin"] - admin_groups = ["/admin"] + allowed_groups = ["/analyst", "/developer", "/admin", "jupyterhub_admin", "jupyterhub_developer"] + admin_groups = ["/admin", "jupyterhub_admin"] manage_groups = true refresh_pre_spawn = true validate_server_cert = false From 79220be0cef8879ef32fa4ff44eeef816e2792a9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 20:53:33 +0000 Subject: [PATCH 02/32] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v4.6.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v4.6.0) - [github.com/psf/black: 24.3.0 → 24.4.2](https://github.com/psf/black/compare/24.3.0...24.4.2) - [github.com/astral-sh/ruff-pre-commit: v0.3.5 → v0.4.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.5...v0.4.3) - [github.com/antonbabenko/pre-commit-terraform: v1.88.4 → v1.89.1](https://github.com/antonbabenko/pre-commit-terraform/compare/v1.88.4...v1.89.1) --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9e9dcd9147..e26093698a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ ci: repos: # general - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: end-of-file-fixer exclude: "^docs-sphinx/cli.html" @@ -51,13 +51,13 @@ repos: # python - repo: https://github.com/psf/black - rev: 24.3.0 + rev: 24.4.2 hooks: - id: black args: ["--line-length=88", "--exclude=/src/_nebari/template/"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.4.3 hooks: - id: ruff args: ["--fix"] @@ -73,7 +73,7 @@ repos: # terraform - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.88.4 + rev: v1.89.1 hooks: - id: terraform_fmt args: From 2ff30120fe2d373d8b9b2acf59a48245944884b5 Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 7 May 2024 14:41:37 -0500 Subject: [PATCH 03/32] change default gcp instances to cost optimized e2 family instances --- src/_nebari/stages/infrastructure/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index f430c49126..170e23ca54 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -314,9 +314,9 @@ class GCPNodeGroup(schema.Base): DEFAULT_GCP_NODE_GROUPS = { - "general": GCPNodeGroup(instance="n1-standard-8", min_nodes=1, max_nodes=1), - "user": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), - "worker": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), + "general": GCPNodeGroup(instance="e2-highmem-4", min_nodes=1, max_nodes=1), + "user": GCPNodeGroup(instance="e2-standard-4", min_nodes=0, max_nodes=5), + "worker": GCPNodeGroup(instance="e2-standard-4", min_nodes=0, max_nodes=5), } From 7d0c28ef2815943f528f54541fd55e8db0c62fc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Krassowski?= <5832902+krassowski@users.noreply.github.com> Date: Wed, 8 May 2024 21:59:38 +0100 Subject: [PATCH 04/32] Upgrade to JupyterHub 5.0 (#2427) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../services/jupyterhub/files/jupyterhub/02-spawner.py | 1 - .../template/modules/kubernetes/services/jupyterhub/main.tf | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py index c3934aad05..ea9511a4cc 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py @@ -72,7 +72,6 @@ def service_for_jhub_apps(name, url): "url": url, "external": True, }, - "oauth_no_confirm": True, } c.JupyterHub.services.extend( diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index 12a942c93a..86c9d2efc2 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -57,7 +57,7 @@ resource "helm_release" "jupyterhub" { repository = "https://jupyterhub.github.io/helm-chart/" chart = "jupyterhub" - version = "3.2.1" + version = "4.0.0-0.dev.git.6586.h0a16e5a0" values = concat([ file("${path.module}/values.yaml"), From 3a4bb878bb23b144c0dd1f07131d737e4b5bde8f Mon Sep 17 00:00:00 2001 From: Pavithra Eswaramoorthy Date: Thu, 9 May 2024 12:37:07 +0530 Subject: [PATCH 05/32] Create CITATION.cff --- CITATION.cff | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 CITATION.cff diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000000..dd8bd19621 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,8 @@ +cff-version: 1.2.0 +message: "If you use this software, please cite it as below." +authors: + - name: "Nebari Development Team" +type: software +title: "Nebari: Your open source data science platform" +url: "https://www.nebari.dev" +repository-code: "https://github.com/nebari-dev/nebari" From 8bcee8fb472ece22f0279bbd12cfe729e2885d99 Mon Sep 17 00:00:00 2001 From: Pavithra Eswaramoorthy Date: Thu, 9 May 2024 12:47:45 +0530 Subject: [PATCH 06/32] Add latest version and release date --- CITATION.cff | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CITATION.cff b/CITATION.cff index dd8bd19621..d511a318db 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -4,5 +4,7 @@ authors: - name: "Nebari Development Team" type: software title: "Nebari: Your open source data science platform" +version: 2024.4.1 +date-released: 2024-04-20 url: "https://www.nebari.dev" repository-code: "https://github.com/nebari-dev/nebari" From a65e595600b2f0b38eeb6977f75c39922f103ffe Mon Sep 17 00:00:00 2001 From: Pavithra Eswaramoorthy Date: Thu, 9 May 2024 12:49:26 +0530 Subject: [PATCH 07/32] Remove tagline --- CITATION.cff | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CITATION.cff b/CITATION.cff index d511a318db..5294ea08d0 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -3,7 +3,7 @@ message: "If you use this software, please cite it as below." authors: - name: "Nebari Development Team" type: software -title: "Nebari: Your open source data science platform" +title: "Nebari" version: 2024.4.1 date-released: 2024-04-20 url: "https://www.nebari.dev" From f32d1bb056e9de639a658759ea2d61fa0ef2c61b Mon Sep 17 00:00:00 2001 From: "Vinicius D. Cerutti" <51954708+viniciusdc@users.noreply.github.com> Date: Mon, 13 May 2024 18:25:19 -0300 Subject: [PATCH 08/32] Release 2024.5.1 updates (#2461) --- RELEASE.md | 15 +++++++++++++++ src/_nebari/constants.py | 2 +- .../jupyterhub/files/jupyterhub/02-spawner.py | 1 + .../kubernetes/services/jupyterhub/main.tf | 2 +- src/_nebari/upgrade.py | 11 +++++++++++ 5 files changed, 29 insertions(+), 2 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index c3ea55dda9..e1bac3acc9 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -9,6 +9,21 @@ This file is copied to nebari-dev/nebari-docs using a GitHub Action. --> --- +### Release 2024.5.1 - May 13, 2024 + +## What's Changed + +* make userscheduler run on general node group by @Adam-D-Lewis in +* Upgrade to Pydantic V2 by @Adam-D-Lewis in +* Pydantic2 PR fix by @Adam-D-Lewis in +* remove redundant pydantic class, fix bug by @Adam-D-Lewis in +* Update `python-keycloak` version pins constraints by @viniciusdc in +* add HERA_TOKEN env var to user pods by @Adam-D-Lewis in +* fix docs link by @Adam-D-Lewis in +* Update allowed admin groups by @aktech in + +**Full Changelog**: + ## Release 2024.4.1 - April 20, 2024 ### What's Changed diff --git a/src/_nebari/constants.py b/src/_nebari/constants.py index 7ca8df28b4..d0e3f37444 100644 --- a/src/_nebari/constants.py +++ b/src/_nebari/constants.py @@ -1,4 +1,4 @@ -CURRENT_RELEASE = "2024.4.1" +CURRENT_RELEASE = "2024.5.1" # NOTE: Terraform cannot be upgraded further due to Hashicorp licensing changes # implemented in August 2023. diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py index ea9511a4cc..c3934aad05 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py @@ -72,6 +72,7 @@ def service_for_jhub_apps(name, url): "url": url, "external": True, }, + "oauth_no_confirm": True, } c.JupyterHub.services.extend( diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index 86c9d2efc2..12a942c93a 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -57,7 +57,7 @@ resource "helm_release" "jupyterhub" { repository = "https://jupyterhub.github.io/helm-chart/" chart = "jupyterhub" - version = "4.0.0-0.dev.git.6586.h0a16e5a0" + version = "3.2.1" values = concat([ file("${path.module}/values.yaml"), diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 64e593be66..2a98ff6f4b 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -773,6 +773,17 @@ def _version_specific_upgrade( return config +class Upgrade_2024_5_1(UpgradeStep): + version = "2024.5.1" + + def _version_specific_upgrade( + self, config, start_version, config_filename: Path, *args, **kwargs + ): + rich.print("Ready to upgrade to Nebari version [green]2024.5.1[/green].") + + return config + + __rounded_version__ = str(rounded_ver_parse(__version__)) # Manually-added upgrade steps must go above this line From 042c2c3c1502b7562aa51cca76e31d65cec602d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Krassowski?= <5832902+krassowski@users.noreply.github.com> Date: Tue, 14 May 2024 10:43:56 +0100 Subject: [PATCH 09/32] Fetch JupyterHub roles from Keycloak (#2447) --- .../jupyterhub/files/jupyterhub/04-auth.py | 133 ++++++++++++++++++ .../kubernetes/services/jupyterhub/main.tf | 13 +- .../services/keycloak-client/main.tf | 30 +++- .../services/keycloak-client/outputs.tf | 6 +- .../services/keycloak-client/variables.tf | 13 ++ tests/tests_deployment/test_jupyterhub_api.py | 42 ++++++ 6 files changed, 230 insertions(+), 7 deletions(-) create mode 100644 src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py create mode 100644 tests/tests_deployment/test_jupyterhub_api.py diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py new file mode 100644 index 0000000000..082268a107 --- /dev/null +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py @@ -0,0 +1,133 @@ +import json +import os +import urllib +from functools import reduce + +from jupyterhub.traitlets import Callable +from oauthenticator.generic import GenericOAuthenticator +from traitlets import Bool, Unicode, Union + + +class KeyCloakOAuthenticator(GenericOAuthenticator): + """ + Since `oauthenticator` 16.3 `GenericOAuthenticator` supports group management. + This subclass adds role management on top of it, building on the new `manage_roles` + feature added in JupyterHub 5.0 (https://github.com/jupyterhub/jupyterhub/pull/4748). + """ + + claim_roles_key = Union( + [Unicode(os.environ.get("OAUTH2_ROLES_KEY", "groups")), Callable()], + config=True, + help="""As `claim_groups_key` but for roles.""", + ) + + realm_api_url = Unicode( + config=True, help="""The keycloak REST API URL for the realm.""" + ) + + reset_managed_roles_on_startup = Bool(True) + + async def update_auth_model(self, auth_model): + auth_model = await super().update_auth_model(auth_model) + user_info = auth_model["auth_state"][self.user_auth_state_key] + user_roles = self._get_user_roles(user_info) + auth_model["roles"] = [{"name": role_name} for role_name in user_roles] + # note: because the roles check is comprehensive, we need to re-add the admin and user roles + if auth_model["admin"]: + auth_model["roles"].append({"name": "admin"}) + if self.check_allowed(auth_model["name"], auth_model): + auth_model["roles"].append({"name": "user"}) + return auth_model + + async def load_managed_roles(self): + if not self.manage_roles: + raise ValueError( + "Managed roles can only be loaded when `manage_roles` is True" + ) + token = await self._get_token() + + # Get the clients list to find the "id" of "jupyterhub" client. + clients_data = await self._fetch_api(endpoint="clients/", token=token) + jupyterhub_clients = [ + client for client in clients_data if client["clientId"] == "jupyterhub" + ] + assert len(jupyterhub_clients) == 1 + jupyterhub_client_id = jupyterhub_clients[0]["id"] + + # Includes roles like "jupyterhub_admin", "jupyterhub_developer", "dask_gateway_developer" + client_roles = await self._fetch_api( + endpoint=f"clients/{jupyterhub_client_id}/roles", token=token + ) + # Includes roles like "default-roles-nebari", "offline_access", "uma_authorization" + realm_roles = await self._fetch_api(endpoint="roles", token=token) + roles = { + role["name"]: {"name": role["name"], "description": role["description"]} + for role in [*realm_roles, *client_roles] + } + # we could use either `name` (e.g. "developer") or `path` ("/developer"); + # since the default claim key returns `path`, it seems preferable. + group_name_key = "path" + for realm_role in realm_roles: + role_name = realm_role["name"] + role = roles[role_name] + # fetch role assignments to groups + groups = await self._fetch_api(f"roles/{role_name}/groups", token=token) + role["groups"] = [group[group_name_key] for group in groups] + # fetch role assignments to users + users = await self._fetch_api(f"roles/{role_name}/users", token=token) + role["users"] = [user["username"] for user in users] + for client_role in client_roles: + role_name = client_role["name"] + role = roles[role_name] + # fetch role assignments to groups + groups = await self._fetch_api( + f"clients/{jupyterhub_client_id}/roles/{role_name}/groups", token=token + ) + role["groups"] = [group[group_name_key] for group in groups] + # fetch role assignments to users + users = await self._fetch_api( + f"clients/{jupyterhub_client_id}/roles/{role_name}/users", token=token + ) + role["users"] = [user["username"] for user in users] + + return list(roles.values()) + + def _get_user_roles(self, user_info): + if callable(self.claim_roles_key): + return set(self.claim_roles_key(user_info)) + try: + return set(reduce(dict.get, self.claim_roles_key.split("."), user_info)) + except TypeError: + self.log.error( + f"The claim_roles_key {self.claim_roles_key} does not exist in the user token" + ) + return set() + + async def _get_token(self) -> str: + http = self.http_client + + body = urllib.parse.urlencode( + { + "client_id": self.client_id, + "client_secret": self.client_secret, + "grant_type": "client_credentials", + } + ) + response = await http.fetch( + self.token_url, + method="POST", + body=body, + ) + data = json.loads(response.body) + return data["access_token"] # type: ignore[no-any-return] + + async def _fetch_api(self, endpoint: str, token: str): + response = await self.http_client.fetch( + f"{self.realm_api_url}/{endpoint}", + method="GET", + headers={"Authorization": f"Bearer {token}"}, + ) + return json.loads(response.body) + + +c.JupyterHub.authenticator_class = KeyCloakOAuthenticator diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index 12a942c93a..8b5eb6875e 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -130,6 +130,7 @@ resource "helm_release" "jupyterhub" { "01-theme.py" = file("${path.module}/files/jupyterhub/01-theme.py") "02-spawner.py" = file("${path.module}/files/jupyterhub/02-spawner.py") "03-profiles.py" = file("${path.module}/files/jupyterhub/03-profiles.py") + "04-auth.py" = file("${path.module}/files/jupyterhub/04-auth.py") } services = { @@ -143,25 +144,25 @@ resource "helm_release" "jupyterhub" { # for simple key value configuration with jupyterhub traitlets # this hub.config property should be used config = { - JupyterHub = { - authenticator_class = "generic-oauth" - } Authenticator = { enable_auth_state = true } - GenericOAuthenticator = { + KeyCloakOAuthenticator = { client_id = module.jupyterhub-openid-client.config.client_id client_secret = module.jupyterhub-openid-client.config.client_secret oauth_callback_url = "https://${var.external-url}/hub/oauth_callback" authorize_url = module.jupyterhub-openid-client.config.authentication_url token_url = module.jupyterhub-openid-client.config.token_url userdata_url = module.jupyterhub-openid-client.config.userinfo_url + realm_api_url = module.jupyterhub-openid-client.config.realm_api_url login_service = "Keycloak" username_claim = "preferred_username" claim_groups_key = "groups" + claim_roles_key = "roles" allowed_groups = ["/analyst", "/developer", "/admin", "jupyterhub_admin", "jupyterhub_developer"] admin_groups = ["/admin", "jupyterhub_admin"] manage_groups = true + manage_roles = true refresh_pre_spawn = true validate_server_cert = false @@ -283,6 +284,10 @@ module "jupyterhub-openid-client" { var.jupyterhub-logout-redirect-url ] jupyterlab_profiles_mapper = true + service-accounts-enabled = true + service-account-roles = [ + "view-realm", "view-users", "view-clients" + ] } diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/main.tf index fd85eeb7a0..7a2c3e648d 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/main.tf @@ -15,7 +15,8 @@ resource "keycloak_openid_client" "main" { access_type = "CONFIDENTIAL" standard_flow_enabled = true - valid_redirect_uris = var.callback-url-paths + valid_redirect_uris = var.callback-url-paths + service_accounts_enabled = var.service-accounts-enabled } @@ -62,6 +63,33 @@ resource "keycloak_openid_user_attribute_protocol_mapper" "jupyterlab_profiles" aggregate_attributes = true } +data "keycloak_realm" "master" { + realm = "nebari" +} + +data "keycloak_openid_client" "realm_management" { + realm_id = var.realm_id + client_id = "realm-management" +} + +data "keycloak_role" "main-service" { + for_each = toset(var.service-account-roles) + + realm_id = data.keycloak_realm.master.id + client_id = data.keycloak_openid_client.realm_management.id + name = each.key +} + +resource "keycloak_openid_client_service_account_role" "main" { + for_each = toset(var.service-account-roles) + + realm_id = var.realm_id + service_account_user_id = keycloak_openid_client.main.service_account_user_id + client_id = data.keycloak_openid_client.realm_management.id + role = data.keycloak_role.main-service[each.key].name +} + + resource "keycloak_role" "main" { for_each = toset(flatten(values(var.role_mapping))) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/outputs.tf index bd1978bd4b..6077c22b0e 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/outputs.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/outputs.tf @@ -1,12 +1,14 @@ output "config" { description = "configuration credentials for connecting to openid client" value = { - client_id = keycloak_openid_client.main.client_id - client_secret = keycloak_openid_client.main.client_secret + client_id = keycloak_openid_client.main.client_id + client_secret = keycloak_openid_client.main.client_secret + service_account_user_id = keycloak_openid_client.main.service_account_user_id authentication_url = "https://${var.external-url}/auth/realms/${var.realm_id}/protocol/openid-connect/auth" token_url = "https://${var.external-url}/auth/realms/${var.realm_id}/protocol/openid-connect/token" userinfo_url = "https://${var.external-url}/auth/realms/${var.realm_id}/protocol/openid-connect/userinfo" + realm_api_url = "https://${var.external-url}/auth/admin/realms/${var.realm_id}" callback_urls = var.callback-url-paths } } diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/variables.tf index d20ecca48a..b4e709c6a5 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/variables.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/variables.tf @@ -16,6 +16,19 @@ variable "external-url" { } +variable "service-accounts-enabled" { + description = "Whether the client should have a service account created" + type = bool + default = false +} + +variable "service-account-roles" { + description = "Roles to be granted to the service account. Requires setting service-accounts-enabled to true." + type = list(string) + default = [] +} + + variable "role_mapping" { description = "Group to role mapping to establish for client" type = map(list(string)) diff --git a/tests/tests_deployment/test_jupyterhub_api.py b/tests/tests_deployment/test_jupyterhub_api.py new file mode 100644 index 0000000000..68fa70c1d7 --- /dev/null +++ b/tests/tests_deployment/test_jupyterhub_api.py @@ -0,0 +1,42 @@ +import pytest + +from tests.tests_deployment import constants +from tests.tests_deployment.utils import get_jupyterhub_session + + +@pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") +def test_jupyterhub_loads_roles_from_keycloak(): + session = get_jupyterhub_session() + xsrf_token = session.cookies.get("_xsrf") + response = session.get( + f"https://{constants.NEBARI_HOSTNAME}/hub/api/users/{constants.KEYCLOAK_USERNAME}", + headers={"X-XSRFToken": xsrf_token}, + verify=False, + ) + user = response.json() + assert set(user["roles"]) == { + "user", + "manage-account", + "jupyterhub_developer", + "argo-developer", + "dask_gateway_developer", + "grafana_viewer", + "conda_store_developer", + "argo-viewer", + "grafana_developer", + "manage-account-links", + "view-profile", + } + + +@pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") +def test_jupyterhub_loads_groups_from_keycloak(): + session = get_jupyterhub_session() + xsrf_token = session.cookies.get("_xsrf") + response = session.get( + f"https://{constants.NEBARI_HOSTNAME}/hub/api/users/{constants.KEYCLOAK_USERNAME}", + headers={"X-XSRFToken": xsrf_token}, + verify=False, + ) + user = response.json() + assert set(user["groups"]) == {"/analyst", "/developer", "/users"} From 2c7f09282a06ff96a6adfbb837330ea69932c780 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Krassowski?= <5832902+krassowski@users.noreply.github.com> Date: Tue, 14 May 2024 14:22:51 +0100 Subject: [PATCH 10/32] Update selector for Start server button to use button tag (#2464) --- tests/tests_e2e/cypress/integration/main.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/tests_e2e/cypress/integration/main.js b/tests/tests_e2e/cypress/integration/main.js index 1184ba76d6..e25d60fd8d 100644 --- a/tests/tests_e2e/cypress/integration/main.js +++ b/tests/tests_e2e/cypress/integration/main.js @@ -52,8 +52,8 @@ describe('First Test', () => { cy.get('h1') .should('contain', 'Server Options'); - cy.get('input.btn.btn-jupyter') - .should('have.attr', 'value', 'Start').click(); + cy.get('button.btn.btn-jupyter') + .should('contain', 'Start').click(); // Minimal check that JupyterLab has opened cy.get('div#jp-MainLogo', { timeout: 60000 }).should('exist').wait(4000); From 6bbb3b70f598b253ca720935b23341c604095745 Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 14 May 2024 11:43:11 -0500 Subject: [PATCH 11/32] add upgrade message --- src/_nebari/upgrade.py | 47 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 2a98ff6f4b..f53377c0a7 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -784,6 +784,53 @@ def _version_specific_upgrade( return config +class Upgrade_2024_6_1(UpgradeStep): + version = "2024.6.1" + + def _version_specific_upgrade( + self, config, start_version, config_filename: Path, *args, **kwargs + ): + if provider := config.get("provider", ""): + if provider == ProviderEnum.gcp.value: + provider_full_name = provider_enum_name_map[provider] + if not config.get(provider_full_name, {}).get("node_groups", {}): + try: + continue_ = Prompt.ask( + f"""The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. + This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. + + As always, make sure to [backup data](https://www.nebari.dev/docs/how-tos/manual-backup/) before upgrading. + + Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? + If not, select "N" and the old default node groups will be added to the nebari config file. + """, + choices=["y", "N"], + default="y", + ) + if continue_ == "N": + config[provider_full_name]["node_groups"] = { + "general": { + "instance": "n1-standard-8", + "min_nodes": 1, + "max_nodes": 1, + }, + "user": { + "instance": "n1-standard-4", + "min_nodes": 0, + "max_nodes": 5, + }, + "worker": { + "instance": "n1-standard-4", + "min_nodes": 0, + "max_nodes": 5, + }, + } + except KeyError: + pass + + return config + + __rounded_version__ = str(rounded_ver_parse(__version__)) # Manually-added upgrade steps must go above this line From 75270ba13d5d1e58f106787f1bc28f8fb1f0290f Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 14 May 2024 11:57:29 -0500 Subject: [PATCH 12/32] make upgrade step 2024.5.2 --- src/_nebari/upgrade.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index f53377c0a7..11d14d3ad8 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -784,8 +784,8 @@ def _version_specific_upgrade( return config -class Upgrade_2024_6_1(UpgradeStep): - version = "2024.6.1" +class Upgrade_2024_5_2(UpgradeStep): + version = "2024.5.2" def _version_specific_upgrade( self, config, start_version, config_filename: Path, *args, **kwargs From c45b0832a5e8418bc28abef3007930401f5a60c1 Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 14 May 2024 12:57:25 -0500 Subject: [PATCH 13/32] add upgrade message --- src/_nebari/upgrade.py | 37 ++++++++++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 11d14d3ad8..df3ae66c19 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -3,6 +3,7 @@ import re import secrets import string +import textwrap from abc import ABC from pathlib import Path from typing import Any, ClassVar, Dict @@ -787,6 +788,10 @@ def _version_specific_upgrade( class Upgrade_2024_5_2(UpgradeStep): version = "2024.5.2" + @staticmethod + def _wrap(s): + return "\n".join("\n".join(textwrap.wrap(x)) for x in s.splitlines()) + def _version_specific_upgrade( self, config, start_version, config_filename: Path, *args, **kwargs ): @@ -795,15 +800,18 @@ def _version_specific_upgrade( provider_full_name = provider_enum_name_map[provider] if not config.get(provider_full_name, {}).get("node_groups", {}): try: - continue_ = Prompt.ask( - f"""The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. - This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. + text = f""" +The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. \ +This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. - As always, make sure to [backup data](https://www.nebari.dev/docs/how-tos/manual-backup/) before upgrading. +As always, make sure to [link=https://www.nebari.dev/docs/how-tos/manual-backup]backup data[/link] before upgrading. - Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? - If not, select "N" and the old default node groups will be added to the nebari config file. - """, +Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? +If not, select "N" and the old default node groups will be added to the nebari config file. +""".rstrip() + wrapped_text = self._wrap(text) + continue_ = Prompt.ask( + wrapped_text, choices=["y", "N"], default="y", ) @@ -827,7 +835,22 @@ def _version_specific_upgrade( } except KeyError: pass + else: + text = f""" +The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. +Consider upgrading your node group instance types to the new default configuration. + +Upgrading your general node will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. + +As always, make sure to [link=https://www.nebari.dev/docs/how-tos/manual-backup]backup data[/link] before upgrading. + +The new default node groups instances are: +{json.dumps({'general': {'instance': 'e2-highmem-4'}, 'user': {'instance': 'e2-standard-4'}, 'worker': {'instance': 'e2-standard-4'}}, indent=4)} +Hit enter to continue +""".rstrip() + wrapped_text = self._wrap(text) + Prompt.ask(wrapped_text) return config From 647104851d340a1b413c9b63e026a017b43f37bf Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 14 May 2024 13:05:52 -0500 Subject: [PATCH 14/32] add link --- src/_nebari/upgrade.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index df3ae66c19..013bf3f64b 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -804,7 +804,7 @@ def _version_specific_upgrade( The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. \ This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. -As always, make sure to [link=https://www.nebari.dev/docs/how-tos/manual-backup]backup data[/link] before upgrading. +As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? If not, select "N" and the old default node groups will be added to the nebari config file. @@ -842,7 +842,7 @@ def _version_specific_upgrade( Upgrading your general node will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. -As always, make sure to [link=https://www.nebari.dev/docs/how-tos/manual-backup]backup data[/link] before upgrading. +As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. The new default node groups instances are: {json.dumps({'general': {'instance': 'e2-highmem-4'}, 'user': {'instance': 'e2-standard-4'}, 'worker': {'instance': 'e2-standard-4'}}, indent=4)} From 02f04c339f880540064d2223176830afdd02f5fa Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 14 May 2024 13:08:35 -0500 Subject: [PATCH 15/32] reset to develop --- src/_nebari/stages/infrastructure/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index 170e23ca54..f430c49126 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -314,9 +314,9 @@ class GCPNodeGroup(schema.Base): DEFAULT_GCP_NODE_GROUPS = { - "general": GCPNodeGroup(instance="e2-highmem-4", min_nodes=1, max_nodes=1), - "user": GCPNodeGroup(instance="e2-standard-4", min_nodes=0, max_nodes=5), - "worker": GCPNodeGroup(instance="e2-standard-4", min_nodes=0, max_nodes=5), + "general": GCPNodeGroup(instance="n1-standard-8", min_nodes=1, max_nodes=1), + "user": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), + "worker": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), } From 2a2f2ee779ac21b70339da6551c2f6b0b00f6efe Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Tue, 14 May 2024 13:13:49 -0500 Subject: [PATCH 16/32] Reduce GCP Fixed Costs by 50% (#2453) --- src/_nebari/stages/infrastructure/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index f430c49126..170e23ca54 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -314,9 +314,9 @@ class GCPNodeGroup(schema.Base): DEFAULT_GCP_NODE_GROUPS = { - "general": GCPNodeGroup(instance="n1-standard-8", min_nodes=1, max_nodes=1), - "user": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), - "worker": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), + "general": GCPNodeGroup(instance="e2-highmem-4", min_nodes=1, max_nodes=1), + "user": GCPNodeGroup(instance="e2-standard-4", min_nodes=0, max_nodes=5), + "worker": GCPNodeGroup(instance="e2-standard-4", min_nodes=0, max_nodes=5), } From fa075661cb4860797597fbaf853f3a87a090a3c1 Mon Sep 17 00:00:00 2001 From: "Vinicius D. Cerutti" <51954708+viniciusdc@users.noreply.github.com> Date: Thu, 16 May 2024 15:35:41 -0300 Subject: [PATCH 17/32] Restore JupyterHub updates from PR-2427 (#2465) --- .../services/jupyterhub/files/jupyterhub/02-spawner.py | 1 - .../template/modules/kubernetes/services/jupyterhub/main.tf | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py index c3934aad05..ea9511a4cc 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py @@ -72,7 +72,6 @@ def service_for_jhub_apps(name, url): "url": url, "external": True, }, - "oauth_no_confirm": True, } c.JupyterHub.services.extend( diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index 8b5eb6875e..46a40c87dd 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -57,7 +57,7 @@ resource "helm_release" "jupyterhub" { repository = "https://jupyterhub.github.io/helm-chart/" chart = "jupyterhub" - version = "3.2.1" + version = "4.0.0-0.dev.git.6586.h0a16e5a0" values = concat([ file("${path.module}/values.yaml"), From 77c8a2d442d0614fedff308ffaba07d5bae6d688 Mon Sep 17 00:00:00 2001 From: krassowski <5832902+krassowski@users.noreply.github.com> Date: Thu, 16 May 2024 19:38:11 +0100 Subject: [PATCH 18/32] Use JupyterHub 5.0.0b2 --- .../template/modules/kubernetes/services/jupyterhub/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index 46a40c87dd..c622ee9a54 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -57,7 +57,7 @@ resource "helm_release" "jupyterhub" { repository = "https://jupyterhub.github.io/helm-chart/" chart = "jupyterhub" - version = "4.0.0-0.dev.git.6586.h0a16e5a0" + version = "4.0.0-0.dev.git.6607.hd1a1130e" values = concat([ file("${path.module}/values.yaml"), From 43fb770c822dab7c4b27d507f756aee971cd8983 Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Mon, 20 May 2024 16:21:43 -0500 Subject: [PATCH 19/32] Workload identity (#2460) --- pyproject.toml | 18 ++++++++++-------- src/_nebari/stages/infrastructure/__init__.py | 3 +++ .../infrastructure/template/azure/main.tf | 5 +++-- .../template/azure/modules/kubernetes/main.tf | 7 +++++++ .../azure/modules/kubernetes/outputs.tf | 10 ++++++++++ .../azure/modules/kubernetes/variables.tf | 6 ++++++ .../infrastructure/template/azure/outputs.tf | 10 ++++++++++ .../infrastructure/template/azure/variables.tf | 6 ++++++ .../stages/kubernetes_services/__init__.py | 5 +++++ .../template/dask_gateway.tf | 2 ++ .../template/forward-auth.tf | 18 +++++++++++++++++- .../modules/kubernetes/forwardauth/main.tf | 6 +++--- .../modules/kubernetes/forwardauth/outputs.tf | 13 +++++++++++++ .../kubernetes/forwardauth/variables.tf | 5 +++++ .../services/dask-gateway/middleware.tf | 2 +- .../services/dask-gateway/variables.tf | 4 ++++ .../stages/nebari_tf_extensions/__init__.py | 3 +++ .../template/modules/nebariextension/locals.tf | 2 +- .../modules/nebariextension/variables.tf | 5 +++++ .../template/tf-extensions.tf | 1 + .../nebari_tf_extensions/template/variables.tf | 5 +++++ 21 files changed, 120 insertions(+), 16 deletions(-) create mode 100644 src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/outputs.tf diff --git a/pyproject.toml b/pyproject.toml index 1731611781..91b0fe4eda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -136,21 +136,23 @@ module = [ ignore_missing_imports = true [tool.ruff] +extend-exclude = [ + "src/_nebari/template", + "home", + "__pycache__" +] + +[tool.ruff.lint] select = [ - "E", - "F", - "PTH", + "E", # E: pycodestyle rules + "F", # F: pyflakes rules + "PTH", # PTH: flake8-use-pathlib rules ] ignore = [ "E501", # Line too long "F821", # Undefined name "PTH123", # open() should be replaced by Path.open() ] -extend-exclude = [ - "src/_nebari/template", - "home", - "__pycache__" -] [tool.coverage.run] branch = true diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index 170e23ca54..8b188a720b 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -112,6 +112,7 @@ class AzureInputVars(schema.Base): tags: Dict[str, str] = {} max_pods: Optional[int] = None network_profile: Optional[Dict[str, str]] = None + workload_identity_enabled: bool = False class AWSNodeGroupInputVars(schema.Base): @@ -380,6 +381,7 @@ class AzureProvider(schema.Base): tags: Optional[Dict[str, str]] = {} network_profile: Optional[Dict[str, str]] = None max_pods: Optional[int] = None + workload_identity_enabled: bool = False @model_validator(mode="before") @classmethod @@ -781,6 +783,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): tags=self.config.azure.tags, network_profile=self.config.azure.network_profile, max_pods=self.config.azure.max_pods, + workload_identity_enabled=self.config.azure.workload_identity_enabled, ).model_dump() elif self.config.provider == schema.ProviderEnum.aws: return AWSInputVars( diff --git a/src/_nebari/stages/infrastructure/template/azure/main.tf b/src/_nebari/stages/infrastructure/template/azure/main.tf index 2ee687cc0f..2d6e2e2afa 100644 --- a/src/_nebari/stages/infrastructure/template/azure/main.tf +++ b/src/_nebari/stages/infrastructure/template/azure/main.tf @@ -40,6 +40,7 @@ module "kubernetes" { max_size = config.max_nodes } ] - vnet_subnet_id = var.vnet_subnet_id - private_cluster_enabled = var.private_cluster_enabled + vnet_subnet_id = var.vnet_subnet_id + private_cluster_enabled = var.private_cluster_enabled + workload_identity_enabled = var.workload_identity_enabled } diff --git a/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/main.tf index 5f2bad6561..cd39488309 100644 --- a/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/main.tf +++ b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/main.tf @@ -5,6 +5,10 @@ resource "azurerm_kubernetes_cluster" "main" { resource_group_name = var.resource_group_name tags = var.tags + # To enable Azure AD Workload Identity oidc_issuer_enabled must be set to true. + oidc_issuer_enabled = var.workload_identity_enabled + workload_identity_enabled = var.workload_identity_enabled + # DNS prefix specified when creating the managed cluster. Changing this forces a new resource to be created. dns_prefix = "Nebari" # required @@ -39,6 +43,9 @@ resource "azurerm_kubernetes_cluster" "main" { "azure-node-pool" = var.node_groups[0].name } tags = var.tags + + # temparory_name_for_rotation must be <= 12 characters + temporary_name_for_rotation = "${substr(var.node_groups[0].name, 0, 9)}tmp" } sku_tier = "Free" # "Free" [Default] or "Paid" diff --git a/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/outputs.tf b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/outputs.tf index 35d7b048b9..e96187bcd6 100644 --- a/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/outputs.tf +++ b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/outputs.tf @@ -17,3 +17,13 @@ output "kubeconfig" { sensitive = true value = azurerm_kubernetes_cluster.main.kube_config_raw } + +output "cluster_oidc_issuer_url" { + description = "The OpenID Connect issuer URL that is associated with the AKS cluster" + value = azurerm_kubernetes_cluster.main.oidc_issuer_url +} + +output "resource_group_name" { + description = "The name of the resource group in which the AKS cluster is created" + value = azurerm_kubernetes_cluster.main.resource_group_name +} diff --git a/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/variables.tf b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/variables.tf index b7159dad9b..b93a9fae2d 100644 --- a/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/variables.tf +++ b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/variables.tf @@ -70,3 +70,9 @@ variable "max_pods" { type = number default = 60 } + +variable "workload_identity_enabled" { + description = "Enable Workload Identity" + type = bool + default = false +} diff --git a/src/_nebari/stages/infrastructure/template/azure/outputs.tf b/src/_nebari/stages/infrastructure/template/azure/outputs.tf index 352e52e3c5..d904e3ec1e 100644 --- a/src/_nebari/stages/infrastructure/template/azure/outputs.tf +++ b/src/_nebari/stages/infrastructure/template/azure/outputs.tf @@ -22,3 +22,13 @@ output "kubeconfig_filename" { description = "filename for nebari kubeconfig" value = var.kubeconfig_filename } + +output "cluster_oidc_issuer_url" { + description = "The OpenID Connect issuer URL that is associated with the AKS cluster" + value = module.kubernetes.cluster_oidc_issuer_url +} + +output "resource_group_name" { + description = "The name of the resource group in which the AKS cluster is created" + value = module.kubernetes.resource_group_name +} diff --git a/src/_nebari/stages/infrastructure/template/azure/variables.tf b/src/_nebari/stages/infrastructure/template/azure/variables.tf index 4d9e6440eb..dcef2c97cb 100644 --- a/src/_nebari/stages/infrastructure/template/azure/variables.tf +++ b/src/_nebari/stages/infrastructure/template/azure/variables.tf @@ -76,3 +76,9 @@ variable "max_pods" { type = number default = 60 } + +variable "workload_identity_enabled" { + description = "Enable Workload Identity" + type = bool + default = false +} diff --git a/src/_nebari/stages/kubernetes_services/__init__.py b/src/_nebari/stages/kubernetes_services/__init__.py index cdc1ae9151..3c9f19a064 100644 --- a/src/_nebari/stages/kubernetes_services/__init__.py +++ b/src/_nebari/stages/kubernetes_services/__init__.py @@ -24,6 +24,9 @@ TIMEOUT = 10 # seconds +_forwardauth_middleware_name = "traefik-forward-auth" + + @schema.yaml_object(schema.yaml) class AccessEnum(str, enum.Enum): all = "all" @@ -327,6 +330,7 @@ class KubernetesServicesInputVars(schema.Base): realm_id: str node_groups: Dict[str, Dict[str, str]] jupyterhub_logout_redirect_url: str = Field(alias="jupyterhub-logout-redirect-url") + forwardauth_middleware_name: str = _forwardauth_middleware_name def _split_docker_image_name(image_name): @@ -383,6 +387,7 @@ class DaskGatewayInputVars(schema.Base): dask_worker_image: ImageNameTag = Field(alias="dask-worker-image") dask_gateway_profiles: Dict[str, Any] = Field(alias="dask-gateway-profiles") cloud_provider: str = Field(alias="cloud-provider") + forwardauth_middleware_name: str = _forwardauth_middleware_name class MonitoringInputVars(schema.Base): diff --git a/src/_nebari/stages/kubernetes_services/template/dask_gateway.tf b/src/_nebari/stages/kubernetes_services/template/dask_gateway.tf index b9b0a9c6c3..fb2fdc71fc 100644 --- a/src/_nebari/stages/kubernetes_services/template/dask_gateway.tf +++ b/src/_nebari/stages/kubernetes_services/template/dask_gateway.tf @@ -40,4 +40,6 @@ module "dask-gateway" { profiles = var.dask-gateway-profiles cloud-provider = var.cloud-provider + + forwardauth_middleware_name = var.forwardauth_middleware_name } diff --git a/src/_nebari/stages/kubernetes_services/template/forward-auth.tf b/src/_nebari/stages/kubernetes_services/template/forward-auth.tf index 3cb4e827e2..6ff9ac45b1 100644 --- a/src/_nebari/stages/kubernetes_services/template/forward-auth.tf +++ b/src/_nebari/stages/kubernetes_services/template/forward-auth.tf @@ -5,5 +5,21 @@ module "forwardauth" { external-url = var.endpoint realm_id = var.realm_id - node-group = var.node_groups.general + node-group = var.node_groups.general + forwardauth_middleware_name = var.forwardauth_middleware_name +} + +variable "forwardauth_middleware_name" { + description = "Name of the traefik forward auth middleware" + type = string +} + +output "forward-auth-middleware" { + description = "middleware name for use with forward auth" + value = module.forwardauth.forward-auth-middleware +} + +output "forward-auth-service" { + description = "middleware name for use with forward auth" + value = module.forwardauth.forward-auth-service } diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf index 6d9eb126ea..2fe1f2d0a0 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf @@ -144,12 +144,12 @@ resource "kubernetes_manifest" "forwardauth-middleware" { apiVersion = "traefik.containo.us/v1alpha1" kind = "Middleware" metadata = { - name = "traefik-forward-auth" + name = var.forwardauth_middleware_name namespace = var.namespace } spec = { forwardAuth = { - address = "http://forwardauth-service:4181" + address = "http://${kubernetes_service.forwardauth-service.metadata.0.name}:4181" authResponseHeaders = [ "X-Forwarded-User" ] @@ -175,7 +175,7 @@ resource "kubernetes_manifest" "forwardauth-ingressroute" { middlewares = [ { - name = "traefik-forward-auth" + name = kubernetes_manifest.forwardauth-middleware.manifest.metadata.name namespace = var.namespace } ] diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/outputs.tf new file mode 100644 index 0000000000..9280da29e9 --- /dev/null +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/outputs.tf @@ -0,0 +1,13 @@ +output "forward-auth-middleware" { + description = "middleware name for use with forward auth" + value = { + name = kubernetes_manifest.forwardauth-middleware.manifest.metadata.name + } +} + +output "forward-auth-service" { + description = "middleware name for use with forward auth" + value = { + name = kubernetes_service.forwardauth-service.metadata.0.name + } +} diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf index 3674b1db75..212238bc76 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf @@ -26,3 +26,8 @@ variable "node-group" { value = string }) } + +variable "forwardauth_middleware_name" { + description = "Name of the traefik forward auth middleware" + type = string +} diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/middleware.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/middleware.tf index 01680129b8..389127d06e 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/middleware.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/middleware.tf @@ -32,7 +32,7 @@ resource "kubernetes_manifest" "chain-middleware" { chain = { middlewares = [ { - name = "traefik-forward-auth" + name = var.forwardauth_middleware_name namespace = var.namespace }, { diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/variables.tf index 7f8a4aa978..074e1214d0 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/variables.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/variables.tf @@ -204,3 +204,7 @@ variable "cloud-provider" { description = "Name of the cloud provider to deploy to." type = string } + +variable "forwardauth_middleware_name" { + type = string +} diff --git a/src/_nebari/stages/nebari_tf_extensions/__init__.py b/src/_nebari/stages/nebari_tf_extensions/__init__.py index eaaf131117..b589f5fb8f 100644 --- a/src/_nebari/stages/nebari_tf_extensions/__init__.py +++ b/src/_nebari/stages/nebari_tf_extensions/__init__.py @@ -72,6 +72,9 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): "stages/05-kubernetes-keycloak" ]["keycloak_nebari_bot_password"]["value"], "helm_extensions": [_.model_dump() for _ in self.config.helm_extensions], + "forwardauth_middleware_name": stage_outputs[ + "stages/07-kubernetes-services" + ]["forward-auth-middleware"]["value"]["name"], } diff --git a/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/locals.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/locals.tf index 4c5f0de3e7..b3616d4d29 100644 --- a/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/locals.tf +++ b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/locals.tf @@ -1,6 +1,6 @@ locals { middlewares = (var.private) ? ([{ - name = "traefik-forward-auth" + name = var.forwardauth_middleware_name namespace = var.namespace }]) : ([]) diff --git a/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/variables.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/variables.tf index 071c11ffbd..9a255ff5e1 100644 --- a/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/variables.tf +++ b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/variables.tf @@ -70,3 +70,8 @@ variable "keycloak_nebari_bot_password" { type = string default = "" } + +variable "forwardauth_middleware_name" { + description = "Name of the traefik forward auth middleware" + type = string +} diff --git a/src/_nebari/stages/nebari_tf_extensions/template/tf-extensions.tf b/src/_nebari/stages/nebari_tf_extensions/template/tf-extensions.tf index dd87639393..915b78879e 100644 --- a/src/_nebari/stages/nebari_tf_extensions/template/tf-extensions.tf +++ b/src/_nebari/stages/nebari_tf_extensions/template/tf-extensions.tf @@ -16,6 +16,7 @@ module "extension" { nebari-realm-id = var.realm_id keycloak_nebari_bot_password = each.value.keycloakadmin ? var.keycloak_nebari_bot_password : "" + forwardauth_middleware_name = var.forwardauth_middleware_name envs = lookup(each.value, "envs", []) } diff --git a/src/_nebari/stages/nebari_tf_extensions/template/variables.tf b/src/_nebari/stages/nebari_tf_extensions/template/variables.tf index 144a6049cb..e17d86ffca 100644 --- a/src/_nebari/stages/nebari_tf_extensions/template/variables.tf +++ b/src/_nebari/stages/nebari_tf_extensions/template/variables.tf @@ -31,3 +31,8 @@ variable "helm_extensions" { variable "keycloak_nebari_bot_password" { description = "Keycloak password for nebari-bot" } + +variable "forwardauth_middleware_name" { + description = "Name of the traefik forward auth middleware" + type = string +} From e59a90fda0cc01802c48549b9b714dbe9a5b9b5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Krassowski?= <5832902+krassowski@users.noreply.github.com> Date: Thu, 23 May 2024 14:13:41 +0100 Subject: [PATCH 20/32] Fix test using a non-specific selector (#2475) --- tests/common/navigator.py | 29 +++++++++++++++++++---------- tests/common/run_notebook.py | 2 +- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/tests/common/navigator.py b/tests/common/navigator.py index 12a1445bd5..f846d9a545 100644 --- a/tests/common/navigator.py +++ b/tests/common/navigator.py @@ -256,7 +256,7 @@ def reset_workspace(self): self._set_environment_via_popup(kernel=None) # go to Kernel menu - kernel_menuitem = self.page.get_by_text("Kernel", exact=True) + kernel_menuitem = self.page.get_by_role("menuitem", name="Kernel", exact=True) kernel_menuitem.click() # shut down multiple running kernels with contextlib.suppress(Exception): @@ -320,14 +320,23 @@ def _set_environment_via_popup(self, kernel=None): # failure here indicates that the environment doesn't exist either # because of incorrect naming syntax or because the env is still # being built - self.page.get_by_role("combobox").nth(1).select_option(kernel) - # click Select to close popup (deal with the two formats of this dialog) - try: - self.page.get_by_role("button", name="Select Kernel").click() - except Exception: - self.page.locator("div").filter(has_text="No KernelSelect").get_by_role( - "button", name="Select Kernel" - ).click() + + new_launcher_popup = self.page.locator( + ".jp-KernelSelector-Dialog .jp-NewLauncher-table table" + ).nth(0) + if new_launcher_popup.is_visible(): + # for when the jupyterlab-new-launcher extension is installed + new_launcher_popup.locator("td").nth(0).click() + else: + # for when only the native launcher is available + self.page.get_by_role("combobox").nth(1).select_option(kernel) + # click Select to close popup (deal with the two formats of this dialog) + try: + self.page.get_by_role("button", name="Select Kernel").click() + except Exception: + self.page.locator("div").filter( + has_text="No KernelSelect" + ).get_by_role("button", name="Select Kernel").click() def set_environment(self, kernel): """Set environment of a jupyter notebook. @@ -350,7 +359,7 @@ def set_environment(self, kernel): popup = self._check_for_kernel_popup() # if there is not a kernel popup, make it appear if not popup: - self.page.get_by_text("Kernel", exact=True).click() + self.page.get_by_role("menuitem", name="Kernel", exact=True).click() self.page.get_by_role("menuitem", name="Change Kernel…").get_by_text( "Change Kernel…" ).click() diff --git a/tests/common/run_notebook.py b/tests/common/run_notebook.py index 10d28d6637..019fd26710 100644 --- a/tests/common/run_notebook.py +++ b/tests/common/run_notebook.py @@ -212,7 +212,7 @@ def _get_outputs(self) -> List[str]: def _restart_run_all(self): # restart run all cells - self.nav.page.get_by_text("Kernel", exact=True).click() + self.nav.page.get_by_role("menuitem", name="Kernel", exact=True).click() self.nav.page.get_by_role( "menuitem", name="Restart Kernel and Run All Cells…" ).get_by_text("Restart Kernel and Run All Cells…").click() From a503572b01aeb7834c48d2c6c913bd5eb1b3463a Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Thu, 23 May 2024 09:33:01 -0500 Subject: [PATCH 21/32] add verify=false since we use self signed cert in tests (#2481) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Michał Krassowski <5832902+krassowski@users.noreply.github.com> --- tests/tests_deployment/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/tests_deployment/utils.py b/tests/tests_deployment/utils.py index d175a2dd05..f37523d920 100644 --- a/tests/tests_deployment/utils.py +++ b/tests/tests_deployment/utils.py @@ -37,6 +37,7 @@ def get_jupyterhub_token(note="jupyterhub-tests-deployment"): f"https://{constants.NEBARI_HOSTNAME}/hub/api/users/{constants.KEYCLOAK_USERNAME}/tokens", headers=headers, json=data, + verify=False, ) return r.json()["token"] From 86e50fbd86c0f2ff9b9d6f413e695c2dfc46db62 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Thu, 23 May 2024 11:49:49 -0400 Subject: [PATCH 22/32] Use textwrap.dedent instead of custom wrapper --- src/_nebari/upgrade.py | 102 +++++++++++++++++++---------------------- 1 file changed, 48 insertions(+), 54 deletions(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 013bf3f64b..e812f3cb40 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -788,69 +788,63 @@ def _version_specific_upgrade( class Upgrade_2024_5_2(UpgradeStep): version = "2024.5.2" - @staticmethod - def _wrap(s): - return "\n".join("\n".join(textwrap.wrap(x)) for x in s.splitlines()) def _version_specific_upgrade( self, config, start_version, config_filename: Path, *args, **kwargs ): - if provider := config.get("provider", ""): - if provider == ProviderEnum.gcp.value: - provider_full_name = provider_enum_name_map[provider] - if not config.get(provider_full_name, {}).get("node_groups", {}): - try: - text = f""" -The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. \ -This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. - -As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. - -Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? -If not, select "N" and the old default node groups will be added to the nebari config file. -""".rstrip() - wrapped_text = self._wrap(text) - continue_ = Prompt.ask( - wrapped_text, - choices=["y", "N"], - default="y", - ) - if continue_ == "N": - config[provider_full_name]["node_groups"] = { - "general": { - "instance": "n1-standard-8", - "min_nodes": 1, - "max_nodes": 1, - }, - "user": { - "instance": "n1-standard-4", - "min_nodes": 0, - "max_nodes": 5, - }, - "worker": { - "instance": "n1-standard-4", - "min_nodes": 0, - "max_nodes": 5, - }, - } - except KeyError: - pass - else: + if (provider := config.get("provider", "")) == ProviderEnum.gcp.value: + provider_full_name = provider_enum_name_map[provider] + if not config.get(provider_full_name, {}).get("node_groups", {}): + try: text = f""" -The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. -Consider upgrading your node group instance types to the new default configuration. + The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. \ + This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. + + As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. + + Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? + If not, select "N" and the old default node groups will be added to the nebari config file. + """ + continue_ = Prompt.ask( + textwrap.dedent(text), + choices=["y", "N"], + default="y", + ) + if continue_ == "N": + config[provider_full_name]["node_groups"] = { + "general": { + "instance": "n1-standard-8", + "min_nodes": 1, + "max_nodes": 1, + }, + "user": { + "instance": "n1-standard-4", + "min_nodes": 0, + "max_nodes": 5, + }, + "worker": { + "instance": "n1-standard-4", + "min_nodes": 0, + "max_nodes": 5, + }, + } + except KeyError: + pass + else: + text = f""" + The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. + Consider upgrading your node group instance types to the new default configuration. -Upgrading your general node will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. + Upgrading your general node will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. -As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. + As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. -The new default node groups instances are: -{json.dumps({'general': {'instance': 'e2-highmem-4'}, 'user': {'instance': 'e2-standard-4'}, 'worker': {'instance': 'e2-standard-4'}}, indent=4)} + The new default node groups instances are: + {json.dumps({'general': {'instance': 'e2-highmem-4'}, 'user': {'instance': 'e2-standard-4'}, 'worker': {'instance': 'e2-standard-4'}}, indent=4)} -Hit enter to continue -""".rstrip() - wrapped_text = self._wrap(text) - Prompt.ask(wrapped_text) + Hit enter to continue + """ + Prompt.ask(textwrap.dedent(text)) return config From 9555dbce34f43500a25ddbea7c7c7c0223914e3e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 23 May 2024 15:49:58 +0000 Subject: [PATCH 23/32] [pre-commit.ci] Apply automatic pre-commit fixes --- src/_nebari/upgrade.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index e812f3cb40..947bb51fc1 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -788,7 +788,6 @@ def _version_specific_upgrade( class Upgrade_2024_5_2(UpgradeStep): version = "2024.5.2" - def _version_specific_upgrade( self, config, start_version, config_filename: Path, *args, **kwargs ): From fb3bbd3dee2be5a226932a6486663b4ce918a65b Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Thu, 23 May 2024 14:31:35 -0500 Subject: [PATCH 24/32] fix forward auth when using custom cert (#2479) Co-authored-by: Vinicius D. Cerutti <51954708+viniciusdc@users.noreply.github.com> --- .../stages/kubernetes_services/__init__.py | 6 ++++ .../template/forward-auth.tf | 6 ++++ .../modules/kubernetes/forwardauth/main.tf | 30 ++++++++++++++++++- .../kubernetes/forwardauth/variables.tf | 5 ++++ 4 files changed, 46 insertions(+), 1 deletion(-) diff --git a/src/_nebari/stages/kubernetes_services/__init__.py b/src/_nebari/stages/kubernetes_services/__init__.py index 3c9f19a064..fae8955de1 100644 --- a/src/_nebari/stages/kubernetes_services/__init__.py +++ b/src/_nebari/stages/kubernetes_services/__init__.py @@ -331,6 +331,7 @@ class KubernetesServicesInputVars(schema.Base): node_groups: Dict[str, Dict[str, str]] jupyterhub_logout_redirect_url: str = Field(alias="jupyterhub-logout-redirect-url") forwardauth_middleware_name: str = _forwardauth_middleware_name + cert_secret_name: Optional[str] = None def _split_docker_image_name(image_name): @@ -491,6 +492,11 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): realm_id=realm_id, node_groups=stage_outputs["stages/02-infrastructure"]["node_selectors"], jupyterhub_logout_redirect_url=final_logout_uri, + cert_secret_name=( + self.config.certificate.secret_name + if self.config.certificate.type == "existing" + else None + ), ) conda_store_vars = CondaStoreInputVars( diff --git a/src/_nebari/stages/kubernetes_services/template/forward-auth.tf b/src/_nebari/stages/kubernetes_services/template/forward-auth.tf index 6ff9ac45b1..2d98bf3e6a 100644 --- a/src/_nebari/stages/kubernetes_services/template/forward-auth.tf +++ b/src/_nebari/stages/kubernetes_services/template/forward-auth.tf @@ -7,6 +7,7 @@ module "forwardauth" { node-group = var.node_groups.general forwardauth_middleware_name = var.forwardauth_middleware_name + cert_secret_name = var.cert_secret_name } variable "forwardauth_middleware_name" { @@ -14,6 +15,11 @@ variable "forwardauth_middleware_name" { type = string } +variable "cert_secret_name" { + description = "Name of the secret containing the certificate" + type = string +} + output "forward-auth-middleware" { description = "middleware name for use with forward auth" value = module.forwardauth.forward-auth-middleware diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf index 2fe1f2d0a0..564d397d1a 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf @@ -59,7 +59,19 @@ resource "kubernetes_deployment" "forwardauth-deployment" { node_selector = { "${var.node-group.key}" = var.node-group.value } - + dynamic "volume" { + for_each = var.cert_secret_name == null ? [] : [1] + content { + name = "cert-volume" + secret { + secret_name = var.cert_secret_name + items { + key = "tls.crt" + path = "tls.crt" + } + } + } + } container { # image = "thomseddon/traefik-forward-auth:2.2.0" # Use PR #159 https://github.com/thomseddon/traefik-forward-auth/pull/159 @@ -125,10 +137,26 @@ resource "kubernetes_deployment" "forwardauth-deployment" { value = var.external-url } + dynamic "env" { + for_each = var.cert_secret_name == null ? [] : [1] + content { + name = "SSL_CERT_FILE" + value = "/config/tls.crt" + } + } + port { container_port = 4181 } + dynamic "volume_mount" { + for_each = var.cert_secret_name == null ? [] : [1] + content { + name = "cert-volume" + mount_path = "/config" + read_only = true + } + } } } diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf index 212238bc76..ae53c5b3a1 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf @@ -31,3 +31,8 @@ variable "forwardauth_middleware_name" { description = "Name of the traefik forward auth middleware" type = string } + +variable "cert_secret_name" { + description = "Name of the secret containing the certificate" + type = string +} From fbb888fdd4562d59d04d94190e413229840acb22 Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Thu, 23 May 2024 17:53:50 -0500 Subject: [PATCH 25/32] Add json dump after dedenting text. --- src/_nebari/upgrade.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 947bb51fc1..e1379df07b 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -795,8 +795,9 @@ def _version_specific_upgrade( provider_full_name = provider_enum_name_map[provider] if not config.get(provider_full_name, {}).get("node_groups", {}): try: - text = f""" - The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. \ + text = textwrap.dedent( + f""" + The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. This change will affect your current deployment, and will result in ~15 minutes of downtime during the upgrade step as the node groups are switched out, but shouldn't result in data loss. As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. @@ -804,8 +805,9 @@ def _version_specific_upgrade( Would you like to upgrade to the cost effective node groups [purple]{config_filename}[/purple]? If not, select "N" and the old default node groups will be added to the nebari config file. """ + ) continue_ = Prompt.ask( - textwrap.dedent(text), + text, choices=["y", "N"], default="y", ) @@ -830,7 +832,8 @@ def _version_specific_upgrade( except KeyError: pass else: - text = f""" + text = textwrap.dedent( + """ The default node groups for GCP have been changed to cost efficient e2 family nodes reducing the running cost of Nebari on GCP by ~50%. Consider upgrading your node group instance types to the new default configuration. @@ -839,11 +842,18 @@ def _version_specific_upgrade( As always, make sure to backup data before upgrading. See https://www.nebari.dev/docs/how-tos/manual-backup for more information. The new default node groups instances are: - {json.dumps({'general': {'instance': 'e2-highmem-4'}, 'user': {'instance': 'e2-standard-4'}, 'worker': {'instance': 'e2-standard-4'}}, indent=4)} - - Hit enter to continue """ - Prompt.ask(textwrap.dedent(text)) + ) + text += json.dumps( + { + "general": {"instance": "e2-highmem-4"}, + "user": {"instance": "e2-standard-4"}, + "worker": {"instance": "e2-standard-4"}, + }, + indent=4, + ) + text += "\n\nHit enter to continue" + Prompt.ask(text) return config From 4b5ff03cd8a6c5eca91947d9f2bd70c93a67986f Mon Sep 17 00:00:00 2001 From: Marcelo Villa Date: Thu, 23 May 2024 17:55:52 -0500 Subject: [PATCH 26/32] Update upgrade version number as it will likely be happening in June and not May. --- src/_nebari/upgrade.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index e1379df07b..e35d7ea309 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -785,8 +785,8 @@ def _version_specific_upgrade( return config -class Upgrade_2024_5_2(UpgradeStep): - version = "2024.5.2" +class Upgrade_2024_6_1(UpgradeStep): + version = "2024.6.1" def _version_specific_upgrade( self, config, start_version, config_filename: Path, *args, **kwargs From 3c2d26dcdad207718f6d0360a33f878fd1a6aced Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Krassowski?= <5832902+krassowski@users.noreply.github.com> Date: Tue, 28 May 2024 09:04:22 +0100 Subject: [PATCH 27/32] Use Helm Chart for JupyterHub 5.0.0 final (#2484) --- .../template/modules/kubernetes/services/jupyterhub/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index c622ee9a54..fe7716cf88 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -57,7 +57,7 @@ resource "helm_release" "jupyterhub" { repository = "https://jupyterhub.github.io/helm-chart/" chart = "jupyterhub" - version = "4.0.0-0.dev.git.6607.hd1a1130e" + version = "4.0.0-0.dev.git.6619.hd126b1bd" values = concat([ file("${path.module}/values.yaml"), From 363cb0d92c72e13fee84312a7ae19e1054d95326 Mon Sep 17 00:00:00 2001 From: Amit Kumar Date: Tue, 28 May 2024 14:59:45 +0100 Subject: [PATCH 28/32] Parse and insert keycloak roles scopes into JupyterHub (#2471) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Michał Krassowski <5832902+krassowski@users.noreply.github.com> --- .../jupyterhub/files/jupyterhub/04-auth.py | 135 ++++++++++++++++-- tests/tests_deployment/conftest.py | 11 ++ tests/tests_deployment/keycloak_utils.py | 96 +++++++++++++ tests/tests_deployment/test_jupyterhub_api.py | 54 ++++++- tests/tests_deployment/utils.py | 9 +- 5 files changed, 286 insertions(+), 19 deletions(-) create mode 100644 tests/tests_deployment/conftest.py create mode 100644 tests/tests_deployment/keycloak_utils.py diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py index 082268a107..bc6fb6a721 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/04-auth.py @@ -1,8 +1,10 @@ import json import os +import time import urllib from functools import reduce +from jupyterhub import scopes from jupyterhub.traitlets import Callable from oauthenticator.generic import GenericOAuthenticator from traitlets import Bool, Unicode, Union @@ -28,24 +30,72 @@ class KeyCloakOAuthenticator(GenericOAuthenticator): reset_managed_roles_on_startup = Bool(True) async def update_auth_model(self, auth_model): + """Updates and returns the auth_model dict. + This function is called every time a user authenticates with JupyterHub, as in + every time a user login to Nebari. + + It will fetch the roles and their corresponding scopes from keycloak + and return updated auth model which will updates roles/scopes for the + user. When a user's roles/scopes are updated, they take in-affect only + after they log in to Nebari. + """ + start = time.time() + self.log.info("Updating user auth model") auth_model = await super().update_auth_model(auth_model) + user_id = auth_model["auth_state"]["oauth_user"]["sub"] + token = await self._get_token() + + jupyterhub_client_id = await self._get_jupyterhub_client_id(token=token) user_info = auth_model["auth_state"][self.user_auth_state_key] - user_roles = self._get_user_roles(user_info) - auth_model["roles"] = [{"name": role_name} for role_name in user_roles] + user_roles_from_claims = self._get_user_roles(user_info=user_info) + keycloak_api_call_start = time.time() + user_roles = await self._get_client_roles_for_user( + user_id=user_id, client_id=jupyterhub_client_id, token=token + ) + user_roles_rich = await self._get_roles_with_attributes( + roles=user_roles, client_id=jupyterhub_client_id, token=token + ) + keycloak_api_call_time_taken = time.time() - keycloak_api_call_start + user_roles_rich_names = {role["name"] for role in user_roles_rich} + user_roles_non_jhub_client = [ + {"name": role} + for role in user_roles_from_claims + if role in (user_roles_from_claims - user_roles_rich_names) + ] + auth_model["roles"] = [ + { + "name": role["name"], + "description": role.get("description"), + "scopes": self._get_scope_from_role(role), + } + for role in [*user_roles_rich, *user_roles_non_jhub_client] + ] # note: because the roles check is comprehensive, we need to re-add the admin and user roles if auth_model["admin"]: auth_model["roles"].append({"name": "admin"}) - if self.check_allowed(auth_model["name"], auth_model): + if await self.check_allowed(auth_model["name"], auth_model): auth_model["roles"].append({"name": "user"}) + execution_time = time.time() - start + self.log.info( + f"Auth model update complete, time taken: {execution_time}s " + f"time taken for keycloak api call: {keycloak_api_call_time_taken}s " + f"delta between full execution and keycloak call: {execution_time - keycloak_api_call_time_taken}s" + ) return auth_model - async def load_managed_roles(self): - if not self.manage_roles: - raise ValueError( - "Managed roles can only be loaded when `manage_roles` is True" - ) - token = await self._get_token() + async def _get_jupyterhub_client_roles(self, jupyterhub_client_id, token): + """Get roles for the client named 'jupyterhub'.""" + # Includes roles like "jupyterhub_admin", "jupyterhub_developer", "dask_gateway_developer" + + client_roles = await self._fetch_api( + endpoint=f"clients/{jupyterhub_client_id}/roles", token=token + ) + client_roles_rich = await self._get_roles_with_attributes( + client_roles, client_id=jupyterhub_client_id, token=token + ) + return client_roles_rich + async def _get_jupyterhub_client_id(self, token): # Get the clients list to find the "id" of "jupyterhub" client. clients_data = await self._fetch_api(endpoint="clients/", token=token) jupyterhub_clients = [ @@ -53,16 +103,28 @@ async def load_managed_roles(self): ] assert len(jupyterhub_clients) == 1 jupyterhub_client_id = jupyterhub_clients[0]["id"] + return jupyterhub_client_id - # Includes roles like "jupyterhub_admin", "jupyterhub_developer", "dask_gateway_developer" - client_roles = await self._fetch_api( - endpoint=f"clients/{jupyterhub_client_id}/roles", token=token + async def load_managed_roles(self): + self.log.info("Loading managed roles") + if not self.manage_roles: + raise ValueError( + "Managed roles can only be loaded when `manage_roles` is True" + ) + token = await self._get_token() + jupyterhub_client_id = await self._get_jupyterhub_client_id(token=token) + client_roles_rich = await self._get_jupyterhub_client_roles( + jupyterhub_client_id=jupyterhub_client_id, token=token ) # Includes roles like "default-roles-nebari", "offline_access", "uma_authorization" realm_roles = await self._fetch_api(endpoint="roles", token=token) roles = { - role["name"]: {"name": role["name"], "description": role["description"]} - for role in [*realm_roles, *client_roles] + role["name"]: { + "name": role["name"], + "description": role["description"], + "scopes": self._get_scope_from_role(role), + } + for role in [*realm_roles, *client_roles_rich] } # we could use either `name` (e.g. "developer") or `path` ("/developer"); # since the default claim key returns `path`, it seems preferable. @@ -76,7 +138,7 @@ async def load_managed_roles(self): # fetch role assignments to users users = await self._fetch_api(f"roles/{role_name}/users", token=token) role["users"] = [user["username"] for user in users] - for client_role in client_roles: + for client_role in client_roles_rich: role_name = client_role["name"] role = roles[role_name] # fetch role assignments to groups @@ -92,6 +154,49 @@ async def load_managed_roles(self): return list(roles.values()) + def _get_scope_from_role(self, role): + """Return scopes from role if the component is jupyterhub""" + role_scopes = role.get("attributes", {}).get("scopes", []) + component = role.get("attributes", {}).get("component") + # Attributes are returned as a single-element array, unless `##` delimiter is used in Keycloak + # See this: https://stackoverflow.com/questions/68954733/keycloak-client-role-attribute-array + if component == ["jupyterhub"] and role_scopes: + return self.validate_scopes(role_scopes[0].split(",")) + else: + return [] + + def validate_scopes(self, role_scopes): + """Validate role scopes to sanity check user provided scopes from keycloak""" + self.log.info(f"Validating role scopes: {role_scopes}") + try: + # This is not a public function, but there isn't any alternative + # method to verify scopes, and we do need to do this sanity check + # as a invalid scopes could cause hub pod to fail + scopes._check_scopes_exist(role_scopes) + return role_scopes + except scopes.ScopeNotFound as e: + self.log.error(f"Invalid scopes, skipping: {role_scopes} ({e})") + return [] + + async def _get_roles_with_attributes(self, roles: dict, client_id: str, token: str): + """This fetches all roles by id to fetch there attributes.""" + roles_rich = [] + for role in roles: + # If this takes too much time, which isn't the case right now, we can + # also do multi-threaded requests + role_rich = await self._fetch_api( + endpoint=f"roles-by-id/{role['id']}?client={client_id}", token=token + ) + roles_rich.append(role_rich) + return roles_rich + + async def _get_client_roles_for_user(self, user_id, client_id, token): + user_roles = await self._fetch_api( + endpoint=f"users/{user_id}/role-mappings/clients/{client_id}/composite", + token=token, + ) + return user_roles + def _get_user_roles(self, user_info): if callable(self.claim_roles_key): return set(self.claim_roles_key(user_info)) diff --git a/tests/tests_deployment/conftest.py b/tests/tests_deployment/conftest.py new file mode 100644 index 0000000000..fa71302823 --- /dev/null +++ b/tests/tests_deployment/conftest.py @@ -0,0 +1,11 @@ +import pytest + +from tests.tests_deployment.keycloak_utils import delete_client_keycloak_test_roles + + +@pytest.fixture() +def cleanup_keycloak_roles(): + # setup + yield + # teardown + delete_client_keycloak_test_roles(client_name="jupyterhub") diff --git a/tests/tests_deployment/keycloak_utils.py b/tests/tests_deployment/keycloak_utils.py new file mode 100644 index 0000000000..6e6f6c21e6 --- /dev/null +++ b/tests/tests_deployment/keycloak_utils.py @@ -0,0 +1,96 @@ +import os +import pathlib + +from _nebari.config import read_configuration +from _nebari.keycloak import get_keycloak_admin_from_config +from nebari.plugins import nebari_plugin_manager + + +def get_keycloak_client_details_by_name(client_name, keycloak_admin=None): + if not keycloak_admin: + keycloak_admin = get_keycloak_admin() + clients = keycloak_admin.get_clients() + for client in clients: + if client["clientId"] == client_name: + return client + + +def get_keycloak_user_details_by_name(username, keycloak_admin=None): + if not keycloak_admin: + keycloak_admin = get_keycloak_admin() + users = keycloak_admin.get_users() + for user in users: + if user["username"] == username: + return user + + +def get_keycloak_role_details_by_name(roles, role_name): + for role in roles: + if role["name"] == role_name: + return role + + +def get_keycloak_admin(): + config_schema = nebari_plugin_manager.config_schema + config_filepath = os.environ.get("NEBARI_CONFIG_PATH", "nebari-config.yaml") + assert pathlib.Path(config_filepath).exists() + config = read_configuration(config_filepath, config_schema) + return get_keycloak_admin_from_config(config) + + +def create_keycloak_client_role( + client_id: str, role_name: str, scopes: str, component: str +): + keycloak_admin = get_keycloak_admin() + keycloak_admin.create_client_role( + client_id, + payload={ + "name": role_name, + "description": f"{role_name} description", + "attributes": {"scopes": [scopes], "component": [component]}, + }, + ) + client_roles = keycloak_admin.get_client_roles(client_id=client_id) + return get_keycloak_role_details_by_name(client_roles, role_name) + + +def assign_keycloak_client_role_to_user(username: str, client_name: str, role: dict): + """Given a keycloak role and client name, assign that to the user""" + keycloak_admin = get_keycloak_admin() + user_details = get_keycloak_user_details_by_name( + username=username, keycloak_admin=keycloak_admin + ) + client_details = get_keycloak_client_details_by_name( + client_name=client_name, keycloak_admin=keycloak_admin + ) + keycloak_admin.assign_client_role( + user_id=user_details["id"], client_id=client_details["id"], roles=[role] + ) + + +def create_keycloak_role(client_name: str, role_name: str, scopes: str, component: str): + """Create a role keycloak role for the given client with scopes and + component set in attributes + """ + keycloak_admin = get_keycloak_admin() + client_details = get_keycloak_client_details_by_name( + client_name=client_name, keycloak_admin=keycloak_admin + ) + return create_keycloak_client_role( + client_details["id"], role_name=role_name, scopes=scopes, component=component + ) + + +def delete_client_keycloak_test_roles(client_name): + keycloak_admin = get_keycloak_admin() + client_details = get_keycloak_client_details_by_name( + client_name=client_name, keycloak_admin=keycloak_admin + ) + client_roles = keycloak_admin.get_client_roles(client_id=client_details["id"]) + for role in client_roles: + if not role["name"].startswith("test"): + continue + keycloak_admin.delete_client_role( + client_role_id=client_details["id"], + role_name=role["name"], + ) diff --git a/tests/tests_deployment/test_jupyterhub_api.py b/tests/tests_deployment/test_jupyterhub_api.py index 68fa70c1d7..5e1a54562b 100644 --- a/tests/tests_deployment/test_jupyterhub_api.py +++ b/tests/tests_deployment/test_jupyterhub_api.py @@ -1,7 +1,11 @@ import pytest from tests.tests_deployment import constants -from tests.tests_deployment.utils import get_jupyterhub_session +from tests.tests_deployment.keycloak_utils import ( + assign_keycloak_client_role_to_user, + create_keycloak_role, +) +from tests.tests_deployment.utils import create_jupyterhub_token, get_jupyterhub_session @pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") @@ -29,6 +33,54 @@ def test_jupyterhub_loads_roles_from_keycloak(): } +@pytest.mark.parametrize( + "component,scopes,expected_scopes_difference", + ( + [ + "jupyterhub", + "read:users:shares,read:groups:shares,users:shares", + {"read:groups:shares", "users:shares", "read:users:shares"}, + ], + ["invalid-component", "read:users:shares,read:groups:shares,users:shares", {}], + ["invalid-component", "admin:invalid-scope", {}], + ), +) +@pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") +@pytest.mark.filterwarnings( + "ignore:.*auto_refresh_token is deprecated:DeprecationWarning" +) +def test_keycloak_roles_attributes_parsed_as_jhub_scopes( + component, scopes, expected_scopes_difference, cleanup_keycloak_roles +): + # check token scopes before role creation and assignment + token_response_before = create_jupyterhub_token( + note="before-role-creation-and-assignment" + ) + token_scopes_before = set(token_response_before.json()["scopes"]) + # create keycloak role with jupyterhub scopes in attributes + role = create_keycloak_role( + client_name="jupyterhub", + # Note: we're clearing this role after every test case, and we're clearing + # it by name, so it must start with test- to be deleted afterward + role_name="test-custom-role", + scopes=scopes, + component=component, + ) + assert role + # assign created role to the user + assign_keycloak_client_role_to_user( + constants.KEYCLOAK_USERNAME, client_name="jupyterhub", role=role + ) + token_response_after = create_jupyterhub_token( + note="after-role-creation-and-assignment" + ) + token_scopes_after = set(token_response_after.json()["scopes"]) + # verify new scopes added/removed + expected_scopes_difference = token_scopes_after - token_scopes_before + # Comparing token scopes for the user before and after role assignment + assert expected_scopes_difference == expected_scopes_difference + + @pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") def test_jupyterhub_loads_groups_from_keycloak(): session = get_jupyterhub_session() diff --git a/tests/tests_deployment/utils.py b/tests/tests_deployment/utils.py index f37523d920..b0965dd1ae 100644 --- a/tests/tests_deployment/utils.py +++ b/tests/tests_deployment/utils.py @@ -26,21 +26,24 @@ def get_jupyterhub_session(): return session -def get_jupyterhub_token(note="jupyterhub-tests-deployment"): +def create_jupyterhub_token(note): session = get_jupyterhub_session() xsrf_token = session.cookies.get("_xsrf") headers = {"Referer": f"https://{constants.NEBARI_HOSTNAME}/hub/token"} if xsrf_token: headers["X-XSRFToken"] = xsrf_token data = {"note": note, "expires_in": None} - r = session.post( + return session.post( f"https://{constants.NEBARI_HOSTNAME}/hub/api/users/{constants.KEYCLOAK_USERNAME}/tokens", headers=headers, json=data, verify=False, ) - return r.json()["token"] + +def get_jupyterhub_token(note="jupyterhub-tests-deployment"): + response = create_jupyterhub_token(note=note) + return response.json()["token"] def monkeypatch_ssl_context(): From 4b301a85c06920f8cfbc31e7ef4798a1b7c19a62 Mon Sep 17 00:00:00 2001 From: Fangchen Li Date: Thu, 30 May 2024 15:52:10 -0700 Subject: [PATCH 29/32] CI: add azure integration (#2061) Co-authored-by: Chuck McAndrew <6248903+dcmcand@users.noreply.github.com> Co-authored-by: Vinicius D. Cerutti <51954708+viniciusdc@users.noreply.github.com> Co-authored-by: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> --- .github/workflows/test_azure_integration.yaml | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 .github/workflows/test_azure_integration.yaml diff --git a/.github/workflows/test_azure_integration.yaml b/.github/workflows/test_azure_integration.yaml new file mode 100644 index 0000000000..0d3c8bd435 --- /dev/null +++ b/.github/workflows/test_azure_integration.yaml @@ -0,0 +1,94 @@ +name: test-azure-integration + +on: + schedule: + - cron: "0 0 * * MON" + pull_request: + branches: + - develop + workflow_dispatch: + inputs: + branch: + description: 'Nebari branch to deploy, test, destroy' + required: true + default: develop + type: string + image-tag: + description: 'Nebari image tag created by the nebari-docker-images repo' + required: true + default: main + type: string + tf-log-level: + description: 'Change Terraform log levels' + required: false + default: info + type: choice + options: + - info + - warn + - debug + - trace + - error + +env: + NEBARI_GH_BRANCH: ${{ github.event.inputs.branch || 'develop' }} + NEBARI_IMAGE_TAG: ${{ github.event.inputs.image-tag || 'main' }} + TF_LOG: ${{ github.event.inputs.tf-log-level || 'info' }} + +jobs: + test-azure-integration: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + ref: ${{ env.NEBARI_GH_BRANCH }} + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + + - name: Install Nebari + run: | + pip install .[dev] + conda install --quiet --yes conda-build + playwright install + + - name: Retrieve secret from Vault + uses: hashicorp/vault-action@v2.5.0 + with: + method: jwt + url: "https://quansight-vault-public-vault-b2379fa7.d415e30e.z1.hashicorp.cloud:8200" + namespace: "admin/quansight" + role: "repository-nebari-dev-nebari-role" + secrets: | + kv/data/repository/nebari-dev/nebari/azure/nebari-dev-ci/github-nebari-dev-repo-ci client_id | ARM_CLIENT_ID; + kv/data/repository/nebari-dev/nebari/azure/nebari-dev-ci/github-nebari-dev-repo-ci tenant_id | ARM_TENANT_ID; + kv/data/repository/nebari-dev/nebari/azure/nebari-dev-ci/github-nebari-dev-repo-ci subscription_id | ARM_SUBSCRIPTION_ID; + kv/data/repository/nebari-dev/nebari/cloudflare/internal-devops@quansight.com/nebari-dev-ci token | CLOUDFLARE_TOKEN; + + - name: 'Azure login' + uses: azure/login@v2 + with: + client-id: ${{ env.ARM_CLIENT_ID }} + tenant-id: ${{ env.ARM_TENANT_ID }} + subscription-id: ${{ env.ARM_SUBSCRIPTION_ID }} + + - name: Integration Tests + run: | + pytest --version + pytest tests/tests_integration/ -vvv -s --cloud azure + env: + NEBARI_SECRET__default_images__jupyterhub: "quay.io/nebari/nebari-jupyterhub:${{ env.NEBARI_IMAGE_TAG }}" + NEBARI_SECRET__default_images__jupyterlab: "quay.io/nebari/nebari-jupyterlab:${{ env.NEBARI_IMAGE_TAG }}" + NEBARI_SECRET__default_images__dask_worker: "quay.io/nebari/nebari-dask-worker:${{ env.NEBARI_IMAGE_TAG }}" + ARM_CLIENT_ID: ${{ env.ARM_CLIENT_ID }} + ARM_TENANT_ID: ${{ env.ARM_TENANT_ID }} + ARM_SUBSCRIPTION_ID: ${{ env.ARM_SUBSCRIPTION_ID }} + ARM_USE_OIDC: "true" + CLOUDFLARE_TOKEN: ${{ env.CLOUDFLARE_TOKEN }} From 6c8b9739cb49c75632435e1a4e6deb939ff62159 Mon Sep 17 00:00:00 2001 From: Chuck McAndrew <6248903+dcmcand@users.noreply.github.com> Date: Fri, 31 May 2024 08:58:41 -0500 Subject: [PATCH 30/32] Create trivy.yml (#2458) Co-authored-by: Vinicius D. Cerutti <51954708+viniciusdc@users.noreply.github.com> --- .github/workflows/trivy.yml | 46 +++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 .github/workflows/trivy.yml diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml new file mode 100644 index 0000000000..2a8bf120fa --- /dev/null +++ b/.github/workflows/trivy.yml @@ -0,0 +1,46 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Code Scanning + +on: + push: + branches: [ "develop", "release/*" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "develop" ] + schedule: + - cron: '19 23 * * 6' + +permissions: + contents: read + +jobs: + SAST: + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + name: Trivy config Scan + runs-on: "ubuntu-20.04" + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run Trivy vulnerability scanner in fs mode + uses: aquasecurity/trivy-action@master + with: + scan-type: 'config' + hide-progress: true + format: 'sarif' + output: 'trivy-results.sarif' + ignore-unfixed: true + severity: 'CRITICAL,HIGH' + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: 'trivy-results.sarif' From a7688b78311f146dad6fe59b81b519ef0db7735d Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Fri, 31 May 2024 10:31:21 -0500 Subject: [PATCH 31/32] don't run azure deployment on PRs, only on schedule and manual trigger (#2498) --- .github/workflows/test_azure_integration.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/test_azure_integration.yaml b/.github/workflows/test_azure_integration.yaml index 0d3c8bd435..a639d915f9 100644 --- a/.github/workflows/test_azure_integration.yaml +++ b/.github/workflows/test_azure_integration.yaml @@ -3,9 +3,6 @@ name: test-azure-integration on: schedule: - cron: "0 0 * * MON" - pull_request: - branches: - - develop workflow_dispatch: inputs: branch: From 2bac8aae4e7755b1a3e0a2d56b58d4b52a86b81d Mon Sep 17 00:00:00 2001 From: Adam Lewis <23342526+Adam-D-Lewis@users.noreply.github.com> Date: Fri, 31 May 2024 16:32:33 -0500 Subject: [PATCH 32/32] add cloud provider deployment status badges to README.md (#2407) Co-authored-by: Vinicius D. Cerutti <51954708+viniciusdc@users.noreply.github.com> --- .github/workflows/test_aws_integration.yaml | 2 +- .github/workflows/test_azure_integration.yaml | 2 +- .github/workflows/test_do_integration.yaml | 2 +- .github/workflows/test_gcp_integration.yaml | 2 +- README.md | 3 ++- 5 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test_aws_integration.yaml b/.github/workflows/test_aws_integration.yaml index 36112ccd50..b6c27c938d 100644 --- a/.github/workflows/test_aws_integration.yaml +++ b/.github/workflows/test_aws_integration.yaml @@ -1,4 +1,4 @@ -name: test-aws-integration +name: AWS Deployment on: schedule: diff --git a/.github/workflows/test_azure_integration.yaml b/.github/workflows/test_azure_integration.yaml index a639d915f9..4579fe8471 100644 --- a/.github/workflows/test_azure_integration.yaml +++ b/.github/workflows/test_azure_integration.yaml @@ -1,4 +1,4 @@ -name: test-azure-integration +name: Azure Delpoyment on: schedule: diff --git a/.github/workflows/test_do_integration.yaml b/.github/workflows/test_do_integration.yaml index dcfacf3175..ef0cbb2352 100644 --- a/.github/workflows/test_do_integration.yaml +++ b/.github/workflows/test_do_integration.yaml @@ -1,4 +1,4 @@ -name: test-gcp-integration +name: Digital Ocean Deployment on: schedule: diff --git a/.github/workflows/test_gcp_integration.yaml b/.github/workflows/test_gcp_integration.yaml index 0418e0af40..6ba1921f41 100644 --- a/.github/workflows/test_gcp_integration.yaml +++ b/.github/workflows/test_gcp_integration.yaml @@ -1,4 +1,4 @@ -name: test-gcp-integration +name: GCP Deployment on: schedule: diff --git a/README.md b/README.md index c693dfb22f..c6a81a17c4 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,8 @@ | :---------- | :-----| | Project | [![License](https://img.shields.io/badge/License-BSD%203--Clause-gray.svg?colorA=2D2A56&colorB=5936D9&style=flat.svg)](https://opensource.org/licenses/BSD-3-Clause) [![Nebari documentation](https://img.shields.io/badge/%F0%9F%93%96%20Read-the%20docs-gray.svg?colorA=2D2A56&colorB=5936D9&style=flat.svg)](https://www.nebari.dev/docs/welcome) [![PyPI](https://img.shields.io/pypi/v/nebari)](https://badge.fury.io/py/nebari) [![conda version](https://img.shields.io/conda/vn/conda-forge/nebari)]((https://anaconda.org/conda-forge/nebari)) | | Community | [![GH discussions](https://img.shields.io/badge/%F0%9F%92%AC%20-Participate%20in%20discussions-gray.svg?colorA=2D2A56&colorB=5936D9&style=flat.svg)](https://github.com/nebari-dev/nebari/discussions) [![Open an issue](https://img.shields.io/badge/%F0%9F%93%9D%20Open-an%20issue-gray.svg?colorA=2D2A56&colorB=5936D9&style=flat.svg)](https://github.com/nebari-dev/nebari/issues/new/choose) [![Community guidelines](https://img.shields.io/badge/🤝%20Community-guidelines-gray.svg?colorA=2D2A56&colorB=5936D9&style=flat.svg)](https://www.nebari.dev/docs/community/) | -| CI | [![Kubernetes Tests](https://github.com/nebari-dev/nebari/actions/workflows/test_local_integration.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/kubernetes_test.yaml) [![Tests](https://github.com/nebari-dev/nebari/actions/workflows/test.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test.yaml) [![Test Nebari Provider](https://github.com/nebari-dev/nebari/actions/workflows/test-provider.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test-provider.yaml) | +| CI | [![Kubernetes Tests](https://github.com/nebari-dev/nebari/actions/workflows/test_local_integration.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/kubernetes_test.yaml) [![Tests](https://github.com/nebari-dev/nebari/actions/workflows/test.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test.yaml) [![Test Nebari Provider](https://github.com/nebari-dev/nebari/actions/workflows/test-provider.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test-provider.yaml)| +| Cloud Providers | [![AWS Deployment Status](https://github.com/nebari-dev/nebari/actions/workflows/test_aws_integration.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test_aws_integration.yaml) [![Azure Deployment Status](https://github.com/nebari-dev/nebari/actions/workflows/test_azure_integration.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test_azure_integration.yaml) [![GCP Deployment Status](https://github.com/nebari-dev/nebari/actions/workflows/test_gcp_integration.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test_gcp_integration.yaml) [![Digital Ocean Deployment Status](https://github.com/nebari-dev/nebari/actions/workflows/test_do_integration.yaml/badge.svg)](https://github.com/nebari-dev/nebari/actions/workflows/test_do_integration.yaml)| ## Table of contents