diff --git a/.github/workflows/generate_cli_doc.yml b/.github/workflows/generate_cli_doc.yml index c0ae6ee7d2..bc9aa6c4d5 100644 --- a/.github/workflows/generate_cli_doc.yml +++ b/.github/workflows/generate_cli_doc.yml @@ -25,10 +25,10 @@ jobs: working-directory: ./docs-sphinx steps: - name: Check out repository 🛎️ - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: "3.10" diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 655bd96206..771999f51d 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -8,10 +8,11 @@ jobs: test-pypi: name: Test PyPi release runs-on: ubuntu-latest - + permissions: + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - name: Set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" @@ -19,7 +20,7 @@ jobs: run: python -m pip install --upgrade pip build - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 @@ -35,8 +36,6 @@ jobs: - name: Publish to test PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ - password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository-url: https://test.pypi.org/legacy/ - name: Sleep @@ -59,7 +58,7 @@ jobs: steps: - name: Set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" @@ -67,26 +66,12 @@ jobs: run: python -m pip install --upgrade pip build - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Build source and binary run: python -m build --sdist --wheel . - - name: Retrieve secret from Vault - uses: hashicorp/vault-action@v2.5.0 - with: - method: jwt - url: "https://quansight-vault-public-vault-b2379fa7.d415e30e.z1.hashicorp.cloud:8200" - namespace: "admin/quansight" - role: "repository-nebari-dev-nebari-role" - secrets: | - kv/data/repository/nebari-dev/nebari/shared_secrets PYPI_USERNAME | PYPI_USERNAME; - kv/data/repository/nebari-dev/nebari/shared_secrets PYPI_PASSWORD | PYPI_PASSWORD; - - name: Publish package uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: ${{ env.PYPI_USERNAME }} - password: ${{ env.PYPI_PASSWORD }} diff --git a/.github/workflows/run-precommit.yaml b/.github/workflows/run-precommit.yaml index 50904ae178..9592a58373 100644 --- a/.github/workflows/run-precommit.yaml +++ b/.github/workflows/run-precommit.yaml @@ -17,9 +17,9 @@ jobs: shell: bash -l {0} steps: - name: Checkout repository 🔔 - uses: actions/checkout@v3 + uses: actions/checkout@v4.1.1 - name: Run terraform pre-commit ⚡️ - uses: pre-commit/action@v3.0.0 + uses: pre-commit/action@v3.0.1 with: extra_args: --all-files terraform_fmt diff --git a/.github/workflows/test-provider.yaml b/.github/workflows/test-provider.yaml index 3c0a3fa89c..717a332482 100644 --- a/.github/workflows/test-provider.yaml +++ b/.github/workflows/test-provider.yaml @@ -56,7 +56,7 @@ jobs: fail-fast: false steps: - name: "Checkout Infrastructure" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Checkout the branch from the PR that triggered the job if: ${{ github.event_name == 'issue_comment' }} @@ -65,9 +65,9 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: "3.11" - name: Retrieve secret from Vault uses: hashicorp/vault-action@v2.5.0 diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 6a8fa4a446..5e527e9aa5 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -32,22 +32,21 @@ jobs: strategy: matrix: python-version: - - "3.8" - - "3.9" - "3.10" - "3.11" + - "3.12" fail-fast: false concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}-${{ matrix.python-version }} cancel-in-progress: true steps: - name: "Checkout Infrastructure" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/test_aws_integration.yaml b/.github/workflows/test_aws_integration.yaml index fa1a2332df..36112ccd50 100644 --- a/.github/workflows/test_aws_integration.yaml +++ b/.github/workflows/test_aws_integration.yaml @@ -43,13 +43,13 @@ jobs: contents: read steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ env.NEBARI_GH_BRANCH }} fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 diff --git a/.github/workflows/test_conda_build.yaml b/.github/workflows/test_conda_build.yaml index e34363d9a3..2a959cdb6b 100644 --- a/.github/workflows/test_conda_build.yaml +++ b/.github/workflows/test_conda_build.yaml @@ -25,21 +25,21 @@ jobs: cancel-in-progress: true steps: - name: "Checkout Infrastructure" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true - python-version: 3.8 + python-version: "3.10" channels: conda-forge activate-environment: nebari-dev - name: Install dependencies run: | - conda install build grayskull conda-build + conda install build grayskull conda-build conda-verify - name: Generate sdist run: | @@ -52,3 +52,8 @@ jobs: - name: Build conda package run: | conda build nebari + + - name: Test conda package + run: | + conda install --use-local nebari + nebari --version diff --git a/.github/workflows/test_do_integration.yaml b/.github/workflows/test_do_integration.yaml index dbe10a3028..dcfacf3175 100644 --- a/.github/workflows/test_do_integration.yaml +++ b/.github/workflows/test_do_integration.yaml @@ -42,12 +42,12 @@ jobs: pull-requests: write steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ env.NEBARI_GH_BRANCH }} fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 diff --git a/.github/workflows/test_gcp_integration.yaml b/.github/workflows/test_gcp_integration.yaml index 57ef84288f..0418e0af40 100644 --- a/.github/workflows/test_gcp_integration.yaml +++ b/.github/workflows/test_gcp_integration.yaml @@ -42,13 +42,13 @@ jobs: pull-requests: write steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ env.NEBARI_GH_BRANCH }} fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.11 diff --git a/.github/workflows/test_helm_charts.yaml b/.github/workflows/test_helm_charts.yaml index daf9abb6da..1d86eb92e4 100644 --- a/.github/workflows/test_helm_charts.yaml +++ b/.github/workflows/test_helm_charts.yaml @@ -23,13 +23,13 @@ jobs: runs-on: ubuntu-latest steps: - name: "Checkout Infrastructure" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.11" - name: Install additional Python dependencies run: | pip install python-hcl2 diff --git a/.github/workflows/test_local_integration.yaml b/.github/workflows/test_local_integration.yaml index 05dec384b0..67e2a7108e 100644 --- a/.github/workflows/test_local_integration.yaml +++ b/.github/workflows/test_local_integration.yaml @@ -57,12 +57,12 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Set up Python - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 env: CONDA: /home/runnerx/miniconda3 with: auto-update-conda: true - python-version: 3.8 + python-version: "3.11" miniconda-version: "latest" - name: Install Nebari and playwright @@ -70,7 +70,7 @@ jobs: pip install .[dev] playwright install - - uses: azure/setup-kubectl@v3 + - uses: azure/setup-kubectl@v4.0.0 with: version: v1.19.16 @@ -140,9 +140,9 @@ jobs: nebari keycloak adduser --user "${TEST_USERNAME}" "${TEST_PASSWORD}" --config nebari-config.yaml nebari keycloak listusers --config nebari-config.yaml - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 20 - name: Get nebari-config.yaml full path run: echo "NEBARI_CONFIG_PATH=`realpath ./local-deployment/nebari-config.yaml`" >> "$GITHUB_ENV" @@ -170,7 +170,7 @@ jobs: - name: Save Cypress screenshots and videos if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4.3.1 with: name: e2e-cypress path: | diff --git a/.github/workflows/typing.yaml b/.github/workflows/typing.yaml index ae3fa18b93..de70d69483 100644 --- a/.github/workflows/typing.yaml +++ b/.github/workflows/typing.yaml @@ -24,12 +24,12 @@ jobs: cancel-in-progress: true steps: - name: "Checkout Repository" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.11" cache: "pip" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ccda1916e6..9e9dcd9147 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -51,13 +51,13 @@ repos: # python - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.3.0 hooks: - id: black args: ["--line-length=88", "--exclude=/src/_nebari/template/"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.3.5 hooks: - id: ruff args: ["--fix"] @@ -73,7 +73,7 @@ repos: # terraform - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.86.0 + rev: v1.88.4 hooks: - id: terraform_fmt args: diff --git a/README.md b/README.md index 1787360b84..c693dfb22f 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ Amazon [AWS](https://aws.amazon.com/), [GCP](https://cloud.google.com/ "Google C - Operating System: Currently, Nebari supports development on macOS and Linux operating systems. Windows is NOT supported. However, we would welcome contributions that add and improve support for Windows. -- You need Python >= 3.8 on your local machine or virtual environment to work on Nebari. +- You need Python >= 3.10 on your local machine or virtual environment to work on Nebari. - Adopting virtual environments ([`conda`](https://docs.conda.io/en/latest/), [`pipenv`](https://github.com/pypa/pipenv) or [`venv`](https://docs.python.org/3/library/venv.html)) is also encouraged. diff --git a/RELEASE.md b/RELEASE.md index 076754b3a6..d8c9bd5b43 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -9,9 +9,62 @@ This file is copied to nebari-dev/nebari-docs using a GitHub Action. --> --- -## Upcoming Release +## Release 2024.3.3 - March 27, 2024 + +### What's Changed +* get default variable value when following a terraform variable by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2322 +* Upgrade Actions versions by @isumitjha in https://github.com/nebari-dev/nebari/pull/2291 +* Cleanup spawner logs by @krassowski in https://github.com/nebari-dev/nebari/pull/2328 +* Fix loki gateway url when deployed on non-dev namespace by @aktech in https://github.com/nebari-dev/nebari/pull/2327 +* Dmcandrew update ruamel.yaml by @dcmcand in https://github.com/nebari-dev/nebari/pull/2315 +* upgrade auth0-python version to ultimately resolve CVE-2024-26130 by @tylergraff in https://github.com/nebari-dev/nebari/pull/2314 +* remove deprecated code paths by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2349 +* Create SECURITY.md by @dcmcand in https://github.com/nebari-dev/nebari/pull/2354 +* Set node affinity for more pods to ensure they run on general node pool by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2353 +* Deduplicate conda-store in JupyterLab main menu by @krassowski in https://github.com/nebari-dev/nebari/pull/2347 +* Pass current namespace to argo via environment variable by @krassowski in https://github.com/nebari-dev/nebari/pull/2317 +* PVC for Traefik Ingress (prevent LetsEncrypt throttling) by @kenafoster in https://github.com/nebari-dev/nebari/pull/2352 + +### New Contributors +* @isumitjha made their first contribution in https://github.com/nebari-dev/nebari/pull/2291 +* @tylergraff made their first contribution in https://github.com/nebari-dev/nebari/pull/2314 + +**Full Changelog**: https://github.com/nebari-dev/nebari/compare/2024.3.2...2024.3.3 + +## Release 2024.3.2 - March 14, 2024 + +### What's Changed +* update max k8s versions and remove depreciated api usage in local deploy by @dcmcand in https://github.com/nebari-dev/nebari/pull/2276 +* update keycloak image repo by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2312 +* Generate random password for Grafana by @aktech in https://github.com/nebari-dev/nebari/pull/2289 +* update conda store to 2024.3.1 by @Adam-D-Lewis in https://github.com/nebari-dev/nebari/pull/2316 +* Switch PyPI release workflow to use trusted publishing by @viniciusdc in https://github.com/nebari-dev/nebari/pull/2323 + + +**Full Changelog**: https://github.com/nebari-dev/nebari/compare/2024.3.1...2024.3.2 + +## Release 2024.3.1 - March 11, 2024 + +### What's Changed +* Modify Playwright test to account for changes in JupyterLab UI. by @marcelovilla in https://github.com/nebari-dev/nebari/pull/2232 +* Add favicon to jupyterhub theme. by @jbouder in https://github.com/nebari-dev/nebari/pull/2222 +* Set min nodes to 0 for worker and user. by @pt247 in https://github.com/nebari-dev/nebari/pull/2168 +* Remove `jhub-client` from pyproject.toml by @pavithraes in https://github.com/nebari-dev/nebari/pull/2242 +* Include permission validation step to programmatically cloned repos by @viniciusdc in https://github.com/nebari-dev/nebari/pull/2258 +* Expose jupyter's preferred dir as a config option by @krassowski in https://github.com/nebari-dev/nebari/pull/2251 +* Allow to configure default settings for JupyterLab (`overrides.json`) by @krassowski in https://github.com/nebari-dev/nebari/pull/2249 +* Feature/jlab menu customization by @marcelovilla in https://github.com/nebari-dev/nebari/pull/2259 +* Add cloud provider to the dask config.json file by @marcelovilla in https://github.com/nebari-dev/nebari/pull/2266 +* Fix syntax error in jupyter-server-config Python file by @krassowski in https://github.com/nebari-dev/nebari/pull/2286 +* Add "Open VS Code" entry in services by @krassowski in https://github.com/nebari-dev/nebari/pull/2267 +* Add Grafana Loki integration by @aktech in https://github.com/nebari-dev/nebari/pull/2156 + +### New Contributors +* @jbouder made their first contribution in https://github.com/nebari-dev/nebari/pull/2222 +* @krassowski made their first contribution in https://github.com/nebari-dev/nebari/pull/2251 + +**Full Changelog**: https://github.com/nebari-dev/nebari/compare/2024.1.1...2024.3.1 -* Added Grafana Loki to aggregate, index and search logs ## Release 2024.1.1 - January 17, 2024 diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..76f80ef924 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,11 @@ +# Security Policy + +## Supported Versions + +We support only the latest version, and we use [CalVer](https://calver.org/) for versioning. + +You should feel comfortable upgrading if you're using our documented public APIs and pay attention to `DeprecationWarnings`. Whenever there is a need to break compatibility, it is announced in the [Changelog](https://www.nebari.dev/docs/references/RELEASE) and will raise a `DeprecationWarning` before it's finally really broken. + +## Reporting a Vulnerability + +If you think you found a vulnerability, please report it at [nebari/security](https://github.com/nebari-dev/nebari/security/new). Please do not report security vulnerabilities on our public issue tracker. Exposing vulnerabilities publicly without giving maintainers a chance to release a fix puts users at risk. diff --git a/pyproject.toml b/pyproject.toml index cb90bc52d0..7bfa0a59c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ name = "nebari" dynamic = ["version"] description = "A Jupyter and Dask-powered open source data science platform." readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.10" license = "BSD-3-Clause" authors = [ { name = "Nebari development team", email = "internal-it@quansight.com" }, @@ -42,8 +42,6 @@ classifiers = [ "Intended Audience :: Developers", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Intended Audience :: Developers", @@ -55,23 +53,23 @@ classifiers = [ ] dependencies = [ - "auth0-python==4.4.2", + "auth0-python==4.7.1", "azure-identity==1.12.0", "azure-mgmt-containerservice==26.0.0", "azure-mgmt-resource==23.0.1", "bcrypt==4.0.1", - "boto3==1.28.40", + "boto3==1.34.63", "cloudflare==2.11.7", "kubernetes==27.2.0", "pluggy==1.3.0", "prompt-toolkit==3.0.36", - "pydantic==1.10.12", + "pydantic==2.4.2", "pynacl==1.5.0", - "python-keycloak==3.3.0", + "python-keycloak>=3.9.0", "questionary==2.0.0", "requests-toolbelt==1.0.0", "rich==13.5.1", - "ruamel.yaml==0.17.32", + "ruamel.yaml==0.18.6", "typer==0.9.0", "packaging==23.2", ] diff --git a/scripts/helm-validate.py b/scripts/helm-validate.py index a916d2a2e5..c623ef0620 100644 --- a/scripts/helm-validate.py +++ b/scripts/helm-validate.py @@ -67,7 +67,7 @@ def _load_variable_value(self, argument, parent_contents): var_name = self._clean_var_name(argument, "var") for var in parent_contents.get("variable", {}): if var_name in var: - return var[var_name] + return var[var_name]["default"] else: raise ValueError(f"Could not find variable {var_name}") diff --git a/src/_nebari/config.py b/src/_nebari/config.py index 5602de0b1b..c1bb0e8ef0 100644 --- a/src/_nebari/config.py +++ b/src/_nebari/config.py @@ -2,19 +2,19 @@ import pathlib import re import sys -import typing +from typing import Any, Dict, List, Union import pydantic from _nebari.utils import yaml -def set_nested_attribute(data: typing.Any, attrs: typing.List[str], value: typing.Any): +def set_nested_attribute(data: Any, attrs: List[str], value: Any): """Takes an arbitrary set of attributes and accesses the deep nested object config to set value """ - def _get_attr(d: typing.Any, attr: str): + def _get_attr(d: Any, attr: str): if isinstance(d, list) and re.fullmatch(r"\d+", attr): return d[int(attr)] elif hasattr(d, "__getitem__"): @@ -22,7 +22,7 @@ def _get_attr(d: typing.Any, attr: str): else: return getattr(d, attr) - def _set_attr(d: typing.Any, attr: str, value: typing.Any): + def _set_attr(d: Any, attr: str, value: Any): if isinstance(d, list) and re.fullmatch(r"\d+", attr): d[int(attr)] = value elif hasattr(d, "__getitem__"): @@ -63,6 +63,15 @@ def set_config_from_environment_variables( return config +def dump_nested_model(model_dict: Dict[str, Union[pydantic.BaseModel, str]]): + result = {} + for key, value in model_dict.items(): + result[key] = ( + value.model_dump() if isinstance(value, pydantic.BaseModel) else value + ) + return result + + def read_configuration( config_filename: pathlib.Path, config_schema: pydantic.BaseModel, @@ -77,7 +86,8 @@ def read_configuration( ) with filename.open() as f: - config = config_schema(**yaml.load(f.read())) + config_dict = yaml.load(f) + config = config_schema(**config_dict) if read_environment: config = set_config_from_environment_variables(config) @@ -87,7 +97,7 @@ def read_configuration( def write_configuration( config_filename: pathlib.Path, - config: typing.Union[pydantic.BaseModel, typing.Dict], + config: Union[pydantic.BaseModel, Dict], mode: str = "w", ): """Write the nebari configuration file to disk""" @@ -97,6 +107,7 @@ def write_configuration( rev_config_dict = {k: config_dict[k] for k in reversed(config_dict)} yaml.dump(rev_config_dict, f) else: + config = dump_nested_model(config) yaml.dump(config, f) diff --git a/src/_nebari/constants.py b/src/_nebari/constants.py index 229d7957a4..46df430e78 100644 --- a/src/_nebari/constants.py +++ b/src/_nebari/constants.py @@ -1,4 +1,4 @@ -CURRENT_RELEASE = "2024.1.1" +CURRENT_RELEASE = "2024.3.3" # NOTE: Terraform cannot be upgraded further due to Hashicorp licensing changes # implemented in August 2023. @@ -8,14 +8,14 @@ # 04-kubernetes-ingress DEFAULT_TRAEFIK_IMAGE_TAG = "2.9.1" -HIGHEST_SUPPORTED_K8S_VERSION = ("1", "26", "9") +HIGHEST_SUPPORTED_K8S_VERSION = ("1", "29", "2") DEFAULT_GKE_RELEASE_CHANNEL = "UNSPECIFIED" DEFAULT_NEBARI_DASK_VERSION = CURRENT_RELEASE -DEFAULT_NEBARI_IMAGE_TAG = "2024.2.1rc2" -DEFAULT_NEBARI_WORKFLOW_CONTROLLER_IMAGE_TAG = "2024.2.1rc2" +DEFAULT_NEBARI_IMAGE_TAG = CURRENT_RELEASE +DEFAULT_NEBARI_WORKFLOW_CONTROLLER_IMAGE_TAG = CURRENT_RELEASE -DEFAULT_CONDA_STORE_IMAGE_TAG = "2024.1.1" +DEFAULT_CONDA_STORE_IMAGE_TAG = "2024.3.1" LATEST_SUPPORTED_PYTHON_VERSION = "3.10" diff --git a/src/_nebari/initialize.py b/src/_nebari/initialize.py index a5f4e3250f..19a2bccea8 100644 --- a/src/_nebari/initialize.py +++ b/src/_nebari/initialize.py @@ -3,6 +3,7 @@ import re import tempfile from pathlib import Path +from typing import Any, Dict import pydantic import requests @@ -46,7 +47,7 @@ def render_config( region: str = None, disable_prompt: bool = False, ssl_cert_email: str = None, -): +) -> Dict[str, Any]: config = { "provider": cloud_provider, "namespace": namespace, @@ -113,7 +114,7 @@ def render_config( if cloud_provider == ProviderEnum.do: do_region = region or constants.DO_DEFAULT_REGION do_kubernetes_versions = kubernetes_version or get_latest_kubernetes_version( - digital_ocean.kubernetes_versions(do_region) + digital_ocean.kubernetes_versions() ) config["digital_ocean"] = { "kubernetes_version": do_kubernetes_versions, @@ -190,7 +191,7 @@ def render_config( from nebari.plugins import nebari_plugin_manager try: - config_model = nebari_plugin_manager.config_schema.parse_obj(config) + config_model = nebari_plugin_manager.config_schema.model_validate(config) except pydantic.ValidationError as e: print(str(e)) diff --git a/src/_nebari/provider/cicd/github.py b/src/_nebari/provider/cicd/github.py index 7b58464c43..2563af6ad9 100644 --- a/src/_nebari/provider/cicd/github.py +++ b/src/_nebari/provider/cicd/github.py @@ -4,7 +4,7 @@ import requests from nacl import encoding, public -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field, RootModel from _nebari.constants import LATEST_SUPPORTED_PYTHON_VERSION from _nebari.provider.cicd.common import pip_install_nebari @@ -143,49 +143,34 @@ class GHA_on_extras(BaseModel): paths: List[str] -class GHA_on(BaseModel): - # to allow for dynamic key names - __root__: Dict[str, GHA_on_extras] - - # TODO: validate __root__ values - # `push`, `pull_request`, etc. - - -class GHA_job_steps_extras(BaseModel): - # to allow for dynamic key names - __root__: Union[str, float, int] +GHA_on = RootModel[Dict[str, GHA_on_extras]] +GHA_job_steps_extras = RootModel[Union[str, float, int]] class GHA_job_step(BaseModel): name: str - uses: Optional[str] - with_: Optional[Dict[str, GHA_job_steps_extras]] = Field(alias="with") - run: Optional[str] - env: Optional[Dict[str, GHA_job_steps_extras]] - - class Config: - allow_population_by_field_name = True + uses: Optional[str] = None + with_: Optional[Dict[str, GHA_job_steps_extras]] = Field(alias="with", default=None) + run: Optional[str] = None + env: Optional[Dict[str, GHA_job_steps_extras]] = None + model_config = ConfigDict(populate_by_name=True) class GHA_job_id(BaseModel): name: str runs_on_: str = Field(alias="runs-on") - permissions: Optional[Dict[str, str]] + permissions: Optional[Dict[str, str]] = None steps: List[GHA_job_step] - - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) -class GHA_jobs(BaseModel): - # to allow for dynamic key names - __root__: Dict[str, GHA_job_id] +GHA_jobs = RootModel[Dict[str, GHA_job_id]] class GHA(BaseModel): name: str on: GHA_on - env: Optional[Dict[str, str]] + env: Optional[Dict[str, str]] = None jobs: GHA_jobs @@ -204,11 +189,7 @@ def checkout_image_step(): return GHA_job_step( name="Checkout Image", uses="actions/checkout@v3", - with_={ - "token": GHA_job_steps_extras( - __root__="${{ secrets.REPOSITORY_ACCESS_TOKEN }}" - ) - }, + with_={"token": GHA_job_steps_extras("${{ secrets.REPOSITORY_ACCESS_TOKEN }}")}, ) @@ -216,11 +197,7 @@ def setup_python_step(): return GHA_job_step( name="Set up Python", uses="actions/setup-python@v4", - with_={ - "python-version": GHA_job_steps_extras( - __root__=LATEST_SUPPORTED_PYTHON_VERSION - ) - }, + with_={"python-version": GHA_job_steps_extras(LATEST_SUPPORTED_PYTHON_VERSION)}, ) @@ -242,7 +219,7 @@ def gen_nebari_ops(config): env_vars = gha_env_vars(config) push = GHA_on_extras(branches=[config.ci_cd.branch], paths=["nebari-config.yaml"]) - on = GHA_on(__root__={"push": push}) + on = GHA_on({"push": push}) step1 = checkout_image_step() step2 = setup_python_step() @@ -272,7 +249,7 @@ def gen_nebari_ops(config): ), env={ "COMMIT_MSG": GHA_job_steps_extras( - __root__="nebari-config.yaml automated commit: ${{ github.sha }}" + "nebari-config.yaml automated commit: ${{ github.sha }}" ) }, ) @@ -291,7 +268,7 @@ def gen_nebari_ops(config): }, steps=gha_steps, ) - jobs = GHA_jobs(__root__={"build": job1}) + jobs = GHA_jobs({"build": job1}) return NebariOps( name="nebari auto update", @@ -312,18 +289,16 @@ def gen_nebari_linter(config): pull_request = GHA_on_extras( branches=[config.ci_cd.branch], paths=["nebari-config.yaml"] ) - on = GHA_on(__root__={"pull_request": pull_request}) + on = GHA_on({"pull_request": pull_request}) step1 = checkout_image_step() step2 = setup_python_step() step3 = install_nebari_step(config.nebari_version) step4_envs = { - "PR_NUMBER": GHA_job_steps_extras(__root__="${{ github.event.number }}"), - "REPO_NAME": GHA_job_steps_extras(__root__="${{ github.repository }}"), - "GITHUB_TOKEN": GHA_job_steps_extras( - __root__="${{ secrets.REPOSITORY_ACCESS_TOKEN }}" - ), + "PR_NUMBER": GHA_job_steps_extras("${{ github.event.number }}"), + "REPO_NAME": GHA_job_steps_extras("${{ github.repository }}"), + "GITHUB_TOKEN": GHA_job_steps_extras("${{ secrets.REPOSITORY_ACCESS_TOKEN }}"), } step4 = GHA_job_step( @@ -336,7 +311,7 @@ def gen_nebari_linter(config): name="nebari", runs_on_="ubuntu-latest", steps=[step1, step2, step3, step4] ) jobs = GHA_jobs( - __root__={ + { "nebari-validate": job1, } ) diff --git a/src/_nebari/provider/cicd/gitlab.py b/src/_nebari/provider/cicd/gitlab.py index e2d02b388b..d5e944f36d 100644 --- a/src/_nebari/provider/cicd/gitlab.py +++ b/src/_nebari/provider/cicd/gitlab.py @@ -1,40 +1,34 @@ from typing import Dict, List, Optional, Union -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field, RootModel from _nebari.constants import LATEST_SUPPORTED_PYTHON_VERSION from _nebari.provider.cicd.common import pip_install_nebari - -class GLCI_extras(BaseModel): - # to allow for dynamic key names - __root__: Union[str, float, int] +GLCI_extras = RootModel[Union[str, float, int]] class GLCI_image(BaseModel): name: str - entrypoint: Optional[str] + entrypoint: Optional[str] = None class GLCI_rules(BaseModel): if_: Optional[str] = Field(alias="if") - changes: Optional[List[str]] - - class Config: - allow_population_by_field_name = True + changes: Optional[List[str]] = None + model_config = ConfigDict(populate_by_name=True) class GLCI_job(BaseModel): - image: Optional[Union[str, GLCI_image]] - variables: Optional[Dict[str, str]] - before_script: Optional[List[str]] - after_script: Optional[List[str]] + image: Optional[Union[str, GLCI_image]] = None + variables: Optional[Dict[str, str]] = None + before_script: Optional[List[str]] = None + after_script: Optional[List[str]] = None script: List[str] - rules: Optional[List[GLCI_rules]] + rules: Optional[List[GLCI_rules]] = None -class GLCI(BaseModel): - __root__: Dict[str, GLCI_job] +GLCI = RootModel[Dict[str, GLCI_job]] def gen_gitlab_ci(config): @@ -76,7 +70,7 @@ def gen_gitlab_ci(config): ) return GLCI( - __root__={ + { "render-nebari": render_nebari, } ) diff --git a/src/_nebari/provider/cloud/amazon_web_services.py b/src/_nebari/provider/cloud/amazon_web_services.py index 2bf905bfcb..1123c07fe0 100644 --- a/src/_nebari/provider/cloud/amazon_web_services.py +++ b/src/_nebari/provider/cloud/amazon_web_services.py @@ -7,25 +7,18 @@ import boto3 from botocore.exceptions import ClientError, EndpointConnectionError -from _nebari import constants +from _nebari.constants import AWS_ENV_DOCS from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version +from _nebari.utils import check_environment_variables from nebari import schema MAX_RETRIES = 5 DELAY = 5 -def check_credentials(): - """Check for AWS credentials are set in the environment.""" - for variable in { - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - }: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {constants.AWS_ENV_DOCS}""" - ) +def check_credentials() -> None: + required_variables = {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"} + check_environment_variables(required_variables, AWS_ENV_DOCS) @functools.lru_cache() diff --git a/src/_nebari/provider/cloud/azure_cloud.py b/src/_nebari/provider/cloud/azure_cloud.py index 992e5c1362..44ebdaaee6 100644 --- a/src/_nebari/provider/cloud/azure_cloud.py +++ b/src/_nebari/provider/cloud/azure_cloud.py @@ -9,10 +9,11 @@ from azure.mgmt.containerservice import ContainerServiceClient from azure.mgmt.resource import ResourceManagementClient -from _nebari import constants +from _nebari.constants import AZURE_ENV_DOCS from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version from _nebari.utils import ( AZURE_TF_STATE_RESOURCE_GROUP_SUFFIX, + check_environment_variables, construct_azure_resource_group_name, ) from nebari import schema @@ -24,29 +25,18 @@ RETRIES = 10 -def check_credentials(): - """Check if credentials are valid.""" - - required_variables = { - "ARM_CLIENT_ID": os.environ.get("ARM_CLIENT_ID", None), - "ARM_SUBSCRIPTION_ID": os.environ.get("ARM_SUBSCRIPTION_ID", None), - "ARM_TENANT_ID": os.environ.get("ARM_TENANT_ID", None), - } - arm_client_secret = os.environ.get("ARM_CLIENT_SECRET", None) - - if not all(required_variables.values()): - raise ValueError( - f"""Missing the following required environment variables: {required_variables}\n - Please see the documentation for more information: {constants.AZURE_ENV_DOCS}""" - ) +def check_credentials() -> DefaultAzureCredential: + required_variables = {"ARM_CLIENT_ID", "ARM_SUBSCRIPTION_ID", "ARM_TENANT_ID"} + check_environment_variables(required_variables, AZURE_ENV_DOCS) + optional_variable = "ARM_CLIENT_SECRET" + arm_client_secret = os.environ.get(optional_variable, None) if arm_client_secret: logger.info("Authenticating as a service principal.") - return DefaultAzureCredential() else: - logger.info("No ARM_CLIENT_SECRET environment variable found.") + logger.info(f"No {optional_variable} environment variable found.") logger.info("Allowing Azure SDK to authenticate using OIDC or other methods.") - return DefaultAzureCredential() + return DefaultAzureCredential() @functools.lru_cache() diff --git a/src/_nebari/provider/cloud/digital_ocean.py b/src/_nebari/provider/cloud/digital_ocean.py index d64ca4c6de..3e4a507be6 100644 --- a/src/_nebari/provider/cloud/digital_ocean.py +++ b/src/_nebari/provider/cloud/digital_ocean.py @@ -7,24 +7,20 @@ import kubernetes.config import requests -from _nebari import constants +from _nebari.constants import DO_ENV_DOCS from _nebari.provider.cloud.amazon_web_services import aws_delete_s3_bucket from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version -from _nebari.utils import set_do_environment +from _nebari.utils import check_environment_variables, set_do_environment from nebari import schema -def check_credentials(): - for variable in { +def check_credentials() -> None: + required_variables = { + "DIGITALOCEAN_TOKEN", "SPACES_ACCESS_KEY_ID", "SPACES_SECRET_ACCESS_KEY", - "DIGITALOCEAN_TOKEN", - }: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {constants.DO_ENV_DOCS}""" - ) + } + check_environment_variables(required_variables, DO_ENV_DOCS) def digital_ocean_request(url, method="GET", json=None): @@ -63,7 +59,7 @@ def regions(): return _kubernetes_options()["options"]["regions"] -def kubernetes_versions(region) -> typing.List[str]: +def kubernetes_versions() -> typing.List[str]: """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" supported_kubernetes_versions = sorted( [_["slug"].split("-")[0] for _ in _kubernetes_options()["options"]["versions"]] diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index ba95f713cf..67d0ebad7a 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,21 +1,17 @@ import functools import json -import os import subprocess from typing import Dict, List, Set -from _nebari import constants +from _nebari.constants import GCP_ENV_DOCS from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version +from _nebari.utils import check_environment_variables from nebari import schema -def check_credentials(): - for variable in {"GOOGLE_CREDENTIALS", "PROJECT_ID"}: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {constants.GCP_ENV_DOCS}""" - ) +def check_credentials() -> None: + required_variables = {"GOOGLE_CREDENTIALS", "PROJECT_ID"} + check_environment_variables(required_variables, GCP_ENV_DOCS) @functools.lru_cache() @@ -282,7 +278,7 @@ def check_missing_service() -> None: if missing: raise ValueError( f"""Missing required services: {missing}\n - Please see the documentation for more information: {constants.GCP_ENV_DOCS}""" + Please see the documentation for more information: {GCP_ENV_DOCS}""" ) diff --git a/src/_nebari/render.py b/src/_nebari/render.py index d46a66852f..daf2fa506d 100644 --- a/src/_nebari/render.py +++ b/src/_nebari/render.py @@ -8,7 +8,6 @@ from rich.table import Table from _nebari.deprecate import DEPRECATED_FILE_PATHS -from _nebari.utils import is_relative_to from nebari import hookspecs, schema @@ -89,7 +88,7 @@ def render_template( for path in deleted: abs_path = (output_directory / path).resolve() - if not is_relative_to(abs_path, output_directory): + if not abs_path.is_relative_to(output_directory): raise Exception( f"[ERROR] SHOULD NOT HAPPEN filename was about to be deleted but path={abs_path} is outside of output_directory" ) diff --git a/src/_nebari/stages/bootstrap/__init__.py b/src/_nebari/stages/bootstrap/__init__.py index 688146999b..97e754d9c8 100644 --- a/src/_nebari/stages/bootstrap/__init__.py +++ b/src/_nebari/stages/bootstrap/__init__.py @@ -96,7 +96,7 @@ def render(self) -> Dict[str, str]: for fn, workflow in gen_cicd(self.config).items(): stream = io.StringIO() schema.yaml.dump( - workflow.dict( + workflow.model_dump( by_alias=True, exclude_unset=True, exclude_defaults=True ), stream, diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py index bdd542e08e..8679c780d3 100644 --- a/src/_nebari/stages/infrastructure/__init__.py +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -5,9 +5,9 @@ import re import sys import tempfile -from typing import Any, Dict, List, Optional, Tuple, Type, Union +from typing import Annotated, Any, Dict, List, Optional, Tuple, Type, Union -import pydantic +from pydantic import Field, field_validator, model_validator from _nebari import constants from _nebari.provider import terraform @@ -34,7 +34,7 @@ def get_kubeconfig_filename(): class LocalInputVars(schema.Base): kubeconfig_filename: str = get_kubeconfig_filename() - kube_context: Optional[str] + kube_context: Optional[str] = None class ExistingInputVars(schema.Base): @@ -180,7 +180,7 @@ def _calculate_node_groups(config: schema.Main): elif config.provider == schema.ProviderEnum.existing: return config.existing.node_selectors else: - return config.local.dict()["node_selectors"] + return config.local.model_dump()["node_selectors"] @contextlib.contextmanager @@ -219,13 +219,13 @@ class DigitalOceanNodeGroup(schema.Base): """ instance: str - min_nodes: pydantic.conint(ge=1) = 1 - max_nodes: pydantic.conint(ge=1) = 1 + min_nodes: Annotated[int, Field(ge=1)] = 1 + max_nodes: Annotated[int, Field(ge=1)] = 1 class DigitalOceanProvider(schema.Base): region: str - kubernetes_version: str + kubernetes_version: Optional[str] = None # Digital Ocean image slugs are listed here https://slugs.do-api.dev/ node_groups: Dict[str, DigitalOceanNodeGroup] = { "general": DigitalOceanNodeGroup( @@ -240,51 +240,39 @@ class DigitalOceanProvider(schema.Base): } tags: Optional[List[str]] = [] - @pydantic.validator("region") - def _validate_region(cls, value): + @model_validator(mode="before") + @classmethod + def _check_input(cls, data: Any) -> Any: digital_ocean.check_credentials() + # check if region is valid available_regions = set(_["slug"] for _ in digital_ocean.regions()) - if value not in available_regions: + if data["region"] not in available_regions: raise ValueError( - f"Digital Ocean region={value} is not one of {available_regions}" + f"Digital Ocean region={data['region']} is not one of {available_regions}" ) - return value - - @pydantic.validator("node_groups") - def _validate_node_group(cls, value): - digital_ocean.check_credentials() - available_instances = {_["slug"] for _ in digital_ocean.instances()} - for name, node_group in value.items(): - if node_group.instance not in available_instances: - raise ValueError( - f"Digital Ocean instance {node_group.instance} not one of available instance types={available_instances}" - ) - - return value - - @pydantic.root_validator - def _validate_kubernetes_version(cls, values): - digital_ocean.check_credentials() - - if "region" not in values: - raise ValueError("Region required in order to set kubernetes_version") - - available_kubernetes_versions = digital_ocean.kubernetes_versions( - values["region"] - ) - assert available_kubernetes_versions - if ( - values["kubernetes_version"] is not None - and values["kubernetes_version"] not in available_kubernetes_versions - ): + # check if kubernetes version is valid + available_kubernetes_versions = digital_ocean.kubernetes_versions() + if len(available_kubernetes_versions) == 0: raise ValueError( - f"\nInvalid `kubernetes-version` provided: {values['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + "Request to Digital Ocean for available Kubernetes versions failed." + ) + if data["kubernetes_version"] is None: + data["kubernetes_version"] = available_kubernetes_versions[-1] + elif data["kubernetes_version"] not in available_kubernetes_versions: + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {data['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." ) - else: - values["kubernetes_version"] = available_kubernetes_versions[-1] - return values + + available_instances = {_["slug"] for _ in digital_ocean.instances()} + if "node_groups" in data: + for _, node_group in data["node_groups"].items(): + if node_group["instance"] not in available_instances: + raise ValueError( + f"Digital Ocean instance {node_group.instance} not one of available instance types={available_instances}" + ) + return data class GCPIPAllocationPolicy(schema.Base): @@ -317,13 +305,13 @@ class GCPGuestAccelerator(schema.Base): """ name: str - count: pydantic.conint(ge=1) = 1 + count: Annotated[int, Field(ge=1)] = 1 class GCPNodeGroup(schema.Base): instance: str - min_nodes: pydantic.conint(ge=0) = 0 - max_nodes: pydantic.conint(ge=1) = 1 + min_nodes: Annotated[int, Field(ge=0)] = 0 + max_nodes: Annotated[int, Field(ge=1)] = 1 preemptible: bool = False labels: Dict[str, str] = {} guest_accelerators: List[GCPGuestAccelerator] = [] @@ -348,31 +336,23 @@ class GoogleCloudPlatformProvider(schema.Base): master_authorized_networks_config: Optional[Union[GCPCIDRBlock, None]] = None private_cluster_config: Optional[Union[GCPPrivateClusterConfig, None]] = None - @pydantic.root_validator - def validate_all(cls, values): - region = values.get("region") - project_id = values.get("project") - - if project_id is None: - raise ValueError("The `google_cloud_platform.project` field is required.") - - if region is None: - raise ValueError("The `google_cloud_platform.region` field is required.") - - # validate region - google_cloud.validate_region(region) - - # validate kubernetes version - kubernetes_version = values.get("kubernetes_version") - available_kubernetes_versions = google_cloud.kubernetes_versions(region) - if kubernetes_version is None: - values["kubernetes_version"] = available_kubernetes_versions[-1] - elif kubernetes_version not in available_kubernetes_versions: + @model_validator(mode="before") + @classmethod + def _check_input(cls, data: Any) -> Any: + google_cloud.check_credentials() + avaliable_regions = google_cloud.regions() + if data["region"] not in avaliable_regions: raise ValueError( - f"\nInvalid `kubernetes-version` provided: {values['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + f"Google Cloud region={data['region']} is not one of {avaliable_regions}" ) - return values + available_kubernetes_versions = google_cloud.kubernetes_versions(data["region"]) + print(available_kubernetes_versions) + if data["kubernetes_version"] not in available_kubernetes_versions: + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {data['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + ) + return data class AzureNodeGroup(schema.Base): @@ -383,24 +363,31 @@ class AzureNodeGroup(schema.Base): class AzureProvider(schema.Base): region: str - kubernetes_version: str + kubernetes_version: Optional[str] = None storage_account_postfix: str - resource_group_name: str = None + resource_group_name: Optional[str] = None node_groups: Dict[str, AzureNodeGroup] = { "general": AzureNodeGroup(instance="Standard_D8_v3", min_nodes=1, max_nodes=1), "user": AzureNodeGroup(instance="Standard_D4_v3", min_nodes=0, max_nodes=5), "worker": AzureNodeGroup(instance="Standard_D4_v3", min_nodes=0, max_nodes=5), } storage_account_postfix: str - vnet_subnet_id: Optional[Union[str, None]] = None + vnet_subnet_id: Optional[str] = None private_cluster_enabled: bool = False resource_group_name: Optional[str] = None tags: Optional[Dict[str, str]] = {} network_profile: Optional[Dict[str, str]] = None max_pods: Optional[int] = None - @pydantic.validator("kubernetes_version") - def _validate_kubernetes_version(cls, value): + @model_validator(mode="before") + @classmethod + def _check_credentials(cls, data: Any) -> Any: + azure_cloud.check_credentials() + return data + + @field_validator("kubernetes_version") + @classmethod + def _validate_kubernetes_version(cls, value: Optional[str]) -> str: available_kubernetes_versions = azure_cloud.kubernetes_versions() if value is None: value = available_kubernetes_versions[-1] @@ -410,7 +397,8 @@ def _validate_kubernetes_version(cls, value): ) return value - @pydantic.validator("resource_group_name") + @field_validator("resource_group_name") + @classmethod def _validate_resource_group_name(cls, value): if value is None: return value @@ -428,9 +416,10 @@ def _validate_resource_group_name(cls, value): return value - @pydantic.validator("tags") - def _validate_tags(cls, tags): - return azure_cloud.validate_tags(tags) + @field_validator("tags") + @classmethod + def _validate_tags(cls, value: Optional[Dict[str, str]]) -> Dict[str, str]: + return value if value is None else azure_cloud.validate_tags(value) class AWSNodeGroup(schema.Base): @@ -455,49 +444,66 @@ class AmazonWebServicesProvider(schema.Base): instance="m5.xlarge", min_nodes=0, max_nodes=5, single_subnet=False ), } - existing_subnet_ids: List[str] = None - existing_security_group_id: str = None + existing_subnet_ids: Optional[List[str]] = None + existing_security_group_id: Optional[str] = None vpc_cidr_block: str = "10.10.0.0/16" permissions_boundary: Optional[str] = None tags: Optional[Dict[str, str]] = {} - @pydantic.root_validator - def validate_all(cls, values): - region = values.get("region") - if region is None: - raise ValueError("The `amazon_web_services.region` field is required.") - - # validate region - amazon_web_services.validate_region(region) - - # validate kubernetes version - kubernetes_version = values.get("kubernetes_version") - available_kubernetes_versions = amazon_web_services.kubernetes_versions(region) - if kubernetes_version is None: - values["kubernetes_version"] = available_kubernetes_versions[-1] - elif kubernetes_version not in available_kubernetes_versions: + @model_validator(mode="before") + @classmethod + def _check_input(cls, data: Any) -> Any: + amazon_web_services.check_credentials() + + # check if region is valid + available_regions = amazon_web_services.regions(data["region"]) + if data["region"] not in available_regions: raise ValueError( - f"\nInvalid `kubernetes-version` provided: {values['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + f"Amazon Web Services region={data['region']} is not one of {available_regions}" ) - # validate node groups - node_groups = values["node_groups"] - available_instances = amazon_web_services.instances(region) - for name, node_group in node_groups.items(): - if node_group.instance not in available_instances: - raise ValueError( - f"Instance {node_group.instance} not available out of available instances {available_instances.keys()}" - ) + # check if kubernetes version is valid + available_kubernetes_versions = amazon_web_services.kubernetes_versions( + data["region"] + ) + if len(available_kubernetes_versions) == 0: + raise ValueError("Request to AWS for available Kubernetes versions failed.") + if data["kubernetes_version"] is None: + data["kubernetes_version"] = available_kubernetes_versions[-1] + elif data["kubernetes_version"] not in available_kubernetes_versions: + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {data['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + ) - if values["availability_zones"] is None: - zones = amazon_web_services.zones(region) - values["availability_zones"] = list(sorted(zones))[:2] + # check if availability zones are valid + available_zones = amazon_web_services.zones(data["region"]) + if "availability_zones" not in data: + data["availability_zones"] = list(sorted(available_zones))[:2] + else: + for zone in data["availability_zones"]: + if zone not in available_zones: + raise ValueError( + f"Amazon Web Services availability zone={zone} is not one of {available_zones}" + ) - return values + # check if instances are valid + available_instances = amazon_web_services.instances(data["region"]) + if "node_groups" in data: + for _, node_group in data["node_groups"].items(): + instance = ( + node_group["instance"] + if hasattr(node_group, "__getitem__") + else node_group.instance + ) + if instance not in available_instances: + raise ValueError( + f"Amazon Web Services instance {node_group.instance} not one of available instance types={available_instances}" + ) + return data class LocalProvider(schema.Base): - kube_context: Optional[str] + kube_context: Optional[str] = None node_selectors: Dict[str, KeyValueDict] = { "general": KeyValueDict(key="kubernetes.io/os", value="linux"), "user": KeyValueDict(key="kubernetes.io/os", value="linux"), @@ -506,7 +512,7 @@ class LocalProvider(schema.Base): class ExistingProvider(schema.Base): - kube_context: Optional[str] + kube_context: Optional[str] = None node_selectors: Dict[str, KeyValueDict] = { "general": KeyValueDict(key="kubernetes.io/os", value="linux"), "user": KeyValueDict(key="kubernetes.io/os", value="linux"), @@ -538,30 +544,31 @@ class ExistingProvider(schema.Base): class InputSchema(schema.Base): - local: Optional[LocalProvider] - existing: Optional[ExistingProvider] - google_cloud_platform: Optional[GoogleCloudPlatformProvider] - amazon_web_services: Optional[AmazonWebServicesProvider] - azure: Optional[AzureProvider] - digital_ocean: Optional[DigitalOceanProvider] + local: Optional[LocalProvider] = None + existing: Optional[ExistingProvider] = None + google_cloud_platform: Optional[GoogleCloudPlatformProvider] = None + amazon_web_services: Optional[AmazonWebServicesProvider] = None + azure: Optional[AzureProvider] = None + digital_ocean: Optional[DigitalOceanProvider] = None def exclude_from_config(self): exclude = set() - for provider in InputSchema.__fields__: + for provider in InputSchema.model_fields: if getattr(self, provider) is None: exclude.add(provider) return exclude - @pydantic.root_validator(pre=True) - def check_provider(cls, values): - if "provider" in values: - provider: str = values["provider"] + @model_validator(mode="before") + @classmethod + def check_provider(cls, data: Any) -> Any: + if "provider" in data: + provider: str = data["provider"] if hasattr(schema.ProviderEnum, provider): # TODO: all cloud providers has required fields, but local and existing don't. # And there is no way to initialize a model without user input here. # We preserve the original behavior here, but we should find a better way to do this. - if provider in ["local", "existing"] and provider not in values: - values[provider] = provider_enum_model_map[provider]() + if provider in ["local", "existing"] and provider not in data: + data[provider] = provider_enum_model_map[provider]() else: # if the provider field is invalid, it won't be set when this validator is called # so we need to check for it explicitly here, and set the `pre` to True @@ -573,16 +580,16 @@ def check_provider(cls, values): setted_providers = [ provider for provider in provider_name_abbreviation_map.keys() - if provider in values + if provider in data ] num_providers = len(setted_providers) if num_providers > 1: raise ValueError(f"Multiple providers set: {setted_providers}") elif num_providers == 1: - values["provider"] = provider_name_abbreviation_map[setted_providers[0]] + data["provider"] = provider_name_abbreviation_map[setted_providers[0]] elif num_providers == 0: - values["provider"] = schema.ProviderEnum.local.value - return values + data["provider"] = schema.ProviderEnum.local.value + return data class NodeSelectorKeyValue(schema.Base): @@ -593,20 +600,20 @@ class NodeSelectorKeyValue(schema.Base): class KubernetesCredentials(schema.Base): host: str cluster_ca_certifiate: str - token: Optional[str] - username: Optional[str] - password: Optional[str] - client_certificate: Optional[str] - client_key: Optional[str] - config_path: Optional[str] - config_context: Optional[str] + token: Optional[str] = None + username: Optional[str] = None + password: Optional[str] = None + client_certificate: Optional[str] = None + client_key: Optional[str] = None + config_path: Optional[str] = None + config_context: Optional[str] = None class OutputSchema(schema.Base): node_selectors: Dict[str, NodeSelectorKeyValue] kubernetes_credentials: KubernetesCredentials kubeconfig_filename: str - nfs_endpoint: Optional[str] + nfs_endpoint: Optional[str] = None class KubernetesInfrastructureStage(NebariTerraformStage): @@ -694,11 +701,13 @@ def tf_objects(self) -> List[Dict]: def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): if self.config.provider == schema.ProviderEnum.local: - return LocalInputVars(kube_context=self.config.local.kube_context).dict() + return LocalInputVars( + kube_context=self.config.local.kube_context + ).model_dump() elif self.config.provider == schema.ProviderEnum.existing: return ExistingInputVars( kube_context=self.config.existing.kube_context - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.do: return DigitalOceanInputVars( name=self.config.escaped_project_name, @@ -707,7 +716,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): tags=self.config.digital_ocean.tags, kubernetes_version=self.config.digital_ocean.kubernetes_version, node_groups=self.config.digital_ocean.node_groups, - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.gcp: return GCPInputVars( name=self.config.escaped_project_name, @@ -736,7 +745,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): ip_allocation_policy=self.config.google_cloud_platform.ip_allocation_policy, master_authorized_networks_config=self.config.google_cloud_platform.master_authorized_networks_config, private_cluster_config=self.config.google_cloud_platform.private_cluster_config, - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.azure: return AzureInputVars( name=self.config.escaped_project_name, @@ -767,7 +776,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): tags=self.config.azure.tags, network_profile=self.config.azure.network_profile, max_pods=self.config.azure.max_pods, - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.aws: return AWSInputVars( name=self.config.escaped_project_name, @@ -793,7 +802,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): vpc_cidr_block=self.config.amazon_web_services.vpc_cidr_block, permissions_boundary=self.config.amazon_web_services.permissions_boundary, tags=self.config.amazon_web_services.tags, - ).dict() + ).model_dump() else: raise ValueError(f"Unknown provider: {self.config.provider}") diff --git a/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/main.tf index 848d1c0471..43e5538507 100644 --- a/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/main.tf +++ b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/main.tf @@ -57,7 +57,6 @@ resource "aws_eks_node_group" "main" { ] tags = merge({ - # "kubernetes.io/cluster/${var.name}" = "shared" "k8s.io/cluster-autoscaler/node-template/label/dedicated" = var.node_groups[count.index].name propagate_at_launch = true }, var.tags) @@ -69,9 +68,36 @@ data "aws_eks_cluster_auth" "main" { resource "aws_eks_addon" "aws-ebs-csi-driver" { # required for Kubernetes v1.23+ on AWS - addon_name = "aws-ebs-csi-driver" - cluster_name = aws_eks_cluster.main.name - resolve_conflicts = "OVERWRITE" + addon_name = "aws-ebs-csi-driver" + cluster_name = aws_eks_cluster.main.name + resolve_conflicts_on_create = "OVERWRITE" + resolve_conflicts_on_update = "OVERWRITE" + + configuration_values = jsonencode({ + controller = { + nodeSelector = { + "eks.amazonaws.com/nodegroup" = "general" + } + } + }) + + # Ensure cluster and node groups are created + depends_on = [ + aws_eks_cluster.main, + aws_eks_node_group.main, + ] +} + +resource "aws_eks_addon" "coredns" { + addon_name = "coredns" + cluster_name = aws_eks_cluster.main.name + + configuration_values = jsonencode({ + nodeSelector = { + "eks.amazonaws.com/nodegroup" = "general" + } + }) + # Ensure cluster and node groups are created depends_on = [ aws_eks_cluster.main, diff --git a/src/_nebari/stages/infrastructure/template/aws/modules/network/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/network/main.tf index b3601a2a91..da42767976 100644 --- a/src/_nebari/stages/infrastructure/template/aws/modules/network/main.tf +++ b/src/_nebari/stages/infrastructure/template/aws/modules/network/main.tf @@ -3,7 +3,6 @@ resource "aws_vpc" "main" { enable_dns_support = true enable_dns_hostnames = true - enable_classiclink = false tags = merge({ Name = var.name }, var.tags, var.vpc_tags) } diff --git a/src/_nebari/stages/infrastructure/template/aws/versions.tf b/src/_nebari/stages/infrastructure/template/aws/versions.tf index 54fc973d6a..68c0faf27b 100644 --- a/src/_nebari/stages/infrastructure/template/aws/versions.tf +++ b/src/_nebari/stages/infrastructure/template/aws/versions.tf @@ -2,7 +2,7 @@ terraform { required_providers { aws = { source = "hashicorp/aws" - version = "3.73.0" + version = "5.33.0" } } required_version = ">= 1.0" diff --git a/src/_nebari/stages/infrastructure/template/azure/versions.tf b/src/_nebari/stages/infrastructure/template/azure/versions.tf index 60343ba4d0..a3753935d2 100644 --- a/src/_nebari/stages/infrastructure/template/azure/versions.tf +++ b/src/_nebari/stages/infrastructure/template/azure/versions.tf @@ -2,7 +2,7 @@ terraform { required_providers { azurerm = { source = "hashicorp/azurerm" - version = "=3.22.0" + version = "=3.97.1" } } required_version = ">= 1.0" diff --git a/src/_nebari/stages/infrastructure/template/local/main.tf b/src/_nebari/stages/infrastructure/template/local/main.tf index 00c1ca97b0..fb0d0997e1 100644 --- a/src/_nebari/stages/infrastructure/template/local/main.tf +++ b/src/_nebari/stages/infrastructure/template/local/main.tf @@ -1,8 +1,8 @@ terraform { required_providers { kind = { - source = "kyma-incubator/kind" - version = "0.0.11" + source = "tehcyx/kind" + version = "0.4.0" } docker = { source = "kreuzwerker/docker" @@ -48,7 +48,7 @@ resource "kind_cluster" "default" { node { role = "general" - image = "kindest/node:v1.23.13" + image = "kindest/node:v1.29.2" } } } diff --git a/src/_nebari/stages/infrastructure/template/local/metallb.yaml b/src/_nebari/stages/infrastructure/template/local/metallb.yaml index 9d6b6833c8..c832baebde 100644 --- a/src/_nebari/stages/infrastructure/template/local/metallb.yaml +++ b/src/_nebari/stages/infrastructure/template/local/metallb.yaml @@ -1,82 +1,3 @@ -apiVersion: policy/v1beta1 -kind: PodSecurityPolicy -metadata: - labels: - app: metallb - name: controller -spec: - allowPrivilegeEscalation: false - allowedCapabilities: [] - allowedHostPaths: [] - defaultAddCapabilities: [] - defaultAllowPrivilegeEscalation: false - fsGroup: - ranges: - - max: 65535 - min: 1 - rule: MustRunAs - hostIPC: false - hostNetwork: false - hostPID: false - privileged: false - readOnlyRootFilesystem: true - requiredDropCapabilities: - - ALL - runAsUser: - ranges: - - max: 65535 - min: 1 - rule: MustRunAs - seLinux: - rule: RunAsAny - supplementalGroups: - ranges: - - max: 65535 - min: 1 - rule: MustRunAs - volumes: - - configMap - - secret - - emptyDir ---- -apiVersion: policy/v1beta1 -kind: PodSecurityPolicy -metadata: - labels: - app: metallb - name: speaker -spec: - allowPrivilegeEscalation: false - allowedCapabilities: - - NET_RAW - allowedHostPaths: [] - defaultAddCapabilities: [] - defaultAllowPrivilegeEscalation: false - fsGroup: - rule: RunAsAny - hostIPC: false - hostNetwork: true - hostPID: false - hostPorts: - - max: 7472 - min: 7472 - - max: 7946 - min: 7946 - privileged: true - readOnlyRootFilesystem: true - requiredDropCapabilities: - - ALL - runAsUser: - rule: RunAsAny - seLinux: - rule: RunAsAny - supplementalGroups: - rule: RunAsAny - volumes: - - configMap - - secret - - emptyDir ---- apiVersion: v1 kind: ServiceAccount metadata: diff --git a/src/_nebari/stages/kubernetes_ingress/__init__.py b/src/_nebari/stages/kubernetes_ingress/__init__.py index 99e241f65d..6436df0ba5 100644 --- a/src/_nebari/stages/kubernetes_ingress/__init__.py +++ b/src/_nebari/stages/kubernetes_ingress/__init__.py @@ -5,8 +5,7 @@ import socket import sys import time -import typing -from typing import Any, Dict, List, Type +from typing import Any, Dict, List, Optional, Type, Union from _nebari import constants from _nebari.provider.dns.cloudflare import update_record @@ -133,20 +132,20 @@ class SelfSignedCertificate(schema.Base): class LetsEncryptCertificate(schema.Base): type: str = CertificateEnum.letsencrypt - acme_email: str + acme_email: str = None acme_server: str = "https://acme-v02.api.letsencrypt.org/directory" class ExistingCertificate(schema.Base): type: str = CertificateEnum.existing - secret_name: str + secret_name: str = None class DisabledCertificate(schema.Base): type: str = CertificateEnum.disabled -Certificate = typing.Union[ +Certificate = Union[ SelfSignedCertificate, LetsEncryptCertificate, ExistingCertificate, @@ -155,16 +154,16 @@ class DisabledCertificate(schema.Base): class DnsProvider(schema.Base): - provider: typing.Optional[str] - auto_provision: typing.Optional[bool] = False + provider: Optional[str] = None + auto_provision: Optional[bool] = False class Ingress(schema.Base): - terraform_overrides: typing.Dict = {} + terraform_overrides: Dict = {} class InputSchema(schema.Base): - domain: typing.Optional[str] + domain: Optional[str] certificate: Certificate = SelfSignedCertificate() ingress: Ingress = Ingress() dns: DnsProvider = DnsProvider() @@ -176,7 +175,7 @@ class IngressEndpoint(schema.Base): class OutputSchema(schema.Base): - load_balancer_address: typing.List[IngressEndpoint] + load_balancer_address: List[IngressEndpoint] domain: str @@ -201,9 +200,9 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): cert_details["acme-email"] = self.config.certificate.acme_email cert_details["acme-server"] = self.config.certificate.acme_server elif cert_type == "existing": - cert_details[ - "certificate-secret-name" - ] = self.config.certificate.secret_name + cert_details["certificate-secret-name"] = ( + self.config.certificate.secret_name + ) return { **{ diff --git a/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/main.tf b/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/main.tf index 08bb5b295d..217039f420 100644 --- a/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/main.tf +++ b/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/main.tf @@ -9,7 +9,7 @@ locals { "--entrypoints.minio.http.tls.certResolver=letsencrypt", "--certificatesresolvers.letsencrypt.acme.tlschallenge", "--certificatesresolvers.letsencrypt.acme.email=${var.acme-email}", - "--certificatesresolvers.letsencrypt.acme.storage=acme.json", + "--certificatesresolvers.letsencrypt.acme.storage=/mnt/acme-certificates/acme.json", "--certificatesresolvers.letsencrypt.acme.caserver=${var.acme-server}", ] self-signed = local.default_cert @@ -27,6 +27,22 @@ resource "kubernetes_service_account" "main" { } } +resource "kubernetes_persistent_volume_claim" "traefik_certs_pvc" { + metadata { + name = "traefik-ingress-certs" + namespace = var.namespace + } + spec { + access_modes = ["ReadWriteOnce"] + resources { + requests = { + storage = "5Gi" + } + } + } + wait_until_bound = false +} + resource "kubernetes_cluster_role" "main" { metadata { @@ -215,6 +231,10 @@ resource "kubernetes_deployment" "main" { image = "${var.traefik-image.image}:${var.traefik-image.tag}" name = var.name + volume_mount { + mount_path = "/mnt/acme-certificates" + name = "acme-certificates" + } security_context { capabilities { drop = ["ALL"] @@ -326,6 +346,12 @@ resource "kubernetes_deployment" "main" { success_threshold = 1 } } + volume { + name = "acme-certificates" + persistent_volume_claim { + claim_name = kubernetes_persistent_volume_claim.traefik_certs_pvc.metadata.0.name + } + } } } } diff --git a/src/_nebari/stages/kubernetes_initialize/__init__.py b/src/_nebari/stages/kubernetes_initialize/__init__.py index e40a69ed0f..7afd69b547 100644 --- a/src/_nebari/stages/kubernetes_initialize/__init__.py +++ b/src/_nebari/stages/kubernetes_initialize/__init__.py @@ -1,8 +1,7 @@ import sys -import typing -from typing import Any, Dict, List, Type, Union +from typing import Any, Dict, List, Optional, Type -import pydantic +from pydantic import model_validator from _nebari.stages.base import NebariTerraformStage from _nebari.stages.tf_objects import ( @@ -16,37 +15,34 @@ class ExtContainerReg(schema.Base): enabled: bool = False - access_key_id: typing.Optional[str] - secret_access_key: typing.Optional[str] - extcr_account: typing.Optional[str] - extcr_region: typing.Optional[str] - - @pydantic.root_validator - def enabled_must_have_fields(cls, values): - if values["enabled"]: + access_key_id: Optional[str] = None + secret_access_key: Optional[str] = None + extcr_account: Optional[str] = None + extcr_region: Optional[str] = None + + @model_validator(mode="after") + def enabled_must_have_fields(self): + if self.enabled: for fldname in ( "access_key_id", "secret_access_key", "extcr_account", "extcr_region", ): - if ( - fldname not in values - or values[fldname] is None - or values[fldname].strip() == "" - ): + value = getattr(self, fldname) + if value is None or value.strip() == "": raise ValueError( f"external_container_reg must contain a non-blank {fldname} when enabled is true" ) - return values + return self class InputVars(schema.Base): name: str environment: str cloud_provider: str - aws_region: Union[str, None] = None - external_container_reg: Union[ExtContainerReg, None] = None + aws_region: Optional[str] = None + external_container_reg: Optional[ExtContainerReg] = None gpu_enabled: bool = False gpu_node_group_names: List[str] = [] @@ -78,7 +74,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): name=self.config.project_name, environment=self.config.namespace, cloud_provider=self.config.provider.value, - external_container_reg=self.config.external_container_reg.dict(), + external_container_reg=self.config.external_container_reg.model_dump(), ) if self.config.provider == schema.ProviderEnum.gcp: @@ -97,7 +93,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): ] input_vars.aws_region = self.config.amazon_web_services.region - return input_vars.dict() + return input_vars.model_dump() def check( self, stage_outputs: Dict[str, Dict[str, Any]], disable_prompt: bool = False diff --git a/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf index 29f982c86a..c07edd70dd 100644 --- a/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf @@ -19,6 +19,24 @@ resource "helm_release" "autoscaler" { clusterName = var.cluster-name enabled = true } + + affinity = { + nodeAffinity = { + requiredDuringSchedulingIgnoredDuringExecution = { + nodeSelectorTerms = [ + { + matchExpressions = [ + { + key = "eks.amazonaws.com/nodegroup" + operator = "In" + values = ["general"] + } + ] + } + ] + } + } + } }) ], var.overrides) } diff --git a/src/_nebari/stages/kubernetes_keycloak/__init__.py b/src/_nebari/stages/kubernetes_keycloak/__init__.py index 767c83189b..7ded0f1f57 100644 --- a/src/_nebari/stages/kubernetes_keycloak/__init__.py +++ b/src/_nebari/stages/kubernetes_keycloak/__init__.py @@ -6,11 +6,9 @@ import string import sys import time -import typing -from abc import ABC -from typing import Any, Dict, List, Type +from typing import Any, Dict, List, Optional, Type, Union -import pydantic +from pydantic import Field, ValidationInfo, field_validator from _nebari.stages.base import NebariTerraformStage from _nebari.stages.tf_objects import ( @@ -62,93 +60,79 @@ def to_yaml(cls, representer, node): class GitHubConfig(schema.Base): - client_id: str = pydantic.Field( - default_factory=lambda: os.environ.get("GITHUB_CLIENT_ID") + client_id: str = Field( + default_factory=lambda: os.environ.get("GITHUB_CLIENT_ID"), + validate_default=True, ) - client_secret: str = pydantic.Field( - default_factory=lambda: os.environ.get("GITHUB_CLIENT_SECRET") + client_secret: str = Field( + default_factory=lambda: os.environ.get("GITHUB_CLIENT_SECRET"), + validate_default=True, ) - @pydantic.root_validator(allow_reuse=True) - def validate_required(cls, values): - missing = [] - for k, v in { + @field_validator("client_id", "client_secret", mode="before") + @classmethod + def validate_credentials(cls, value: Optional[str], info: ValidationInfo) -> str: + variable_mapping = { "client_id": "GITHUB_CLIENT_ID", "client_secret": "GITHUB_CLIENT_SECRET", - }.items(): - if not values.get(k): - missing.append(v) - - if len(missing) > 0: + } + if value is None: raise ValueError( - f"Missing the following required environment variable(s): {', '.join(missing)}" + f"Missing the following required environment variable: {variable_mapping[info.field_name]}" ) - - return values + return value class Auth0Config(schema.Base): - client_id: str = pydantic.Field( - default_factory=lambda: os.environ.get("AUTH0_CLIENT_ID") + client_id: str = Field( + default_factory=lambda: os.environ.get("AUTH0_CLIENT_ID"), + validate_default=True, ) - client_secret: str = pydantic.Field( - default_factory=lambda: os.environ.get("AUTH0_CLIENT_SECRET") + client_secret: str = Field( + default_factory=lambda: os.environ.get("AUTH0_CLIENT_SECRET"), + validate_default=True, ) - auth0_subdomain: str = pydantic.Field( - default_factory=lambda: os.environ.get("AUTH0_DOMAIN") + auth0_subdomain: str = Field( + default_factory=lambda: os.environ.get("AUTH0_DOMAIN"), + validate_default=True, ) - @pydantic.root_validator(allow_reuse=True) - def validate_required(cls, values): - missing = [] - for k, v in { + @field_validator("client_id", "client_secret", "auth0_subdomain", mode="before") + @classmethod + def validate_credentials(cls, value: Optional[str], info: ValidationInfo) -> str: + variable_mapping = { "client_id": "AUTH0_CLIENT_ID", "client_secret": "AUTH0_CLIENT_SECRET", "auth0_subdomain": "AUTH0_DOMAIN", - }.items(): - if not values.get(k): - missing.append(v) - - if len(missing) > 0: + } + if value is None: raise ValueError( - f"Missing the following required environment variable(s): {', '.join(missing)}" + f"Missing the following required environment variable: {variable_mapping[info.field_name]} " ) + return value - return values - - -class Authentication(schema.Base, ABC): - _types: typing.Dict[str, type] = {} +class BaseAuthentication(schema.Base): type: AuthenticationEnum - # Based on https://github.com/samuelcolvin/pydantic/issues/2177#issuecomment-739578307 - # This allows type field to determine which subclass of Authentication should be used for validation. +class PasswordAuthentication(BaseAuthentication): + type: AuthenticationEnum = AuthenticationEnum.password - # Used to register automatically all the submodels in `_types`. - def __init_subclass__(cls): - cls._types[cls._typ.value] = cls - @classmethod - def __get_validators__(cls): - yield cls.validate +class Auth0Authentication(BaseAuthentication): + type: AuthenticationEnum = AuthenticationEnum.auth0 + config: Auth0Config = Field(default_factory=lambda: Auth0Config()) - @classmethod - def validate(cls, value: typing.Dict[str, typing.Any]) -> "Authentication": - if "type" not in value: - raise ValueError("type field is missing from security.authentication") - specified_type = value.get("type") - sub_class = cls._types.get(specified_type, None) +class GitHubAuthentication(BaseAuthentication): + type: AuthenticationEnum = AuthenticationEnum.github + config: GitHubConfig = Field(default_factory=lambda: GitHubConfig()) - if not sub_class: - raise ValueError( - f"No registered Authentication type called {specified_type}" - ) - # init with right submodel - return sub_class(**value) +Authentication = Union[ + PasswordAuthentication, Auth0Authentication, GitHubAuthentication +] def random_secure_string( @@ -157,33 +141,56 @@ def random_secure_string( return "".join(secrets.choice(chars) for i in range(length)) -class PasswordAuthentication(Authentication): - _typ = AuthenticationEnum.password - +class Keycloak(schema.Base): + initial_root_password: str = Field(default_factory=random_secure_string) + overrides: Dict = {} + realm_display_name: str = "Nebari" -class Auth0Authentication(Authentication): - _typ = AuthenticationEnum.auth0 - config: Auth0Config = pydantic.Field(default_factory=lambda: Auth0Config()) +auth_enum_to_model = { + AuthenticationEnum.password: PasswordAuthentication, + AuthenticationEnum.auth0: Auth0Authentication, + AuthenticationEnum.github: GitHubAuthentication, +} -class GitHubAuthentication(Authentication): - _typ = AuthenticationEnum.github - config: GitHubConfig = pydantic.Field(default_factory=lambda: GitHubConfig()) - - -class Keycloak(schema.Base): - initial_root_password: str = pydantic.Field(default_factory=random_secure_string) - overrides: typing.Dict = {} - realm_display_name: str = "Nebari" +auth_enum_to_config = { + AuthenticationEnum.auth0: Auth0Config, + AuthenticationEnum.github: GitHubConfig, +} class Security(schema.Base): - authentication: Authentication = PasswordAuthentication( - type=AuthenticationEnum.password - ) + authentication: Authentication = PasswordAuthentication() shared_users_group: bool = True keycloak: Keycloak = Keycloak() + @field_validator("authentication", mode="before") + @classmethod + def validate_authentication(cls, value: Optional[Dict]) -> Authentication: + if value is None: + return PasswordAuthentication() + if "type" not in value: + raise ValueError( + "Authentication type must be specified if authentication is set" + ) + auth_type = value["type"] if hasattr(value, "__getitem__") else value.type + if auth_type in auth_enum_to_model: + if auth_type == AuthenticationEnum.password: + return auth_enum_to_model[auth_type]() + else: + if "config" in value: + config_dict = ( + value["config"] + if hasattr(value, "__getitem__") + else value.config + ) + config = auth_enum_to_config[auth_type](**config_dict) + else: + config = auth_enum_to_config[auth_type]() + return auth_enum_to_model[auth_type](config=config) + else: + raise ValueError(f"Unsupported authentication type {auth_type}") + class InputSchema(schema.Base): security: Security = Security() @@ -226,7 +233,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): node_group=stage_outputs["stages/02-infrastructure"]["node_selectors"][ "general" ], - ).dict() + ).model_dump() def check( self, stage_outputs: Dict[str, Dict[str, Any]], disable_check: bool = False diff --git a/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/values.yaml b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/values.yaml index 94359cf451..abe7d4d3e3 100644 --- a/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/values.yaml +++ b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/values.yaml @@ -4,6 +4,9 @@ ingress: # we will need to define our own IngressRoute elsewhere. enabled: false +image: + repository: quay.io/keycloak/keycloak + imagePullSecrets: - name: "extcrcreds" diff --git a/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py b/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py index 4cb0c23aeb..1c33429e37 100644 --- a/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py +++ b/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py @@ -3,6 +3,7 @@ from typing import Any, Dict, List, Type from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.kubernetes_keycloak import Authentication from _nebari.stages.tf_objects import NebariTerraformState from nebari import schema from nebari.hookspecs import NebariStage, hookimpl @@ -14,7 +15,7 @@ class InputVars(schema.Base): realm: str = "nebari" realm_display_name: str - authentication: Dict[str, Any] + authentication: Authentication keycloak_groups: List[str] = ["superadmin", "admin", "developer", "analyst"] default_groups: List[str] = ["analyst"] @@ -39,7 +40,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): input_vars.keycloak_groups += users_group input_vars.default_groups += users_group - return input_vars.dict() + return input_vars.model_dump() def check( self, stage_outputs: Dict[str, Dict[str, Any]], disable_prompt: bool = False diff --git a/src/_nebari/stages/kubernetes_services/__init__.py b/src/_nebari/stages/kubernetes_services/__init__.py index 9c47fee6ec..cdc1ae9151 100644 --- a/src/_nebari/stages/kubernetes_services/__init__.py +++ b/src/_nebari/stages/kubernetes_services/__init__.py @@ -2,12 +2,10 @@ import json import sys import time -import typing -from typing import Any, Dict, List, Type +from typing import Any, Dict, List, Optional, Type, Union from urllib.parse import urlencode -import pydantic -from pydantic import Field +from pydantic import ConfigDict, Field, field_validator, model_validator from _nebari import constants from _nebari.stages.base import NebariTerraformStage @@ -81,13 +79,11 @@ class Theme(schema.Base): class KubeSpawner(schema.Base): - cpu_limit: int - cpu_guarantee: int + cpu_limit: float + cpu_guarantee: float mem_limit: str mem_guarantee: str - - class Config: - extra = "allow" + model_config = ConfigDict(extra="allow") class JupyterLabProfile(schema.Base): @@ -95,36 +91,31 @@ class JupyterLabProfile(schema.Base): display_name: str description: str default: bool = False - users: typing.Optional[typing.List[str]] - groups: typing.Optional[typing.List[str]] - kubespawner_override: typing.Optional[KubeSpawner] - - @pydantic.root_validator - def only_yaml_can_have_groups_and_users(cls, values): - if values["access"] != AccessEnum.yaml: - if ( - values.get("users", None) is not None - or values.get("groups", None) is not None - ): + users: Optional[List[str]] = None + groups: Optional[List[str]] = None + kubespawner_override: Optional[KubeSpawner] = None + + @model_validator(mode="after") + def only_yaml_can_have_groups_and_users(self): + if self.access != AccessEnum.yaml: + if self.users is not None or self.groups is not None: raise ValueError( "Profile must not contain groups or users fields unless access = yaml" ) - return values + return self class DaskWorkerProfile(schema.Base): - worker_cores_limit: int - worker_cores: int + worker_cores_limit: float + worker_cores: float worker_memory_limit: str worker_memory: str worker_threads: int = 1 - - class Config: - extra = "allow" + model_config = ConfigDict(extra="allow") class Profiles(schema.Base): - jupyterlab: typing.List[JupyterLabProfile] = [ + jupyterlab: List[JupyterLabProfile] = [ JupyterLabProfile( display_name="Small Instance", description="Stable environment with 2 cpu / 8 GB ram", @@ -147,7 +138,7 @@ class Profiles(schema.Base): ), ), ] - dask_worker: typing.Dict[str, DaskWorkerProfile] = { + dask_worker: Dict[str, DaskWorkerProfile] = { "Small Worker": DaskWorkerProfile( worker_cores_limit=2, worker_cores=1.5, @@ -164,25 +155,26 @@ class Profiles(schema.Base): ), } - @pydantic.validator("jupyterlab") - def check_default(cls, v, values): + @field_validator("jupyterlab") + @classmethod + def check_default(cls, value): """Check if only one default value is present.""" - default = [attrs["default"] for attrs in v if "default" in attrs] + default = [attrs["default"] for attrs in value if "default" in attrs] if default.count(True) > 1: raise TypeError( "Multiple default Jupyterlab profiles may cause unexpected problems." ) - return v + return value class CondaEnvironment(schema.Base): name: str - channels: typing.Optional[typing.List[str]] - dependencies: typing.List[typing.Union[str, typing.Dict[str, typing.List[str]]]] + channels: Optional[List[str]] = None + dependencies: List[Union[str, Dict[str, List[str]]]] class CondaStore(schema.Base): - extra_settings: typing.Dict[str, typing.Any] = {} + extra_settings: Dict[str, Any] = {} extra_config: str = "" image: str = "quansight/conda-store-server" image_tag: str = constants.DEFAULT_CONDA_STORE_IMAGE_TAG @@ -197,7 +189,7 @@ class NebariWorkflowController(schema.Base): class ArgoWorkflows(schema.Base): enabled: bool = True - overrides: typing.Dict = {} + overrides: Dict = {} nebari_workflow_controller: NebariWorkflowController = NebariWorkflowController() @@ -206,9 +198,9 @@ class JHubApps(schema.Base): class MonitoringOverrides(schema.Base): - loki: typing.Dict = {} - promtail: typing.Dict = {} - minio: typing.Dict = {} + loki: Dict = {} + promtail: Dict = {} + minio: Dict = {} class Monitoring(schema.Base): @@ -219,7 +211,7 @@ class Monitoring(schema.Base): class JupyterLabPioneer(schema.Base): enabled: bool = False - log_format: typing.Optional[str] = None + log_format: Optional[str] = None class Telemetry(schema.Base): @@ -227,7 +219,7 @@ class Telemetry(schema.Base): class JupyterHub(schema.Base): - overrides: typing.Dict = {} + overrides: Dict = {} class IdleCuller(schema.Base): @@ -241,10 +233,10 @@ class IdleCuller(schema.Base): class JupyterLab(schema.Base): - default_settings: typing.Dict[str, typing.Any] = {} + default_settings: Dict[str, Any] = {} idle_culler: IdleCuller = IdleCuller() - initial_repositories: typing.List[typing.Dict[str, str]] = [] - preferred_dir: typing.Optional[str] = None + initial_repositories: List[Dict[str, str]] = [] + preferred_dir: Optional[str] = None class InputSchema(schema.Base): @@ -252,7 +244,7 @@ class InputSchema(schema.Base): storage: Storage = Storage() theme: Theme = Theme() profiles: Profiles = Profiles() - environments: typing.Dict[str, CondaEnvironment] = { + environments: Dict[str, CondaEnvironment] = { "environment-dask.yaml": CondaEnvironment( name="dask", channels=["conda-forge"], @@ -374,7 +366,9 @@ class JupyterhubInputVars(schema.Base): initial_repositories: str = Field(alias="initial-repositories") jupyterhub_overrides: List[str] = Field(alias="jupyterhub-overrides") jupyterhub_stared_storage: str = Field(alias="jupyterhub-shared-storage") - jupyterhub_shared_endpoint: str = Field(None, alias="jupyterhub-shared-endpoint") + jupyterhub_shared_endpoint: Optional[str] = Field( + alias="jupyterhub-shared-endpoint", default=None + ) jupyterhub_profiles: List[JupyterLabProfile] = Field(alias="jupyterlab-profiles") jupyterhub_image: ImageNameTag = Field(alias="jupyterhub-image") jupyterhub_hub_extraEnv: str = Field(alias="jupyterhub-hub-extraEnv") @@ -382,9 +376,7 @@ class JupyterhubInputVars(schema.Base): argo_workflows_enabled: bool = Field(alias="argo-workflows-enabled") jhub_apps_enabled: bool = Field(alias="jhub-apps-enabled") cloud_provider: str = Field(alias="cloud-provider") - jupyterlab_preferred_dir: typing.Optional[str] = Field( - alias="jupyterlab-preferred-dir" - ) + jupyterlab_preferred_dir: Optional[str] = Field(alias="jupyterlab-preferred-dir") class DaskGatewayInputVars(schema.Base): @@ -405,7 +397,7 @@ class MonitoringInputVars(schema.Base): class TelemetryInputVars(schema.Base): jupyterlab_pioneer_enabled: bool = Field(alias="jupyterlab-pioneer-enabled") - jupyterlab_pioneer_log_format: typing.Optional[str] = Field( + jupyterlab_pioneer_log_format: Optional[str] = Field( alias="jupyterlab-pioneer-log-format" ) @@ -498,7 +490,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): conda_store_vars = CondaStoreInputVars( conda_store_environments={ - k: v.dict() for k, v in self.config.environments.items() + k: v.model_dump() for k, v in self.config.environments.items() }, conda_store_default_namespace=self.config.conda_store.default_namespace, conda_store_filesystem_storage=self.config.storage.conda_store, @@ -511,14 +503,14 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): ) jupyterhub_vars = JupyterhubInputVars( - jupyterhub_theme=jupyterhub_theme.dict(), + jupyterhub_theme=jupyterhub_theme.model_dump(), jupyterlab_image=_split_docker_image_name( self.config.default_images.jupyterlab ), jupyterhub_stared_storage=self.config.storage.shared_filesystem, jupyterhub_shared_endpoint=jupyterhub_shared_endpoint, cloud_provider=cloud_provider, - jupyterhub_profiles=self.config.profiles.dict()["jupyterlab"], + jupyterhub_profiles=self.config.profiles.model_dump()["jupyterlab"], jupyterhub_image=_split_docker_image_name( self.config.default_images.jupyterhub ), @@ -526,7 +518,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): jupyterhub_hub_extraEnv=json.dumps( self.config.jupyterhub.overrides.get("hub", {}).get("extraEnv", []) ), - idle_culler_settings=self.config.jupyterlab.idle_culler.dict(), + idle_culler_settings=self.config.jupyterlab.idle_culler.model_dump(), argo_workflows_enabled=self.config.argo_workflows.enabled, jhub_apps_enabled=self.config.jhub_apps.enabled, initial_repositories=str(self.config.jupyterlab.initial_repositories), @@ -538,7 +530,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): dask_worker_image=_split_docker_image_name( self.config.default_images.dask_worker ), - dask_gateway_profiles=self.config.profiles.dict()["dask_worker"], + dask_gateway_profiles=self.config.profiles.model_dump()["dask_worker"], cloud_provider=cloud_provider, ) @@ -568,13 +560,13 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): ) return { - **kubernetes_services_vars.dict(by_alias=True), - **conda_store_vars.dict(by_alias=True), - **jupyterhub_vars.dict(by_alias=True), - **dask_gateway_vars.dict(by_alias=True), - **monitoring_vars.dict(by_alias=True), - **argo_workflows_vars.dict(by_alias=True), - **telemetry_vars.dict(by_alias=True), + **kubernetes_services_vars.model_dump(by_alias=True), + **conda_store_vars.model_dump(by_alias=True), + **jupyterhub_vars.model_dump(by_alias=True), + **dask_gateway_vars.model_dump(by_alias=True), + **monitoring_vars.model_dump(by_alias=True), + **argo_workflows_vars.model_dump(by_alias=True), + **telemetry_vars.model_dump(by_alias=True), } def check( diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/main.tf index 29f27da26a..92bcad6eae 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/main.tf @@ -557,6 +557,23 @@ resource "kubernetes_manifest" "deployment_admission_controller" { } }, ] + affinity = { + nodeAffinity = { + requiredDuringSchedulingIgnoredDuringExecution = { + nodeSelectorTerms = [ + { + matchExpressions = [ + { + key = var.node-group.key + operator = "In" + values = [var.node-group.value] + } + ] + } + ] + } + } + } } } } diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py index 06aa97287a..ddbc3b6be2 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py @@ -208,12 +208,14 @@ def base_profile_extra_mounts(): extra_pod_config = { "volumes": [ - { - "name": volume["name"], - "persistentVolumeClaim": {"claimName": volume["name"]}, - } - if volume["kind"] == "persistentvolumeclaim" - else {"name": volume["name"], "configMap": {"name": volume["name"]}} + ( + { + "name": volume["name"], + "persistentVolumeClaim": {"claimName": volume["name"]}, + } + if volume["kind"] == "persistentvolumeclaim" + else {"name": volume["name"], "configMap": {"name": volume["name"]}} + ) for mount_path, volume in extra_mounts.items() ] } @@ -367,9 +369,11 @@ def configure_user(username, groups, uid=1000, gid=100): # mount the shared directories for user only if there are # shared folders (groups) that the user is a member of # else ensure that the `shared` folder symlink does not exist - f"ln -sfn /shared /home/{username}/shared" - if groups - else f"rm -f /home/{username}/shared", + ( + f"ln -sfn /shared /home/{username}/shared" + if groups + else f"rm -f /home/{username}/shared" + ), # conda-store environment configuration f"printf '{condarc}' > /home/{username}/.condarc", # jupyter configuration @@ -451,6 +455,14 @@ def profile_conda_store_viewer_token(): } } }, + "CONDA_STORE_SERVICE_NAMESPACE": { + "valueFrom": { + "secretKeyRef": { + "name": "argo-workflows-conda-store-token", + "key": "conda-store-service-namespace", + } + } + }, } @@ -529,14 +541,13 @@ def render_profiles(spawner): # userinfo request to have the groups in the key # "auth_state.oauth_user.groups" auth_state = yield spawner.user.get_auth_state() - spawner.log.error(str(auth_state)) username = auth_state["oauth_user"]["preferred_username"] # only return the lowest level group name # e.g. /projects/myproj -> myproj # and /developers -> developers groups = [Path(group).name for group in auth_state["oauth_user"]["groups"]] - spawner.log.error(f"user info: {username} {groups}") + spawner.log.info(f"user info: {username} {groups}") keycloak_profilenames = auth_state["oauth_user"].get("jupyterlab_profiles", []) diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json index fd6cafc624..02d6564455 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json @@ -6,7 +6,8 @@ "apiUrl": "/conda-store/", "authMethod": "cookie", "loginUrl": "/conda-store/login?next=", - "authToken": "" + "authToken": "", + "addMainMenuItem": false }, "@jupyterlab/apputils-extension:notification": { "checkForUpdates": false, @@ -50,13 +51,24 @@ "rank": 1000, "items": [ { - "command": "help:open", - "rank": 1, + "command": "nebari:run-first-enabled", "args": { - "url": "/conda-store", - "text": "Environment Management", - "newBrowserTab": true - } + "commands": [ + { + "id": "condastore:open", + "label": "Environment Management" + }, + { + "id": "help:open", + "args": { + "url": "/conda-store", + "text": "Environment Management", + "newBrowserTab": true + } + } + ] + }, + "rank": 1 }, { "command": "help:open", diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf index af690112f6..e2ddf02f3b 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf @@ -150,18 +150,25 @@ resource "helm_release" "jupyterhub" { enable_auth_state = true } GenericOAuthenticator = { - client_id = module.jupyterhub-openid-client.config.client_id - client_secret = module.jupyterhub-openid-client.config.client_secret - oauth_callback_url = "https://${var.external-url}/hub/oauth_callback" - authorize_url = module.jupyterhub-openid-client.config.authentication_url - token_url = module.jupyterhub-openid-client.config.token_url - userdata_url = module.jupyterhub-openid-client.config.userinfo_url - login_service = "Keycloak" - username_key = "preferred_username" - claim_groups_key = "roles" - allowed_groups = ["jupyterhub_admin", "jupyterhub_developer"] - admin_groups = ["jupyterhub_admin"] - tls_verify = false + client_id = module.jupyterhub-openid-client.config.client_id + client_secret = module.jupyterhub-openid-client.config.client_secret + oauth_callback_url = "https://${var.external-url}/hub/oauth_callback" + authorize_url = module.jupyterhub-openid-client.config.authentication_url + token_url = module.jupyterhub-openid-client.config.token_url + userdata_url = module.jupyterhub-openid-client.config.userinfo_url + login_service = "Keycloak" + username_claim = "preferred_username" + claim_groups_key = "groups" + allowed_groups = ["/analyst", "/developer", "/admin"] + admin_groups = ["/admin"] + manage_groups = true + refresh_pre_spawn = true + validate_server_cert = false + + # deprecated, to be removed (replaced by validate_server_cert) + tls_verify = false + # deprecated, to be removed (replaced by username_claim) + username_key = "preferred_username" } } } @@ -231,6 +238,28 @@ resource "kubernetes_manifest" "jupyterhub" { port = 80 } ] + middlewares = [ + { + name = kubernetes_manifest.jupyterhub-proxy-add-slash.manifest.metadata.name + namespace = var.namespace + } + ] + }, + { + kind = "Rule" + match = "Host(`${var.external-url}`) && (PathPrefix(`/home`) || PathPrefix(`/token`) || PathPrefix(`/admin`))" + middlewares = [ + { + name = kubernetes_manifest.jupyterhub-middleware-addprefix.manifest.metadata.name + namespace = var.namespace + } + ] + services = [ + { + name = "proxy-public" + port = 80 + } + ] } ] } @@ -264,8 +293,9 @@ resource "kubernetes_secret" "argo-workflows-conda-store-token" { } data = { - "conda-store-api-token" = var.conda-store-argo-workflows-jupyter-scheduler-token - "conda-store-service-name" = var.conda-store-service-name + "conda-store-api-token" = var.conda-store-argo-workflows-jupyter-scheduler-token + "conda-store-service-name" = var.conda-store-service-name + "conda-store-service-namespace" = var.namespace } type = "Opaque" diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/middleware.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/middleware.tf new file mode 100644 index 0000000000..6d01252295 --- /dev/null +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/middleware.tf @@ -0,0 +1,33 @@ +resource "kubernetes_manifest" "jupyterhub-middleware-addprefix" { + manifest = { + apiVersion = "traefik.containo.us/v1alpha1" + kind = "Middleware" + metadata = { + name = "nebari-jupyterhub-add-prefix" + namespace = var.namespace + } + spec = { + addPrefix = { + prefix = "/hub" + } + } + } +} + +resource "kubernetes_manifest" "jupyterhub-proxy-add-slash" { + manifest = { + apiVersion = "traefik.containo.us/v1alpha1" + kind = "Middleware" + metadata = { + name = "nebari-jupyterhub-proxy-add-slash" + namespace = var.namespace + } + spec = { + redirectRegex = { + regex = "^https://${var.external-url}/user/([^/]+)/proxy/(\\d+)$" + replacement = "https://${var.external-url}/user/$${1}/proxy/$${2}/" + permanent = true + } + } + } +} diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/main.tf index 7ba919ec54..869f616c71 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/main.tf +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/main.tf @@ -1,3 +1,8 @@ +resource "random_password" "grafana_admin_password" { + length = 32 + special = false +} + resource "helm_release" "prometheus-grafana" { name = "nebari" namespace = var.namespace @@ -176,6 +181,17 @@ resource "helm_release" "prometheus-grafana" { "${var.node-group.key}" = var.node-group.value } + additionalDataSources = [ + { + name = "Loki" + type = "loki" + url = "http://loki-gateway.${var.namespace}" + } + ] + + # Avoid using the default password, as that's a security risk + adminPassword : random_password.grafana_admin_password.result + sidecar = { dashboards = { annotations = { diff --git a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/values.yaml index f3cf47c88d..ada868882f 100644 --- a/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/values.yaml +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/values.yaml @@ -1,7 +1 @@ # https://github.com/prometheus-community/helm-charts/blob/main/charts/kube-prometheus-stack/values.yaml - -grafana: - additionalDataSources: - - name: Loki - type: loki - url: http://loki-gateway.dev diff --git a/src/_nebari/stages/nebari_tf_extensions/__init__.py b/src/_nebari/stages/nebari_tf_extensions/__init__.py index b5bfdbec4f..eaaf131117 100644 --- a/src/_nebari/stages/nebari_tf_extensions/__init__.py +++ b/src/_nebari/stages/nebari_tf_extensions/__init__.py @@ -1,5 +1,4 @@ -import typing -from typing import Any, Dict, List, Type +from typing import Any, Dict, List, Optional, Type from _nebari.stages.base import NebariTerraformStage from _nebari.stages.tf_objects import ( @@ -25,8 +24,8 @@ class NebariExtension(schema.Base): keycloakadmin: bool = False jwt: bool = False nebariconfigyaml: bool = False - logout: typing.Optional[str] - envs: typing.Optional[typing.List[NebariExtensionEnv]] + logout: Optional[str] = None + envs: Optional[List[NebariExtensionEnv]] = None class HelmExtension(schema.Base): @@ -34,12 +33,12 @@ class HelmExtension(schema.Base): repository: str chart: str version: str - overrides: typing.Dict = {} + overrides: Dict = {} class InputSchema(schema.Base): - helm_extensions: typing.List[HelmExtension] = [] - tf_extensions: typing.List[NebariExtension] = [] + helm_extensions: List[HelmExtension] = [] + tf_extensions: List[NebariExtension] = [] class OutputSchema(schema.Base): @@ -67,12 +66,12 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): "realm_id": stage_outputs["stages/06-kubernetes-keycloak-configuration"][ "realm_id" ]["value"], - "tf_extensions": [_.dict() for _ in self.config.tf_extensions], - "nebari_config_yaml": self.config.dict(), + "tf_extensions": [_.model_dump() for _ in self.config.tf_extensions], + "nebari_config_yaml": self.config.model_dump(), "keycloak_nebari_bot_password": stage_outputs[ "stages/05-kubernetes-keycloak" ]["keycloak_nebari_bot_password"]["value"], - "helm_extensions": [_.dict() for _ in self.config.helm_extensions], + "helm_extensions": [_.model_dump() for _ in self.config.helm_extensions], } diff --git a/src/_nebari/stages/terraform_state/__init__.py b/src/_nebari/stages/terraform_state/__init__.py index 094231e967..edd4b9ed8a 100644 --- a/src/_nebari/stages/terraform_state/__init__.py +++ b/src/_nebari/stages/terraform_state/__init__.py @@ -4,10 +4,9 @@ import os import pathlib import re -import typing -from typing import Any, Dict, List, Tuple, Type +from typing import Any, Dict, List, Optional, Tuple, Type -import pydantic +from pydantic import field_validator from _nebari.provider import terraform from _nebari.provider.cloud import azure_cloud @@ -39,10 +38,11 @@ class AzureInputVars(schema.Base): region: str storage_account_postfix: str state_resource_group_name: str - tags: Dict[str, str] = {} + tags: Dict[str, str] - @pydantic.validator("state_resource_group_name") - def _validate_resource_group_name(cls, value): + @field_validator("state_resource_group_name") + @classmethod + def _validate_resource_group_name(cls, value: str) -> str: if value is None: return value length = len(value) + len(AZURE_TF_STATE_RESOURCE_GROUP_SUFFIX) @@ -59,9 +59,10 @@ def _validate_resource_group_name(cls, value): return value - @pydantic.validator("tags") - def _validate_tags(cls, tags): - return azure_cloud.validate_tags(tags) + @field_validator("tags") + @classmethod + def _validate_tags(cls, value: Dict[str, str]) -> Dict[str, str]: + return azure_cloud.validate_tags(value) class AWSInputVars(schema.Base): @@ -82,8 +83,8 @@ def to_yaml(cls, representer, node): class TerraformState(schema.Base): type: TerraformStateEnum = TerraformStateEnum.remote - backend: typing.Optional[str] - config: typing.Dict[str, str] = {} + backend: Optional[str] = None + config: Dict[str, str] = {} class InputSchema(schema.Base): @@ -192,18 +193,18 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): name=self.config.project_name, namespace=self.config.namespace, region=self.config.digital_ocean.region, - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.gcp: return GCPInputVars( name=self.config.project_name, namespace=self.config.namespace, region=self.config.google_cloud_platform.region, - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.aws: return AWSInputVars( name=self.config.project_name, namespace=self.config.namespace, - ).dict() + ).model_dump() elif self.config.provider == schema.ProviderEnum.azure: return AzureInputVars( name=self.config.project_name, @@ -217,7 +218,7 @@ def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): suffix=AZURE_TF_STATE_RESOURCE_GROUP_SUFFIX, ), tags=self.config.azure.tags, - ).dict() + ).model_dump() elif ( self.config.provider == schema.ProviderEnum.local or self.config.provider == schema.ProviderEnum.existing diff --git a/src/_nebari/stages/terraform_state/template/azure/main.tf b/src/_nebari/stages/terraform_state/template/azure/main.tf index ecea10196d..c4efa43d3c 100644 --- a/src/_nebari/stages/terraform_state/template/azure/main.tf +++ b/src/_nebari/stages/terraform_state/template/azure/main.tf @@ -47,7 +47,7 @@ terraform { required_providers { azurerm = { source = "hashicorp/azurerm" - version = "=3.22.0" + version = "=3.97.1" } } required_version = ">= 1.0" diff --git a/src/_nebari/subcommands/init.py b/src/_nebari/subcommands/init.py index 05e7d712b9..44ec904c16 100644 --- a/src/_nebari/subcommands/init.py +++ b/src/_nebari/subcommands/init.py @@ -75,6 +75,15 @@ LATEST = "latest" +CLOUD_PROVIDER_FULL_NAME = { + "Local": ProviderEnum.local.name, + "Existing": ProviderEnum.existing.name, + "Digital Ocean": ProviderEnum.do.name, + "Amazon Web Services": ProviderEnum.aws.name, + "Google Cloud Platform": ProviderEnum.gcp.name, + "Microsoft Azure": ProviderEnum.azure.name, +} + class GitRepoEnum(str, enum.Enum): github = "github.com" @@ -410,7 +419,7 @@ def check_cloud_provider_kubernetes_version( f"Invalid Kubernetes version `{kubernetes_version}`. Please refer to the GCP docs for a list of valid versions: {versions}" ) elif cloud_provider == ProviderEnum.do.value.lower(): - versions = digital_ocean.kubernetes_versions(region) + versions = digital_ocean.kubernetes_versions() if not kubernetes_version or kubernetes_version == LATEST: kubernetes_version = get_latest_kubernetes_version(versions) @@ -647,12 +656,14 @@ def guided_init_wizard(ctx: typer.Context, guided_init: str): ) ) # try: - inputs.cloud_provider = questionary.select( + cloud_provider: str = questionary.select( "Where would you like to deploy your Nebari cluster?", - choices=enum_to_list(ProviderEnum), + choices=CLOUD_PROVIDER_FULL_NAME.keys(), qmark=qmark, ).unsafe_ask() + inputs.cloud_provider = CLOUD_PROVIDER_FULL_NAME.get(cloud_provider) + if not disable_checks: check_cloud_provider_creds( cloud_provider=inputs.cloud_provider, @@ -910,7 +921,11 @@ def if_used(key, model=inputs, ignore_list=["cloud_provider"]): return b.format(key=key, value=value).replace("_", "-") cmds = " ".join( - [_ for _ in [if_used(_) for _ in inputs.dict().keys()] if _ is not None] + [ + _ + for _ in [if_used(_) for _ in inputs.model_dump().keys()] + if _ is not None + ] ) rich.print( diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 5c095f04a2..ee174a735c 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -9,7 +9,7 @@ import rich from packaging.version import Version -from pydantic.error_wrappers import ValidationError +from pydantic import ValidationError from rich.prompt import Prompt from _nebari.config import backup_configuration @@ -716,6 +716,28 @@ def _version_specific_upgrade( return config +class Upgrade_2024_3_2(UpgradeStep): + version = "2024.3.2" + + def _version_specific_upgrade( + self, config, start_version, config_filename: Path, *args, **kwargs + ): + rich.print("Ready to upgrade to Nebari version [green]2024.3.2[/green].") + + return config + + +class Upgrade_2024_3_3(UpgradeStep): + version = "2024.3.3" + + def _version_specific_upgrade( + self, config, start_version, config_filename: Path, *args, **kwargs + ): + rich.print("Ready to upgrade to Nebari version [green]2024.3.3[/green].") + + return config + + __rounded_version__ = str(rounded_ver_parse(__version__)) # Manually-added upgrade steps must go above this line diff --git a/src/_nebari/utils.py b/src/_nebari/utils.py index 3378116a1d..3ae4ad4bd8 100644 --- a/src/_nebari/utils.py +++ b/src/_nebari/utils.py @@ -11,7 +11,7 @@ import time import warnings from pathlib import Path -from typing import Dict, List +from typing import Dict, List, Set from ruamel.yaml import YAML @@ -268,18 +268,6 @@ def random_secure_string( return "".join(secrets.choice(chars) for i in range(length)) -def is_relative_to(self: Path, other: Path, /) -> bool: - """Compatibility function to bring ``Path.is_relative_to`` to Python 3.8""" - if sys.version_info[:2] >= (3, 9): - return self.is_relative_to(other) - - try: - self.relative_to(other) - return True - except ValueError: - return False - - def set_do_environment(): os.environ["AWS_ACCESS_KEY_ID"] = os.environ["SPACES_ACCESS_KEY_ID"] os.environ["AWS_SECRET_ACCESS_KEY"] = os.environ["SPACES_SECRET_ACCESS_KEY"] @@ -350,3 +338,18 @@ def get_provider_config_block_name(provider): return PROVIDER_CONFIG_NAMES[provider] else: return provider + + +def check_environment_variables(variables: Set[str], reference: str) -> None: + """Check that environment variables are set.""" + required_variables = { + variable: os.environ.get(variable, None) for variable in variables + } + missing_variables = { + variable for variable, value in required_variables.items() if value is None + } + if missing_variables: + raise ValueError( + f"""Missing the following required environment variables: {required_variables}\n + Please see the documentation for more information: {reference}""" + ) diff --git a/src/nebari/schema.py b/src/nebari/schema.py index 313364f811..70b9589e6f 100644 --- a/src/nebari/schema.py +++ b/src/nebari/schema.py @@ -1,6 +1,8 @@ import enum +from typing import Annotated import pydantic +from pydantic import ConfigDict, Field, StringConstraints, field_validator from ruamel.yaml import yaml_object from _nebari.utils import escape_string, yaml @@ -8,27 +10,23 @@ # Regex for suitable project names project_name_regex = r"^[A-Za-z][A-Za-z0-9\-_]{1,14}[A-Za-z0-9]$" -project_name_pydantic = pydantic.constr(regex=project_name_regex) +project_name_pydantic = Annotated[str, StringConstraints(pattern=project_name_regex)] # Regex for suitable namespaces namespace_regex = r"^[A-Za-z][A-Za-z\-_]*[A-Za-z]$" -namespace_pydantic = pydantic.constr(regex=namespace_regex) +namespace_pydantic = Annotated[str, StringConstraints(pattern=namespace_regex)] email_regex = "^[^ @]+@[^ @]+\\.[^ @]+$" -email_pydantic = pydantic.constr(regex=email_regex) +email_pydantic = Annotated[str, StringConstraints(pattern=email_regex)] github_url_regex = "^(https://)?github.com/([^/]+)/([^/]+)/?$" -github_url_pydantic = pydantic.constr(regex=github_url_regex) +github_url_pydantic = Annotated[str, StringConstraints(pattern=github_url_regex)] class Base(pydantic.BaseModel): - ... - - class Config: - extra = "forbid" - validate_assignment = True - allow_population_by_field_name = True - smart_union = True + model_config = ConfigDict( + extra="forbid", validate_assignment=True, populate_by_name=True + ) @yaml_object(yaml) @@ -50,7 +48,7 @@ class Main(Base): namespace: namespace_pydantic = "dev" provider: ProviderEnum = ProviderEnum.local # In nebari_version only use major.minor.patch version - drop any pre/post/dev suffixes - nebari_version: str = __version__ + nebari_version: Annotated[str, Field(validate_default=True)] = __version__ prevent_deploy: bool = ( False # Optional, but will be given default value if not present @@ -58,19 +56,13 @@ class Main(Base): # If the nebari_version in the schema is old # we must tell the user to first run nebari upgrade - @pydantic.validator("nebari_version", pre=True, always=True) - def check_default(cls, v): - """ - Always called even if nebari_version is not supplied at all (so defaults to ''). That way we can give a more helpful error message. - """ - if not cls.is_version_accepted(v): - if v == "": - v = "not supplied" - raise ValueError( - f"nebari_version in the config file must be equivalent to {__version__} to be processed by this version of nebari (your config file version is {v})." - " Install a different version of nebari or run nebari upgrade to ensure your config file is compatible." - ) - return v + @field_validator("nebari_version") + @classmethod + def check_default(cls, value): + assert cls.is_version_accepted( + value + ), f"nebari_version={value} is not an accepted version, it must be equivalent to {__version__}.\nInstall a different version of nebari or run nebari upgrade to ensure your config file is compatible." + return value @classmethod def is_version_accepted(cls, v): diff --git a/tests/tests_deployment/test_grafana_api.py b/tests/tests_deployment/test_grafana_api.py new file mode 100644 index 0000000000..cdb489f349 --- /dev/null +++ b/tests/tests_deployment/test_grafana_api.py @@ -0,0 +1,18 @@ +import base64 + +import pytest +import requests + +from tests.tests_deployment import constants + + +@pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") +def test_grafana_api_not_accessible_with_default_credentials(): + """Making sure that Grafana's API is not accessible on default user/pass""" + user_pass_b64_encoded = base64.b64encode(b"admin:prom-operator").decode() + response = requests.get( + f"https://{constants.NEBARI_HOSTNAME}/monitoring/api/datasources", + headers={"Authorization": f"Basic {user_pass_b64_encoded}"}, + verify=False, + ) + assert response.status_code == 401 diff --git a/tests/tests_integration/deployment_fixtures.py b/tests/tests_integration/deployment_fixtures.py index 1709bd7262..f5752d4c24 100644 --- a/tests/tests_integration/deployment_fixtures.py +++ b/tests/tests_integration/deployment_fixtures.py @@ -167,7 +167,7 @@ def deploy(request): config = add_preemptible_node_group(config, cloud=cloud) print("*" * 100) - pprint.pprint(config.dict()) + pprint.pprint(config.model_dump()) print("*" * 100) # render diff --git a/tests/tests_unit/cli_validate/local.error.authentication-type-called-custom.yaml b/tests/tests_unit/cli_validate/local.error.authentication-type-custom.yaml similarity index 100% rename from tests/tests_unit/cli_validate/local.error.authentication-type-called-custom.yaml rename to tests/tests_unit/cli_validate/local.error.authentication-type-custom.yaml diff --git a/tests/tests_unit/cli_validate/local.error.extra-fields.yaml b/tests/tests_unit/cli_validate/local.error.extra-inputs.yaml similarity index 100% rename from tests/tests_unit/cli_validate/local.error.extra-fields.yaml rename to tests/tests_unit/cli_validate/local.error.extra-inputs.yaml diff --git a/tests/tests_unit/conftest.py b/tests/tests_unit/conftest.py index e98661c214..d78dfdf1ec 100644 --- a/tests/tests_unit/conftest.py +++ b/tests/tests_unit/conftest.py @@ -172,7 +172,7 @@ def nebari_config_options(request) -> schema.Main: @pytest.fixture def nebari_config(nebari_config_options): - return nebari_plugin_manager.config_schema.parse_obj( + return nebari_plugin_manager.config_schema.model_validate( render_config(**nebari_config_options) ) diff --git a/tests/tests_unit/test_cli_validate.py b/tests/tests_unit/test_cli_validate.py index 00c46c2cd6..faf2efa8a1 100644 --- a/tests/tests_unit/test_cli_validate.py +++ b/tests/tests_unit/test_cli_validate.py @@ -134,7 +134,13 @@ def test_cli_validate_from_env(): "key, value, provider, expected_message, addl_config", [ ("NEBARI_SECRET__project_name", "123invalid", "local", "validation error", {}), - ("NEBARI_SECRET__this_is_an_error", "true", "local", "object has no field", {}), + ( + "NEBARI_SECRET__this_is_an_error", + "true", + "local", + "Object has no attribute", + {}, + ), ( "NEBARI_SECRET__amazon_web_services__kubernetes_version", "1.0", diff --git a/tests/tests_unit/test_config.py b/tests/tests_unit/test_config.py index ccc52543d7..f20eb3f671 100644 --- a/tests/tests_unit/test_config.py +++ b/tests/tests_unit/test_config.py @@ -97,7 +97,7 @@ def test_read_configuration_non_existent_file(nebari_config): def test_write_configuration_with_dict(nebari_config, tmp_path): config_file = tmp_path / "nebari-config-dict.yaml" - config_dict = nebari_config.dict() + config_dict = nebari_config.model_dump() write_configuration(config_file, config_dict) read_config = read_configuration(config_file, nebari_config.__class__) diff --git a/tests/tests_unit/test_render.py b/tests/tests_unit/test_render.py index 73c4fb5ca1..e0fd6636fe 100644 --- a/tests/tests_unit/test_render.py +++ b/tests/tests_unit/test_render.py @@ -1,7 +1,6 @@ import os from _nebari.stages.bootstrap import CiEnum -from nebari import schema from nebari.plugins import nebari_plugin_manager @@ -22,18 +21,12 @@ def test_render_config(nebari_render): "03-kubernetes-initialize", }.issubset(os.listdir(output_directory / "stages")) - if config.provider == schema.ProviderEnum.do: - assert (output_directory / "stages" / "01-terraform-state/do").is_dir() - assert (output_directory / "stages" / "02-infrastructure/do").is_dir() - elif config.provider == schema.ProviderEnum.aws: - assert (output_directory / "stages" / "01-terraform-state/aws").is_dir() - assert (output_directory / "stages" / "02-infrastructure/aws").is_dir() - elif config.provider == schema.ProviderEnum.gcp: - assert (output_directory / "stages" / "01-terraform-state/gcp").is_dir() - assert (output_directory / "stages" / "02-infrastructure/gcp").is_dir() - elif config.provider == schema.ProviderEnum.azure: - assert (output_directory / "stages" / "01-terraform-state/azure").is_dir() - assert (output_directory / "stages" / "02-infrastructure/azure").is_dir() + assert ( + output_directory / "stages" / f"01-terraform-state/{config.provider.value}" + ).is_dir() + assert ( + output_directory / "stages" / f"02-infrastructure/{config.provider.value}" + ).is_dir() if config.ci_cd.type == CiEnum.github_actions: assert (output_directory / ".github/workflows/").is_dir() diff --git a/tests/tests_unit/test_schema.py b/tests/tests_unit/test_schema.py index b4fb58bc62..446b6d1085 100644 --- a/tests/tests_unit/test_schema.py +++ b/tests/tests_unit/test_schema.py @@ -1,7 +1,7 @@ from contextlib import nullcontext import pytest -from pydantic.error_wrappers import ValidationError +from pydantic import ValidationError from nebari import schema from nebari.plugins import nebari_plugin_manager @@ -125,7 +125,7 @@ def test_no_provider(config_schema, provider, full_name, default_fields): } config = config_schema(**config_dict) assert config.provider == provider - assert full_name in config.dict() + assert full_name in config.model_dump() def test_multiple_providers(config_schema): @@ -164,6 +164,6 @@ def test_setted_provider(config_schema, provider): } config = config_schema(**config_dict) assert config.provider == provider - result_config_dict = config.dict() + result_config_dict = config.model_dump() assert provider in result_config_dict assert result_config_dict[provider]["kube_context"] == "some_context"