Skip to content

Commit

Permalink
Commit triggered by a change on the main branch of helm-charts-dev
Browse files Browse the repository at this point in the history
  • Loading branch information
bbrauzzi committed Feb 9, 2023
1 parent 8ce35a4 commit d587a11
Show file tree
Hide file tree
Showing 3 changed files with 240 additions and 1 deletion.
2 changes: 1 addition & 1 deletion charts/application-hub/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 2.0.3
version: 2.0.4

# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
Expand Down
119 changes: 119 additions & 0 deletions charts/application-hub/files/hub/jupyter_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
import os
import sys

from tornado.httpclient import AsyncHTTPClient

configuration_directory = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, configuration_directory)

from z2jh import (
get_name,
get_name_env
)



# Configure JupyterHub to use the curl backend for making HTTP requests,
# rather than the pure-python implementations. The default one starts
# being too slow to make a large number of requests to the proxy API
# at the rate required.
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")

c.ConfigurableHTTPProxy.api_url = (
f'http://{get_name("proxy-api")}:{get_name_env("proxy-api", "_SERVICE_PORT")}'
)
c.ConfigurableHTTPProxy.should_start = False

# Don't wait at all before redirecting a spawning user to the progress page
c.JupyterHub.tornado_settings = {
"slow_spawn_timeout": 0,
}

#c.KubeSpawner.image_pull_secrets =

jupyterhub_env = os.environ["JUPYTERHUB_ENV"].upper()
jupyterhub_hub_pod_namespace = os.environ["POD_NAMESPACE"].split(" ")[0]
jupyterhub_single_user_image = os.environ["JUPYTERHUB_SINGLE_USER_IMAGE"]
jupyterhub_auth_method = os.environ.get("JUPYTERHUB_AUTH_METHOD", "pam")
jupyterhub_oauth_callback_url = os.environ.get("JUPYTERHUB_OAUTH_CALLBACK_URL", "")
jupyterhub_oauth_client_id = os.environ.get("JUPYTERHUB_OAUTH_CLIENT_ID", "")
jupyterhub_oauth_client_secret = os.environ.get("JUPYTERHUB_OAUTH_CLIENT_SECRET", "")


jupyterhub_hub_host = f"hub.{jupyterhub_hub_pod_namespace}"
c.JupyterHub.authenticator_class = "jupyterhub.auth.DummyAuthenticator"
c.JupyterHub.cookie_secret_file = "/srv/jupyterhub/cookie_secret"
# Proxy config
c.JupyterHub.cleanup_servers = False
# Network
c.JupyterHub.allow_named_servers = False
c.JupyterHub.ip = "0.0.0.0"
c.JupyterHub.hub_ip = "0.0.0.0"
c.JupyterHub.hub_connect_ip = jupyterhub_hub_host
# Misc
c.JupyterHub.cleanup_servers = False

# Culling
c.JupyterHub.services = [
{
"name": "idle-culler",
"admin": True,
"command": [sys.executable, "-m", "jupyterhub_idle_culler", "--timeout=3600"],
}
]

# Logs
c.JupyterHub.log_level = "DEBUG"

# Spawner
c.JupyterHub.spawner_class = "kubespawner.KubeSpawner"
c.KubeSpawner.environment = {
"JUPYTER_ENABLE_LAB": "true",
}

c.KubeSpawner.uid = 1001
c.KubeSpawner.fs_gid = 100
c.KubeSpawner.hub_connect_ip = jupyterhub_hub_host

# SecurityContext
c.KubeSpawner.privileged = True

# ServiceAccount
c.KubeSpawner.service_account = "default"
c.KubeSpawner.start_timeout = 60 * 5
c.KubeSpawner.image = jupyterhub_single_user_image
c.KubernetesSpawner.verify_ssl = True
c.KubeSpawner.pod_name_template = (
"jupyter-{username}-" + os.environ["JUPYTERHUB_ENV"].lower()
)

# NodeSelector
#c.KubeSpawner.node_selector = {"jupyter": "prod"}

# Namespace
c.KubeSpawner.namespace = jupyterhub_hub_pod_namespace

# User namespace
c.KubeSpawner.enable_user_namespaces = True

# Volumes
c.KubeSpawner.storage_capacity = "10Gi"
c.KubeSpawner.storage_class = "managed-nfs-storage"
c.KubeSpawner.storage_pvc_ensure = True
c.KubeSpawner.pvc_name_template = (
"claim-{username}-" + os.environ["JUPYTERHUB_ENV"].lower()
)
c.KubeSpawner.volumes = [
{
"name": "volume-workspace-{username}-" + os.environ["JUPYTERHUB_ENV"].lower(),
"persistentVolumeClaim": {
"claimName": "claim-{username}-" + os.environ["JUPYTERHUB_ENV"].lower()
},
},
]
c.KubeSpawner.volume_mounts = [
{
"name": "volume-workspace-{username}-" + os.environ["JUPYTERHUB_ENV"].lower(),
"mountPath": "/workspace",
}
]
120 changes: 120 additions & 0 deletions charts/application-hub/files/hub/z2jh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
"""
Utility methods for use in jupyterhub_config.py and dynamic subconfigs.
Methods here can be imported by extraConfig in values.yaml
"""
from collections import Mapping
from functools import lru_cache
import os

import yaml

# memoize so we only load config once
@lru_cache()
def _load_config():
"""Load the Helm chart configuration used to render the Helm templates of
the chart from a mounted k8s Secret, and merge in values from an optionally
mounted secret (hub.existingSecret)."""

cfg = {}
for source in ("secret/values.yaml", "existing-secret/values.yaml"):
path = f"/usr/local/etc/jupyterhub/{source}"
if os.path.exists(path):
print(f"Loading {path}")
with open(path) as f:
values = yaml.safe_load(f)
cfg = _merge_dictionaries(cfg, values)
else:
print(f"No config at {path}")
return cfg


@lru_cache()
def _get_config_value(key):
"""Load value from the k8s ConfigMap given a key."""

path = f"/usr/local/etc/jupyterhub/config/{key}"
if os.path.exists(path):
with open(path) as f:
return f.read()
else:
raise Exception(f"{path} not found!")


@lru_cache()
def get_secret_value(key, default="never-explicitly-set"):
"""Load value from the user managed k8s Secret or the default k8s Secret
given a key."""

for source in ("existing-secret", "secret"):
path = f"/usr/local/etc/jupyterhub/{source}/{key}"
if os.path.exists(path):
with open(path) as f:
return f.read()
if default != "never-explicitly-set":
return default
raise Exception(f"{key} not found in either k8s Secret!")


def get_name(name):
"""Returns the fullname of a resource given its short name"""
return _get_config_value(name)


def get_name_env(name, suffix=""):
"""Returns the fullname of a resource given its short name along with a
suffix, converted to uppercase with dashes replaced with underscores. This
is useful to reference named services associated environment variables, such
as PROXY_PUBLIC_SERVICE_PORT."""
env_key = _get_config_value(name) + suffix
env_key = env_key.upper().replace("-", "_")
return os.environ[env_key]


def _merge_dictionaries(a, b):
"""Merge two dictionaries recursively.
Simplified From https://stackoverflow.com/a/7205107
"""
merged = a.copy()
for key in b:
if key in a:
if isinstance(a[key], Mapping) and isinstance(b[key], Mapping):
merged[key] = _merge_dictionaries(a[key], b[key])
else:
merged[key] = b[key]
else:
merged[key] = b[key]
return merged


def get_config(key, default=None):
"""
Find a config item of a given name & return it
Parses everything as YAML, so lists and dicts are available too
get_config("a.b.c") returns config['a']['b']['c']
"""
value = _load_config()
# resolve path in yaml
for level in key.split("."):
if not isinstance(value, dict):
# a parent is a scalar or null,
# can't resolve full path
return default
if level not in value:
return default
else:
value = value[level]
return value


def set_config_if_not_none(cparent, name, key):
"""
Find a config item of a given name, set the corresponding Jupyter
configuration item if not None
"""
data = get_config(key)
if data is not None:
setattr(cparent, name, data)

0 comments on commit d587a11

Please sign in to comment.