Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump opentelemetry from 0.26.0 to 0.27.1 #89

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
125 changes: 71 additions & 54 deletions Cargo.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ version = "0.96.0"


[workspace.package]
version = "0.3.2"
version = "0.3.3"
authors = ["Sébastien Huss <[email protected]>"]
edition = "2021"
license = "BSD-3-Clause"
Expand Down Expand Up @@ -77,6 +77,7 @@ operator = { cmd=[
"podman build . -f operator/Dockerfile -t docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version) && podman push docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version)",
]}
box = { cmd=[
"cargo cmd generate_crd",
"cargo run --bin agent -- package update --source ./box/vynil/",
"cargo run --bin agent -- package build -o ./box/vynil/ --tag $(cargo run --bin agent -- version) -r docker.io -n sebt3/vynil -u $(jq -r '.auths[\"docker.io\"].auth' </run/user/$(id -u)/containers/auth.json |base64 -d|awk -F: '{print $1}') -p $(jq -r '.auths[\"docker.io\"].auth' </run/user/$(id -u)/containers/auth.json |base64 -d|awk -F: '{print $2}')",
]}
4 changes: 3 additions & 1 deletion agent/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,12 @@ COPY agent/providers.tf .
RUN tofu init
# Finally assemble everything together
FROM middle AS target
RUN mkdir -p /var/cache/restic /backup /secrets && chown nobody:nogroup /var/cache/restic /backup /secrets
COPY --from=builder /usr/src/agent/target/release/agent /usr/bin/agent
COPY agent/scripts /usr/lib/vynil/scripts
COPY agent/templates /usr/lib/vynil/templates
USER nobody
COPY --from=downloader /src/.terraform/providers /nonexistent/.terraform.d/plugins
WORKDIR /work
ENV SCRIPT_DIRECTORY=/usr/lib/vynil/scripts PACKAGE_DIRECTORY=/package CONFIG_DIR=/etc/vynil
ENV SCRIPT_DIRECTORY=/usr/lib/vynil/scripts TEMPLATE_DIRECTORY=/usr/lib/vynil/templates PACKAGE_DIRECTORY=/package CONFIG_DIR=/etc/vynil XDG_CACHE_HOME=/var/cache/restic
ENTRYPOINT ["agent"]
2 changes: 1 addition & 1 deletion agent/parent.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ version = "0.96.0"


[workspace.package]
version = "0.3.2"
version = "0.3.3"
authors = ["Sébastien Huss <[email protected]>"]
edition = "2021"
license = "BSD-3-Clause"
Expand Down
247 changes: 247 additions & 0 deletions agent/scripts/lib/backup_context.rhai
Original file line number Diff line number Diff line change
@@ -0,0 +1,247 @@
// create a run context for backup/restore pod in from the preparation of the "run" function bellow
fn from_args(context) {
context["deployment_list"] = get_env("DEPLOYMENT_LIST").split(" ").filter(|x| x!="");
context["statefulset_list"] = get_env("STATEFULSET_LIST").split(" ").filter(|x| x!="");
context["secret_list"] = get_env("SECRET_LIST").split(" ").filter(|x| x!="");
context["pg_list"] = get_env("PG_LIST").split(" ").filter(|x| x!="");
context["restic_tags"] = get_env("RESTIC_TAGS");
context["snapshot"] = get_env("RESTIC_SNAPSHOT");
context["max_dayly"] = get_env("RESTIC_MAX_DAYLY");
if context["max_dayly"] == "" {context["max_dayly"] = "1";}
context["max_weekly"] = get_env("RESTIC_MAX_WEEKLY");
if context["max_weekly"] == "" {context["max_weekly"] = "5";}
context["max_monthly"] = get_env("RESTIC_MAX_MONTHLY");
if context["max_monthly"] == "" {context["max_monthly"] = "12";}
context["max_yearly"] = get_env("RESTIC_MAX_YEARLY");
if context["max_yearly"] == "" {context["max_yearly"] = "4";}
context["namespace"] = get_env("NAMESPACE");
try {
context["scale_target"] = parse_int(get_env("SCALE_TARGET"));
} catch {
context["scale_target"] = 1;
}
let sub_path = get_env("SUB_PATH");
let base_path = get_env("BASE_REPO_URL");
sub_path.replace("/"," ");
sub_path.trim();
sub_path.replace(" ","/");
base_path.replace("/"," ");
base_path.trim();
base_path.replace(" ","/");
context["sub_path"] = sub_path;
context["base_path"] = base_path;
context["s3_url"] = `s3:${base_path}/${sub_path}`;
context["restic_args"] = "";
if get_env("INSECURE_TLS") == "true" {
context["restic_args"] += " --insecure-tls";
}
context
}

fn run(instance, context, use_init_from) {
// TODO: Prepare MongoDB backup
// TODO: Prepare Redis backup
let secret_name = "backup-settings";
let sub_path = `${context.instance.namespace}/${context.instance.appslug}`;
if use_init_from {
if instance.spec.initFrom.secretName != () {
secret_name = instance.spec.initFrom.secretName;
}
if instance.spec.initFrom.subPath != () {
sub_path = instance.spec.initFrom.subPath;
}
}
context["volumes"] = [#{
name: "empty-dir",
emptyDir: #{}
}];
context["mounts"] = [#{
name: "empty-dir",
mountPath: `/backup`
}];
context["envs_from"] = [#{
secretRef: #{
name: secret_name
}
}];
context["envs"] = [#{
name: "RESTIC_TAGS",
value: `tenant:${context.tenant.name},namespace:${context.instance.namespace},instance:${context.instance.name},component:${context.instance["package"].name},version:${context.instance.requested}`
}, #{
name: "RESTIC_HOST",
value: context.instance.appslug
}, #{
name: "AGENT_IMAGE",
value: context.agent_image
}, #{
name: "SCALE_TARGET",
value: if context.namespace.ha { "2" } else { "1" }
}, #{
name: "VYNIL_NAMESPACE",
value: context.cluster.vynil_namespace
}, #{
name: "INSTANCE",
value: context.instance.name
}, #{
name: "TAG",
value: context.instance.requested
}, #{
name: "SUB_PATH",
value: sub_path
}, #{
name: "APPSLUG",
value: context.instance.appslug
}, #{
name: "NAMESPACE",
valueFrom: #{
fieldRef: #{
apiVersion: "v1",
fieldPath: "metadata.namespace"
}
}
}, #{
name: "POD_NAME",
valueFrom: #{
fieldRef: #{
apiVersion: "v1",
fieldPath: "metadata.name"
}
}
}];
if use_init_from {
context["envs"] += #{
name: "RESTIC_SNAPSHOT",
value: instance.spec.initFrom.snapshot
};
}
context["files"] = [];
if is_dir(`${context.package_dir}/scripts`) {
for f in read_dir(`${context.package_dir}/scripts`) {
let base = basename(f);
if base.starts_with("restore") || base.starts_with("backup") || base.starts_with("maintenance") {
context["files"] += #{
name: base,
content: file_read(f)
};
}
}
}
context["has_files"] = context["files"].len() > 0;
if context["has_files"] {
context["volumes"] += #{
name: "backups-scripts",
configMap: #{
defaultMode: 493,
name: `${context.instance.appslug}-backups-scripts`
}
};
context["mounts"] += #{
name: "backups-scripts",
mountPath: "/package/scripts"
};
}
context["schedule"] = `${context.namespace.maintenance_start_minut} ${context.namespace.maintenance_start_hour} * * *`;
context["service_account"] = `${context.instance.appslug}-backup`;
let pgs = [];
let secrets = [];
for v in instance.status.vitals {
let name = v.name;
name.replace(context.instance.appslug,"");
name.replace("-"," ");
name.trim();
name.replace(" ","-");
if v.kind == "PersistentVolumeClaim" {
if name.is_empty() {
name = "data";
}
context["volumes"] += #{
name: name,
persistentVolumeClaim: #{
claimName: v.name
}
};
context["mounts"] += #{
name: name,
mountPath: `/backup/${name}`
};
} else if v.kind == "Secret" {
if name.is_empty() {
name = "secret";
}
secrets += name;
if ! use_init_from {
context["volumes"] += #{
name: name,
secret: #{
secretName: v.name
}
};
context["mounts"] += #{
name: name,
mountPath: `/secrets/${name}`
};
}
context["envs"] += #{
name: `secret_${name}_target`,
value: v.name
};
} else if v.kind == "Cluster" {
if name.is_empty() {
name = "postgres";
}
pgs += name;
let lst = k8s_resource("Secrets", context.instance.namespace).list_meta().items;
let secret = `${v.name}-app`;
if lst.filter(|s| s.metadata.name == `${v.name}-superuser`).len() > 0 {
secret = `${v.name}-superuser`;
}
for i in ["host", "username", "password", "dbname"] {
context["envs"] += #{
name: `${name}_${i}`,
valueFrom: #{
secretKeyRef: #{
name: secret,
key: i
}
}
};
}
}
}
let deploy = [];
let sts = [];
if instance.status.scalables != () {
for s in instance.status.scalables {
if s.kind == "Deployment" {
deploy += s.name;
} else if s.kind == "StatefulSet" {
sts += s.name;
}
}
}
if deploy.len() > 0 {
context["envs"] += #{
name: `DEPLOYMENT_LIST`,
value: deploy.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
if sts.len() > 0 {
context["envs"] += #{
name: `STATEFULSET_LIST`,
value: sts.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
if pgs.len() > 0 {
context["envs"] += #{
name: `PG_LIST`,
value: pgs.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
if secrets.len() > 0 {
context["envs"] += #{
name: `SECRET_LIST`,
value: secrets.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
context
}
2 changes: 1 addition & 1 deletion agent/scripts/lib/build_context.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,6 @@ fn run(instance, args) {
},
values: get_values(instance.spec.options, defaults),
defaults: defaults,
package_dir: args.package_dir
package_dir: args.package_dir,
}
}
16 changes: 11 additions & 5 deletions agent/scripts/lib/install_from_dir.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,17 @@ fn get_objects(context, dir) {
hbs.register_helper_dir(`${context.package_dir}/handlebars/helpers`);
let ret = [];
for file in read_dir(dir) {
let objects = if file.ends_with(".yaml") || file.ends_with(".yml") {
yaml_decode_multi(file_read(file))
} else if file.ends_with(".yaml.hbs") || file.ends_with(".yml.hbs") {
yaml_decode_multi(hbs.render_from(file_read(file), context))
} else {[]};
let objects = [];
try {
objects = if file.ends_with(".yaml") || file.ends_with(".yml") {
yaml_decode_multi(file_read(file))
} else if file.ends_with(".yaml.hbs") || file.ends_with(".yml.hbs") {
yaml_decode_multi(hbs.render_from(file_read(file), context))
} else {[]};
} catch(e) {
log_error(`While parsing ${file} :`);
throw e;
}
for obj in objects.filter(|obj| type_of(obj) != "map" || ! obj.keys().contains("kind") || ! obj.keys().contains("metadata") || type_of(obj.metadata) != "map") {
log_warn(`No kind for an object in file ${file}`);
log_debug(yaml_encode(obj));
Expand Down
10 changes: 10 additions & 0 deletions agent/scripts/lib/wait.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,16 @@ fn vital(lst, duration) {
log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`);
let sts = get_statefulset(v.namespace, v.name);
sts.wait_available(duration);
} else if ["BucketClaim"].contains(v.kind) {
log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`);
let api = k8s_resource(v.kind, v.namespace);
let obj = api.get_obj(v.name);
obj.wait_status("bucketReady", duration);
} else if ["BucketAccess"].contains(v.kind) {
log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`);
let api = k8s_resource(v.kind, v.namespace);
let obj = api.get_obj(v.name);
obj.wait_status("accessGranted", duration);
}
}
}
Expand Down
2 changes: 2 additions & 0 deletions agent/scripts/packages/build.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ fn build(args) {
copy::dir_exts(item, args.temp+"/"+base, [".hbs", ".yaml", ".yml"]);
} else if base in ["vitals", "scalables", "others"] && is_dir(item) && valid.metadata.type == "tenant" {
copy::dir_exts(item, args.temp+"/"+base, [".hbs", ".yaml", ".yml"]);
} else if base == "pods" && is_dir(item) && valid.metadata.type == "tenant" {
copy::dir_exts(item, args.temp+"/"+base, [".hbs"]);
} else if base == "handlebars" && is_dir(item) {
for sub in read_dir(item) {
let subbase = basename(sub);
Expand Down
52 changes: 52 additions & 0 deletions agent/scripts/tenant/backup.rhai
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import "backup_context" as ctx;
fn run(args) {
let context = ctx::from_args(args);
log_info(`Starting backup using target: ${context.s3_url}`);

import_run("backup_pre", context);
if is_file(`${args.package_dir}/scripts/backup.sh`) {
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup.sh`);
if rc != 0 {
throw `${context.package_dir}/scripts/backup.sh FAILED returning ${rc}`;
} else {
import_run("backup_post", context);
return rc;
}
}
import_run("backup_init", context);
import_run("maintenance_start", context);
if is_file(`${args.package_dir}/scripts/backup_prepare.sh`) {
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare.sh`);
if rc != 0 {
throw `${context.package_dir}/scripts/backup_prepare.sh FAILED returning ${rc}`;
}
} else {
if is_file(`${args.package_dir}/scripts/backup_prepare_secret.sh`) {
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare_secret.sh`);
if rc != 0 {
throw `${context.package_dir}/scripts/backup_prepare_secret.sh FAILED returning ${rc}`;
}
} else {
import_run("backup_prepare_secret", context);
}
if is_file(`${args.package_dir}/scripts/backup_prepare_postgresql.sh`) {
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare_postgresql.sh`);
if rc != 0 {
throw `${context.package_dir}/scripts/backup_prepare_postgresql.sh FAILED returning ${rc}`;
}
} else {
import_run("backup_prepare_postgresql", context);
}
}
if is_file(`${args.package_dir}/scripts/backup_before.sh`) {
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_before.sh`);
if rc != 0 {
throw `${context.package_dir}/scripts/backup_before.sh FAILED returning ${rc}`;
}
}
import_run("backup_run", context);
import_run("maintenance_stop", context);
import_run("backup_check", context);
import_run("backup_prune", context);
import_run("backup_post", context);
}
Loading