From edda4fba45724ba690e58163e53c30d692c19053 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Mon, 5 Jun 2023 19:18:19 -0400 Subject: [PATCH 01/47] Add Database and ObjectStorage Health Checks - Delays deploying DSPA Manifests until DB and ObjStore Connections are healthy - Introduces status condition of "ComponentDeploymentNotFound" for Required Components with missing Deployments --- controllers/config/defaults.go | 9 ++- controllers/database.go | 47 +++++++++++ controllers/dspipeline_controller.go | 112 +++++++++++++++++---------- controllers/metrics.go | 20 +++++ controllers/storage.go | 68 ++++++++++++++++ go.mod | 23 ++++-- go.sum | 35 +++++++++ 7 files changed, 264 insertions(+), 50 deletions(-) diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index fdf841b9d..15db8ece0 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -69,6 +69,8 @@ const ( // DSPA Status Condition Types const ( + DatabaseAvailable = "DatabaseAvailable" + ObjectStoreAvailable = "ObjectStoreAvailable" APIServerReady = "APIServerReady" PersistenceAgentReady = "PersistenceAgentReady" ScheduledWorkflowReady = "ScheduledWorkflowReady" @@ -81,9 +83,10 @@ const ( // kubectl get output, and in summarizing // occurrences of causes const ( - MinimumReplicasAvailable = "MinimumReplicasAvailable" - FailingToDeploy = "FailingToDeploy" - Deploying = "Deploying" + MinimumReplicasAvailable = "MinimumReplicasAvailable" + FailingToDeploy = "FailingToDeploy" + Deploying = "Deploying" + ComponentDeploymentNotFound = "ComponentDeploymentNotFound" ) // Any required Configmap paths can be added here, diff --git a/controllers/database.go b/controllers/database.go index 385cfc010..24b50d7a3 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -17,6 +17,11 @@ package controllers import ( "context" + "database/sql" + b64 "encoding/base64" + "fmt" + + _ "github.com/go-sql-driver/mysql" dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" ) @@ -31,6 +36,48 @@ var dbTemplates = []string{ dbSecret, } +func (r *DSPAReconciler) VerifyMySQLDBConnection(host, port, username, password, dbname string) bool { + connectionString := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s", username, password, host, port, dbname) + db, err := sql.Open("mysql", connectionString) + if err != nil { + return false + } + defer db.Close() + + testStatement := "SELECT 1;" + _, err = db.Exec(testStatement) + return err == nil +} + +func (r *DSPAReconciler) isDatabaseAccessible(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, + params *DSPAParams) bool { + log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) + + log.Info("Performing Database Health Check") + databaseSpecified := dsp.Spec.Database != nil + usingExternalDB := params.UsingExternalDB(dsp) + usingMariaDB := !databaseSpecified || dsp.Spec.Database.MariaDB != nil + if usingMariaDB || usingExternalDB { + decodePass, _ := b64.StdEncoding.DecodeString(params.DBConnection.Password) + db_connect := r.VerifyMySQLDBConnection(params.DBConnection.Host, + params.DBConnection.Port, + params.DBConnection.Username, + string(decodePass), + params.DBConnection.DBName) + if db_connect { + log.Info("Database Health Check Successful") + } else { + log.Info("Unable to connect to Database") + } + return db_connect + + } + + log.Info(fmt.Sprintf("Could not connect to Database: Unsupported Type")) + // Only MariaDB and Mysql-Compliant Database supported. + return false +} + func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) error { diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index ed5a13976..dd460483e 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -19,6 +19,7 @@ package controllers import ( "context" "fmt" + "github.com/go-logr/logr" mf "github.com/manifestival/manifestival" dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" @@ -217,34 +218,42 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. return ctrl.Result{}, err } - err = r.ReconcileCommon(dspa, params) - if err != nil { - return ctrl.Result{}, err - } + // Get Prereq Status (DB and ObjStore Ready) + dbAvailable := r.isDatabaseAccessible(ctx, dspa, params) + objStoreAvailable := r.isObjectStorageAccessible(ctx, dspa, params) + dspa_prereqs_ready := (dbAvailable && objStoreAvailable) - err = r.ReconcileAPIServer(ctx, dspa, params) - if err != nil { - return ctrl.Result{}, err - } + if dspa_prereqs_ready { + // Manage Common Manifests + err = r.ReconcileCommon(dspa, params) + if err != nil { + return ctrl.Result{}, err + } - err = r.ReconcilePersistenceAgent(dspa, params) - if err != nil { - return ctrl.Result{}, err - } + err = r.ReconcileAPIServer(ctx, dspa, params) + if err != nil { + return ctrl.Result{}, err + } - err = r.ReconcileScheduledWorkflow(dspa, params) - if err != nil { - return ctrl.Result{}, err - } + err = r.ReconcilePersistenceAgent(dspa, params) + if err != nil { + return ctrl.Result{}, err + } - err = r.ReconcileUI(dspa, params) - if err != nil { - return ctrl.Result{}, err - } + err = r.ReconcileScheduledWorkflow(dspa, params) + if err != nil { + return ctrl.Result{}, err + } - err = r.ReconcileMLMD(dspa, params) - if err != nil { - return ctrl.Result{}, err + err = r.ReconcileUI(dspa, params) + if err != nil { + return ctrl.Result{}, err + } + + err = r.ReconcileMLMD(dspa, params) + if err != nil { + return ctrl.Result{}, err + } } log.Info("Updating CR status") @@ -255,7 +264,7 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. return ctrl.Result{}, err } - conditions, err := r.GenerateStatus(ctx, dspa) + conditions, err := r.GenerateStatus(ctx, dspa, params) if err != nil { log.Info(err.Error()) return ctrl.Result{}, err @@ -271,24 +280,20 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. r.PublishMetrics( dspa, + util.GetConditionByType(config.DatabaseAvailable, conditions), + util.GetConditionByType(config.ObjectStoreAvailable, conditions), util.GetConditionByType(config.APIServerReady, conditions), util.GetConditionByType(config.PersistenceAgentReady, conditions), util.GetConditionByType(config.ScheduledWorkflowReady, conditions), util.GetConditionByType(config.CrReady, conditions), ) - return ctrl.Result{}, nil } // handleReadyCondition evaluates if condition with "name" is in condition of type "conditionType". // this procedure is valid only for conditions with bool status type, for conditions of non bool type // results are undefined. -func (r *DSPAReconciler) handleReadyCondition( - ctx context.Context, - dspa *dspav1alpha1.DataSciencePipelinesApplication, - name string, - condition string, -) (metav1.Condition, error) { +func (r *DSPAReconciler) handleReadyCondition(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, name string, condition string) (metav1.Condition, error) { readyCondition := r.buildCondition(condition, dspa, config.MinimumReplicasAvailable) deployment := &appsv1.Deployment{} @@ -297,7 +302,14 @@ func (r *DSPAReconciler) handleReadyCondition( err := r.Get(ctx, types.NamespacedName{Name: component, Namespace: dspa.Namespace}, deployment) if err != nil { - return metav1.Condition{}, err + if apierrs.IsNotFound(err) { + readyCondition.Reason = config.ComponentDeploymentNotFound + readyCondition.Status = metav1.ConditionFalse + readyCondition.Message = fmt.Sprintf("Deployment for component \"%s\" is missing", component) + return readyCondition, nil + } else { + return metav1.Condition{}, err + } } // First check if deployment is scaled down, if it is, component is deemed not ready @@ -397,7 +409,16 @@ func (r *DSPAReconciler) handleReadyCondition( } -func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication) ([]metav1.Condition, error) { +func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) ([]metav1.Condition, error) { + databaseAvailable := r.buildCondition(config.DatabaseAvailable, dspa, config.DatabaseAvailable) + if r.isDatabaseAccessible(ctx, dspa, params) { + databaseAvailable.Status = metav1.ConditionTrue + } + + objStoreAvailable := r.buildCondition(config.ObjectStoreAvailable, dspa, config.ObjectStoreAvailable) + if r.isObjectStorageAccessible(ctx, dspa, params) { + objStoreAvailable.Status = metav1.ConditionTrue + } apiServerReady, err := r.handleReadyCondition(ctx, dspa, "ds-pipeline", config.APIServerReady) if err != nil { @@ -411,7 +432,10 @@ func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1. if err != nil { return []metav1.Condition{}, err } + var conditions []metav1.Condition + conditions = append(conditions, databaseAvailable) + conditions = append(conditions, objStoreAvailable) conditions = append(conditions, apiServerReady) conditions = append(conditions, persistenceAgentReady) conditions = append(conditions, scheduledWorkflowReady) @@ -420,7 +444,7 @@ func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1. crReady := r.buildCondition(config.CrReady, dspa, config.MinimumReplicasAvailable) crReady.Type = config.CrReady - componentConditions := []metav1.Condition{apiServerReady, persistenceAgentReady, scheduledWorkflowReady} + componentConditions := []metav1.Condition{databaseAvailable, objStoreAvailable, apiServerReady, persistenceAgentReady, scheduledWorkflowReady} allReady := true failureMessages := "" for _, c := range componentConditions { @@ -449,16 +473,24 @@ func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1. } func (r *DSPAReconciler) PublishMetrics(dspa *dspav1alpha1.DataSciencePipelinesApplication, - apiServerReady, persistenceAgentReady, scheduledWorkflowReady, + dbAvailable, objStoreAvailable, apiServerReady, persistenceAgentReady, scheduledWorkflowReady, crReady metav1.Condition) { log := r.Log.WithValues("namespace", dspa.Namespace).WithValues("dspa_name", dspa.Name) log.Info("Publishing Ready Metrics") - if apiServerReady.Status == metav1.ConditionTrue { - log.Info("APIServer Ready") - APIServerReadyMetric.WithLabelValues(dspa.Name, dspa.Namespace).Set(1) + if dbAvailable.Status == metav1.ConditionTrue { + log.Info("Database Accessible") + DBAvailableMetric.WithLabelValues(dspa.Name, dspa.Namespace).Set(1) + } else { + log.Info("Database Not Yet Accessible") + DBAvailableMetric.WithLabelValues(dspa.Name, dspa.Namespace).Set(0) + } + + if objStoreAvailable.Status == metav1.ConditionTrue { + log.Info("Object Store Accessible") + ObjectStoreAvailableMetric.WithLabelValues(dspa.Name, dspa.Namespace).Set(1) } else { - log.Info("APIServer Not Ready") - APIServerReadyMetric.WithLabelValues(dspa.Name, dspa.Namespace).Set(0) + log.Info("Object Store Not Yet Accessible") + ObjectStoreAvailableMetric.WithLabelValues(dspa.Name, dspa.Namespace).Set(0) } if persistenceAgentReady.Status == metav1.ConditionTrue { diff --git a/controllers/metrics.go b/controllers/metrics.go index 3a4bd1f99..eed541be1 100644 --- a/controllers/metrics.go +++ b/controllers/metrics.go @@ -23,6 +23,26 @@ import ( // Prometheus metrics gauges var ( + DBAvailableMetric = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "data_science_pipelines_application_database_available", + Help: "Data Science Pipelines Application - Database Availability Status", + }, + []string{ + "dspa_name", + "dspa_namespace", + }, + ) + ObjectStoreAvailableMetric = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "data_science_pipelines_application_object_store_available", + Help: "Data Science Pipelines Application - Object Store Availability Status", + }, + []string{ + "dspa_name", + "dspa_namespace", + }, + ) APIServerReadyMetric = prometheus.NewGaugeVec( prometheus.GaugeOpts{ Name: "data_science_pipelines_application_apiserver_ready", diff --git a/controllers/storage.go b/controllers/storage.go index 9fb3d3a9f..97f5cf1c0 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -18,7 +18,12 @@ package controllers import ( "context" + "encoding/base64" + "fmt" + "net/http" + minio "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" ) @@ -31,6 +36,69 @@ var storageTemplates = []string{ storageSecret, } +func joinHostPort(host, port string) string { + if port == "" { + return host + } + return fmt.Sprintf("%s:%s", host, port) +} + +func createCredentialProvidersChain(endpoint, accessKey, secretKey string) *credentials.Credentials { + // first try with static api key + if accessKey != "" && secretKey != "" { + return credentials.NewStaticV4(accessKey, secretKey, "") + } + // otherwise use a chained provider: minioEnv -> awsEnv -> IAM + providers := []credentials.Provider{ + &credentials.EnvMinio{}, + &credentials.EnvAWS{}, + &credentials.IAM{ + Client: &http.Client{ + Transport: http.DefaultTransport, + }, + }, + } + return credentials.New(&credentials.Chain{Providers: providers}) +} + +func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, + params *DSPAParams) bool { + log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) + log.Info("Performing Object Storage Health Check") + + endpoint := joinHostPort(params.ObjectStorageConnection.Host, params.ObjectStorageConnection.Port) + accesskey, err := base64.StdEncoding.DecodeString(params.ObjectStorageConnection.AccessKeyID) + if err != nil { + log.Error(err, "Could not decode Object Storage Access Key ID") + return false + } + + secretkey, err := base64.StdEncoding.DecodeString(params.ObjectStorageConnection.SecretAccessKey) + if err != nil { + log.Error(err, "Could not decode Object Storage Secret Access Key") + return false + } + + cred := createCredentialProvidersChain(endpoint, string(accesskey), string(secretkey)) + minioClient, err := minio.New(endpoint, &minio.Options{ + Creds: cred, + Secure: params.ObjectStorageConnection.Secure, + }) + if err != nil { + log.Info(fmt.Sprintf("Could not connect to object storage endpoint: %s", endpoint)) + return false + } + + _, err = minioClient.ListBuckets(ctx) + if err != nil { + log.Info(fmt.Sprintf("Could not perform ListBuckets health check on object storage endpoint: %s", endpoint)) + return false + } + + log.Info("Object Storage Health Check Successful") + return true +} + // ReconcileStorage will set up Storage Connection. func (r *DSPAReconciler) ReconcileStorage(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) error { diff --git a/go.mod b/go.mod index 5a7d7276c..270907ef2 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.18 require ( github.com/fsnotify/fsnotify v1.5.4 github.com/go-logr/logr v1.2.3 + github.com/go-sql-driver/mysql v1.7.1 github.com/golang/glog v1.0.0 github.com/manifestival/controller-runtime-client v0.4.0 github.com/manifestival/manifestival v0.7.2 @@ -33,6 +34,7 @@ require ( github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect github.com/emicklei/go-restful/v3 v3.8.0 // indirect github.com/evanphx/json-patch v4.12.0+incompatible // indirect github.com/evanphx/json-patch/v5 v5.6.0 // indirect @@ -49,15 +51,20 @@ require ( github.com/google/go-cmp v0.5.9 // indirect github.com/google/gofuzz v1.1.0 // indirect github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 // indirect - github.com/google/uuid v1.1.2 // indirect + github.com/google/uuid v1.3.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/imdario/mergo v0.3.12 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.16.5 // indirect + github.com/klauspost/cpuid/v2 v2.2.4 // indirect github.com/kr/pretty v0.3.0 // indirect github.com/magiconair/properties v1.8.1 // indirect github.com/mailru/easyjson v0.7.6 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/minio/minio-go/v7 v7.0.56 // indirect + github.com/minio/sha256-simd v1.0.1 // indirect github.com/mitchellh/mapstructure v1.4.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect @@ -68,6 +75,8 @@ require ( github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect github.com/rogpeppe/go-internal v1.8.0 // indirect + github.com/rs/xid v1.5.0 // indirect + github.com/sirupsen/logrus v1.9.2 // indirect github.com/spf13/afero v1.2.2 // indirect github.com/spf13/cast v1.3.0 // indirect github.com/spf13/jwalterweatherman v1.0.0 // indirect @@ -75,12 +84,12 @@ require ( github.com/subosito/gotenv v1.2.0 // indirect go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect - golang.org/x/crypto v0.5.0 // indirect - golang.org/x/net v0.7.0 // indirect + golang.org/x/crypto v0.9.0 // indirect + golang.org/x/net v0.10.0 // indirect golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect - golang.org/x/sys v0.5.0 // indirect - golang.org/x/term v0.5.0 // indirect - golang.org/x/text v0.7.0 // indirect + golang.org/x/sys v0.8.0 // indirect + golang.org/x/term v0.8.0 // indirect + golang.org/x/text v0.9.0 // indirect golang.org/x/time v0.0.0-20220609170525-579cf78fd858 // indirect golang.org/x/tools v0.6.0 // indirect gomodules.xyz/jsonpatch/v2 v2.2.0 // indirect @@ -88,7 +97,7 @@ require ( google.golang.org/protobuf v1.28.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/ini.v1 v1.51.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect k8s.io/apiextensions-apiserver v0.25.0 // indirect diff --git a/go.sum b/go.sum index 86b077305..e21f8a2c1 100644 --- a/go.sum +++ b/go.sum @@ -151,6 +151,8 @@ github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZ github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= @@ -256,6 +258,8 @@ github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/ github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA= github.com/go-openapi/validate v0.19.5/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4= +github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI= +github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= @@ -355,6 +359,8 @@ github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -430,6 +436,11 @@ github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvW github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= +github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= +github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -467,6 +478,12 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5 github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= +github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= +github.com/minio/minio-go/v7 v7.0.56 h1:pkZplIEHu8vinjkmhsexcXpWth2tjVLphrTZx6fBVZY= +github.com/minio/minio-go/v7 v7.0.56/go.mod h1:NUDy4A4oXPq1l2yK6LTSvCEzAMeIcoz9lcj5dbzSrRE= +github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= +github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -563,6 +580,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -571,6 +590,8 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeV github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y= +github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= @@ -672,6 +693,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= +golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -762,6 +785,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -863,13 +888,19 @@ golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -882,6 +913,8 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20161028155119-f51c12702a4d/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1115,6 +1148,8 @@ gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= From 4f63f37ee002ccdeb922c86178431fba739f0026 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Mon, 10 Jul 2023 17:46:30 -0400 Subject: [PATCH 02/47] Update Ginkgo UnitTests to Mock/Override DB and Storage checks --- controllers/database.go | 35 ++++++++++---------- controllers/dspipeline_controller.go | 23 ++++++++++--- controllers/metrics.go | 4 ++- controllers/storage.go | 48 +++++++++++++++++----------- controllers/suite_test.go | 11 +++++++ go.mod | 2 +- go.sum | 16 ---------- 7 files changed, 79 insertions(+), 60 deletions(-) diff --git a/controllers/database.go b/controllers/database.go index 24b50d7a3..bbcfdd533 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -36,7 +36,8 @@ var dbTemplates = []string{ dbSecret, } -func (r *DSPAReconciler) VerifyMySQLDBConnection(host, port, username, password, dbname string) bool { +// extract to var for mocking in testing +var ConnectAndQueryDatabase = func(host, port, username, password, dbname string) bool { connectionString := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s", username, password, host, port, dbname) db, err := sql.Open("mysql", connectionString) if err != nil { @@ -57,25 +58,23 @@ func (r *DSPAReconciler) isDatabaseAccessible(ctx context.Context, dsp *dspav1al databaseSpecified := dsp.Spec.Database != nil usingExternalDB := params.UsingExternalDB(dsp) usingMariaDB := !databaseSpecified || dsp.Spec.Database.MariaDB != nil - if usingMariaDB || usingExternalDB { - decodePass, _ := b64.StdEncoding.DecodeString(params.DBConnection.Password) - db_connect := r.VerifyMySQLDBConnection(params.DBConnection.Host, - params.DBConnection.Port, - params.DBConnection.Username, - string(decodePass), - params.DBConnection.DBName) - if db_connect { - log.Info("Database Health Check Successful") - } else { - log.Info("Unable to connect to Database") - } - return db_connect - + if !usingMariaDB && !usingExternalDB { + log.Info("Could not connect to Database: Unsupported Type") + return false } - log.Info(fmt.Sprintf("Could not connect to Database: Unsupported Type")) - // Only MariaDB and Mysql-Compliant Database supported. - return false + decodePass, _ := b64.StdEncoding.DecodeString(params.DBConnection.Password) + db_connect := ConnectAndQueryDatabase(params.DBConnection.Host, + params.DBConnection.Port, + params.DBConnection.Username, + string(decodePass), + params.DBConnection.DBName) + if db_connect { + log.Info("Database Health Check Successful") + } else { + log.Info("Unable to connect to Database") + } + return db_connect } func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index dd460483e..c287372e1 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -264,7 +264,7 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. return ctrl.Result{}, err } - conditions, err := r.GenerateStatus(ctx, dspa, params) + conditions, err := r.GenerateStatus(ctx, dspa, params, dbAvailable, objStoreAvailable) if err != nil { log.Info(err.Error()) return ctrl.Result{}, err @@ -305,7 +305,7 @@ func (r *DSPAReconciler) handleReadyCondition(ctx context.Context, dspa *dspav1a if apierrs.IsNotFound(err) { readyCondition.Reason = config.ComponentDeploymentNotFound readyCondition.Status = metav1.ConditionFalse - readyCondition.Message = fmt.Sprintf("Deployment for component \"%s\" is missing", component) + readyCondition.Message = fmt.Sprintf("Deployment for component \"%s\" is missing - pre-requisite component may not yet be available.", component) return readyCondition, nil } else { return metav1.Condition{}, err @@ -409,25 +409,38 @@ func (r *DSPAReconciler) handleReadyCondition(ctx context.Context, dspa *dspav1a } -func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) ([]metav1.Condition, error) { +func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams, dbAvailableStatus, objStoreAvailableStatus bool) ([]metav1.Condition, error) { + // Create Database Availability Condition databaseAvailable := r.buildCondition(config.DatabaseAvailable, dspa, config.DatabaseAvailable) - if r.isDatabaseAccessible(ctx, dspa, params) { + if dbAvailableStatus { databaseAvailable.Status = metav1.ConditionTrue + databaseAvailable.Message = "Database connectivity successfully verified" + } else { + databaseAvailable.Message = "Could not connect to database" } + // Create Object Storage Availability Condition objStoreAvailable := r.buildCondition(config.ObjectStoreAvailable, dspa, config.ObjectStoreAvailable) - if r.isObjectStorageAccessible(ctx, dspa, params) { + if objStoreAvailableStatus { objStoreAvailable.Status = metav1.ConditionTrue + objStoreAvailable.Message = "Object Store connectivity successfully verified" + } else { + objStoreAvailable.Message = "Could not connect to Object Store" } + // Create APIServer Readiness Condition apiServerReady, err := r.handleReadyCondition(ctx, dspa, "ds-pipeline", config.APIServerReady) if err != nil { return []metav1.Condition{}, err } + + // Create PersistenceAgent Readiness Condition persistenceAgentReady, err := r.handleReadyCondition(ctx, dspa, "ds-pipeline-persistenceagent", config.PersistenceAgentReady) if err != nil { return []metav1.Condition{}, err } + + // Create ScheduledWorkflow Readiness Condition scheduledWorkflowReady, err := r.handleReadyCondition(ctx, dspa, "ds-pipeline-scheduledworkflow", config.ScheduledWorkflowReady) if err != nil { return []metav1.Condition{}, err diff --git a/controllers/metrics.go b/controllers/metrics.go index eed541be1..22a45e99b 100644 --- a/controllers/metrics.go +++ b/controllers/metrics.go @@ -87,7 +87,9 @@ var ( // InitMetrics initialize prometheus metrics func InitMetrics() { - metrics.Registry.MustRegister(APIServerReadyMetric, + metrics.Registry.MustRegister(DBAvailableMetric, + ObjectStoreAvailableMetric, + APIServerReadyMetric, PersistenceAgentReadyMetric, ScheduledWorkflowReadyMetric, CrReadyMetric) diff --git a/controllers/storage.go b/controllers/storage.go index 97f5cf1c0..a542f8f17 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -22,7 +22,8 @@ import ( "fmt" "net/http" - minio "github.com/minio/minio-go/v7" + "github.com/go-logr/logr" + "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" ) @@ -43,7 +44,7 @@ func joinHostPort(host, port string) string { return fmt.Sprintf("%s:%s", host, port) } -func createCredentialProvidersChain(endpoint, accessKey, secretKey string) *credentials.Credentials { +func createCredentialProvidersChain(accessKey, secretKey string) *credentials.Credentials { // first try with static api key if accessKey != "" && secretKey != "" { return credentials.NewStaticV4(accessKey, secretKey, "") @@ -61,6 +62,26 @@ func createCredentialProvidersChain(endpoint, accessKey, secretKey string) *cred return credentials.New(&credentials.Chain{Providers: providers}) } +var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint string, accesskey, secretkey []byte, secure bool) bool { + cred := createCredentialProvidersChain(string(accesskey), string(secretkey)) + minioClient, err := minio.New(endpoint, &minio.Options{ + Creds: cred, + Secure: secure, + }) + if err != nil { + log.Info(fmt.Sprintf("Could not connect to object storage endpoint: %s", endpoint)) + return false + } + + _, err = minioClient.ListBuckets(ctx) + if err != nil { + log.Info(fmt.Sprintf("Could not perform ListBuckets health check on object storage endpoint: %s", endpoint)) + return false + } + + return true +} + func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) bool { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) @@ -79,24 +100,13 @@ func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dsp return false } - cred := createCredentialProvidersChain(endpoint, string(accesskey), string(secretkey)) - minioClient, err := minio.New(endpoint, &minio.Options{ - Creds: cred, - Secure: params.ObjectStorageConnection.Secure, - }) - if err != nil { - log.Info(fmt.Sprintf("Could not connect to object storage endpoint: %s", endpoint)) - return false - } - - _, err = minioClient.ListBuckets(ctx) - if err != nil { - log.Info(fmt.Sprintf("Could not perform ListBuckets health check on object storage endpoint: %s", endpoint)) - return false + verified := ConnectAndQueryObjStore(ctx, log, endpoint, accesskey, secretkey, params.ObjectStorageConnection.Secure) + if verified { + log.Info("Object Storage Health Check Successful") + } else { + log.Info("Object Storage Health Check Failed") } - - log.Info("Object Storage Health Check Successful") - return true + return verified } // ReconcileStorage will set up Storage Connection. diff --git a/controllers/suite_test.go b/controllers/suite_test.go index a96b8ee04..91fc77154 100644 --- a/controllers/suite_test.go +++ b/controllers/suite_test.go @@ -31,6 +31,7 @@ import ( . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" + "github.com/go-logr/logr" "k8s.io/client-go/kubernetes/scheme" clientgoscheme "k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/rest" @@ -67,6 +68,16 @@ func TestAPIs(t *testing.T) { RunSpecs(t, "Controller Suite") } +var _ = BeforeEach(func() { + By("Overriding the Database and Object Store live connection functions with trivial stubs") + ConnectAndQueryDatabase = func(host string, port string, username string, password string, dbname string) bool { + return true + } + ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint string, accesskey, secretkey []byte, secure bool) bool { + return true + } +}) + var _ = BeforeSuite(func() { ctx, cancel = context.WithCancel(context.TODO()) diff --git a/go.mod b/go.mod index 270907ef2..31f717865 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/golang/glog v1.0.0 github.com/manifestival/controller-runtime-client v0.4.0 github.com/manifestival/manifestival v0.7.2 + github.com/minio/minio-go/v7 v7.0.56 github.com/onsi/ginkgo/v2 v2.8.4 github.com/onsi/gomega v1.27.1 github.com/openshift/api v3.9.0+incompatible @@ -63,7 +64,6 @@ require ( github.com/mailru/easyjson v0.7.6 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect github.com/minio/md5-simd v1.1.2 // indirect - github.com/minio/minio-go/v7 v7.0.56 // indirect github.com/minio/sha256-simd v1.0.1 // indirect github.com/mitchellh/mapstructure v1.4.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect diff --git a/go.sum b/go.sum index e21f8a2c1..faeb0131f 100644 --- a/go.sum +++ b/go.sum @@ -357,7 +357,6 @@ github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLe github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -370,7 +369,6 @@ github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3i github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= github.com/gophercloud/gophercloud v0.0.0-20190126172459-c818fa66e4c8/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= @@ -428,7 +426,6 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= @@ -592,9 +589,7 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y= github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= @@ -691,8 +686,6 @@ golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= -golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -783,8 +776,6 @@ golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -890,15 +881,11 @@ golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -911,8 +898,6 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20161028155119-f51c12702a4d/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1146,7 +1131,6 @@ gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMy gopkg.in/inf.v0 v0.9.0/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= From a4fba416dd66511ca1d5fe24d32df0ff0a124c57 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Wed, 19 Jul 2023 17:31:13 -0400 Subject: [PATCH 03/47] Add disableHealthCheck functionality for DB/ObjStore - Useful for local testing and debugging --- api/v1alpha1/dspipeline_types.go | 6 ++++++ ...ahub.io_datasciencepipelinesapplications.yaml | 6 ++++++ controllers/database.go | 5 +++++ controllers/dspipeline_params.go | 16 ++++++++++++++++ controllers/storage.go | 7 ++++++- 5 files changed, 39 insertions(+), 1 deletion(-) diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 44578faa7..ce211113a 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -122,6 +122,9 @@ type MlPipelineUI struct { type Database struct { *MariaDB `json:"mariaDB,omitempty"` *ExternalDB `json:"externalDB,omitempty"` + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + DisableHealthCheck bool `json:"disableHealthCheck"` } type MariaDB struct { @@ -151,6 +154,9 @@ type ExternalDB struct { type ObjectStorage struct { *Minio `json:"minio,omitempty"` *ExternalStorage `json:"externalStorage,omitempty"` + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + DisableHealthCheck bool `json:"disableHealthCheck"` } type Minio struct { diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index 33e312e40..2f4bb903d 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -138,6 +138,9 @@ spec: mariaDB: deploy: true properties: + disableHealthCheck: + default: false + type: boolean externalDB: properties: host: @@ -431,6 +434,9 @@ spec: type: object objectStorage: properties: + disableHealthCheck: + default: false + type: boolean externalStorage: properties: bucket: diff --git a/controllers/database.go b/controllers/database.go index bbcfdd533..46cd0ed3d 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -54,6 +54,11 @@ func (r *DSPAReconciler) isDatabaseAccessible(ctx context.Context, dsp *dspav1al params *DSPAParams) bool { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) + if params.DatabaseHealthCheckDisabled(dsp) { + log.V(1).Info("Database health check disabled, assuming database is available and ready.") + return true + } + log.Info("Performing Database Health Check") databaseSpecified := dsp.Spec.Database != nil usingExternalDB := params.UsingExternalDB(dsp) diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index e81373647..141b2d1ad 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -81,6 +81,14 @@ func (p *DSPAParams) UsingExternalDB(dsp *dspa.DataSciencePipelinesApplication) return false } +// StorageHealthCheckDisabled will return the value if the Database has disableHealthCheck specified in the CR, otherwise false. +func (p *DSPAParams) DatabaseHealthCheckDisabled(dsp *dspa.DataSciencePipelinesApplication) bool { + if dsp.Spec.Database != nil { + return dsp.Spec.Database.DisableHealthCheck + } + return false +} + // UsingExternalStorage will return true if an external Object Storage is specified in the CR, otherwise false. func (p *DSPAParams) UsingExternalStorage(dsp *dspa.DataSciencePipelinesApplication) bool { if dsp.Spec.ObjectStorage != nil && dsp.Spec.ObjectStorage.ExternalStorage != nil { @@ -89,6 +97,14 @@ func (p *DSPAParams) UsingExternalStorage(dsp *dspa.DataSciencePipelinesApplicat return false } +// ObjectStorageHealthCheckDisabled will return the value if the Object Storage has disableHealthCheck specified in the CR, otherwise false. +func (p *DSPAParams) ObjectStorageHealthCheckDisabled(dsp *dspa.DataSciencePipelinesApplication) bool { + if dsp.Spec.ObjectStorage != nil { + return dsp.Spec.ObjectStorage.DisableHealthCheck + } + return false +} + func (p *DSPAParams) UsingMLMD(dsp *dspa.DataSciencePipelinesApplication) bool { if dsp.Spec.MLMD != nil { return dsp.Spec.MLMD.Deploy diff --git a/controllers/storage.go b/controllers/storage.go index a542f8f17..ecca82086 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -85,6 +85,11 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) bool { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) + if params.ObjectStorageHealthCheckDisabled(dsp) { + log.V(1).Info("Object Storage health check disabled, assuming object store is available and ready.") + return true + } + log.Info("Performing Object Storage Health Check") endpoint := joinHostPort(params.ObjectStorageConnection.Host, params.ObjectStorageConnection.Port) @@ -100,7 +105,7 @@ func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dsp return false } - verified := ConnectAndQueryObjStore(ctx, log, endpoint, accesskey, secretkey, params.ObjectStorageConnection.Secure) + verified := ConnectAndQueryObjStore(ctx, log, endpoint, accesskey, secretkey, *params.ObjectStorageConnection.Secure) if verified { log.Info("Object Storage Health Check Successful") } else { From 0563af3fe4c9b72a587e8606496d40c085d8d42d Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Wed, 19 Jul 2023 18:07:30 -0400 Subject: [PATCH 04/47] Updated regenerated deepcopy --- api/v1alpha1/zz_generated.deepcopy.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/api/v1alpha1/zz_generated.deepcopy.go b/api/v1alpha1/zz_generated.deepcopy.go index 2326c7f27..babfd004f 100644 --- a/api/v1alpha1/zz_generated.deepcopy.go +++ b/api/v1alpha1/zz_generated.deepcopy.go @@ -270,6 +270,11 @@ func (in *ExternalStorage) DeepCopyInto(out *ExternalStorage) { *out = new(S3CredentialSecret) **out = **in } + if in.Secure != nil { + in, out := &in.Secure, &out.Secure + *out = new(bool) + **out = **in + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalStorage. From 8f1f2b84687fde39cbb0c34685980e92fab7b4fb Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 21 Jul 2023 17:00:44 -0400 Subject: [PATCH 05/47] Add gh to jira action. Signed-off-by: Humair Khan --- .github/workflows/gh-to-jira.yaml | 77 +++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 .github/workflows/gh-to-jira.yaml diff --git a/.github/workflows/gh-to-jira.yaml b/.github/workflows/gh-to-jira.yaml new file mode 100644 index 000000000..d43e66f9a --- /dev/null +++ b/.github/workflows/gh-to-jira.yaml @@ -0,0 +1,77 @@ +name: Create Jira Issue from GH Release +run-name: GH Release to Jira. +on: + workflow_dispatch: + inputs: + component: + default: 'Data Science Pipelines' + description: 'ODH Component' + required: true + target_release: + description: 'Target Downstream Release' + required: true + gh_org: + default: 'opendatahub-io' + description: 'Upstream GH Org' + required: true + repos: + default: | + [{"repo_name":"data-science-pipelines","target_release":"UPDATE","previous_release":"UPDATE"},{"repo_name":"data-science-pipelines-operator","target_release":"UPDATE","previous_release":"UPDATE"}] + description: 'Upstream Source Repos & Tags' + required: true + labels: + default: 'qe/verify' + required: true + description: "" + jira_server: + default: 'https://issues.redhat.com' + required: true + description: "Jira Server" + jira_project: + default: "RHODS" + required: true + description: "Jira Project" + jira_labels: + default: "MLOps" + required: true + description: "Jira Labels to Add" + jira_issue_type: + default: "Story" + required: true + description: "Jira Issue Type" + jira_priority: + default: 'Normal' + required: true + description: "Jira Priority to Set" + +jobs: + gh-to-jira: + runs-on: ubuntu-latest + steps: + - name: Git checkout + uses: actions/checkout@v3 + with: + repository: HumairAK/gh-to-jira + fetch-depth: '0' + - name: Set up Python 3.10 + uses: actions/setup-python@v1 + with: + python-version: '3.10' + - name: Install dependencies + run: | + pip install -r requirements.txt + - name: Submit Jira + env: + GITHUB_TOKEN: ${{ secrets.GTJ_GH_TOKEN }} + JIRA_TOKEN: ${{ secrets.GTJ_JIRA_TOKEN }} + REPOS: ${{ inputs.repos }} + run: | + python src --component="${{ inputs.component }}" \ + --target_release="${{ inputs.target_release }}" \ + --org="${{ inputs.gh_org }}" \ + --labels="${{ inputs.labels }}" \ + --jira_server="${{ inputs.jira_server }}" \ + --jira_project="${{ inputs.jira_project }}" \ + --jira_labels="${{ inputs.jira_labels }}" \ + --jira_issue_type="${{ inputs.jira_issue_type }}" \ + --jira_priority="${{ inputs.jira_priority }}" From 9c7b055f23f3949bcc9adff65471ab7c596ec4c7 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 24 Jul 2023 12:46:56 -0400 Subject: [PATCH 06/47] Add issue forms for dspo. Signed-off-by: Humair Khan --- .github/ISSUE_TEMPLATE/bug_report.yaml | 91 ++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.yml | 33 ++++++++ 2 files changed, 124 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yaml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml new file mode 100644 index 000000000..f4371b6d3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -0,0 +1,91 @@ +name: Bug Report +description: File a bug report. +title: "[Bug]: " +labels: ["kind/bug", "priority/normal"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! Please, fill this form to help us improve the project. + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: I have searched the existing issues + required: true + - type: dropdown + id: deploy-type + attributes: + label: Deploy type + description: | + How did you deploy DSPO? + multiple: false + options: + - Standalone DSPO (without ODH) + - Manually deployed Kfdef + - ODH Dashboard UI + validations: + required: true + - type: input + id: version + attributes: + label: Version + description: | + If using ODH, please provide the ODH version. + If using standalone DSPO, please provide the tag version + used for this repo, or state "main" if deploying + directly from main branch. + validations: + required: true + - type: textarea + id: environment + attributes: + label: Environment + description: Describe your environment. + placeholder: | + * Python Version (if relevant): + * SDK Version (if relevant): + * OCP Pipelines Version: + * OCP Version: + validations: + required: true + - type: textarea + attributes: + label: Current Behavior + description: A concise description of what you're experiencing. + validations: + required: true + - type: textarea + attributes: + label: Expected Behavior + description: A concise description of what you expected to happen. + validations: + required: true + - type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. In this environment... + 2. With this config... + 3. Run '...' + 4. See error... + - type: textarea + id: workaround + attributes: + label: Workaround (if any) + description: Any manual steps that allow you to resolve the issue + placeholder: Tell us the steps you followed to resolve the issue! + validations: + required: false + - type: textarea + id: anything-else + attributes: + label: Anything else + description: | + Any additional information you'd like to share + + Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..79f674a0a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,33 @@ +name: Feature request +description: Suggest an idea for this project. +title: "[Feature Request]: " +labels: ["kind/enhancement", "priority/normal"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this feature request! Please, fill this form to help us improve the project. + - type: textarea + id: description + attributes: + label: Feature description + description: A clear and concise description of what you want to happen. + validations: + required: true + - type: textarea + id: describe-alternatives + attributes: + label: Describe alternatives you've considered + description: A clear and concise description of any alternative solutions or features you've considered. + placeholder: Tell us about alternatives you've considered... + validations: + required: false + - type: textarea + attributes: + label: Anything else? + description: | + Links? References? Add any other context or screenshots about the feature request here. + + Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. + validations: + required: false From 2fe988ada01cf647879baa8190e0dcfa8c2b5db9 Mon Sep 17 00:00:00 2001 From: Sven Thoms <21118431+shalberd@users.noreply.github.com> Date: Mon, 24 Jul 2023 20:08:06 +0200 Subject: [PATCH 07/47] change oauth proxy digest value to manifest list digest of tag v4.10 --- config/base/params.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/base/params.env b/config/base/params.env index e8adc3bc3..ef633b3d9 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -6,7 +6,7 @@ IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:e52fc1de73dc2879 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:443db9a646aaf9374f95d266ba0c8656a52d70d0ffcc386a782cea28fa32e55d IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:cafc7364494fb7206c373a1235fd5da74399c19b5c34d87dd02aa07e8f343fa2 IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:bd4f3cfc9688aeb4296a5f3f7274557adeca0a8811533da750f05b485a819a8d -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:d0f2f1ef0bdc3aa1a70794ac8ac779271b634af83e939029ac5224ec0c815d7a +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:f2d5d430bbc925520f635f35698e604aae391ace39b15a5d601a9c9eb26dec2b IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:2490aadb2227cc72fd9e698549a8cd3270b669a2faa24bb0603c37f1c71ac8c4 IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:89fc26374f8e58384628f6b178eb9b8e3ebb111fe395c529d0b65ba8adaa89f5 From 7363daf3482b83f4932a21f5198f6fe8f99159db Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Mon, 24 Jul 2023 16:51:19 -0400 Subject: [PATCH 08/47] Handle behavior if only database.disableHealthCheck specified - specifying Spec.Database.DsiableHealthCheck but not externalDB/mariaDB leads to an issue where the DSPO does not recognize the need for a default DB deployment. This handles that case by including it in the default-deploy behavior --- controllers/database.go | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/controllers/database.go b/controllers/database.go index 46cd0ed3d..f5a5f2457 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -91,8 +91,12 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha // DB field can be specified as an empty obj, confirm that subfields are also specified // By default if Database is empty, we deploy mariadb externalDBSpecified := params.UsingExternalDB(dsp) - mariaDBSpecified := !databaseSpecified || dsp.Spec.Database.MariaDB != nil - deployMariaDB := !databaseSpecified || (mariaDBSpecified && dsp.Spec.Database.MariaDB.Deploy) + mariaDBSpecified := dsp.Spec.Database.MariaDB != nil + defaultDBRequired := (!databaseSpecified || (!externalDBSpecified && !mariaDBSpecified)) + + deployMariaDB := (mariaDBSpecified && dsp.Spec.Database.MariaDB.Deploy) + // Default DB is currently MariaDB as well, but storing these bools seperately in case that changes + deployDefaultDB := (!databaseSpecified || defaultDBRequired) // If external db is specified, it takes precedence if externalDBSpecified { @@ -103,7 +107,7 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha if err != nil { return err } - } else if deployMariaDB { + } else if deployMariaDB || deployDefaultDB { log.Info("Applying mariaDB resources.") for _, template := range dbTemplates { err := r.Apply(dsp, params, template) @@ -116,6 +120,8 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha // desired state. if !databaseSpecified { dsp.Spec.Database = &dspav1alpha1.Database{} + } + if !databaseSpecified || defaultDBRequired { dsp.Spec.Database.MariaDB = params.MariaDB.DeepCopy() dsp.Spec.Database.MariaDB.Deploy = true if err := r.Update(ctx, dsp); err != nil { From ea490c4c0aec1a64d4ba5a4b3fe8fe9f466a89e5 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 25 Jul 2023 14:42:35 -0400 Subject: [PATCH 09/47] Add disableHeatlhChecks fields to dspa_all_fields.yaml sample --- config/samples/dspa_all_fields.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config/samples/dspa_all_fields.yaml b/config/samples/dspa_all_fields.yaml index e765758d8..be0cc8ccd 100644 --- a/config/samples/dspa_all_fields.yaml +++ b/config/samples/dspa_all_fields.yaml @@ -74,6 +74,7 @@ spec: # otherwise operator will not deploy DSPA configMap: ds-pipeline-ui-configmap database: + disableHealthCheck: false mariaDB: # mutually exclusive with externalDB deploy: true image: registry.redhat.io/rhel8/mariadb-103:1-188 @@ -101,6 +102,7 @@ spec: # name: somesecret # key: somekey objectStorage: + disableHealthCheck: false minio: # mutually exclusive with externalStorage deploy: true image: quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance From 050f4c2a0a8de4b2119e4db0986bfca9d1cfb79a Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Tue, 25 Jul 2023 13:58:22 -0400 Subject: [PATCH 10/47] Add pr template. Signed-off-by: Humair Khan --- .github/PULL_REQUEST_TEMPLATE.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..e2cca413f --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,15 @@ +## Ths issue resolved by this Pull Request: +Resolves # + +## Description of your changes: + + + +## Testing instructions + + +## Checklist +- [ ] The commits are squashed in a cohesive manner and have meaningful messages. +- [ ] Testing instructions have been added in the PR body (for PRs involving changes that are not immediately obvious). +- [ ] The developer has manually tested the changes and verified that the changes work From 133e0170a38b6ddcb758b7cb7408250948afaa29 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 25 Jul 2023 15:59:27 -0400 Subject: [PATCH 11/47] Code cleanup for DB/ObjStore waiting functionality --- controllers/database.go | 12 ++++++------ controllers/dspipeline_controller.go | 8 ++++---- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/controllers/database.go b/controllers/database.go index f5a5f2457..065730ea8 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -69,17 +69,17 @@ func (r *DSPAReconciler) isDatabaseAccessible(ctx context.Context, dsp *dspav1al } decodePass, _ := b64.StdEncoding.DecodeString(params.DBConnection.Password) - db_connect := ConnectAndQueryDatabase(params.DBConnection.Host, + dbHealthCheckPassed := ConnectAndQueryDatabase(params.DBConnection.Host, params.DBConnection.Port, params.DBConnection.Username, string(decodePass), params.DBConnection.DBName) - if db_connect { + if dbHealthCheckPassed { log.Info("Database Health Check Successful") } else { log.Info("Unable to connect to Database") } - return db_connect + return dbHealthCheckPassed } func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, @@ -92,11 +92,11 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha // By default if Database is empty, we deploy mariadb externalDBSpecified := params.UsingExternalDB(dsp) mariaDBSpecified := dsp.Spec.Database.MariaDB != nil - defaultDBRequired := (!databaseSpecified || (!externalDBSpecified && !mariaDBSpecified)) + defaultDBRequired := !databaseSpecified || (!externalDBSpecified && !mariaDBSpecified) - deployMariaDB := (mariaDBSpecified && dsp.Spec.Database.MariaDB.Deploy) + deployMariaDB := mariaDBSpecified && dsp.Spec.Database.MariaDB.Deploy // Default DB is currently MariaDB as well, but storing these bools seperately in case that changes - deployDefaultDB := (!databaseSpecified || defaultDBRequired) + deployDefaultDB := !databaseSpecified || defaultDBRequired // If external db is specified, it takes precedence if externalDBSpecified { diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index c287372e1..adae055dc 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -221,9 +221,9 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. // Get Prereq Status (DB and ObjStore Ready) dbAvailable := r.isDatabaseAccessible(ctx, dspa, params) objStoreAvailable := r.isObjectStorageAccessible(ctx, dspa, params) - dspa_prereqs_ready := (dbAvailable && objStoreAvailable) + dspaPrereqsReady := (dbAvailable && objStoreAvailable) - if dspa_prereqs_ready { + if dspaPrereqsReady { // Manage Common Manifests err = r.ReconcileCommon(dspa, params) if err != nil { @@ -264,7 +264,7 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. return ctrl.Result{}, err } - conditions, err := r.GenerateStatus(ctx, dspa, params, dbAvailable, objStoreAvailable) + conditions, err := r.GenerateStatus(ctx, dspa, dbAvailable, objStoreAvailable) if err != nil { log.Info(err.Error()) return ctrl.Result{}, err @@ -409,7 +409,7 @@ func (r *DSPAReconciler) handleReadyCondition(ctx context.Context, dspa *dspav1a } -func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams, dbAvailableStatus, objStoreAvailableStatus bool) ([]metav1.Condition, error) { +func (r *DSPAReconciler) GenerateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, dbAvailableStatus, objStoreAvailableStatus bool) ([]metav1.Condition, error) { // Create Database Availability Condition databaseAvailable := r.buildCondition(config.DatabaseAvailable, dspa, config.DatabaseAvailable) if dbAvailableStatus { From 0c406ff29b267d787de91aa8b5fad5ad680ae6df Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 25 Jul 2023 16:12:40 -0400 Subject: [PATCH 12/47] Add Local Development sample DSPA --- config/samples/dspa_local_dev.yaml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 config/samples/dspa_local_dev.yaml diff --git a/config/samples/dspa_local_dev.yaml b/config/samples/dspa_local_dev.yaml new file mode 100644 index 000000000..e3c112864 --- /dev/null +++ b/config/samples/dspa_local_dev.yaml @@ -0,0 +1,19 @@ +# A simple DSPA with the Database and ObjectStore Health Checks Disabled +# +# Since the default database and storage options leverage internal Services, +# a locally-run DSPO that manages an external cluster (common development practice) +# would not be able to run the pre-deploy health checks on these prerequisite components +# and therefore the DSPA will never fully deploy without disabling them, as this DSPA sample does +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +kind: DataSciencePipelinesApplication +metadata: + name: sample +spec: + database: + disableHealthCheck: true + objectStorage: + disableHealthCheck: true + minio: + image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' + mlpipelineUI: + image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' From 33a3004a818449b59b3a43eb592bd007f88cb891 Mon Sep 17 00:00:00 2001 From: ddalvi Date: Tue, 18 Jul 2023 13:13:54 -0400 Subject: [PATCH 13/47] GH action for automated PRs to odh-manifests --- .github/workflows/odh-manifests-PR-sync.yml | 66 +++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 .github/workflows/odh-manifests-PR-sync.yml diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml new file mode 100644 index 000000000..280db7403 --- /dev/null +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -0,0 +1,66 @@ +name: odh-manifests sync + +run-name: Sync manifests in odh-manifests +on: + workflow_dispatch: + push: + tags: + - '*' +jobs: + send-pull-requests: + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Checkout data-science-pipelines-operator repository + uses: actions/checkout@v2 + with: + fetch-depth: 0 + ref: main + repository: opendatahub-io/data-science-pipelines-operator + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Send pull-request + run: | + LATEST_TAG=$(git describe --tags --always --abbrev=0) + REPOSITORY="opendatahub-io/odh-manifests" + FOLDER="bin/$REPOSITORY" + BRANCH_NAME="chore-update-scripts-to-$LATEST_TAG" + + # Clone the remote repository and change working directory to the + # folder it was cloned to. + git clone \ + --depth=1 \ + --branch=master \ + https://opendatahub-io:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ + $FOLDER + cd $FOLDER + + # Setup the committers identity. + git config user.email "140449482+dsp-developers@users.noreply.github.com" + git config user.name "dsp-developers" + + # Create a new feature branch for the changes. + git checkout -b $BRANCH_NAME + echo "Created branch: $BRANCH_NAME" + + # Copy DSPO manifests. Using rsync to allow filtering of paths/files (e.g. like a .gitignore, hidden files, etc) + + rsync -av --exclude={'overlays/','samples/','internal/'} ../../../config/ data-science-pipelines-operator/ + + # Commit the changes and push the feature branch to origin + git add . + git commit -m "Update DSPO to $LATEST_TAG" + # Check if the branch exists and perform rebase if it does + if git ls-remote --exit-code --heads origin $BRANCH_NAME; then + git pull --rebase origin $BRANCH_NAME + fi + git push origin $BRANCH_NAME + + gh pr create \ + --body "This is an automated PR to update Data Science Pipelines Operator manifests to $LATEST_TAG" \ + --title "Update DSP Operator manifests to $LATEST_TAG" \ + --head "$BRANCH_NAME" \ + --base "master" + env: + GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} From eee4412f53a289dbb97cc4a6919cbf5f44b2640c Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 26 Jul 2023 12:38:20 -0400 Subject: [PATCH 14/47] Fix typo in pr template. Signed-off-by: Humair Khan --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index e2cca413f..41f680ae5 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,4 +1,4 @@ -## Ths issue resolved by this Pull Request: +## The issue resolved by this Pull Request: Resolves # ## Description of your changes: From d7441df88911eebe41a5e2c3d3ad63e52060ecf3 Mon Sep 17 00:00:00 2001 From: vaishnavipatil3 <99957726+vaishnavipatil3@users.noreply.github.com> Date: Thu, 27 Jul 2023 12:05:34 +0530 Subject: [PATCH 15/47] Update Makefile for power kustomize change --- Makefile | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Makefile b/Makefile index f02f0041a..b3253b67e 100644 --- a/Makefile +++ b/Makefile @@ -164,7 +164,13 @@ CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen ENVTEST ?= $(LOCALBIN)/setup-envtest ## Tool Versions +arch:= $(shell uname -m) + +ifeq ($(arch), ppc64le) +KUSTOMIZE_VERSION ?= v5.1.0 +else KUSTOMIZE_VERSION ?= v3.8.7 +endif CONTROLLER_TOOLS_VERSION ?= v0.10.0 KUSTOMIZE_INSTALL_SCRIPT ?= "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" From 304d4e34a5b6596064fe798d69cb6fcfdcc30a31 Mon Sep 17 00:00:00 2001 From: Achyut Madhusudan Date: Wed, 12 Jul 2023 00:43:30 +0530 Subject: [PATCH 16/47] Added RequeueAfter to test the reconciling logic. Signed-off-by: Achyut Madhusudan --- controllers/dspipeline_controller.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index adae055dc..d1d4e8658 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -19,6 +19,7 @@ package controllers import ( "context" "fmt" + "time" "github.com/go-logr/logr" mf "github.com/manifestival/manifestival" @@ -161,10 +162,10 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. dspa := &dspav1alpha1.DataSciencePipelinesApplication{} err := r.Get(ctx, req.NamespacedName, dspa) if err != nil && apierrs.IsNotFound(err) { - log.Info("Stop DSPAParams reconciliation") + log.Info("DSPA resource was not found") return ctrl.Result{}, nil } else if err != nil { - log.Error(err, "Unable to fetch the DSPAParams") + log.Error(err, "Encountered error when fetching DSPA") return ctrl.Result{}, err } @@ -205,7 +206,7 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. err = params.ExtractParams(ctx, dspa, r.Client, r.Log) if err != nil { log.Info(fmt.Sprintf("Encountered error when parsing CR: [%s]", err)) - return ctrl.Result{}, nil + return ctrl.Result{Requeue: true, RequeueAfter: 2 * time.Minute}, nil } err = r.ReconcileDatabase(ctx, dspa, params) From 08eb98dbb3f6b47740608e050ad40bc93779f740 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 24 Jul 2023 08:39:14 -0400 Subject: [PATCH 17/47] Add push artifact work around. The workaround identifies artifacts in workspace and pushes them to s3 storage. Signed-off-by: Humair Khan --- config/internal/apiserver/artifact_script.yaml.tmpl | 13 ++++++++++--- .../expected/created/configmap_artifact_script.yaml | 13 ++++++++++--- .../expected/created/configmap_artifact_script.yaml | 13 ++++++++++--- .../expected/created/configmap_artifact_script.yaml | 13 ++++++++++--- .../expected/created/configmap_artifact_script.yaml | 13 ++++++++++--- 5 files changed, 50 insertions(+), 15 deletions(-) diff --git a/config/internal/apiserver/artifact_script.yaml.tmpl b/config/internal/apiserver/artifact_script.yaml.tmpl index f54d55584..9af903b2f 100644 --- a/config/internal/apiserver/artifact_script.yaml.tmpl +++ b/config/internal/apiserver/artifact_script.yaml.tmpl @@ -3,9 +3,16 @@ data: artifact_script: |- #!/usr/bin/env sh push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") + workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) + artifact_name=$(basename $2) + if [ -f "$workspace_dest/$artifact_name" ]; then + echo sending to: ${workspace_dest}/${artifact_name} + tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + elif [ -f "$2" ]; then + tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml index a6e1e3658..307711088 100644 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml @@ -3,9 +3,16 @@ data: artifact_script: |- #!/usr/bin/env sh push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") + workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) + artifact_name=$(basename $2) + if [ -f "$workspace_dest/$artifact_name" ]; then + echo sending to: ${workspace_dest}/${artifact_name} + tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + elif [ -f "$2" ]; then + tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml index 82048ee18..88659df81 100644 --- a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml @@ -3,9 +3,16 @@ data: artifact_script: |- #!/usr/bin/env sh push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") + workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) + artifact_name=$(basename $2) + if [ -f "$workspace_dest/$artifact_name" ]; then + echo sending to: ${workspace_dest}/${artifact_name} + tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + elif [ -f "$2" ]; then + tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml index b53a24551..b00c143cb 100644 --- a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml @@ -3,9 +3,16 @@ data: artifact_script: |- #!/usr/bin/env sh push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") + workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) + artifact_name=$(basename $2) + if [ -f "$workspace_dest/$artifact_name" ]; then + echo sending to: ${workspace_dest}/${artifact_name} + tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + elif [ -f "$2" ]; then + tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml index 1220718c8..bf1f028c9 100644 --- a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml @@ -3,9 +3,16 @@ data: artifact_script: |- #!/usr/bin/env sh push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") + workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) + artifact_name=$(basename $2) + if [ -f "$workspace_dest/$artifact_name" ]; then + echo sending to: ${workspace_dest}/${artifact_name} + tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + elif [ -f "$2" ]; then + tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} + aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi From 7aa753a5776da666cf1a42dbb50671ea95ecec4c Mon Sep 17 00:00:00 2001 From: ddalvi Date: Thu, 27 Jul 2023 14:23:13 -0400 Subject: [PATCH 18/47] Turn off on tag push event for PR sync action --- .github/workflows/odh-manifests-PR-sync.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index 280db7403..e3f6ff56c 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -3,9 +3,9 @@ name: odh-manifests sync run-name: Sync manifests in odh-manifests on: workflow_dispatch: - push: - tags: - - '*' +# push: +# tags: +# - '*' jobs: send-pull-requests: runs-on: ubuntu-latest From c9f5c1bd3e1d8e7f75b168013efafd0a217d163c Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 27 Jul 2023 14:40:01 -0400 Subject: [PATCH 19/47] Turn of auto image tagging for dspo. Signed-off-by: Humair Khan --- .github/workflows/tag-release-quay.yml | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/.github/workflows/tag-release-quay.yml b/.github/workflows/tag-release-quay.yml index 6e11de620..385de10ad 100644 --- a/.github/workflows/tag-release-quay.yml +++ b/.github/workflows/tag-release-quay.yml @@ -1,14 +1,13 @@ name: Image push per Github Tag -# This GitHub action activates whenever a new tag is created on the repo under "opendatahub-io" -# and creates a copy of the image of the associated commit hash with the -# appropriate tag name. - -run-name: Creating new tag in quay based on pushed tag in Github. +run-name: Creating new tag in quay based on latest pushed tag in Github. on: - push: - tags: - - '*' + workflow_dispatch: + inputs: + target_tag: + default: 'vx.y.z' + description: 'DSPO Tag' + required: true env: QUAY_IMAGE_REPO: ${{ secrets.QUAY_IMAGE_REPO }} jobs: @@ -32,19 +31,15 @@ jobs: QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} run: | skopeo login quay.io -u ${QUAY_ROBOT_USERNAME} -p ${QUAY_ROBOT_TOKEN} - - name: Get latest tag name - id: tag - run: echo "tag=$(git describe --tags --abbrev=0)" >> ${GITHUB_OUTPUT} - name: Get latest tag hash id: hash - run: echo "hash=$(git rev-parse --short ${{ steps.tag.outputs.tag }} )" >> ${GITHUB_OUTPUT} + run: echo "hash=$(git rev-parse --short ${{ inputs.target_tag }} )" >> ${GITHUB_OUTPUT} - name: Create new tag shell: bash env: - TAG: ${{ steps.tag.outputs.tag }} HASH: ${{ steps.hash.outputs.hash }} run: | - skopeo copy docker://${QUAY_IMAGE_REPO}:main-${{ env.HASH }} docker://${QUAY_IMAGE_REPO}:${{ env.TAG }} + skopeo copy docker://${QUAY_IMAGE_REPO}:main-${{ env.HASH }} docker://${QUAY_IMAGE_REPO}:${{ inputs.target_tag }} - name: Create latest tag shell: bash env: From a67b16bd037ae0c2c63de8f86e82298423621df2 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 27 Jul 2023 16:28:43 -0400 Subject: [PATCH 20/47] Use unauthenticated images for go-toolset. Signed-off-by: Humair Khan --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index d3428ff4f..2a2ae627f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Build the manager binary -FROM registry.redhat.io/ubi8/go-toolset:1.18.9-8 as builder +FROM registry.access.redhat.com/ubi8/go-toolset:1.18.9-8 as builder ARG TARGETOS ARG TARGETARCH From 596430452ef1a8d7c7efb1a42962af81e09c4c61 Mon Sep 17 00:00:00 2001 From: Achyut Madhusudan Date: Fri, 28 Jul 2023 15:56:06 +0530 Subject: [PATCH 21/47] Added amadhusu as Reviewer Signed-off-by: Achyut Madhusudan --- OWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/OWNERS b/OWNERS index becd6a539..d865fea16 100644 --- a/OWNERS +++ b/OWNERS @@ -7,6 +7,7 @@ approvers: - HumairAK - rimolive reviewers: + - amadhusu - DharmitD - gmfrasca - gregsheremeta From 8c131cfaedaf8633bda8f0e537841b41a3e52b3e Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 27 Jul 2023 15:43:03 -0400 Subject: [PATCH 22/47] Add workflow for src image build and push to tag. Signed-off-by: Humair Khan --- .github/workflows/tag-and-build.yml | 283 ++++++++++++++++++++++++++++ 1 file changed, 283 insertions(+) create mode 100644 .github/workflows/tag-and-build.yml diff --git a/.github/workflows/tag-and-build.yml b/.github/workflows/tag-and-build.yml new file mode 100644 index 000000000..89b3e1cd3 --- /dev/null +++ b/.github/workflows/tag-and-build.yml @@ -0,0 +1,283 @@ +name: Build images from sources. +run-name: Build images from sources. +on: + workflow_dispatch: + inputs: + src_branch: + default: 'v1.0.x' + description: 'Source branch to build DSPO/DSP from' + required: true + target_tag: + default: 'vx.y.z' + description: 'Target Image Tag' + required: true + quay_org: + default: 'opendatahub' + description: 'Quay Organization' + required: true + dsp_org_repo: + default: 'opendatahub-io/data-science-pipelines' + description: 'DSP org/repo' + required: true +env: + IMAGE_DSPO: data-science-pipelines-operator + IMAGE_SERVER: ds-pipelines-api-server + IMAGE_UI: ds-pipelines-frontend + IMAGE_CACHE: ds-pipelines-cacheserver + IMAGE_PA: ds-pipelines-persistenceagent + IMAGE_SWF: ds-pipelines-scheduledworkflow + IMAGE_VC: ds-pipelines-viewercontroller + IMAGE_ARTIFACT: ds-pipelines-artifact-manager + IMAGE_MLMD_WRITER: ds-pipelines-metadata-writer + IMAGE_MLMD_ENVOY: ds-pipelines-metadata-envoy + IMAGE_MLMD_GRPC: ds-pipelines-metadata-grpc +jobs: + dspo-build: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.src_branch }} + + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + + - name: Image Build and Push + run: | + make podman-build podman-push -e IMG=quay.io/${{ inputs.quay_org }}/${IMAGE_DSPO}:${{ inputs.target_tag }} + + server-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Buid APIServer + env: + API_SERVER: quay.io/${{ inputs.quay_org }}/${IMAGE_SERVER}:${{ inputs.target_tag }} + run: | + podman build . -f backend/Dockerfile -t ${{ env.API_SERVER }} && podman push ${{ env.API_SERVER }} + + ui-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + UI: quay.io/${{ inputs.quay_org }}/${IMAGE_UI}:${{ inputs.target_tag }} + run: | + podman build . -f frontend/Dockerfile -t ${{ env.UI }} && podman push ${{ env.UI }} + + cache-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + CACHE: quay.io/${{ inputs.quay_org }}/${IMAGE_CACHE}:${{ inputs.target_tag }} + run: | + podman build . -f backend/Dockerfile.cacheserver -t ${{ env.CACHE }} && podman push ${{ env.CACHE }} + + PA-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + PA: quay.io/${{ inputs.quay_org }}/${IMAGE_PA}:${{ inputs.target_tag }} + run: | + podman build . -f backend/Dockerfile.persistenceagent -t ${{ env.PA }} && podman push ${{ env.PA }} + + SWF-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + SWF: quay.io/${{ inputs.quay_org }}/${IMAGE_SWF}:${{ inputs.target_tag }} + run: | + podman build . -f backend/Dockerfile.scheduledworkflow -t ${{ env.SWF }} && podman push ${{ env.SWF }} + + VC-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + VC: quay.io/${{ inputs.quay_org }}/${IMAGE_VC}:${{ inputs.target_tag }} + run: | + podman build . -f backend/Dockerfile.viewercontroller -t ${{ env.VC }} && podman push ${{ env.VC }} + + ARTIFACT-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + ARTIFACT: quay.io/${{ inputs.quay_org }}/${IMAGE_ARTIFACT}:${{ inputs.target_tag }} + run: | + podman build . -f backend/artifact_manager/Dockerfile -t ${{ env.ARTIFACT }} && podman push ${{ env.ARTIFACT }} + + MLMD_WRITER-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + MLMD_WRITER: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_WRITER}:${{ inputs.target_tag }} + run: | + podman build . -f backend/metadata_writer/Dockerfile -t ${{ env.MLMD_WRITER }} && podman push ${{ env.MLMD_WRITER }} + + MLMD_ENVOY-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + MLMD_ENVOY: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_ENVOY}:${{ inputs.target_tag }} + run: | + podman build . -f third-party/metadata_envoy/Dockerfile -t ${{ env.MLMD_ENVOY }} && podman push ${{ env.MLMD_ENVOY }} + + MLMD_GRPC-build: + runs-on: ubuntu-latest + permissions: + contents: read + env: + IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.dsp_org_repo }} + ref: ${{ inputs.src_branch }} + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ secrets.QUAY_ID }} + password: ${{ secrets.QUAY_TOKEN }} + registry: quay.io + - name: Build image + env: + MLMD_GRPC: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_GRPC}:${{ inputs.target_tag }} + run: | + podman build . -f third-party/ml-metadata/Dockerfile -t ${{ env.MLMD_GRPC }} && podman push ${{ env.MLMD_GRPC }} From 3fdfafe9746d658c1dae8ed9ee375977b5a749db Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 28 Jul 2023 15:07:56 -0400 Subject: [PATCH 23/47] Check if image tag exists before pushing in build action. Signed-off-by: Humair Khan --- .github/workflows/tag-and-build.yml | 48 ++++++++++------------------- 1 file changed, 16 insertions(+), 32 deletions(-) diff --git a/.github/workflows/tag-and-build.yml b/.github/workflows/tag-and-build.yml index 89b3e1cd3..4cb46507a 100644 --- a/.github/workflows/tag-and-build.yml +++ b/.github/workflows/tag-and-build.yml @@ -56,8 +56,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -69,18 +67,19 @@ jobs: username: ${{ secrets.QUAY_ID }} password: ${{ secrets.QUAY_TOKEN }} registry: quay.io - - name: Buid APIServer + - name: Buid image env: - API_SERVER: quay.io/${{ inputs.quay_org }}/${IMAGE_SERVER}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_SERVER}:${{ inputs.target_tag }} + DOCKERFILE: backend/Dockerfile run: | - podman build . -f backend/Dockerfile -t ${{ env.API_SERVER }} && podman push ${{ env.API_SERVER }} + RESULT=$(podman image exists ${{ env.IMAGE }}) + if [ $RESULT -eq 1 ]; then echo "Image already exists" && exit 1; fi + podman build . -f ${{ env.DOCKERFILE }} -t ${{ env.IMAGE }} && podman push ${{ env.IMAGE }} ui-build: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -94,7 +93,8 @@ jobs: registry: quay.io - name: Build image env: - UI: quay.io/${{ inputs.quay_org }}/${IMAGE_UI}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_UI}:${{ inputs.target_tag }} + DOCKERFILE: frontend/Dockerfile run: | podman build . -f frontend/Dockerfile -t ${{ env.UI }} && podman push ${{ env.UI }} @@ -102,8 +102,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -117,7 +115,7 @@ jobs: registry: quay.io - name: Build image env: - CACHE: quay.io/${{ inputs.quay_org }}/${IMAGE_CACHE}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_CACHE}:${{ inputs.target_tag }} run: | podman build . -f backend/Dockerfile.cacheserver -t ${{ env.CACHE }} && podman push ${{ env.CACHE }} @@ -125,8 +123,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -140,7 +136,7 @@ jobs: registry: quay.io - name: Build image env: - PA: quay.io/${{ inputs.quay_org }}/${IMAGE_PA}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_PA}:${{ inputs.target_tag }} run: | podman build . -f backend/Dockerfile.persistenceagent -t ${{ env.PA }} && podman push ${{ env.PA }} @@ -148,8 +144,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -163,7 +157,7 @@ jobs: registry: quay.io - name: Build image env: - SWF: quay.io/${{ inputs.quay_org }}/${IMAGE_SWF}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_SWF}:${{ inputs.target_tag }} run: | podman build . -f backend/Dockerfile.scheduledworkflow -t ${{ env.SWF }} && podman push ${{ env.SWF }} @@ -171,8 +165,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -186,7 +178,7 @@ jobs: registry: quay.io - name: Build image env: - VC: quay.io/${{ inputs.quay_org }}/${IMAGE_VC}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_VC}:${{ inputs.target_tag }} run: | podman build . -f backend/Dockerfile.viewercontroller -t ${{ env.VC }} && podman push ${{ env.VC }} @@ -194,8 +186,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -209,7 +199,7 @@ jobs: registry: quay.io - name: Build image env: - ARTIFACT: quay.io/${{ inputs.quay_org }}/${IMAGE_ARTIFACT}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_ARTIFACT}:${{ inputs.target_tag }} run: | podman build . -f backend/artifact_manager/Dockerfile -t ${{ env.ARTIFACT }} && podman push ${{ env.ARTIFACT }} @@ -217,8 +207,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -232,7 +220,7 @@ jobs: registry: quay.io - name: Build image env: - MLMD_WRITER: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_WRITER}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_WRITER}:${{ inputs.target_tag }} run: | podman build . -f backend/metadata_writer/Dockerfile -t ${{ env.MLMD_WRITER }} && podman push ${{ env.MLMD_WRITER }} @@ -240,8 +228,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -255,7 +241,7 @@ jobs: registry: quay.io - name: Build image env: - MLMD_ENVOY: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_ENVOY}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_ENVOY}:${{ inputs.target_tag }} run: | podman build . -f third-party/metadata_envoy/Dockerfile -t ${{ env.MLMD_ENVOY }} && podman push ${{ env.MLMD_ENVOY }} @@ -263,8 +249,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - env: - IMAGE_ORG_BASE: quay.io/${{ inputs.quay_org }} steps: - uses: actions/checkout@v3 with: @@ -278,6 +262,6 @@ jobs: registry: quay.io - name: Build image env: - MLMD_GRPC: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_GRPC}:${{ inputs.target_tag }} + IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_GRPC}:${{ inputs.target_tag }} run: | podman build . -f third-party/ml-metadata/Dockerfile -t ${{ env.MLMD_GRPC }} && podman push ${{ env.MLMD_GRPC }} From 09f3d02261e463c2de51741e9315803b206f7674 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 28 Jul 2023 16:45:31 -0400 Subject: [PATCH 24/47] Add image exist checks to tag builds. Signed-off-by: Humair Khan --- .github/actions/build/action.yaml | 63 ++++++++ .github/workflows/tag-and-build.yml | 217 ++++++++-------------------- 2 files changed, 123 insertions(+), 157 deletions(-) create mode 100644 .github/actions/build/action.yaml diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml new file mode 100644 index 000000000..5086dc3de --- /dev/null +++ b/.github/actions/build/action.yaml @@ -0,0 +1,63 @@ +name: "Set up KinD" +description: "Step to start and configure KinD cluster" +inputs: + IMAGE_REPO: + description: "Quay image repo name." + required: true + DOCKERFILE: + description: "Path to Dockerfile." + required: true + GH_REPO: + description: "GH org/repo that contains the dockerfile to source." + required: true + OVERWRITE: + default: "false" + description: "GH org/repo that contains the dockerfile to source." + required: true +runs: + using: "composite" + steps: + - uses: actions/checkout@v3 + with: + repository: ${{ inputs.GH_REPO }} + ref: ${{ env.SOURCE_BRANCH }} + path: build + - name: Login to Quay.io + uses: redhat-actions/podman-login@v1 + with: + username: ${{ env.QUAY_ID }} + password: ${{ env.QUAY_TOKEN }} + registry: quay.io + # Tags in quay stick around as objects in api, when deleted quay adds "end_ts" time stamp on the tag. + # To determine a tag is deleted, we need to check for the presence of this tag. + # Also note there can be multiple tags created/deleted, thus we have 4 cases: + # Case 1: Only 1 tag was ever created "tags: [{name:..},]" -- no end_ts field + # Case 2: No tag was ever created "tags: []" + # Case 3: >1 tags were created, but they were all deleted at some point [{name:..., end_ts,..},....] -- note they all have "end_ts" field. + # Case 4: >1 tags were created, but the most recent one was never deleted (same as case 3, but the latest tag does not have "end_ts". + - name: Check if Image already exists + shell: bash + if: ${{ inputs.OVERWRITE }} == "false" + env: + IMAGE: quay.io/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}:${{ env.TARGET_IMAGE_TAG }} + run: | + tags=$(curl --request GET 'https://quay.io/api/v1/repository/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}/tag/?specificTag=${{ env.TARGET_IMAGE_TAG }}') + echo $tags | yq .tags - | yq 'sort_by(.start_ts) | reverse' - -P | yq .[0].end_ts - + latest_tag_has_end_ts=$(echo $tags | yq .tags - | yq 'sort_by(.start_ts) | reverse' - -P | yq .[0].end_ts -) + echo $latest_tag_has_end_ts + notempty=$(echo ${tags} | yq .tags - | yq any) + + # Image only exists if there is a tag that does not have "end_ts" (i.e. it is still present). + if [[ "$notempty" == "true" && $latest_tag_has_end_ts == "null" ]]; then + echo "::error::The image ${{ env.IMAGE }} already exists" + exit 1 + else + echo "Image does not exist...proceeding with build & push." + fi + - name: Build image + shell: bash + working-directory: build + env: + IMAGE: quay.io/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}:${{ env.TARGET_IMAGE_TAG }} + run: | + podman build . -f ${{ inputs.DOCKERFILE }} -t ${{ env.IMAGE }} && podman push ${{ env.IMAGE }} diff --git a/.github/workflows/tag-and-build.yml b/.github/workflows/tag-and-build.yml index 4cb46507a..0e7d9dede 100644 --- a/.github/workflows/tag-and-build.yml +++ b/.github/workflows/tag-and-build.yml @@ -20,37 +20,34 @@ on: description: 'DSP org/repo' required: true env: - IMAGE_DSPO: data-science-pipelines-operator - IMAGE_SERVER: ds-pipelines-api-server - IMAGE_UI: ds-pipelines-frontend - IMAGE_CACHE: ds-pipelines-cacheserver - IMAGE_PA: ds-pipelines-persistenceagent - IMAGE_SWF: ds-pipelines-scheduledworkflow - IMAGE_VC: ds-pipelines-viewercontroller - IMAGE_ARTIFACT: ds-pipelines-artifact-manager - IMAGE_MLMD_WRITER: ds-pipelines-metadata-writer - IMAGE_MLMD_ENVOY: ds-pipelines-metadata-envoy - IMAGE_MLMD_GRPC: ds-pipelines-metadata-grpc + IMAGE_REPO_DSPO: data-science-pipelines-operator + IMAGE_REPO_SERVER: ds-pipelines-api-server + IMAGE_REPO_UI: ds-pipelines-frontend + IMAGE_REPO_CACHE: ds-pipelines-cacheserver + IMAGE_REPO_PA: ds-pipelines-persistenceagent + IMAGE_REPO_SWF: ds-pipelines-scheduledworkflow + IMAGE_REPO_VC: ds-pipelines-viewercontroller + IMAGE_REPO_ARTIFACT: ds-pipelines-artifact-manager + IMAGE_REPO_MLMD_WRITER: ds-pipelines-metadata-writer + IMAGE_REPO_MLMD_ENVOY: ds-pipelines-metadata-envoy + IMAGE_REPO_MLMD_GRPC: ds-pipelines-metadata-grpc + SOURCE_BRANCH: ${{ inputs.src_branch }} + QUAY_ORG: ${{ inputs.quay_org }} + QUAY_ID: ${{ secrets.QUAY_ID }} + QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} + TARGET_IMAGE_TAG: ${{ inputs.target_tag }} jobs: dspo-build: runs-on: ubuntu-latest permissions: contents: read steps: - - uses: actions/checkout@v3 - with: - ref: ${{ inputs.src_branch }} - - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - - name: Image Build and Push - run: | - make podman-build podman-push -e IMG=quay.io/${{ inputs.quay_org }}/${IMAGE_DSPO}:${{ inputs.target_tag }} + - uses: actions/checkout@v3 + - uses: ./.github/actions/build + with: + IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} + DOCKERFILE: Dockerfile + GH_REPO: ${{ github.repository }} server-build: runs-on: ubuntu-latest @@ -58,23 +55,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Buid image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_SERVER}:${{ inputs.target_tag }} + IMAGE_REPO: ${{ env.IMAGE_REPO_SERVER }} DOCKERFILE: backend/Dockerfile - run: | - RESULT=$(podman image exists ${{ env.IMAGE }}) - if [ $RESULT -eq 1 ]; then echo "Image already exists" && exit 1; fi - podman build . -f ${{ env.DOCKERFILE }} -t ${{ env.IMAGE }} && podman push ${{ env.IMAGE }} + GH_REPO: ${{ inputs.dsp_org_repo }} ui-build: runs-on: ubuntu-latest @@ -82,21 +67,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_UI}:${{ inputs.target_tag }} + IMAGE_REPO: ${{ env.IMAGE_REPO_UI }} DOCKERFILE: frontend/Dockerfile - run: | - podman build . -f frontend/Dockerfile -t ${{ env.UI }} && podman push ${{ env.UI }} + GH_REPO: ${{ inputs.dsp_org_repo }} cache-build: runs-on: ubuntu-latest @@ -104,20 +79,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_CACHE}:${{ inputs.target_tag }} - run: | - podman build . -f backend/Dockerfile.cacheserver -t ${{ env.CACHE }} && podman push ${{ env.CACHE }} + IMAGE_REPO: ${{ env.IMAGE_REPO_CACHE }} + DOCKERFILE: backend/Dockerfile.cacheserver + GH_REPO: ${{ inputs.dsp_org_repo }} PA-build: runs-on: ubuntu-latest @@ -125,20 +91,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_PA}:${{ inputs.target_tag }} - run: | - podman build . -f backend/Dockerfile.persistenceagent -t ${{ env.PA }} && podman push ${{ env.PA }} + IMAGE_REPO: ${{ env.IMAGE_REPO_PA }} + DOCKERFILE: backend/Dockerfile.persistenceagent + GH_REPO: ${{ inputs.dsp_org_repo }} SWF-build: runs-on: ubuntu-latest @@ -146,20 +103,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_SWF}:${{ inputs.target_tag }} - run: | - podman build . -f backend/Dockerfile.scheduledworkflow -t ${{ env.SWF }} && podman push ${{ env.SWF }} + IMAGE_REPO: ${{ env.IMAGE_REPO_SWF }} + DOCKERFILE: backend/Dockerfile.scheduledworkflow + GH_REPO: ${{ inputs.dsp_org_repo }} VC-build: runs-on: ubuntu-latest @@ -167,20 +115,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_VC}:${{ inputs.target_tag }} - run: | - podman build . -f backend/Dockerfile.viewercontroller -t ${{ env.VC }} && podman push ${{ env.VC }} + IMAGE_REPO: ${{ env.IMAGE_REPO_VC }} + DOCKERFILE: backend/Dockerfile.viewercontroller + GH_REPO: ${{ inputs.dsp_org_repo }} ARTIFACT-build: runs-on: ubuntu-latest @@ -188,20 +127,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_ARTIFACT}:${{ inputs.target_tag }} - run: | - podman build . -f backend/artifact_manager/Dockerfile -t ${{ env.ARTIFACT }} && podman push ${{ env.ARTIFACT }} + IMAGE_REPO: ${{ env.IMAGE_REPO_ARTIFACT }} + DOCKERFILE: backend/artifact_manager/Dockerfile + GH_REPO: ${{ inputs.dsp_org_repo }} MLMD_WRITER-build: runs-on: ubuntu-latest @@ -209,20 +139,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_WRITER}:${{ inputs.target_tag }} - run: | - podman build . -f backend/metadata_writer/Dockerfile -t ${{ env.MLMD_WRITER }} && podman push ${{ env.MLMD_WRITER }} + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_WRITER }} + DOCKERFILE: backend/metadata_writer/Dockerfile + GH_REPO: ${{ inputs.dsp_org_repo }} MLMD_ENVOY-build: runs-on: ubuntu-latest @@ -230,20 +151,11 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_ENVOY}:${{ inputs.target_tag }} - run: | - podman build . -f third-party/metadata_envoy/Dockerfile -t ${{ env.MLMD_ENVOY }} && podman push ${{ env.MLMD_ENVOY }} + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_ENVOY }} + DOCKERFILE: third-party/metadata_envoy/Dockerfile + GH_REPO: ${{ inputs.dsp_org_repo }} MLMD_GRPC-build: runs-on: ubuntu-latest @@ -251,17 +163,8 @@ jobs: contents: read steps: - uses: actions/checkout@v3 + - uses: ./.github/actions/build with: - repository: ${{ inputs.dsp_org_repo }} - ref: ${{ inputs.src_branch }} - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - registry: quay.io - - name: Build image - env: - IMAGE: quay.io/${{ inputs.quay_org }}/${IMAGE_MLMD_GRPC}:${{ inputs.target_tag }} - run: | - podman build . -f third-party/ml-metadata/Dockerfile -t ${{ env.MLMD_GRPC }} && podman push ${{ env.MLMD_GRPC }} + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_GRPC }} + DOCKERFILE: third-party/ml-metadata/Dockerfile + GH_REPO: ${{ inputs.dsp_org_repo }} From ffab0b36d8f6794d740466fc5acc6cf8d34cb420 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 28 Jul 2023 16:45:31 -0400 Subject: [PATCH 25/47] Add image exist checks to tag builds. Signed-off-by: Humair Khan --- .github/workflows/image-check.yaml | 12 ++++++------ .github/workflows/precommit.yml | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/image-check.yaml b/.github/workflows/image-check.yaml index 668bc3589..521bdf94a 100644 --- a/.github/workflows/image-check.yaml +++ b/.github/workflows/image-check.yaml @@ -1,11 +1,11 @@ name: Image-check on: - push: - branches: - - '**' - tags-ignore: - - 'v*' - pull_request: +# push: +# branches: +# - '**' +# tags-ignore: +# - 'v*' +# pull_request: workflow_dispatch: jobs: diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index ac0bbb97e..5f6b9c9db 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -1,11 +1,11 @@ name: Pre-commit on: - push: - branches: - - '**' - tags-ignore: - - 'v*' - pull_request: +# push: +# branches: +# - '**' +# tags-ignore: +# - 'v*' +# pull_request: workflow_dispatch: jobs: From 58e78243a53667e75967d591793ae44a623c1fc3 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 10:28:38 -0400 Subject: [PATCH 26/47] Remove unneeded gh actions. Signed-off-by: Humair Khan --- .github/workflows/pr-merged-image-delete.yml | 52 -------------------- .github/workflows/tag-release-quay.yml | 48 ------------------ 2 files changed, 100 deletions(-) delete mode 100644 .github/workflows/pr-merged-image-delete.yml delete mode 100644 .github/workflows/tag-release-quay.yml diff --git a/.github/workflows/pr-merged-image-delete.yml b/.github/workflows/pr-merged-image-delete.yml deleted file mode 100644 index a90c2fb11..000000000 --- a/.github/workflows/pr-merged-image-delete.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Delete quay image of PR once merged -on: - push: - branches: - - 'main' - -permissions: - pull-requests: read -env: - QUAY_IMAGE_REPO: ${{ secrets.QUAY_IMAGE_REPO }} -jobs: - delete-pr-quay-image: - runs-on: ubuntu-latest - steps: - - name: Git checkout - uses: actions/checkout@v3 - with: - fetch-depth: '0' - - name: Install skopeo - shell: bash - run: | - sudo apt-get -y update - sudo apt-get -y install skopeo - - name: Get Pull Request Number - uses: actions/github-script@v6 - id: get_issue_number - with: - script: | - if (context.issue.number) { - // Return issue number if present - return context.issue.number; - } else { - // Otherwise return issue number from commit - return ( - await github.rest.repos.listPullRequestsAssociatedWithCommit({ - commit_sha: context.sha, - owner: context.repo.owner, - repo: context.repo.repo, - }) - ).data[0].number; - } - result-encoding: string - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Delete PR image - shell: bash - env: - PR: ${{steps.get_issue_number.outputs.result}} - QUAY_ROBOT_USERNAME: ${{ secrets.QUAY_ROBOT_USERNAME }} - QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} - run: | - skopeo delete --creds ${QUAY_ROBOT_USERNAME}:${QUAY_ROBOT_TOKEN} docker://${QUAY_IMAGE_REPO}:pr-${{ env.PR }} diff --git a/.github/workflows/tag-release-quay.yml b/.github/workflows/tag-release-quay.yml deleted file mode 100644 index 385de10ad..000000000 --- a/.github/workflows/tag-release-quay.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Image push per Github Tag - -run-name: Creating new tag in quay based on latest pushed tag in Github. -on: - workflow_dispatch: - inputs: - target_tag: - default: 'vx.y.z' - description: 'DSPO Tag' - required: true -env: - QUAY_IMAGE_REPO: ${{ secrets.QUAY_IMAGE_REPO }} -jobs: - copy-tag-to-quay: - runs-on: ubuntu-latest - if: github.repository == 'opendatahub-io/data-science-pipelines-operator' - steps: - - name: Git checkout - uses: actions/checkout@v3 - with: - fetch-depth: '0' - - name: Install skopeo - shell: bash - run: | - sudo apt-get -y update - sudo apt-get -y install skopeo - - name: Login to quay.io - shell: bash - env: - QUAY_ROBOT_USERNAME: ${{ secrets.QUAY_ROBOT_USERNAME }} - QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} - run: | - skopeo login quay.io -u ${QUAY_ROBOT_USERNAME} -p ${QUAY_ROBOT_TOKEN} - - name: Get latest tag hash - id: hash - run: echo "hash=$(git rev-parse --short ${{ inputs.target_tag }} )" >> ${GITHUB_OUTPUT} - - name: Create new tag - shell: bash - env: - HASH: ${{ steps.hash.outputs.hash }} - run: | - skopeo copy docker://${QUAY_IMAGE_REPO}:main-${{ env.HASH }} docker://${QUAY_IMAGE_REPO}:${{ inputs.target_tag }} - - name: Create latest tag - shell: bash - env: - HASH: ${{ steps.hash.outputs.hash }} - run: | - skopeo copy docker://${QUAY_IMAGE_REPO}:main-${{ env.HASH }} docker://${QUAY_IMAGE_REPO}:latest From fa6cc96d68273d6cd10e690184bcb9af6f2b294c Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 10:29:21 -0400 Subject: [PATCH 27/47] Adjust event triggers for ci actions. Signed-off-by: Humair Khan --- .github/workflows/image-check.yaml | 9 +-------- .github/workflows/precommit.yml | 9 +-------- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/.github/workflows/image-check.yaml b/.github/workflows/image-check.yaml index 521bdf94a..961562614 100644 --- a/.github/workflows/image-check.yaml +++ b/.github/workflows/image-check.yaml @@ -1,13 +1,6 @@ name: Image-check on: -# push: -# branches: -# - '**' -# tags-ignore: -# - 'v*' -# pull_request: - workflow_dispatch: - + pull_request: jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 5f6b9c9db..bb192425f 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -1,13 +1,6 @@ name: Pre-commit on: -# push: -# branches: -# - '**' -# tags-ignore: -# - 'v*' -# pull_request: - workflow_dispatch: - + pull_request: jobs: precommit: runs-on: ubuntu-latest From b906d2750765e9118fded379f15cd06a194180b1 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 10:30:06 -0400 Subject: [PATCH 28/47] Fix overwrite condition check for composite action. Signed-off-by: Humair Khan --- .github/actions/build/action.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml index 5086dc3de..66a955439 100644 --- a/.github/actions/build/action.yaml +++ b/.github/actions/build/action.yaml @@ -37,14 +37,14 @@ runs: # Case 4: >1 tags were created, but the most recent one was never deleted (same as case 3, but the latest tag does not have "end_ts". - name: Check if Image already exists shell: bash - if: ${{ inputs.OVERWRITE }} == "false" + if: inputs.OVERWRITE == 'false' env: IMAGE: quay.io/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}:${{ env.TARGET_IMAGE_TAG }} run: | + echo ${{ inputs.OVERWRITE }} + tags=$(curl --request GET 'https://quay.io/api/v1/repository/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}/tag/?specificTag=${{ env.TARGET_IMAGE_TAG }}') - echo $tags | yq .tags - | yq 'sort_by(.start_ts) | reverse' - -P | yq .[0].end_ts - latest_tag_has_end_ts=$(echo $tags | yq .tags - | yq 'sort_by(.start_ts) | reverse' - -P | yq .[0].end_ts -) - echo $latest_tag_has_end_ts notempty=$(echo ${tags} | yq .tags - | yq any) # Image only exists if there is a tag that does not have "end_ts" (i.e. it is still present). From ec8ac48d5e7b69877fba52010cc34c2081c69419 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 10:30:34 -0400 Subject: [PATCH 29/47] Rename build tag action. Signed-off-by: Humair Khan --- .github/workflows/{tag-and-build.yml => build-tags.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{tag-and-build.yml => build-tags.yml} (100%) diff --git a/.github/workflows/tag-and-build.yml b/.github/workflows/build-tags.yml similarity index 100% rename from .github/workflows/tag-and-build.yml rename to .github/workflows/build-tags.yml From 3e153a535e2ced94d988545074ec9f4efaf224fc Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 10:30:48 -0400 Subject: [PATCH 30/47] Add gh action to build pr images. Signed-off-by: Humair Khan --- .github/workflows/build-prs.yml | 93 +++++++++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 .github/workflows/build-prs.yml diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml new file mode 100644 index 000000000..e46d64859 --- /dev/null +++ b/.github/workflows/build-prs.yml @@ -0,0 +1,93 @@ +name: Build images for PRs +on: + pull_request: + types: + - opened + - reopened + - closed + - synchronize +permissions: + pull-requests: read +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +env: + IMAGE_REPO_DSPO: data-science-pipelines-operator + SOURCE_BRANCH: ${{ github.event.pull_request.head.sha }} + QUAY_ORG: opendatahub + QUAY_ID: ${{ secrets.QUAY_ID }} + QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} + TARGET_IMAGE_TAG: pr-${{ github.event.pull_request.number }} +jobs: + build-pr-image: + if: github.event.pull_request.state == 'open' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/build + with: + OVERWRITE: true + IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} + DOCKERFILE: Dockerfile + GH_REPO: ${{ github.repository }} + - name: Post build + shell: bash + env: + GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} + IMG: quay.io/${{ env.QUAY_ORG }}/data-science-pipelines-operator:${{ env.TARGET_IMAGE_TAG }} + run: | + git config user.email "140449482+dsp-developers@users.noreply.github.com" + git config user.name "dsp-developers" + + action=${{ github.event.action }} + + if [[ "$action" == "synchronize" ]]; then + echo "Change to PR detected. A new PR build was completed." >> /tmp/body-file.txt + fi + + if [[ "$action" == "reopened" ]]; then + echo "PR was re-opened." >> /tmp/body-file.txt + fi + + cat <<"EOF" >> /tmp/body-file.txt + A new image has been built to help with testing out this PR: `${{ env.IMG }}` + EOF + + if [[ "$action" == "opened" || "$action" == "reopened" ]]; then + cat <<"EOF" >> /tmp/body-file.txt + An OCP cluster where you are logged in as cluster admin is required. + + To use this image run the following: + + ```bash + cd $(mktemp -d) + git clone git@github.com:opendatahub-io/data-science-pipelines-operator.git + cd data-science-pipelines-operator/ + git fetch origin pull/${{ github.event.pull_request.number }}/head + git checkout -b pullrequest ${{ env.SOURCE_BRANCH }} + make deploy IMG="${{ env.IMG }}" + ``` + + More instructions [here](https://github.com/opendatahub-io/data-science-pipelines-operator#deploy-dsp-instance) on how to deploy and test a Data Science Pipelines Application. + + EOF + fi + + gh pr comment ${{ github.event.pull_request.number }} --body-file /tmp/body-file.txt + + clean-pr-images: + if: github.event.pull_request.state == 'closed' + runs-on: ubuntu-latest + steps: + - name: Delete PR image + shell: bash + run: | + tag=$(curl --request GET 'https://quay.io/api/v1/repository/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }}/tag/?specificTag=${{ env.TARGET_IMAGE_TAG }}') + exists=$(echo ${tag} | yq .tags - | yq any) + IMAGE=quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }}:${{ env.TARGET_IMAGE_TAG }} + if [[ "$exists" == "true" ]]; then + echo "PR Closed deleting image...${{ env.IMAGE }}." + skopeo delete --creds ${{ env.QUAY_ID }}:${{ env.QUAY_TOKEN }} docker://${IMAGE} + else + echo "Deletion of image ${IMAGE} skipped because image already does not exist." + fi \ No newline at end of file From fb89ee89bbbe8d1074409026e044c25ac89e3dc0 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 10:31:09 -0400 Subject: [PATCH 31/47] Add gh action to build images on main Signed-off-by: Humair Khan --- .github/workflows/build-main.yml | 46 ++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 .github/workflows/build-main.yml diff --git a/.github/workflows/build-main.yml b/.github/workflows/build-main.yml new file mode 100644 index 000000000..ad0884346 --- /dev/null +++ b/.github/workflows/build-main.yml @@ -0,0 +1,46 @@ +name: Build images for Main branch +on: + push: + branches: + - main +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true +env: + IMAGE_REPO_DSPO: data-science-pipelines-operator + QUAY_ORG: hukhan + QUAY_ID: ${{ secrets.QUAY_ID }} + QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} + SOURCE_BRANCH: main +jobs: + build-image: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Generate Tag + shell: bash + id: tags + run: | + commit_sha=${{ github.event.after }} + tag=main-${commit_sha:0:7} + echo "tag=${tag}" >> $GITHUB_OUTPUT + - name: Build Image + uses: ./.github/actions/build + env: + IMG: quay.io/${{ env.QUAY_ORG }}/data-science-pipelines-operator:${{ steps.tags.outputs.tag }} + TARGET_IMAGE_TAG: ${{ steps.tags.outputs.tag }} + with: + OVERWRITE: true + IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} + DOCKERFILE: Dockerfile + GH_REPO: ${{ github.repository }} + - name: Tag latest + shell: bash + env: + IMG: quay.io/${{ env.QUAY_ORG }}/data-science-pipelines-operator + NEWEST_TAG: ${{ steps.tags.outputs.tag }} + run: | + podman tag ${IMG}:${NEWEST_TAG} ${IMG}:latest + podman push ${IMG}:latest + podman tag ${IMG}:${NEWEST_TAG} ${IMG}:main + podman push ${IMG}:main From af90121e431205526a9eef63f8ee2b49cb57d7ea Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 11:11:32 -0400 Subject: [PATCH 32/47] Update build actions name/description. Signed-off-by: Humair Khan --- .github/actions/build/action.yaml | 4 ++-- .github/workflows/build-main.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml index 66a955439..82c0958eb 100644 --- a/.github/actions/build/action.yaml +++ b/.github/actions/build/action.yaml @@ -1,5 +1,5 @@ -name: "Set up KinD" -description: "Step to start and configure KinD cluster" +name: "Create a podman build" +description: "This workflow can be used to create a podman build and push to quay.io from source branches." inputs: IMAGE_REPO: description: "Quay image repo name." diff --git a/.github/workflows/build-main.yml b/.github/workflows/build-main.yml index ad0884346..0d33c3b29 100644 --- a/.github/workflows/build-main.yml +++ b/.github/workflows/build-main.yml @@ -8,7 +8,7 @@ concurrency: cancel-in-progress: true env: IMAGE_REPO_DSPO: data-science-pipelines-operator - QUAY_ORG: hukhan + QUAY_ORG: opendatahub QUAY_ID: ${{ secrets.QUAY_ID }} QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} SOURCE_BRANCH: main From dd4a8ba5cc41abe74bf3146c6c49cf9607a282e5 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 11:14:29 -0400 Subject: [PATCH 33/47] Add missing step names for gh actions. Signed-off-by: Humair Khan --- .github/actions/build/action.yaml | 2 +- .github/workflows/build-prs.yml | 33 ++++++++++++++++--------------- .github/workflows/build-tags.yml | 11 +++++++++++ .github/workflows/precommit.yml | 2 -- 4 files changed, 29 insertions(+), 19 deletions(-) diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml index 82c0958eb..ffd5bf20a 100644 --- a/.github/actions/build/action.yaml +++ b/.github/actions/build/action.yaml @@ -42,7 +42,7 @@ runs: IMAGE: quay.io/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}:${{ env.TARGET_IMAGE_TAG }} run: | echo ${{ inputs.OVERWRITE }} - + tags=$(curl --request GET 'https://quay.io/api/v1/repository/${{ env.QUAY_ORG }}/${{ inputs.IMAGE_REPO }}/tag/?specificTag=${{ env.TARGET_IMAGE_TAG }}') latest_tag_has_end_ts=$(echo $tags | yq .tags - | yq 'sort_by(.start_ts) | reverse' - -P | yq .[0].end_ts -) notempty=$(echo ${tags} | yq .tags - | yq any) diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index e46d64859..074f73614 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -24,13 +24,14 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: ./.github/actions/build + - name: Build Image + uses: ./.github/actions/build with: OVERWRITE: true IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} DOCKERFILE: Dockerfile GH_REPO: ${{ github.repository }} - - name: Post build + - name: Send comment shell: bash env: GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} @@ -38,27 +39,27 @@ jobs: run: | git config user.email "140449482+dsp-developers@users.noreply.github.com" git config user.name "dsp-developers" - + action=${{ github.event.action }} - - if [[ "$action" == "synchronize" ]]; then + + if [[ "$action" == "synchronize" ]]; then echo "Change to PR detected. A new PR build was completed." >> /tmp/body-file.txt fi - - if [[ "$action" == "reopened" ]]; then + + if [[ "$action" == "reopened" ]]; then echo "PR was re-opened." >> /tmp/body-file.txt fi - + cat <<"EOF" >> /tmp/body-file.txt A new image has been built to help with testing out this PR: `${{ env.IMG }}` EOF - - if [[ "$action" == "opened" || "$action" == "reopened" ]]; then + + if [[ "$action" == "opened" || "$action" == "reopened" ]]; then cat <<"EOF" >> /tmp/body-file.txt - An OCP cluster where you are logged in as cluster admin is required. - + An OCP cluster where you are logged in as cluster admin is required. + To use this image run the following: - + ```bash cd $(mktemp -d) git clone git@github.com:opendatahub-io/data-science-pipelines-operator.git @@ -67,9 +68,9 @@ jobs: git checkout -b pullrequest ${{ env.SOURCE_BRANCH }} make deploy IMG="${{ env.IMG }}" ``` - + More instructions [here](https://github.com/opendatahub-io/data-science-pipelines-operator#deploy-dsp-instance) on how to deploy and test a Data Science Pipelines Application. - + EOF fi @@ -90,4 +91,4 @@ jobs: skopeo delete --creds ${{ env.QUAY_ID }}:${{ env.QUAY_TOKEN }} docker://${IMAGE} else echo "Deletion of image ${IMAGE} skipped because image already does not exist." - fi \ No newline at end of file + fi diff --git a/.github/workflows/build-tags.yml b/.github/workflows/build-tags.yml index 0e7d9dede..c06c7f623 100644 --- a/.github/workflows/build-tags.yml +++ b/.github/workflows/build-tags.yml @@ -44,6 +44,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} DOCKERFILE: Dockerfile @@ -56,6 +57,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_SERVER }} DOCKERFILE: backend/Dockerfile @@ -68,6 +70,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_UI }} DOCKERFILE: frontend/Dockerfile @@ -80,6 +83,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_CACHE }} DOCKERFILE: backend/Dockerfile.cacheserver @@ -92,6 +96,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_PA }} DOCKERFILE: backend/Dockerfile.persistenceagent @@ -104,6 +109,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_SWF }} DOCKERFILE: backend/Dockerfile.scheduledworkflow @@ -116,6 +122,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_VC }} DOCKERFILE: backend/Dockerfile.viewercontroller @@ -128,6 +135,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_ARTIFACT }} DOCKERFILE: backend/artifact_manager/Dockerfile @@ -140,6 +148,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_WRITER }} DOCKERFILE: backend/metadata_writer/Dockerfile @@ -152,6 +161,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_ENVOY }} DOCKERFILE: third-party/metadata_envoy/Dockerfile @@ -164,6 +174,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/build + name: Build Image with: IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_GRPC }} DOCKERFILE: third-party/ml-metadata/Dockerfile diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index bb192425f..554090bab 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -15,12 +15,10 @@ jobs: - /cache steps: - uses: actions/checkout@v2 - - name: Activate cache uses: actions/cache@v2 with: path: /cache key: ${{ runner.os }}-cache-${{ hashFiles('**/go.sum', '.pre-commit-config.yaml') }} - - name: Run pre-commit checks run: pre-commit run --all-files From 810b7180951d3557c62495f1acd8b1a1cdfc4f06 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 2 Aug 2023 13:22:26 -0400 Subject: [PATCH 34/47] Clean up env vars. Signed-off-by: Humair Khan --- .github/workflows/build-main.yml | 4 ++-- .github/workflows/build-prs.yml | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-main.yml b/.github/workflows/build-main.yml index 0d33c3b29..ce4959f37 100644 --- a/.github/workflows/build-main.yml +++ b/.github/workflows/build-main.yml @@ -27,7 +27,7 @@ jobs: - name: Build Image uses: ./.github/actions/build env: - IMG: quay.io/${{ env.QUAY_ORG }}/data-science-pipelines-operator:${{ steps.tags.outputs.tag }} + IMG: quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }}:${{ steps.tags.outputs.tag }} TARGET_IMAGE_TAG: ${{ steps.tags.outputs.tag }} with: OVERWRITE: true @@ -37,7 +37,7 @@ jobs: - name: Tag latest shell: bash env: - IMG: quay.io/${{ env.QUAY_ORG }}/data-science-pipelines-operator + IMG: quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }} NEWEST_TAG: ${{ steps.tags.outputs.tag }} run: | podman tag ${IMG}:${NEWEST_TAG} ${IMG}:latest diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index 074f73614..605629cfd 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -18,6 +18,8 @@ env: QUAY_ID: ${{ secrets.QUAY_ID }} QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} TARGET_IMAGE_TAG: pr-${{ github.event.pull_request.number }} + GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com + GH_USER_NAME: dsp-developers jobs: build-pr-image: if: github.event.pull_request.state == 'open' @@ -35,10 +37,10 @@ jobs: shell: bash env: GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} - IMG: quay.io/${{ env.QUAY_ORG }}/data-science-pipelines-operator:${{ env.TARGET_IMAGE_TAG }} + IMG: quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }}:${{ env.TARGET_IMAGE_TAG }} run: | - git config user.email "140449482+dsp-developers@users.noreply.github.com" - git config user.name "dsp-developers" + git config user.email "${{ env.GH_USER_EMAIL }}" + git config user.name "${{ env.GH_USER_NAME }}" action=${{ github.event.action }} From a15f084b431bf33affd0faeefcda559a4a7448e3 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 3 Aug 2023 13:09:27 -0400 Subject: [PATCH 35/47] Add secure pr build triggers. Signed-off-by: Humair Khan --- .github/workflows/build-prs-trigger.yaml | 27 ++++++++ .github/workflows/build-prs.yml | 87 +++++++++++++++++++----- 2 files changed, 98 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/build-prs-trigger.yaml diff --git a/.github/workflows/build-prs-trigger.yaml b/.github/workflows/build-prs-trigger.yaml new file mode 100644 index 000000000..1e925d27f --- /dev/null +++ b/.github/workflows/build-prs-trigger.yaml @@ -0,0 +1,27 @@ +name: Trigger build images for PRs +on: + pull_request: + types: + - opened + - reopened + - closed + - synchronize +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +jobs: + upload-data: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Save PR payload + shell: bash + run: | + mkdir -p ./pr + echo ${{ github.event.pull_request.number }} >> ./pr/pr_number + echo ${{ github.event.pull_request.state }} >> ./pr/pr_state + echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha + - uses: actions/upload-artifact@v2 + with: + name: pr + path: pr/ diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index 605629cfd..dee4c0182 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -1,29 +1,69 @@ name: Build images for PRs on: - pull_request: + workflow_run: + workflows: ["Trigger build images for PRs"] types: - - opened - - reopened - - closed - - synchronize -permissions: - pull-requests: read -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true + - completed env: IMAGE_REPO_DSPO: data-science-pipelines-operator - SOURCE_BRANCH: ${{ github.event.pull_request.head.sha }} QUAY_ORG: opendatahub QUAY_ID: ${{ secrets.QUAY_ID }} QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} - TARGET_IMAGE_TAG: pr-${{ github.event.pull_request.number }} GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com GH_USER_NAME: dsp-developers jobs: + fetch-data: + name: Fetch workflow payload + runs-on: ubuntu-latest + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + outputs: + pr_state: ${{ steps.vars.outputs.pr_state }} + pr_number: ${{ steps.vars.outputs.pr_number }} + head_sha: ${{ steps.vars.outputs.head_sha }} + steps: + - name: 'Download artifact' + uses: actions/github-script@v3.1.0 + with: + script: | + var artifacts = await github.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" + })[0]; + var download = await github.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); + - run: unzip pr.zip + - shell: bash + id: vars + run: | + pr_number=$(cat ./pr_number) + pr_state=$(cat ./pr_state) + head_sha=$(cat ./head_sha) + echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT + echo "pr_state=${pr_state}" >> $GITHUB_OUTPUT + echo "head_sha=${head_sha}" >> $GITHUB_OUTPUT + build-pr-image: - if: github.event.pull_request.state == 'open' + if: needs.fetch-data.outputs.pr_state == 'open' runs-on: ubuntu-latest + needs: fetch-data + concurrency: + group: ${{ github.workflow }}-build-pr-image-${{ needs.fetch-data.outputs.pr_number }} + cancel-in-progress: true + env: + SOURCE_BRANCH: ${{ needs.fetch-data.outputs.head_sha }} + TARGET_IMAGE_TAG: pr-${{ needs.fetch-data.outputs.pr_number }} steps: - uses: actions/checkout@v3 - name: Build Image @@ -33,6 +73,15 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} DOCKERFILE: Dockerfile GH_REPO: ${{ github.repository }} + - name: Echo PR metadata + shell: bash + env: + GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} + IMG: quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }}:${{ env.TARGET_IMAGE_TAG }} + run: | + echo ${{ needs.fetch-data.outputs.head_sha }} + echo ${{ needs.fetch-data.outputs.pr_number }} + echo ${{ needs.fetch-data.outputs.pr_state }} - name: Send comment shell: bash env: @@ -66,7 +115,7 @@ jobs: cd $(mktemp -d) git clone git@github.com:opendatahub-io/data-science-pipelines-operator.git cd data-science-pipelines-operator/ - git fetch origin pull/${{ github.event.pull_request.number }}/head + git fetch origin pull/${{ needs.fetch-data.outputs.pr_number }}/head git checkout -b pullrequest ${{ env.SOURCE_BRANCH }} make deploy IMG="${{ env.IMG }}" ``` @@ -76,11 +125,17 @@ jobs: EOF fi - gh pr comment ${{ github.event.pull_request.number }} --body-file /tmp/body-file.txt + gh pr comment ${{ needs.fetch-data.outputs.pr_number }} --body-file /tmp/body-file.txt clean-pr-images: - if: github.event.pull_request.state == 'closed' + if: needs.fetch-data.outputs.pr_state == 'closed' runs-on: ubuntu-latest + needs: fetch-data + concurrency: + group: ${{ github.workflow }}-clean-pr-images-${{ needs.fetch-data.outputs.pr_number }} + cancel-in-progress: true + env: + TARGET_IMAGE_TAG: pr-${{ needs.fetch-data.outputs.pr_number }} steps: - name: Delete PR image shell: bash From e0466351107132d6b0dbd7b70cec3b7b6bda363b Mon Sep 17 00:00:00 2001 From: ddalvi Date: Wed, 2 Aug 2023 16:57:28 -0400 Subject: [PATCH 36/47] Add non-default SAs to MariaDB and Minio --- config/internal/mariadb/deployment.yaml.tmpl | 1 + .../mariadb/{sa.yaml.tmpl => mariadb-sa.yaml.tmpl} | 2 +- config/internal/minio/deployment.yaml.tmpl | 1 + config/internal/minio/minio-sa.yaml.tmpl | 8 ++++++++ controllers/database.go | 2 +- controllers/storage.go | 1 + 6 files changed, 13 insertions(+), 2 deletions(-) rename config/internal/mariadb/{sa.yaml.tmpl => mariadb-sa.yaml.tmpl} (78%) create mode 100644 config/internal/minio/minio-sa.yaml.tmpl diff --git a/config/internal/mariadb/deployment.yaml.tmpl b/config/internal/mariadb/deployment.yaml.tmpl index 88a9c1c57..d1b9b7147 100644 --- a/config/internal/mariadb/deployment.yaml.tmpl +++ b/config/internal/mariadb/deployment.yaml.tmpl @@ -26,6 +26,7 @@ spec: component: data-science-pipelines dspa: {{.Name}} spec: + serviceAccountName: ds-pipelines-mariadb-sa-{{.Name}} containers: - name: mariadb image: {{.MariaDB.Image}} diff --git a/config/internal/mariadb/sa.yaml.tmpl b/config/internal/mariadb/mariadb-sa.yaml.tmpl similarity index 78% rename from config/internal/mariadb/sa.yaml.tmpl rename to config/internal/mariadb/mariadb-sa.yaml.tmpl index d4514e9e5..597bc1ee0 100644 --- a/config/internal/mariadb/sa.yaml.tmpl +++ b/config/internal/mariadb/mariadb-sa.yaml.tmpl @@ -1,7 +1,7 @@ apiVersion: v1 kind: ServiceAccount metadata: - name: mariadb-{{.Name}} + name: ds-pipelines-mariadb-sa-{{.Name}} namespace: {{.Namespace}} labels: app: mariadb-{{.Name}} diff --git a/config/internal/minio/deployment.yaml.tmpl b/config/internal/minio/deployment.yaml.tmpl index 84a1c2415..b8481d083 100644 --- a/config/internal/minio/deployment.yaml.tmpl +++ b/config/internal/minio/deployment.yaml.tmpl @@ -22,6 +22,7 @@ spec: component: data-science-pipelines dspa: {{.Name}} spec: + serviceAccountName: ds-pipelines-minio-sa-{{.Name}} containers: - args: - server diff --git a/config/internal/minio/minio-sa.yaml.tmpl b/config/internal/minio/minio-sa.yaml.tmpl new file mode 100644 index 000000000..c154310dc --- /dev/null +++ b/config/internal/minio/minio-sa.yaml.tmpl @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: ds-pipelines-minio-sa-{{.Name}} + namespace: {{.Namespace}} + labels: + app: minio-{{.Name}} + component: data-science-pipelines diff --git a/controllers/database.go b/controllers/database.go index 065730ea8..810f11c02 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -31,8 +31,8 @@ const dbSecret = "mariadb/secret.yaml.tmpl" var dbTemplates = []string{ "mariadb/deployment.yaml.tmpl", "mariadb/pvc.yaml.tmpl", - "mariadb/sa.yaml.tmpl", "mariadb/service.yaml.tmpl", + "mariadb/mariadb-sa.yaml.tmpl", dbSecret, } diff --git a/controllers/storage.go b/controllers/storage.go index ecca82086..f0655b358 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -34,6 +34,7 @@ var storageTemplates = []string{ "minio/deployment.yaml.tmpl", "minio/pvc.yaml.tmpl", "minio/service.yaml.tmpl", + "minio/minio-sa.yaml.tmpl", storageSecret, } From 7f51518772f677f214b9e7ba3dbdc7183561bf32 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 4 Aug 2023 10:28:48 -0400 Subject: [PATCH 37/47] Update params.env to latest dsp images. Signed-off-by: Humair Khan --- config/base/params.env | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index ef633b3d9..6b396a3dc 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -2,11 +2,11 @@ IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:4650c62254cd IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:58a13845901f8aae5421f640eeebee0abf3b12b27c1f96fbc8ff199b7e4f8d8d IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:c8b0953c28fd24180ddd24a30c68df411d299ccc7f6bc18ab15f4dba4a84b7d9 IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:31d049e74ab038f3a6d3ff9fa8953a4d0ddb21b0efc43fbb5b07fbaf83817022 -IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:e52fc1de73dc2879516431ff1865e0fb61b1a32f57b6f914bdcddb13c62f84e6 -IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:443db9a646aaf9374f95d266ba0c8656a52d70d0ffcc386a782cea28fa32e55d -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:cafc7364494fb7206c373a1235fd5da74399c19b5c34d87dd02aa07e8f343fa2 -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:bd4f3cfc9688aeb4296a5f3f7274557adeca0a8811533da750f05b485a819a8d -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:f2d5d430bbc925520f635f35698e604aae391ace39b15a5d601a9c9eb26dec2b IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:2490aadb2227cc72fd9e698549a8cd3270b669a2faa24bb0603c37f1c71ac8c4 IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:89fc26374f8e58384628f6b178eb9b8e3ebb111fe395c529d0b65ba8adaa89f5 +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:c1d77b668149396a4409926eea279647c817a02868a3d21f9a4b5f30c1e86766 +IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:e52fc1de73dc2879516431ff1865e0fb61b1a32f57b6f914bdcddb13c62f84e6 +IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:443db9a646aaf9374f95d266ba0c8656a52d70d0ffcc386a782cea28fa32e55d +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:6c3ae581b754017b335a70388c0010cf729df8a29daeb6651642ebee4e8abfde +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 From 8980ea289ae6c13cf60ce94a79019212c0e7cd26 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 4 Aug 2023 12:14:40 -0400 Subject: [PATCH 38/47] Add v1.2.0 shas. Signed-off-by: Humair Khan --- config/base/params.env | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 6b396a3dc..79c1b2671 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:4650c62254cd79112de3e4f09270130501d0d86a4dea79b74c2fcb8b5ca567e7 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:58a13845901f8aae5421f640eeebee0abf3b12b27c1f96fbc8ff199b7e4f8d8d -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:c8b0953c28fd24180ddd24a30c68df411d299ccc7f6bc18ab15f4dba4a84b7d9 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:31d049e74ab038f3a6d3ff9fa8953a4d0ddb21b0efc43fbb5b07fbaf83817022 -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:f2d5d430bbc925520f635f35698e604aae391ace39b15a5d601a9c9eb26dec2b -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:2490aadb2227cc72fd9e698549a8cd3270b669a2faa24bb0603c37f1c71ac8c4 -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:89fc26374f8e58384628f6b178eb9b8e3ebb111fe395c529d0b65ba8adaa89f5 -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:c1d77b668149396a4409926eea279647c817a02868a3d21f9a4b5f30c1e86766 +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:5174e8f05f7562b21fe2f88c13b35e3247dc362441811d6478c712987cd83f09 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:424e07d6802101d5daf26f753d59050e0f075038da51531ff4a34dff0d017721 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:da56c4edc5ea5a8783f6ca20bd2acf02bc0c1a7f11e3e1c662da295ea14f9188 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:4c42602c2f27ab10a2871b6823becba609420dca36614b1a47a0f3ab19897e03 +IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 +IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:48c80013fd6dd42b9fcd0238fe6c075213d8ae849848cd4208d05a03ea7979c5 +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:2f8515f475578494a0238552a86b6c28f1755ef8998db628aff4efb4e4973056 IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:e52fc1de73dc2879516431ff1865e0fb61b1a32f57b6f914bdcddb13c62f84e6 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:443db9a646aaf9374f95d266ba0c8656a52d70d0ffcc386a782cea28fa32e55d IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:6c3ae581b754017b335a70388c0010cf729df8a29daeb6651642ebee4e8abfde From 09cecfae9ae7668c9ffadcf362acb3736dd0b740 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 4 Aug 2023 13:26:56 -0400 Subject: [PATCH 39/47] Allow odh sync to fork from user fork. Signed-off-by: Humair Khan --- .github/workflows/odh-manifests-PR-sync.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index e3f6ff56c..b5b5b0e0e 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -32,7 +32,7 @@ jobs: git clone \ --depth=1 \ --branch=master \ - https://opendatahub-io:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ + https://dsp-developers:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ $FOLDER cd $FOLDER @@ -40,8 +40,11 @@ jobs: git config user.email "140449482+dsp-developers@users.noreply.github.com" git config user.name "dsp-developers" + git remote add upstream https://github.com/opendatahub-io/odh-manifests.git + git fetch upstream + # Create a new feature branch for the changes. - git checkout -b $BRANCH_NAME + git checkout -B $BRANCH_NAME upstream/master echo "Created branch: $BRANCH_NAME" # Copy DSPO manifests. Using rsync to allow filtering of paths/files (e.g. like a .gitignore, hidden files, etc) @@ -62,5 +65,6 @@ jobs: --title "Update DSP Operator manifests to $LATEST_TAG" \ --head "$BRANCH_NAME" \ --base "master" + --repo https://github.com/opendatahub-io/data-science-pipelines-operator env: GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} From 8ac5b63a6c7f9d93adf86b05f918ff97488dbed5 Mon Sep 17 00:00:00 2001 From: ddalvi Date: Fri, 4 Aug 2023 14:23:02 -0400 Subject: [PATCH 40/47] Fix GH workflow to clone dsp-developers/odh-manifests fork --- .github/workflows/odh-manifests-PR-sync.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index b5b5b0e0e..adbc9b065 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -23,7 +23,7 @@ jobs: - name: Send pull-request run: | LATEST_TAG=$(git describe --tags --always --abbrev=0) - REPOSITORY="opendatahub-io/odh-manifests" + REPOSITORY="dsp-developers/odh-manifests" FOLDER="bin/$REPOSITORY" BRANCH_NAME="chore-update-scripts-to-$LATEST_TAG" From 45b9145b51f5f2ebc1eea2df6a8c5d1dc624c1dc Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 30 Aug 2023 11:34:42 -0400 Subject: [PATCH 41/47] Hardcode obj store info in artifact script. Signed-off-by: Humair Khan --- config/internal/apiserver/artifact_script.yaml.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/internal/apiserver/artifact_script.yaml.tmpl b/config/internal/apiserver/artifact_script.yaml.tmpl index 9af903b2f..ed2d9f7d4 100644 --- a/config/internal/apiserver/artifact_script.yaml.tmpl +++ b/config/internal/apiserver/artifact_script.yaml.tmpl @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi From 0c2e07ee3bf7c1665c722c27006c6014dcf8327f Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 31 Aug 2023 14:54:30 -0400 Subject: [PATCH 42/47] Correct artifact endpoints/bucket for test cases. Signed-off-by: Humair Khan --- .../case_0/expected/created/configmap_artifact_script.yaml | 4 ++-- .../case_2/expected/created/configmap_artifact_script.yaml | 4 ++-- .../case_4/expected/created/configmap_artifact_script.yaml | 4 ++-- .../case_5/expected/created/configmap_artifact_script.yaml | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml index 307711088..5863fd2d1 100644 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml index 88659df81..beb358966 100644 --- a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml index b00c143cb..cc4ba319b 100644 --- a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml index bf1f028c9..e384c59cb 100644 --- a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi From aa909740f98c752e4afefeec17d046963bed15c1 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 31 Aug 2023 15:31:52 -0400 Subject: [PATCH 43/47] Add params.env Signed-off-by: Humair Khan --- config/base/params.env | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 79c1b2671..f5c40f565 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,12 +1,12 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:5174e8f05f7562b21fe2f88c13b35e3247dc362441811d6478c712987cd83f09 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:424e07d6802101d5daf26f753d59050e0f075038da51531ff4a34dff0d017721 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:da56c4edc5ea5a8783f6ca20bd2acf02bc0c1a7f11e3e1c662da295ea14f9188 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:4c42602c2f27ab10a2871b6823becba609420dca36614b1a47a0f3ab19897e03 +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:05aeba9a47908604c33436caa40f3a2e9f0222808e5f7ae6be1414a4eebc6db5 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:befff28d238a395e9198cfafdb5363b76a8d2eda09afea541c6ef2749ef9cdd7 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:91d1d0e329b5dea8ee6f639dd0a522b1c366b7782062c47e06830a9f2d9478a9 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:1271db88c61525b073a38318bc82e247c372272338041991b9d7269e0c733fe5 IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:48c80013fd6dd42b9fcd0238fe6c075213d8ae849848cd4208d05a03ea7979c5 -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:2f8515f475578494a0238552a86b6c28f1755ef8998db628aff4efb4e4973056 -IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:e52fc1de73dc2879516431ff1865e0fb61b1a32f57b6f914bdcddb13c62f84e6 -IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:443db9a646aaf9374f95d266ba0c8656a52d70d0ffcc386a782cea28fa32e55d -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:6c3ae581b754017b335a70388c0010cf729df8a29daeb6651642ebee4e8abfde +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:4db014b53c4564b2b53f3148c8771cc8a45e609bf300cd3504ed4f93ff6318cd +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:a84d9f8a0c4e31dd0cccc1f91079bbc52d29e25f55d365bb173fb5bc2703f8c0 +IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 +IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 From 62fcb8169f17818bc859099f6316b5deb441a14a Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 8 Sep 2023 12:56:29 -0400 Subject: [PATCH 44/47] Add timeout for db/storage health checks. Signed-off-by: Humair Khan --- controllers/database.go | 9 +++++++-- controllers/storage.go | 4 ++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/controllers/database.go b/controllers/database.go index 810f11c02..ca9130b1d 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -20,6 +20,7 @@ import ( "database/sql" b64 "encoding/base64" "fmt" + "time" _ "github.com/go-sql-driver/mysql" @@ -38,6 +39,10 @@ var dbTemplates = []string{ // extract to var for mocking in testing var ConnectAndQueryDatabase = func(host, port, username, password, dbname string) bool { + // Create a context with a timeout of 1 second + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + connectionString := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s", username, password, host, port, dbname) db, err := sql.Open("mysql", connectionString) if err != nil { @@ -46,7 +51,8 @@ var ConnectAndQueryDatabase = func(host, port, username, password, dbname string defer db.Close() testStatement := "SELECT 1;" - _, err = db.Exec(testStatement) + _, err = db.QueryContext(ctx, testStatement) + return err == nil } @@ -86,7 +92,6 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) - databaseSpecified := dsp.Spec.Database != nil // DB field can be specified as an empty obj, confirm that subfields are also specified // By default if Database is empty, we deploy mariadb diff --git a/controllers/storage.go b/controllers/storage.go index f0655b358..62c42ae08 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -21,6 +21,7 @@ import ( "encoding/base64" "fmt" "net/http" + "time" "github.com/go-logr/logr" "github.com/minio/minio-go/v7" @@ -74,6 +75,9 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin return false } + ctx, cancel := context.WithTimeout(ctx, time.Second) + defer cancel() + _, err = minioClient.ListBuckets(ctx) if err != nil { log.Info(fmt.Sprintf("Could not perform ListBuckets health check on object storage endpoint: %s", endpoint)) From 243955eef7b84119584705cf737daf08a117019d Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 8 Sep 2023 12:57:21 -0400 Subject: [PATCH 45/47] Increase concurrent reconciles. Currently we have the default sincle thread reconcile loop, which is not very scalable and results in blocking reconciles during db/storage healthchecks. It also drastically reduces the time to deploy increasing number of DSPAs. This change will increase concurrent reconciles to 10 to resolve some of these scalability issues. Signed-off-by: Humair Khan --- controllers/config/defaults.go | 9 +++++++++ controllers/database.go | 7 ++----- controllers/dspipeline_controller.go | 4 ++++ controllers/storage.go | 7 +++---- 4 files changed, 18 insertions(+), 9 deletions(-) diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index 15db8ece0..4abfdb23a 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -20,6 +20,7 @@ import ( dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" "github.com/spf13/viper" "k8s.io/apimachinery/pkg/api/resource" + "time" ) const ( @@ -103,6 +104,14 @@ var requiredFields = []string{ OAuthProxyImagePath, } +// DefaultDBConnectionTimeout is the default DB storage healthcheck timeout +const DefaultDBConnectionTimeout = time.Second * 15 + +// DefaultObjStoreConnectionTimeout is the default Object storage healthcheck timeout +const DefaultObjStoreConnectionTimeout = time.Second * 15 + +const DefaultMaxConcurrentReconciles = 10 + func GetConfigRequiredFields() []string { return requiredFields } diff --git a/controllers/database.go b/controllers/database.go index ca9130b1d..33a083a53 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -20,11 +20,9 @@ import ( "database/sql" b64 "encoding/base64" "fmt" - "time" - _ "github.com/go-sql-driver/mysql" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" ) const dbSecret = "mariadb/secret.yaml.tmpl" @@ -40,7 +38,7 @@ var dbTemplates = []string{ // extract to var for mocking in testing var ConnectAndQueryDatabase = func(host, port, username, password, dbname string) bool { // Create a context with a timeout of 1 second - ctx, cancel := context.WithTimeout(context.Background(), time.Second) + ctx, cancel := context.WithTimeout(context.Background(), config.DefaultDBConnectionTimeout) defer cancel() connectionString := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s", username, password, host, port, dbname) @@ -52,7 +50,6 @@ var ConnectAndQueryDatabase = func(host, port, username, password, dbname string testStatement := "SELECT 1;" _, err = db.QueryContext(ctx, testStatement) - return err == nil } diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index d1d4e8658..1315ae83d 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -19,6 +19,7 @@ package controllers import ( "context" "fmt" + "sigs.k8s.io/controller-runtime/pkg/controller" "time" "github.com/go-logr/logr" @@ -573,6 +574,9 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { return reconcileRequests })). // TODO: Add watcher for ui cluster rbac since it has no owner + WithOptions(controller.Options{ + MaxConcurrentReconciles: config.DefaultMaxConcurrentReconciles, + }). Complete(r) } diff --git a/controllers/storage.go b/controllers/storage.go index 62c42ae08..6b6b654bd 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -20,13 +20,12 @@ import ( "context" "encoding/base64" "fmt" - "net/http" - "time" - "github.com/go-logr/logr" "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" + "net/http" ) const storageSecret = "minio/secret.yaml.tmpl" @@ -75,7 +74,7 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin return false } - ctx, cancel := context.WithTimeout(ctx, time.Second) + ctx, cancel := context.WithTimeout(ctx, config.DefaultObjStoreConnectionTimeout) defer cancel() _, err = minioClient.ListBuckets(ctx) From 3a5df7eb03ff11fd519171a23a89f5caaeef8fed Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 15 Sep 2023 13:42:45 -0400 Subject: [PATCH 46/47] Update params.env for v1.2.2 Signed-off-by: Humair Khan --- config/base/params.env | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index f5c40f565..d6fa7dd8c 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:05aeba9a47908604c33436caa40f3a2e9f0222808e5f7ae6be1414a4eebc6db5 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:befff28d238a395e9198cfafdb5363b76a8d2eda09afea541c6ef2749ef9cdd7 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:91d1d0e329b5dea8ee6f639dd0a522b1c366b7782062c47e06830a9f2d9478a9 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:1271db88c61525b073a38318bc82e247c372272338041991b9d7269e0c733fe5 -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:4db014b53c4564b2b53f3148c8771cc8a45e609bf300cd3504ed4f93ff6318cd -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:a84d9f8a0c4e31dd0cccc1f91079bbc52d29e25f55d365bb173fb5bc2703f8c0 +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:7d754b9ec5d8cc1f645315e2481c1b66d4f242dc2ff61d7c40d659f1ec539945 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:8ad90b6e2be772ec1ee42149f9c408c6b01574a1a91dfdf96bcd70f783f8b2c7 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:f79e855288bf6f7bd3176da3066a33903681cdc441689c813ed2882e25a3d90e +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:6d4a3744de95d74c7bd32f7ab10b2e67f7c602a77a42aec94b8a20f3ffdecd1b +IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:c491e63c8885c7d59005f9305b77cd1fa776b50e63db90c4f8ccdee963759630 +IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:4af88c246d77cce33099489090508734978aafa83a0a5745408ae8d139d5378a +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:4e8b19b97d09ac9d9b5948d07b5c981468797dba6ab756a1f99b323ec3026e6b +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:739762a28a009d4a1ac039b494c574bad90a2496c2ef47c60e271f36074a00e1 IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa From 6a51d1c68a0eef9f9182b86fefc010f9a52b7f7b Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 15 Sep 2023 16:10:57 -0400 Subject: [PATCH 47/47] Add params for v1.2.2. Signed-off-by: Humair Khan --- config/base/params.env | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index d6fa7dd8c..4fbc08834 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:7d754b9ec5d8cc1f645315e2481c1b66d4f242dc2ff61d7c40d659f1ec539945 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:8ad90b6e2be772ec1ee42149f9c408c6b01574a1a91dfdf96bcd70f783f8b2c7 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:f79e855288bf6f7bd3176da3066a33903681cdc441689c813ed2882e25a3d90e -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:6d4a3744de95d74c7bd32f7ab10b2e67f7c602a77a42aec94b8a20f3ffdecd1b +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:b21893e06b2397de4cc882eb1839d5c0e15c406658215cd0e62976a5fde672f1 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:b618fb0e5e12c9905ed6015956d7c02e9513b13cd35c0ccf885ebcbadc505cc3 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:4efeac3b2355af1b672bfafbfc4869af09b7600fbe8e2d717d7d3ef9a6fba7f4 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:1407d92448007f315cf772c3cc1edae9385e232ac3aed813d5b7385eb610de0e IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:c491e63c8885c7d59005f9305b77cd1fa776b50e63db90c4f8ccdee963759630 IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:4af88c246d77cce33099489090508734978aafa83a0a5745408ae8d139d5378a -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:4e8b19b97d09ac9d9b5948d07b5c981468797dba6ab756a1f99b323ec3026e6b -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:739762a28a009d4a1ac039b494c574bad90a2496c2ef47c60e271f36074a00e1 +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:42bd9d468254100eeee7cee1c7e90b72ccb3d7e3c3df4b9d3d3d8f8fb60403ae +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:8ddee87c903a20c8c32798e0b208efb67c20a82d3825e0f71ed29c59e6d05cea IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa