From 6eb7a2d3c5e1c677798a433143b6e8a7a7f185c5 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 7 Nov 2023 18:19:30 +0100 Subject: [PATCH 01/35] WIP: exports are done async with workers --- go.mod | 4 +- go.sum | 6 + internals/export/utils.go | 49 ++++++ internals/export/worker.go | 160 +++++++++++++++++ internals/export/wrapper.go | 242 ++++++++++++++++++++++++++ internals/handlers/export_handlers.go | 37 ++-- plugins/standalone/plugin.go | 3 + 7 files changed, 476 insertions(+), 25 deletions(-) create mode 100644 internals/export/utils.go create mode 100644 internals/export/worker.go create mode 100644 internals/export/wrapper.go diff --git a/go.mod b/go.mod index 77172d7b..59a82ceb 100644 --- a/go.mod +++ b/go.mod @@ -19,7 +19,7 @@ require ( github.com/gorilla/context v1.1.1 github.com/gorilla/websocket v1.5.0 github.com/hashicorp/go-hclog v1.3.1 - github.com/hashicorp/go-plugin v1.3.0 + github.com/hashicorp/go-plugin v1.5.2 github.com/jmoiron/sqlx v1.2.0 github.com/json-iterator/go v1.1.12 github.com/lestrrat-go/jwx v1.2.6 @@ -38,7 +38,7 @@ require ( golang.org/x/net v0.12.0 golang.org/x/oauth2 v0.6.0 google.golang.org/grpc v1.40.0 - google.golang.org/protobuf v1.28.1 + google.golang.org/protobuf v1.28.2-0.20230222093303-bc1253ad3743 ) require ( diff --git a/go.sum b/go.sum index 0b9694ad..d55ca80c 100644 --- a/go.sum +++ b/go.sum @@ -43,6 +43,8 @@ github.com/PaesslerAG/gval v1.2.2 h1:Y7iBzhgE09IGTt5QgGQ2IdaYYYOU134YGHBThD+wm9E github.com/PaesslerAG/gval v1.2.2/go.mod h1:XRFLwvmkTEdYziLdaCeCa5ImcGVrfQbeNUbVR+C6xac= github.com/PaesslerAG/jsonpath v0.1.0 h1:gADYeifvlqK3R3i2cR5B4DGgxLXIPb3TRTH1mGi0jPI= github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -227,6 +229,8 @@ github.com/hashicorp/go-hclog v1.3.1 h1:vDwF1DFNZhntP4DAjuTpOw3uEgMUpXh1pB5fW9Dq github.com/hashicorp/go-hclog v1.3.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-plugin v1.3.0 h1:4d/wJojzvHV1I4i/rrjVaeuyxWrLzDE1mDCyDy8fXS8= github.com/hashicorp/go-plugin v1.3.0/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYtXdgmf1AVNs0= +github.com/hashicorp/go-plugin v1.5.2 h1:aWv8eimFqWlsEiMrYZdPYl+FdHaBJSN4AWwGWfT1G2Y= +github.com/hashicorp/go-plugin v1.5.2/go.mod h1:w1sAEES3g3PuV/RzUrgow20W2uErMly84hhD3um1WL4= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= @@ -753,6 +757,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.28.2-0.20230222093303-bc1253ad3743 h1:yqElulDvOF26oZ2O+2/aoX7mQ8DY/6+p39neytrycd8= +google.golang.org/protobuf v1.28.2-0.20230222093303-bc1253ad3743/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/internals/export/utils.go b/internals/export/utils.go new file mode 100644 index 00000000..c2e76d08 --- /dev/null +++ b/internals/export/utils.go @@ -0,0 +1,49 @@ +package export + +type CSVParameters struct { + Columns []string + ColumnsLabel []string + FormatColumnsData map[string]string + Separator rune + Limit int64 + ChunkSize int64 + FileName string +} + +// Equals compares two CSVParameters +func (p CSVParameters) Equals(Params CSVParameters) bool { + if p.FileName != Params.FileName { + return false + } + if p.Separator != Params.Separator { + return false + } + if p.Limit != Params.Limit { + return false + } + if len(p.Columns) != len(Params.Columns) { + return false + } + for i, column := range p.Columns { + if column != Params.Columns[i] { + return false + } + } + if len(p.ColumnsLabel) != len(Params.ColumnsLabel) { + return false + } + for i, columnLabel := range p.ColumnsLabel { + if columnLabel != Params.ColumnsLabel[i] { + return false + } + } + if len(p.FormatColumnsData) != len(Params.FormatColumnsData) { + return false + } + for key, value := range p.FormatColumnsData { + if value != Params.FormatColumnsData[key] { + return false + } + } + return true +} diff --git a/internals/export/worker.go b/internals/export/worker.go new file mode 100644 index 00000000..f63803c8 --- /dev/null +++ b/internals/export/worker.go @@ -0,0 +1,160 @@ +package export + +import ( + "compress/gzip" + "context" + "encoding/csv" + "errors" + "go.uber.org/zap" + "os" + "path/filepath" + "sync" +) + +type ExportWorker struct { + Mutex sync.Mutex + Available bool + QueueItem *ExportWrapperItem + Context context.Context + BasePath string +} + +func NewExportWorker(basePath string) *ExportWorker { + return &ExportWorker{ + Available: true, + QueueItem: nil, + BasePath: basePath, + } +} + +// SetAvailable sets the worker availability to true and clears the queueItem +func (e *ExportWorker) SetAvailable() { + e.Mutex.Lock() + defer e.Mutex.Unlock() + e.Available = true + + // set queueItem status to done + e.QueueItem.Mutex.Lock() + if e.QueueItem.Error == nil { + e.QueueItem.Status = StatusDone + } + e.QueueItem.Mutex.Unlock() + + e.QueueItem = nil +} + +// Start starts the export task +// It handles one queueItem at a time and when finished it stops the goroutine +func (e *ExportWorker) Start(item *ExportWrapperItem) { + defer e.SetAvailable() + e.Mutex.Lock() + e.QueueItem = item + e.QueueItem.SetStatus(StatusRunning) + e.Mutex.Unlock() + + // create file + path := filepath.Join(e.BasePath, item.Params.FileName) + // check if file exists + if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) { + e.Mutex.Lock() + e.QueueItem.SetError(err) + e.Mutex.Unlock() + return + } + + file, err := os.Create("data.csv.gz") + if err != nil { + e.Mutex.Lock() + e.QueueItem.SetError(err) + e.Mutex.Unlock() + return + } + defer file.Close() + + // Create a gzip writer + gzipWriter := gzip.NewWriter(file) + defer gzipWriter.Close() + + csvWriter := csv.NewWriter(gzipWriter) + + // start streamed export + streamedExport := NewStreamedExport() + var wg sync.WaitGroup + + // Increment the WaitGroup counter + wg.Add(2) // 2 goroutines + + var writerErr error = nil + + /** + * How streamed export works: + * 1. Browser opens connection + * 2. Two goroutines are started: + * - Export goroutine: each fact is processed one by one + * Each bulk of data is sent through a channel to the receiver goroutine + * - The receiver handles the incoming channel data and converts them to the CSV format + * After the conversion, the data is written and gzipped to a local file + */ + + go func() { + defer wg.Done() + defer close(streamedExport.Data) + + for _, f := range item.Facts { + writerErr = streamedExport.StreamedExportFactHitsFull(e.Context, f, item.Params.Limit) + if writerErr != nil { + zap.L().Error("Error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) + break // break here when error occurs? + } + } + + }() + + // Chunk handler goroutine + go func() { + defer wg.Done() + first := true + labels := item.Params.ColumnsLabel + + for { + select { + case hits, ok := <-streamedExport.Data: + if !ok { // channel closed + return + } + + data, err := ConvertHitsToCSV(hits, item.Params.Columns, labels, item.Params.FormatColumnsData, item.Params.Separator) + + if err != nil { + zap.L().Error("ConvertHitsToCSV error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) + cancel() + return + } + + // Write data + _, err = csvWriter.Write(data) + if err != nil { + zap.L().Error("Write error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) + cancel() + return + } + // Flush data to be sent directly to browser + flusher.Flush() + + if first { + first = false + labels = []string{} + } + + case <-requestContext.Done(): + // Browser unexpectedly closed connection + writerErr = errors.New("browser unexpectedly closed connection") + cancel() + return + } + } + }() + + wg.Wait() + +} diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go new file mode 100644 index 00000000..f5faae88 --- /dev/null +++ b/internals/export/wrapper.go @@ -0,0 +1,242 @@ +package export + +import ( + "context" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" + "github.com/myrteametrics/myrtea-sdk/v4/engine" + "go.uber.org/zap" + "os" + "path/filepath" + "sync" + "time" +) + +const ( + CodeUserAdded = 1 + CodeAdded = 0 + CodeUserExists = -1 + CodeQueueFull = -2 + + // ExportWrapperItem statuses + StatusPending = 0 + StatusRunning = 1 + StatusDone = 2 + StatusError = 3 +) + +type ExportWrapper struct { + QueueMutex sync.Mutex + DoneMutex sync.Mutex + Workers []*ExportWorker + Queue []*ExportWrapperItem + Done []*ExportWrapperItem + DiskRetentionDays int + BasePath string + QueueMaxSize int +} + +type ExportWrapperItem struct { + Mutex sync.Mutex + Error error + Status int + Users []users.User // handles export ownership + // non mutexed fields + FactID int64 + Params CSVParameters + Date time.Time + Facts []engine.Fact +} + +func NewExportWrapperItem(factID int64, params CSVParameters, user users.User) *ExportWrapperItem { + return &ExportWrapperItem{ + FactID: factID, + Params: params, + Users: append([]users.User{}, user), + Date: time.Now(), + Status: 0, + Error: nil, + } +} + +func (ew *ExportWrapperItem) SetStatus(status int) { + ew.Mutex.Lock() + defer ew.Mutex.Unlock() + ew.Status = status +} + +func (ew *ExportWrapperItem) SetError(err error) { + ew.Mutex.Lock() + defer ew.Mutex.Unlock() + ew.Error = err + ew.Status = StatusError + zap.L().Error("Error happened during export worker execution", zap.Error(err)) +} + +// AddToQueue Adds a new export to the export worker queue +func (ew *ExportWrapper) AddToQueue(factID int64, params CSVParameters, user users.User) int { + ew.QueueMutex.Lock() + defer ew.QueueMutex.Unlock() + + for _, queueItem := range ew.Queue { + if queueItem.FactID == factID && queueItem.Params.Equals(params) { + + // check if user not already in queue.users + for _, u := range queueItem.Users { + if u.ID == user.ID { + return CodeUserExists + } + } + + queueItem.Users = append(queueItem.Users, user) + return CodeUserAdded + } + } + + if len(ew.Queue) >= ew.QueueMaxSize { + return CodeQueueFull + } + + ew.Queue = append(ew.Queue, NewExportWrapperItem(factID, params, user)) + return CodeAdded +} + +func NewExportWrapper(basePath string, diskRetentionDays, queueMaxSize int) *ExportWrapper { + return &ExportWrapper{ + Workers: make([]*ExportWorker, 0), + Queue: make([]*ExportWrapperItem, 0), + Done: make([]*ExportWrapperItem, 0), + QueueMaxSize: queueMaxSize, + BasePath: basePath, + DiskRetentionDays: diskRetentionDays, + } +} + +// FindAvailableWorker finds an available worker and sets it to unavailable +func (ew *ExportWrapper) FindAvailableWorker() *ExportWorker { + ew.QueueMutex.Lock() + defer ew.QueueMutex.Unlock() + + for _, worker := range ew.Workers { + worker.Mutex.Lock() + if worker.Available { + worker.Available = false + worker.Mutex.Unlock() + return worker + } + worker.Mutex.Unlock() + } + + return nil +} + +// Init initializes the export wrapper +func (ew *ExportWrapper) Init(basePath string, workers int) { + // instantiate workers + for i := 0; i < workers; i++ { + ew.Workers = append(ew.Workers, NewExportWorker(basePath)) + } + go ew.StartDispatcher(context.Background()) +} + +// StartDispatcher starts the export tasks dispatcher & the expired files checker +func (ew *ExportWrapper) StartDispatcher(context context.Context) { + zap.L().Info("Starting export tasks dispatcher") + // every 5 seconds check if there is a new task to process in queue then check if there is an available worker + // if yes, start the worker with the task + // if no, continue to check + ticker := time.NewTicker(5 * time.Second) + expiredFileTicker := time.NewTicker(24 * time.Hour) + for { + select { + case <-ticker.C: + ew.QueueMutex.Lock() + if len(ew.Queue) > 0 { + for i := 0; i < len(ew.Queue); i++ { + x := ew.Queue[i] + w := ew.FindAvailableWorker() + + // if no worker available, stop the loop since no worker will be available for the next tasks + if w == nil { + break + } + + // attach the task to the worker and start the worker + go w.Start(x) + + // dequeue the task + ew.Queue = ew.Queue[1:] + + // add the task to the done list + ew.DoneMutex.Lock() + ew.Done = append(ew.Done, x) + ew.DoneMutex.Unlock() + + } + } + ew.QueueMutex.Unlock() + case <-expiredFileTicker.C: + err := ew.CheckForExpiredFiles() + + if err != nil { + zap.L().Error("Error during expired files check", zap.Error(err)) + } + + case <-context.Done(): + ticker.Stop() + return + } + } +} + +func (ew *ExportWrapper) CheckForExpiredFiles() error { + // Get all files in directory and check the last edit date + // if last edit date is older than diskRetentionDays, delete the file + zap.L().Info("Checking for expired files") + files, err := os.ReadDir(ew.BasePath) + if err != nil { + return err + } + + // delete all done tasks of ew.Done that are older than diskRetentionDays + ew.DoneMutex.Lock() + for i := 0; i < len(ew.Done); i++ { + x := ew.Done[i] + if time.Since(x.Date).Hours() > float64(ew.DiskRetentionDays*24) { + ew.Done = append(ew.Done[:i], ew.Done[i+1:]...) + i-- + } + } + ew.DoneMutex.Unlock() + + // count the number of deleted files + count := 0 + + for _, file := range files { + if file.IsDir() { + continue + } + + fi, err := os.Stat(file.Name()) + if err != nil { + zap.L().Error("Cannot get file info", zap.String("file", file.Name()), zap.Error(err)) + continue + } + + // skip if file is not a zip + if filepath.Ext(file.Name()) != ".zip" { + continue + } + + if time.Since(fi.ModTime()).Hours() > float64(ew.DiskRetentionDays*24) { + err = os.Remove(file.Name()) + if err != nil { + zap.L().Error("Cannot delete file", zap.String("file", file.Name()), zap.Error(err)) + continue + } + count++ + } + } + + zap.L().Info("Deleted expired files", zap.Int("count", count)) + return nil +} diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 662eb00d..5d22f853 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -19,15 +19,6 @@ import ( "go.uber.org/zap" ) -type CSVParameters struct { - columns []string - columnsLabel []string - formatColumnsData map[string]string - separator rune - limit int64 - chunkSize int64 -} - // ExportFact godoc // @Summary Export facts // @Description Get all action definitions @@ -109,36 +100,36 @@ func ExportFact(w http.ResponseWriter, r *http.Request) { } -func GetCSVParameters(r *http.Request) CSVParameters { - result := CSVParameters{separator: ','} +func GetCSVParameters(r *http.Request) export.CSVParameters { + result := export.CSVParameters{Separator: ','} limit, err := QueryParamToOptionalInt64(r, "limit", -1) if err != nil { - result.limit = -1 + result.Limit = -1 } else { - result.limit = limit + result.Limit = limit } - result.columns = QueryParamToOptionalStringArray(r, "columns", ",", []string{}) - result.columnsLabel = QueryParamToOptionalStringArray(r, "columnsLabel", ",", []string{}) + result.Columns = QueryParamToOptionalStringArray(r, "columns", ",", []string{}) + result.ColumnsLabel = QueryParamToOptionalStringArray(r, "columnsLabel", ",", []string{}) formatColumnsData := QueryParamToOptionalStringArray(r, "formateColumns", ",", []string{}) - result.formatColumnsData = make(map[string]string) + result.FormatColumnsData = make(map[string]string) for _, formatData := range formatColumnsData { parts := strings.Split(formatData, ";") if len(parts) != 2 { continue } key := strings.TrimSpace(parts[0]) - result.formatColumnsData[key] = parts[1] + result.FormatColumnsData[key] = parts[1] } separator := r.URL.Query().Get("separator") if separator != "" { sep, size := utf8.DecodeRuneInString(separator) if size != 1 { - result.separator = ',' + result.Separator = ',' } else { - result.separator = sep + result.Separator = sep } } @@ -146,7 +137,7 @@ func GetCSVParameters(r *http.Request) CSVParameters { } // HandleStreamedExport actually only handles CSV -func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, facts []engine.Fact, fileName string, params CSVParameters) error { +func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, facts []engine.Fact, fileName string, params export.CSVParameters) error { w.Header().Set("Connection", "Keep-Alive") w.Header().Set("Transfer-Encoding", "chunked") w.Header().Set("X-Content-Type-Options", "nosniff") @@ -182,7 +173,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, defer close(streamedExport.Data) for _, f := range facts { - writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, params.limit) + writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, params.Limit) if writerErr != nil { zap.L().Error("Error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) break // break here when error occurs? @@ -195,7 +186,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, go func() { defer wg.Done() first := true - labels := params.columnsLabel + labels := params.ColumnsLabel for { select { @@ -204,7 +195,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, return } - data, err := export.ConvertHitsToCSV(hits, params.columns, labels, params.formatColumnsData, params.separator) + data, err := export.ConvertHitsToCSV(hits, params.Columns, labels, params.FormatColumnsData, params.Separator) if err != nil { zap.L().Error("ConvertHitsToCSV error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) diff --git a/plugins/standalone/plugin.go b/plugins/standalone/plugin.go index b2cb4bb3..c0c91b45 100644 --- a/plugins/standalone/plugin.go +++ b/plugins/standalone/plugin.go @@ -7,6 +7,7 @@ import ( "os" "os/exec" "runtime" + "time" "github.com/go-chi/chi/v5" "github.com/hashicorp/go-plugin" @@ -60,7 +61,9 @@ func NewPlugin(config pluginutils.PluginConfig) *Plugin { HandshakeConfig: Handshake, Plugins: pluginMap, Cmd: cmd, + StartTimeout: 2 * time.Minute, AllowedProtocols: []plugin.Protocol{plugin.ProtocolNetRPC}, + SkipHostEnv: false, }, } } From 06b94eda01f6edd0f3739bcbb50715e2fdaa6417 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 8 Nov 2023 13:58:06 +0100 Subject: [PATCH 02/35] improved export worker --- internals/export/csv.go | 15 +++- internals/export/worker.go | 159 ++++++++++++++++++++---------------- internals/export/wrapper.go | 14 ++-- 3 files changed, 108 insertions(+), 80 deletions(-) diff --git a/internals/export/csv.go b/internals/export/csv.go index 7e746006..dd6c93cb 100644 --- a/internals/export/csv.go +++ b/internals/export/csv.go @@ -11,9 +11,7 @@ import ( "go.uber.org/zap" ) -func ConvertHitsToCSV(hits []reader.Hit, columns []string, columnsLabel []string, formatColumnsData map[string]string, separator rune) ([]byte, error) { - b := new(bytes.Buffer) - w := csv.NewWriter(b) +func WriteConvertHitsToCSV(w *csv.Writer, hits []reader.Hit, columns []string, columnsLabel []string, formatColumnsData map[string]string, separator rune) error { w.Comma = separator // avoid to print header when labels are empty @@ -45,9 +43,18 @@ func ConvertHitsToCSV(hits []reader.Hit, columns []string, columnsLabel []string } w.Flush() - if err := w.Error(); err != nil { + return w.Error() +} + +func ConvertHitsToCSV(hits []reader.Hit, columns []string, columnsLabel []string, formatColumnsData map[string]string, separator rune) ([]byte, error) { + b := new(bytes.Buffer) + w := csv.NewWriter(b) + err := WriteConvertHitsToCSV(w, hits, columns, columnsLabel, formatColumnsData, separator) + + if err != nil { return nil, err } + return b.Bytes(), nil } diff --git a/internals/export/worker.go b/internals/export/worker.go index f63803c8..f1e4374a 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -14,59 +14,57 @@ import ( type ExportWorker struct { Mutex sync.Mutex Available bool - QueueItem *ExportWrapperItem - Context context.Context - BasePath string + // + QueueItemId string + Context context.Context + BasePath string } func NewExportWorker(basePath string) *ExportWorker { return &ExportWorker{ - Available: true, - QueueItem: nil, - BasePath: basePath, + Available: true, + QueueItemId: "", + BasePath: basePath, } } // SetAvailable sets the worker availability to true and clears the queueItem -func (e *ExportWorker) SetAvailable() { +func (e *ExportWorker) SetAvailable(item *ExportWrapperItem) { e.Mutex.Lock() defer e.Mutex.Unlock() e.Available = true // set queueItem status to done - e.QueueItem.Mutex.Lock() - if e.QueueItem.Error == nil { - e.QueueItem.Status = StatusDone + item.Mutex.Lock() + if item.Error == nil { + item.Status = StatusDone } - e.QueueItem.Mutex.Unlock() + item.Mutex.Unlock() - e.QueueItem = nil + e.QueueItemId = "" } // Start starts the export task // It handles one queueItem at a time and when finished it stops the goroutine func (e *ExportWorker) Start(item *ExportWrapperItem) { - defer e.SetAvailable() + defer e.SetAvailable(item) e.Mutex.Lock() - e.QueueItem = item - e.QueueItem.SetStatus(StatusRunning) + e.QueueItemId = item.Id e.Mutex.Unlock() + item.SetStatus(StatusRunning) + // create file path := filepath.Join(e.BasePath, item.Params.FileName) // check if file exists if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) { - e.Mutex.Lock() - e.QueueItem.SetError(err) - e.Mutex.Unlock() + item.SetError(err) return } - file, err := os.Create("data.csv.gz") + file, err := os.Create(path) if err != nil { - e.Mutex.Lock() - e.QueueItem.SetError(err) - e.Mutex.Unlock() + item.SetError(err) return } defer file.Close() @@ -80,18 +78,21 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { // start streamed export streamedExport := NewStreamedExport() var wg sync.WaitGroup + var writerErr error - // Increment the WaitGroup counter - wg.Add(2) // 2 goroutines + // local context handling + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() - var writerErr error = nil + // Increment the WaitGroup counter + wg.Add(1) /** * How streamed export works: * 1. Browser opens connection - * 2. Two goroutines are started: + * 2. One goroutine is started: * - Export goroutine: each fact is processed one by one - * Each bulk of data is sent through a channel to the receiver goroutine + * Each bulk of data is sent through a channel to the receiver * - The receiver handles the incoming channel data and converts them to the CSV format * After the conversion, the data is written and gzipped to a local file */ @@ -101,60 +102,76 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { defer close(streamedExport.Data) for _, f := range item.Facts { - writerErr = streamedExport.StreamedExportFactHitsFull(e.Context, f, item.Params.Limit) + writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, item.Params.Limit) if writerErr != nil { - zap.L().Error("Error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) break // break here when error occurs? } } - }() - // Chunk handler goroutine - go func() { - defer wg.Done() - first := true - labels := item.Params.ColumnsLabel - - for { - select { - case hits, ok := <-streamedExport.Data: - if !ok { // channel closed - return - } - - data, err := ConvertHitsToCSV(hits, item.Params.Columns, labels, item.Params.FormatColumnsData, item.Params.Separator) - - if err != nil { - zap.L().Error("ConvertHitsToCSV error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) - cancel() - return - } - - // Write data - _, err = csvWriter.Write(data) - if err != nil { - zap.L().Error("Write error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) - cancel() - return - } - // Flush data to be sent directly to browser - flusher.Flush() - - if first { - first = false - labels = []string{} - } - - case <-requestContext.Done(): - // Browser unexpectedly closed connection - writerErr = errors.New("browser unexpectedly closed connection") + // Chunk handler + first := true + labels := item.Params.ColumnsLabel + loop := true + + for loop { + select { + case hits, ok := <-streamedExport.Data: + if !ok { // channel closed + loop = false + break + } + + err := WriteConvertHitsToCSV(csvWriter, hits, item.Params.Columns, labels, item.Params.FormatColumnsData, item.Params.Separator) + + if err != nil { + zap.L().Error("WriteConvertHitsToCSV error during export", zap.Error(err)) cancel() - return + loop = false + break + } + + // Flush data + csvWriter.Flush() + + if first { + first = false + labels = []string{} } + + case <-e.Context.Done(): + zap.L().Warn("export was canceled") + cancel() + loop = false + break } - }() + } wg.Wait() + // error occurred, close file and delete + if writerErr != nil || err != nil { + if ctx.Err() != nil { + item.SetStatus(StatusCanceled) + zap.L().Warn("Export worker: canceled, deleting file...", zap.String("filePath", path)) + } else { + if err != nil { + item.SetError(err) + } else { + item.SetError(writerErr) + } + zap.L().Error("Export worker: error, deleting file...", zap.String("filePath", path), + zap.NamedError("err", err), zap.NamedError("writerErr", writerErr)) + } + + // close writer and file access before trying to delete file + _ = gzipWriter.Close() + _ = file.Close() + + err = os.Remove(path) + if err != nil { + zap.L().Error("Export worker: couldn't delete file", zap.String("filePath", path), zap.Error(err)) + } + } + } diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index f5faae88..7410f513 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -2,6 +2,7 @@ package export import ( "context" + "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" @@ -18,10 +19,11 @@ const ( CodeQueueFull = -2 // ExportWrapperItem statuses - StatusPending = 0 - StatusRunning = 1 - StatusDone = 2 - StatusError = 3 + StatusPending = 0 + StatusRunning = 1 + StatusDone = 2 + StatusError = 3 + StatusCanceled = 4 ) type ExportWrapper struct { @@ -41,6 +43,7 @@ type ExportWrapperItem struct { Status int Users []users.User // handles export ownership // non mutexed fields + Id string // unique id that represents an export demand FactID int64 Params CSVParameters Date time.Time @@ -49,11 +52,12 @@ type ExportWrapperItem struct { func NewExportWrapperItem(factID int64, params CSVParameters, user users.User) *ExportWrapperItem { return &ExportWrapperItem{ + Id: uuid.New().String(), FactID: factID, Params: params, Users: append([]users.User{}, user), Date: time.Now(), - Status: 0, + Status: StatusPending, Error: nil, } } From f333236235387af17c8948569b3d03e35d8a0563 Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 8 Nov 2023 17:10:33 +0100 Subject: [PATCH 03/35] added tests for export package --- internals/export/csv_test.go | 24 ++++ internals/export/utils.go | 3 + internals/export/utils_test.go | 76 ++++++++++++ internals/export/worker.go | 34 +++--- internals/export/worker_test.go | 13 ++ internals/export/wrapper.go | 90 +++++++------- internals/export/wrapper_test.go | 197 +++++++++++++++++++++++++++++++ internals/export/zip_test.go | 15 +++ 8 files changed, 393 insertions(+), 59 deletions(-) create mode 100644 internals/export/utils_test.go create mode 100644 internals/export/worker_test.go create mode 100644 internals/export/wrapper_test.go create mode 100644 internals/export/zip_test.go diff --git a/internals/export/csv_test.go b/internals/export/csv_test.go index f27d08a9..b71c2d6f 100644 --- a/internals/export/csv_test.go +++ b/internals/export/csv_test.go @@ -1,6 +1,8 @@ package export import ( + "bytes" + csv2 "encoding/csv" "testing" "github.com/myrteametrics/myrtea-engine-api/v5/internals/reader" @@ -25,3 +27,25 @@ func TestConvertHitsToCSV(t *testing.T) { } t.Log("\n" + string(csv)) } + +func TestWriteConvertHitsToCSV(t *testing.T) { + hits := []reader.Hit{ + {ID: "1", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "d": map[string]interface{}{"e": "nested"}, "date": "2023-06-30T10:42:59.500"}}, + {ID: "2", Fields: map[string]interface{}{"b": 20, "c": 3.123456, "d": map[string]interface{}{"e": "nested"}, "date": "2023-06-30T10:42:59.500"}}, + {ID: "3", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "date": "2023-06-30T10:42:59.500"}}, + {ID: "1", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "d": map[string]interface{}{"zzz": "nested"}, "date": "2023-06-30T10:42:59.500"}}, + } + columns := []string{"a", "b", "c", "d.e", "date"} + columnsLabel := []string{"Label A", "Label B", "Label C", "Label D.E", "Date"} + formatColumnsData := map[string]string{ + "date": "02/01/2006", + } + b := new(bytes.Buffer) + w := csv2.NewWriter(b) + err := WriteConvertHitsToCSV(w, hits, columns, columnsLabel, formatColumnsData, ',') + if err != nil { + t.Log(err) + t.FailNow() + } + t.Log("\n" + string(b.Bytes())) +} diff --git a/internals/export/utils.go b/internals/export/utils.go index c2e76d08..02c7fac4 100644 --- a/internals/export/utils.go +++ b/internals/export/utils.go @@ -21,6 +21,9 @@ func (p CSVParameters) Equals(Params CSVParameters) bool { if p.Limit != Params.Limit { return false } + if p.ChunkSize != Params.ChunkSize { + return false + } if len(p.Columns) != len(Params.Columns) { return false } diff --git a/internals/export/utils_test.go b/internals/export/utils_test.go new file mode 100644 index 00000000..427eb1ec --- /dev/null +++ b/internals/export/utils_test.go @@ -0,0 +1,76 @@ +package export + +import ( + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "testing" +) + +func TestEquals(t *testing.T) { + p1 := CSVParameters{FileName: "bla"} + p2 := CSVParameters{FileName: "bla2"} + expression.AssertEqual(t, p1.Equals(p2), false) + expression.AssertEqual(t, p1.Equals(p1), true) + + // make a full test with all variables in parameters filled + params3 := CSVParameters{ + Columns: []string{"col1", "col2"}, + ColumnsLabel: []string{"col1", "col2"}, + FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}, + Separator: ';', + Limit: 10, + ChunkSize: 100, + FileName: "bla", + } + expression.AssertEqual(t, params3.Equals(p2), false) + expression.AssertEqual(t, params3.Equals(params3), true) + + // test separator + p1 = CSVParameters{Separator: ';'} + p2 = CSVParameters{Separator: ','} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test limit + p1 = CSVParameters{Limit: 10} + p2 = CSVParameters{Limit: 101} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test chunk size + p1 = CSVParameters{ChunkSize: 100} + p2 = CSVParameters{ChunkSize: 10} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test columns size + p1 = CSVParameters{Columns: []string{"col1", "col2"}} + p2 = CSVParameters{Columns: []string{"col1", "col2", "col3"}} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test columns values + p1 = CSVParameters{Columns: []string{"col1", "col2"}} + p2 = CSVParameters{Columns: []string{"col1", "col3"}} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test columnsLabel size + p1 = CSVParameters{ColumnsLabel: []string{"col1", "col2"}} + p2 = CSVParameters{ColumnsLabel: []string{"col1", "col2", "col3"}} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test columnsLabel values + p1 = CSVParameters{ColumnsLabel: []string{"col1", "col2"}} + p2 = CSVParameters{ColumnsLabel: []string{"col1", "col3"}} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test formatColumnsData size + p1 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}} + p2 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2", "col3": "format3"}} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test formatColumnsData values + p1 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}} + p2 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format3"}} + expression.AssertEqual(t, p1.Equals(p2), false) + + // test formatColumnsData keys + p1 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}} + p2 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col3": "format2"}} + expression.AssertEqual(t, p1.Equals(p2), false) +} diff --git a/internals/export/worker.go b/internals/export/worker.go index f1e4374a..b58949ac 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -4,7 +4,7 @@ import ( "compress/gzip" "context" "encoding/csv" - "errors" + "fmt" "go.uber.org/zap" "os" "path/filepath" @@ -14,10 +14,10 @@ import ( type ExportWorker struct { Mutex sync.Mutex Available bool + Cancel chan bool // channel to cancel the worker // - QueueItemId string - Context context.Context - BasePath string + QueueItemId string // id of the queueItem currently handled by the worker + BasePath string // base path where the file will be saved } func NewExportWorker(basePath string) *ExportWorker { @@ -25,6 +25,7 @@ func NewExportWorker(basePath string) *ExportWorker { Available: true, QueueItemId: "", BasePath: basePath, + Cancel: make(chan bool), } } @@ -36,7 +37,12 @@ func (e *ExportWorker) SetAvailable(item *ExportWrapperItem) { // set queueItem status to done item.Mutex.Lock() - if item.Error == nil { + // set status to error if error occurred + if item.Error != nil { + item.Status = StatusError + } + // set status to done if no error occurred + if item.Status != StatusError { item.Status = StatusDone } item.Mutex.Unlock() @@ -56,9 +62,9 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { // create file path := filepath.Join(e.BasePath, item.Params.FileName) - // check if file exists - if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) { - item.SetError(err) + // check if file not already exists + if _, err := os.Stat(path); err == nil { + item.SetError(fmt.Errorf("file with same name already exists")) return } @@ -69,13 +75,11 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { } defer file.Close() - // Create a gzip writer + // opens a gzip writer gzipWriter := gzip.NewWriter(file) defer gzipWriter.Close() csvWriter := csv.NewWriter(gzipWriter) - - // start streamed export streamedExport := NewStreamedExport() var wg sync.WaitGroup var writerErr error @@ -89,8 +93,6 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { /** * How streamed export works: - * 1. Browser opens connection - * 2. One goroutine is started: * - Export goroutine: each fact is processed one by one * Each bulk of data is sent through a channel to the receiver * - The receiver handles the incoming channel data and converts them to the CSV format @@ -139,11 +141,9 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { labels = []string{} } - case <-e.Context.Done(): - zap.L().Warn("export was canceled") + case <-e.Cancel: cancel() loop = false - break } } @@ -155,7 +155,7 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { item.SetStatus(StatusCanceled) zap.L().Warn("Export worker: canceled, deleting file...", zap.String("filePath", path)) } else { - if err != nil { + if err != nil { // priority to err item.SetError(err) } else { item.SetError(writerErr) diff --git a/internals/export/worker_test.go b/internals/export/worker_test.go new file mode 100644 index 00000000..b4f6ad7b --- /dev/null +++ b/internals/export/worker_test.go @@ -0,0 +1,13 @@ +package export + +import ( + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "testing" +) + +func TestNewExportWorker(t *testing.T) { + worker := NewExportWorker("/tmp") + expression.AssertEqual(t, worker.BasePath, "/tmp") + expression.AssertEqual(t, worker.Available, true) + expression.AssertEqual(t, worker.QueueItemId, "") +} diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 7410f513..0311fbc4 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -26,23 +26,12 @@ const ( StatusCanceled = 4 ) -type ExportWrapper struct { - QueueMutex sync.Mutex - DoneMutex sync.Mutex - Workers []*ExportWorker - Queue []*ExportWrapperItem - Done []*ExportWrapperItem - DiskRetentionDays int - BasePath string - QueueMaxSize int -} - type ExportWrapperItem struct { Mutex sync.Mutex Error error Status int Users []users.User // handles export ownership - // non mutexed fields + // non-critical fields Id string // unique id that represents an export demand FactID int64 Params CSVParameters @@ -50,6 +39,19 @@ type ExportWrapperItem struct { Facts []engine.Fact } +type ExportWrapper struct { + QueueMutex sync.Mutex + DoneMutex sync.Mutex + Workers []*ExportWorker + Queue []*ExportWrapperItem + // Done contains all tasks that have been taken out of the queue, including the ones that are still running + Done []*ExportWrapperItem + DiskRetentionDays int + BasePath string + QueueMaxSize int +} + +// NewExportWrapperItem creates a new export wrapper item func NewExportWrapperItem(factID int64, params CSVParameters, user users.User) *ExportWrapperItem { return &ExportWrapperItem{ Id: uuid.New().String(), @@ -62,12 +64,26 @@ func NewExportWrapperItem(factID int64, params CSVParameters, user users.User) * } } +// NewExportWrapper creates a new export wrapper +func NewExportWrapper(basePath string, diskRetentionDays, queueMaxSize int) *ExportWrapper { + return &ExportWrapper{ + Workers: make([]*ExportWorker, 0), + Queue: make([]*ExportWrapperItem, 0), + Done: make([]*ExportWrapperItem, 0), + QueueMaxSize: queueMaxSize, + BasePath: basePath, + DiskRetentionDays: diskRetentionDays, + } +} + +// SetStatus sets the status of the export wrapper item func (ew *ExportWrapperItem) SetStatus(status int) { ew.Mutex.Lock() defer ew.Mutex.Unlock() ew.Status = status } +// SetError sets the error and status of the export wrapper item func (ew *ExportWrapperItem) SetError(err error) { ew.Mutex.Lock() defer ew.Mutex.Unlock() @@ -76,6 +92,15 @@ func (ew *ExportWrapperItem) SetError(err error) { zap.L().Error("Error happened during export worker execution", zap.Error(err)) } +// Init initializes the export wrapper +func (ew *ExportWrapper) Init(workers int) { + // instantiate workers + for i := 0; i < workers; i++ { + ew.Workers = append(ew.Workers, NewExportWorker(ew.BasePath)) + } + go ew.StartDispatcher(context.Background()) +} + // AddToQueue Adds a new export to the export worker queue func (ew *ExportWrapper) AddToQueue(factID int64, params CSVParameters, user users.User) int { ew.QueueMutex.Lock() @@ -104,22 +129,8 @@ func (ew *ExportWrapper) AddToQueue(factID int64, params CSVParameters, user use return CodeAdded } -func NewExportWrapper(basePath string, diskRetentionDays, queueMaxSize int) *ExportWrapper { - return &ExportWrapper{ - Workers: make([]*ExportWorker, 0), - Queue: make([]*ExportWrapperItem, 0), - Done: make([]*ExportWrapperItem, 0), - QueueMaxSize: queueMaxSize, - BasePath: basePath, - DiskRetentionDays: diskRetentionDays, - } -} - // FindAvailableWorker finds an available worker and sets it to unavailable func (ew *ExportWrapper) FindAvailableWorker() *ExportWorker { - ew.QueueMutex.Lock() - defer ew.QueueMutex.Unlock() - for _, worker := range ew.Workers { worker.Mutex.Lock() if worker.Available { @@ -133,15 +144,6 @@ func (ew *ExportWrapper) FindAvailableWorker() *ExportWorker { return nil } -// Init initializes the export wrapper -func (ew *ExportWrapper) Init(basePath string, workers int) { - // instantiate workers - for i := 0; i < workers; i++ { - ew.Workers = append(ew.Workers, NewExportWorker(basePath)) - } - go ew.StartDispatcher(context.Background()) -} - // StartDispatcher starts the export tasks dispatcher & the expired files checker func (ew *ExportWrapper) StartDispatcher(context context.Context) { zap.L().Info("Starting export tasks dispatcher") @@ -192,6 +194,8 @@ func (ew *ExportWrapper) StartDispatcher(context context.Context) { } } +// CheckForExpiredFiles checks for expired files in the export directory and deletes them +// it also deletes the done tasks that are older than diskRetentionDays func (ew *ExportWrapper) CheckForExpiredFiles() error { // Get all files in directory and check the last edit date // if last edit date is older than diskRetentionDays, delete the file @@ -220,21 +224,23 @@ func (ew *ExportWrapper) CheckForExpiredFiles() error { continue } - fi, err := os.Stat(file.Name()) + filePath := filepath.Join(ew.BasePath, file.Name()) + + fi, err := os.Stat(filePath) if err != nil { - zap.L().Error("Cannot get file info", zap.String("file", file.Name()), zap.Error(err)) + zap.L().Error("Cannot get file info", zap.String("file", filePath), zap.Error(err)) continue } // skip if file is not a zip - if filepath.Ext(file.Name()) != ".zip" { - continue - } + //if filepath.Ext(file.Name()) != ".zip" { + // continue + //} if time.Since(fi.ModTime()).Hours() > float64(ew.DiskRetentionDays*24) { - err = os.Remove(file.Name()) + err = os.Remove(filePath) if err != nil { - zap.L().Error("Cannot delete file", zap.String("file", file.Name()), zap.Error(err)) + zap.L().Error("Cannot delete file", zap.String("file", filePath), zap.Error(err)) continue } count++ diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go new file mode 100644 index 00000000..68de99eb --- /dev/null +++ b/internals/export/wrapper_test.go @@ -0,0 +1,197 @@ +package export + +import ( + "fmt" + "github.com/google/uuid" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "os" + "path/filepath" + "testing" + "time" +) + +func TestNewExportWrapper(t *testing.T) { + wrapper := NewExportWrapper("/tmp", 1, 1) + expression.AssertEqual(t, wrapper.BasePath, "/tmp") + expression.AssertEqual(t, wrapper.QueueMaxSize, 1) + expression.AssertEqual(t, wrapper.DiskRetentionDays, 1) +} + +func TestNewExportWrapperItem(t *testing.T) { + item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) + expression.AssertNotEqual(t, item.Id, "") + expression.AssertEqual(t, item.FactID, int64(1)) + expression.AssertEqual(t, item.Params.Equals(CSVParameters{}), true) + expression.AssertEqual(t, item.Status, StatusPending) +} + +func TestExportWrapperItem_SetError(t *testing.T) { + item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) + expression.AssertEqual(t, item.Status, StatusPending) + item.SetError(fmt.Errorf("error")) + expression.AssertEqual(t, item.Status, StatusError) + expression.AssertNotEqual(t, item.Error, nil) +} + +func TestExportWrapperItem_SetStatus(t *testing.T) { + item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) + expression.AssertEqual(t, item.Status, StatusPending) + item.SetStatus(StatusRunning) + expression.AssertEqual(t, item.Status, StatusRunning) +} + +func TestAddToQueue(t *testing.T) { + wrapper := NewExportWrapper("/tmp", 1, 1) + user1 := users.User{ID: uuid.New()} + user2 := users.User{ID: uuid.New()} + csvParams := CSVParameters{} + expression.AssertEqual(t, wrapper.AddToQueue(1, csvParams, user1), CodeAdded, "AddToQueue should return CodeAdded") + expression.AssertEqual(t, wrapper.AddToQueue(1, csvParams, user1), CodeUserExists, "AddToQueue should return CodeUserExists") + expression.AssertEqual(t, wrapper.AddToQueue(1, csvParams, user2), CodeUserAdded, "AddToQueue should return CodeUserAdded") + expression.AssertEqual(t, wrapper.AddToQueue(2, csvParams, user2), CodeQueueFull, "AddToQueue should return CodeQueueFull") +} + +func TestFindAvailableWorker(t *testing.T) { + wrapper := NewExportWrapper("/tmp", 1, 1) + // since wrapper.Init() starts the dispatcher worker that we don't want to run in this test, we initialize the workers manually + for i := 0; i < 2; i++ { + wrapper.Workers = append(wrapper.Workers, NewExportWorker("/tmp")) + } + w1 := wrapper.FindAvailableWorker() + expression.AssertNotEqual(t, w1, nil) + w2 := wrapper.FindAvailableWorker() + expression.AssertNotEqual(t, w2, nil) + w3 := wrapper.FindAvailableWorker() + expression.AssertEqual(t, w3, (*ExportWorker)(nil)) +} + +func TestStartDispatcher(t *testing.T) { + // we don't want that the worker try to export data, therefore we will create a temporary directory with a temp file + // so that the worker will not be able to create the file and will return an error + dname, err := os.MkdirTemp("", "exportdispatcher") + if err != nil { + t.Error(err) + t.FailNow() + } + defer os.RemoveAll(dname) + + // create a file that is 2 days old + file, err := os.CreateTemp(dname, "exportdispatcher") + if err != nil { + t.Error(err) + t.FailNow() + } + fileName := filepath.Base(file.Name()) + _ = file.Close() + + wrapper := NewExportWrapper(dname, 1, 1) + wrapper.Init(1) + expression.AssertEqual(t, len(wrapper.Workers), 1) + // sleep one second to let the goroutine start + fmt.Println("Sleeping 1 second to let the goroutine start") + time.Sleep(1 * time.Second) + + worker := wrapper.Workers[0] + + // check if the worker is available + worker.Mutex.Lock() + expression.AssertEqual(t, worker.Available, true) + worker.Mutex.Unlock() + + // add a task to the queue and check if the task was added to queue + expression.AssertEqual(t, wrapper.AddToQueue(1, CSVParameters{FileName: fileName}, users.User{ID: uuid.New()}), CodeAdded, "AddToQueue should return CodeAdded") + wrapper.QueueMutex.Lock() + item := wrapper.Queue[0] + expression.AssertEqual(t, len(wrapper.Queue), 1) + wrapper.QueueMutex.Unlock() + + // sleep another 5 seconds to let the goroutine handle the task + fmt.Println("Sleeping 5 seconds to let the goroutine handle the task") + time.Sleep(5 * time.Second) + + wrapper.QueueMutex.Lock() + expression.AssertEqual(t, len(wrapper.Queue), 0) + wrapper.QueueMutex.Unlock() + + wrapper.DoneMutex.Lock() + expression.AssertEqual(t, len(wrapper.Done), 1) + foundItem := wrapper.Done[0] + wrapper.DoneMutex.Unlock() + + expression.AssertEqual(t, item.Id, foundItem.Id) + + fmt.Println("Sleeping 1 second to wait for status") + time.Sleep(2 * time.Second) + + // could not create file + foundItem.Mutex.Lock() + expression.AssertEqual(t, foundItem.Status, StatusError) + foundItem.Mutex.Unlock() +} + +func TestCheckForExpiredFiles(t *testing.T) { + // first test : check if files are deleted + dname, err := os.MkdirTemp("", "export") + if err != nil { + t.Error(err) + t.FailNow() + } + defer os.RemoveAll(dname) + + // create a file that is 2 days old + file, err := os.CreateTemp(dname, "export") + if err != nil { + t.Error(err) + t.FailNow() + } + file1Name := file.Name() + _ = file.Close() + err = os.Chtimes(file1Name, time.Now().AddDate(0, 0, -2), time.Now().AddDate(0, 0, -2)) + if err != nil { + t.Error(err) + t.FailNow() + } + + // create a freshly created file + file2, err := os.CreateTemp(dname, "export") + if err != nil { + t.Error(err) + t.FailNow() + } + file2Name := file2.Name() + _ = file2.Close() + + wrapper := NewExportWrapper(dname, 1, 1) + err = wrapper.CheckForExpiredFiles() + if err != nil { + t.Error(err) + t.FailNow() + } + + // check that the file has been deleted + _, err = os.Stat(file1Name) + if !os.IsNotExist(err) { + t.Error("File1 should have been deleted") + t.FailNow() + } + + _, err = os.Stat(file2Name) + if os.IsNotExist(err) { + t.Error("File2 should not have been deleted") + t.FailNow() + } + + // second test : check if expired exports are deleted + goodDate := time.Now() + wrapper.Done = append(wrapper.Done, &ExportWrapperItem{Date: time.Now().AddDate(0, 0, -2)}) + wrapper.Done = append(wrapper.Done, &ExportWrapperItem{Date: goodDate}) + expression.AssertEqual(t, len(wrapper.Done), 2) + err = wrapper.CheckForExpiredFiles() + if err != nil { + t.Error(err) + t.FailNow() + } + expression.AssertEqual(t, len(wrapper.Done), 1) + expression.AssertEqual(t, wrapper.Done[0].Date, goodDate) +} diff --git a/internals/export/zip_test.go b/internals/export/zip_test.go new file mode 100644 index 00000000..fc73adeb --- /dev/null +++ b/internals/export/zip_test.go @@ -0,0 +1,15 @@ +package export + +import ( + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "testing" +) + +func TestCreatePasswordProtectedZipFile(t *testing.T) { + file, err := CreatePasswordProtectedZipFile("test.zip", []byte("test")) + if err != nil { + t.Error(err) + t.FailNow() + } + expression.AssertNotEqual(t, len(file), 0) +} From fea23da989ed311773b6f84c7a3d258d8eaa0093 Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 8 Nov 2023 19:29:50 +0100 Subject: [PATCH 04/35] delete forgotten zip_test --- internals/export/zip_test.go | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 internals/export/zip_test.go diff --git a/internals/export/zip_test.go b/internals/export/zip_test.go deleted file mode 100644 index fc73adeb..00000000 --- a/internals/export/zip_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package export - -import ( - "github.com/myrteametrics/myrtea-sdk/v4/expression" - "testing" -) - -func TestCreatePasswordProtectedZipFile(t *testing.T) { - file, err := CreatePasswordProtectedZipFile("test.zip", []byte("test")) - if err != nil { - t.Error(err) - t.FailNow() - } - expression.AssertNotEqual(t, len(file), 0) -} From b4591f6884c9d0e2e3bfc16321de51fd7fe8823a Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 9 Nov 2023 15:40:09 +0100 Subject: [PATCH 05/35] Added export routes --- internals/handlers/export_handlers.go | 72 ++++++++++++++++++++++-- internals/handlers/processor_handlers.go | 2 +- internals/router/router.go | 53 +++++++++-------- internals/router/routes.go | 13 +++-- main.go | 20 ++++++- 5 files changed, 124 insertions(+), 36 deletions(-) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 5d22f853..62f1604a 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -19,16 +19,26 @@ import ( "go.uber.org/zap" ) -// ExportFact godoc -// @Summary Export facts -// @Description Get all action definitions -// @Tags ExportFact +type ExportHandler struct { + exportWrapper *export.ExportWrapper +} + +func NewExportHandler(exportWrapper *export.ExportWrapper) *ExportHandler { + return &ExportHandler{ + exportWrapper: exportWrapper, + } +} + +// ExportFactStreamed godoc +// @Summary CSV streamed export facts in chunks +// @Description CSV Streamed export for facts in chunks +// @Tags ExportFactStreamed // @Produce octet-stream // @Security Bearer // @Success 200 {file} Returns data to be saved into a file // @Failure 500 "internal server error" // @Router /engine/export/facts/{id} [get] -func ExportFact(w http.ResponseWriter, r *http.Request) { +func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { id := chi.URLParam(r, "id") idFact, err := strconv.ParseInt(id, 10, 64) @@ -236,3 +246,55 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, return err } + +// GetFacts godoc +// @Summary Get all user exports +// @Description Get all user exports +// @Tags Exports +// @Produce json +// @Security Bearer +// @Success 200 "Status OK" +// @Failure 500 "internal server error" +// @Router /service/exports [post] +func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { + +} + +// GetFacts godoc +// @Summary Get all user exports +// @Description Get all user exports +// @Tags Exports +// @Produce json +// @Security Bearer +// @Success 200 "Status OK" +// @Failure 500 "internal server error" +// @Router /service/exports/{id} [post] +func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { + +} + +// GetFacts godoc +// @Summary Get all user exports +// @Description Get all user exports +// @Tags Exports +// @Produce json +// @Security Bearer +// @Success 200 "Status OK" +// @Failure 500 "internal server error" +// @Router /service/exports/{id} [post] +func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { + +} + +// GetFacts godoc +// @Summary Get all user exports +// @Description Get all user exports +// @Tags Exports +// @Produce json +// @Security Bearer +// @Success 200 "Status OK" +// @Failure 500 "internal server error" +// @Router /service/exports/fact/{id} [post] +func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { + +} diff --git a/internals/handlers/processor_handlers.go b/internals/handlers/processor_handlers.go index 81a60c47..cafa02c4 100644 --- a/internals/handlers/processor_handlers.go +++ b/internals/handlers/processor_handlers.go @@ -79,7 +79,7 @@ func PostObjects(w http.ResponseWriter, r *http.Request) { // @Success 200 "Status OK" // @Failure 429 "Processing queue is full please retry later" // @Failure 500 "internal server error" -// @Router /service/ingester [post] +// @Router /service/aggregates [post] func (handler *ProcessorHandler) PostAggregates(w http.ResponseWriter, r *http.Request) { var aggregates []scheduler.ExternalAggregate err := json.NewDecoder(r.Body).Decode(&aggregates) diff --git a/internals/router/router.go b/internals/router/router.go index 7abe0063..fe490f34 100644 --- a/internals/router/router.go +++ b/internals/router/router.go @@ -33,7 +33,12 @@ type Config struct { VerboseError bool AuthenticationMode string LogLevel zap.AtomicLevel - PluginCore *plugin.Core +} + +type Services struct { + PluginCore *plugin.Core + ProcessorHandler *handlers.ProcessorHandler + ExportHandler *handlers.ExportHandler } // Check clean up the configuration and logs comments if required @@ -68,7 +73,7 @@ func (config *Config) Check() { // New returns a new fully configured instance of chi.Mux // It instanciates all middlewares including the security ones, all routes and route groups -func New(config Config) *chi.Mux { +func New(config Config, services Services) *chi.Mux { config.Check() @@ -76,7 +81,7 @@ func New(config Config) *chi.Mux { // Global middleware stack // TODO: Add CORS middleware if config.CORS { - cors := cors.New(cors.Options{ + corsHandler := cors.New(cors.Options{ AllowedOrigins: []string{"*"}, AllowedMethods: []string{"GET", "POST", "PUT", "DELETE", "OPTIONS"}, AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "X-CSRF-Token"}, @@ -84,7 +89,7 @@ func New(config Config) *chi.Mux { AllowCredentials: true, MaxAge: 300, // Maximum value not ignored by any of major browsers }) - r.Use(cors.Handler) + r.Use(corsHandler.Handler) } r.Use(chimiddleware.SetHeader("Strict-Transport-Security", "max-age=63072000; includeSubDomains")) @@ -105,11 +110,11 @@ func New(config Config) *chi.Mux { switch config.AuthenticationMode { case "BASIC": - routes, err = buildRoutesV3Basic(config) + routes, err = buildRoutesV3Basic(config, services) case "SAML": - routes, err = buildRoutesV3SAML(config) + routes, err = buildRoutesV3SAML(config, services) case "OIDC": - routes, err = buildRoutesV3OIDC(config) + routes, err = buildRoutesV3OIDC(config, services) default: zap.L().Panic("Authentication mode not supported", zap.String("AuthenticationMode", config.AuthenticationMode)) return nil @@ -124,7 +129,7 @@ func New(config Config) *chi.Mux { return r } -func buildRoutesV3Basic(config Config) (func(r chi.Router), error) { +func buildRoutesV3Basic(config Config, services Services) (func(r chi.Router), error) { signingKey := []byte(security.RandString(128)) securityMiddleware := security.NewMiddlewareJWT(signingKey, security.NewDatabaseAuth(postgres.DB())) @@ -163,7 +168,7 @@ func buildRoutesV3Basic(config Config) (func(r chi.Router), error) { rg.HandleFunc("/log_level", config.LogLevel.ServeHTTP) rg.Mount("/engine", engineRouter()) - for _, plugin := range config.PluginCore.Plugins { + for _, plugin := range services.PluginCore.Plugins { rg.Mount(plugin.Plugin.HandlerPrefix(), plugin.Plugin.Handler()) rg.HandleFunc(fmt.Sprintf("/plugin%s", plugin.Plugin.HandlerPrefix()), func(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]interface{}{"loaded": true}) @@ -202,12 +207,12 @@ func buildRoutesV3Basic(config Config) (func(r chi.Router), error) { // } // rg.Use(chimiddleware.SetHeader("Content-Type", "application/json")) - rg.Mount("/service", serviceRouter()) + rg.Mount("/service", serviceRouter(services)) }) }, nil } -func buildRoutesV3SAML(config Config) (func(r chi.Router), error) { +func buildRoutesV3SAML(config Config, services Services) (func(r chi.Router), error) { samlConfig := SamlSPMiddlewareConfig{ MetadataMode: viper.GetString("AUTHENTICATION_SAML_METADATA_MODE"), @@ -254,7 +259,7 @@ func buildRoutesV3SAML(config Config) (func(r chi.Router), error) { rg.HandleFunc("/log_level", config.LogLevel.ServeHTTP) rg.Mount("/engine", engineRouter()) - for _, plugin := range config.PluginCore.Plugins { + for _, plugin := range services.PluginCore.Plugins { rg.Mount(plugin.Plugin.HandlerPrefix(), plugin.Plugin.Handler()) rg.HandleFunc(fmt.Sprintf("/plugin%s", plugin.Plugin.HandlerPrefix()), func(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]interface{}{"loaded": true}) @@ -286,20 +291,12 @@ func buildRoutesV3SAML(config Config) (func(r chi.Router), error) { // } rg.Use(chimiddleware.SetHeader("Content-Type", "application/json")) - rg.Mount("/service", serviceRouter()) + rg.Mount("/service", serviceRouter(services)) }) }, nil } -// ReverseProxy act as a reverse proxy for any plugin http handlers -func ReverseProxy(plugin plugin.MyrteaPlugin) http.HandlerFunc { - url, _ := url.Parse(fmt.Sprintf("http://localhost:%d", plugin.ServicePort())) - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - httputil.NewSingleHostReverseProxy(url).ServeHTTP(w, r) - }) -} - -func buildRoutesV3OIDC(config Config) (func(r chi.Router), error) { +func buildRoutesV3OIDC(config Config, services Services) (func(r chi.Router), error) { return func(r chi.Router) { // Public routes @@ -326,7 +323,7 @@ func buildRoutesV3OIDC(config Config) (func(r chi.Router), error) { rg.HandleFunc("/log_level", config.LogLevel.ServeHTTP) rg.Mount("/engine", engineRouter()) - for _, plugin := range config.PluginCore.Plugins { + for _, plugin := range services.PluginCore.Plugins { rg.Mount(plugin.Plugin.HandlerPrefix(), plugin.Plugin.Handler()) rg.HandleFunc(fmt.Sprintf("/plugin%s", plugin.Plugin.HandlerPrefix()), func(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, map[string]interface{}{"loaded": true}) @@ -350,7 +347,15 @@ func buildRoutesV3OIDC(config Config) (func(r chi.Router), error) { r.Group(func(rg chi.Router) { rg.Use(chimiddleware.SetHeader("Content-Type", "application/json")) - rg.Mount("/service", serviceRouter()) + rg.Mount("/service", serviceRouter(services)) }) }, nil } + +// ReverseProxy act as a reverse proxy for any plugin http handlers +func ReverseProxy(plugin plugin.MyrteaPlugin) http.HandlerFunc { + pluginUrl, _ := url.Parse(fmt.Sprintf("http://localhost:%d", plugin.ServicePort())) + return func(w http.ResponseWriter, r *http.Request) { + httputil.NewSingleHostReverseProxy(pluginUrl).ServeHTTP(w, r) + } +} diff --git a/internals/router/routes.go b/internals/router/routes.go index 30a6c8c3..5d431856 100644 --- a/internals/router/routes.go +++ b/internals/router/routes.go @@ -173,7 +173,7 @@ func engineRouter() http.Handler { r.Get("/connector/{id}/executions/last", handlers.GetlastConnectorExecutionDateTime) - r.Get("/facts/{id}/export", handlers.ExportFact) + r.Get("/facts/{id}/streamedexport", handlers.ExportFactStreamed) r.Get("/variablesconfig", handlers.GetVariablesConfig) r.Get("/variablesconfig/{id}", handlers.GetVariableConfig) @@ -185,16 +185,21 @@ func engineRouter() http.Handler { return r } -func serviceRouter() http.Handler { +func serviceRouter(services Services) http.Handler { r := chi.NewRouter() - processorHandler := handlers.NewProcessorHandler() r.Post("/objects", handlers.PostObjects) - r.Post("/aggregates", processorHandler.PostAggregates) + r.Post("/aggregates", services.ProcessorHandler.PostAggregates) r.Get("/externalconfigs", handlers.GetExternalConfigs) r.Get("/externalconfigs/{id}", handlers.GetExternalConfig) r.Get("/externalconfigs/name/{name}", handlers.GetExternalConfigByName) + // exports + r.Get("/exports", services.ExportHandler.GetExports) + r.Get("/exports/{id}", services.ExportHandler.GetExport) + r.Delete("/exports/{id}", services.ExportHandler.DeleteExport) + r.Post("/exports/fact/{id}", services.ExportHandler.ExportFact) + return r } diff --git a/main.go b/main.go index 12fcf053..88823692 100644 --- a/main.go +++ b/main.go @@ -2,6 +2,8 @@ package main import ( "context" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers" "github.com/myrteametrics/myrtea-engine-api/v5/internals/metrics" "net/http" "os" @@ -68,10 +70,24 @@ func main() { GatewayMode: viper.GetBool("HTTP_SERVER_API_ENABLE_GATEWAY_MODE"), AuthenticationMode: viper.GetString("AUTHENTICATION_MODE"), LogLevel: zapConfig.Level, - PluginCore: core, } - router := router.New(routerConfig) + // basePath string, diskRetentionDays int, queueMaxSize int + basePath := viper.GetString("EXPORT_BASE_PATH") + diskRetentionDays := viper.GetInt("EXPORT_DISK_RETENTION_DAYS") + queueMaxSize := viper.GetInt("EXPORT_QUEUE_MAX_SIZE") + exportWorkersCount := viper.GetInt("EXPORT_WORKERS_COUNT") + + exportWrapper := export.NewExportWrapper(basePath, diskRetentionDays, queueMaxSize) + exportWrapper.Init(exportWorkersCount) + + routerServices := router.Services{ + PluginCore: core, + ProcessorHandler: handlers.NewProcessorHandler(), + ExportHandler: handlers.NewExportHandler(exportWrapper), + } + + router := router.New(routerConfig, routerServices) var srv *http.Server if serverEnableTLS { srv = server.NewSecuredServer(serverPort, serverTLSCert, serverTLSKey, router) From 0833412fe4e461d89d23d8e4c9c229c06867cd99 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Mon, 20 Nov 2023 20:20:32 +0100 Subject: [PATCH 06/35] lot of changes, replaced mutexes by channels --- internals/export/utils.go | 17 +- internals/export/worker.go | 78 +++--- internals/export/wrapper.go | 358 +++++++++++++++++--------- internals/export/wrapper_test.go | 28 +- internals/handlers/export_handlers.go | 15 +- internals/router/router.go | 7 +- internals/router/routes.go | 14 +- 7 files changed, 334 insertions(+), 183 deletions(-) diff --git a/internals/export/utils.go b/internals/export/utils.go index 02c7fac4..16227976 100644 --- a/internals/export/utils.go +++ b/internals/export/utils.go @@ -7,14 +7,10 @@ type CSVParameters struct { Separator rune Limit int64 ChunkSize int64 - FileName string } // Equals compares two CSVParameters func (p CSVParameters) Equals(Params CSVParameters) bool { - if p.FileName != Params.FileName { - return false - } if p.Separator != Params.Separator { return false } @@ -50,3 +46,16 @@ func (p CSVParameters) Equals(Params CSVParameters) bool { } return true } + +// Int64Equals compares two int64 slices +func Int64Equals(a, b []int64) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if v != b[i] { + return false + } + } + return true +} diff --git a/internals/export/worker.go b/internals/export/worker.go index b58949ac..c047aa4b 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -5,6 +5,7 @@ import ( "context" "encoding/csv" "fmt" + "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" "os" "path/filepath" @@ -12,65 +13,77 @@ import ( ) type ExportWorker struct { + Id int Mutex sync.Mutex - Available bool + Available bool // do not touch this variable inside of worker it is used thread-safely by wrapper + Success chan<- int Cancel chan bool // channel to cancel the worker // - QueueItemId string // id of the queueItem currently handled by the worker - BasePath string // base path where the file will be saved + QueueItem WrapperItem + BasePath string // base path where the file will be saved } -func NewExportWorker(basePath string) *ExportWorker { +func NewExportWorker(id int, basePath string, success chan<- int) *ExportWorker { return &ExportWorker{ - Available: true, - QueueItemId: "", - BasePath: basePath, - Cancel: make(chan bool), + Id: id, + Available: true, + BasePath: basePath, + Cancel: make(chan bool), + Success: success, } } -// SetAvailable sets the worker availability to true and clears the queueItem -func (e *ExportWorker) SetAvailable(item *ExportWrapperItem) { +func (e *ExportWorker) SetError(error error) { + e.Mutex.Lock() + defer e.Mutex.Unlock() + e.QueueItem.Status = StatusError + e.QueueItem.Error = error +} + +func (e *ExportWorker) SetStatus(status int) { e.Mutex.Lock() defer e.Mutex.Unlock() - e.Available = true + e.QueueItem.Status = status +} + +// SetAvailable sets the worker availability to true and clears the queueItem +func (e *ExportWorker) SetAvailable() { + e.Mutex.Lock() - // set queueItem status to done - item.Mutex.Lock() // set status to error if error occurred - if item.Error != nil { - item.Status = StatusError + if e.QueueItem.Error != nil { + e.QueueItem.Status = StatusError } // set status to done if no error occurred - if item.Status != StatusError { - item.Status = StatusDone + if e.QueueItem.Status != StatusError { + e.QueueItem.Status = StatusDone } - item.Mutex.Unlock() - e.QueueItemId = "" + e.Mutex.Unlock() + + // notify to the dispatcher that this worker is now available + e.Success <- e.Id } // Start starts the export task // It handles one queueItem at a time and when finished it stops the goroutine -func (e *ExportWorker) Start(item *ExportWrapperItem) { - defer e.SetAvailable(item) +func (e *ExportWorker) Start(item WrapperItem) { + defer e.SetAvailable() e.Mutex.Lock() - e.QueueItemId = item.Id + e.QueueItem = item e.Mutex.Unlock() - item.SetStatus(StatusRunning) - // create file - path := filepath.Join(e.BasePath, item.Params.FileName) + path := filepath.Join(e.BasePath, item.FileName) // check if file not already exists if _, err := os.Stat(path); err == nil { - item.SetError(fmt.Errorf("file with same name already exists")) + e.SetError(fmt.Errorf("file with same name already exists")) return } file, err := os.Create(path) if err != nil { - item.SetError(err) + e.SetError(err) return } defer file.Close() @@ -103,8 +116,9 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { defer wg.Done() defer close(streamedExport.Data) - for _, f := range item.Facts { - writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, item.Params.Limit) + for _, f := range item.FactIDs { + _ = f // TODO: + writerErr = streamedExport.StreamedExportFactHitsFull(ctx, engine.Fact{}, item.Params.Limit) if writerErr != nil { break // break here when error occurs? } @@ -152,13 +166,13 @@ func (e *ExportWorker) Start(item *ExportWrapperItem) { // error occurred, close file and delete if writerErr != nil || err != nil { if ctx.Err() != nil { - item.SetStatus(StatusCanceled) + e.SetStatus(StatusCanceled) zap.L().Warn("Export worker: canceled, deleting file...", zap.String("filePath", path)) } else { if err != nil { // priority to err - item.SetError(err) + e.SetError(err) } else { - item.SetError(writerErr) + e.SetError(writerErr) } zap.L().Error("Export worker: error, deleting file...", zap.String("filePath", path), zap.NamedError("err", err), zap.NamedError("writerErr", writerErr)) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 0311fbc4..efc53fa7 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -2,12 +2,13 @@ package export import ( "context" + "fmt" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" - "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" "os" "path/filepath" + "sort" "sync" "time" ) @@ -18,7 +19,7 @@ const ( CodeUserExists = -1 CodeQueueFull = -2 - // ExportWrapperItem statuses + // WrapperItem statuses StatusPending = 0 StatusRunning = 1 StatusDone = 2 @@ -26,160 +27,156 @@ const ( StatusCanceled = 4 ) -type ExportWrapperItem struct { - Mutex sync.Mutex - Error error - Status int - Users []users.User // handles export ownership - // non-critical fields - Id string // unique id that represents an export demand - FactID int64 - Params CSVParameters - Date time.Time - Facts []engine.Fact +type WrapperItem struct { + Id string `json:"id"` // unique id that represents an export demand + FactIDs []int64 `json:"factIds"` + Error error `json:"error"` + Status int `json:"status"` + FileName string `json:"fileName"` + Date time.Time `json:"date"` + Users []string `json:"-"` + Params CSVParameters `json:"-"` } -type ExportWrapper struct { - QueueMutex sync.Mutex - DoneMutex sync.Mutex - Workers []*ExportWorker - Queue []*ExportWrapperItem - // Done contains all tasks that have been taken out of the queue, including the ones that are still running - Done []*ExportWrapperItem +type Wrapper struct { + // Queue handling + QueueItemsMutex sync.RWMutex + QueueItems []*WrapperItem // stores queue to handle duplicates, state + Queue chan *WrapperItem + + // contains also current handled items + // Workers is final, its only instanced once and thus does not change size (ExportWorker have there indexes in this slice stored) + Workers []*ExportWorker + + // Success is passed to all workers, they write on this channel when they've finished with there export + Success chan int + + // Archived WrapperItem's + Archive sync.Map // map of all exports that have been done, key is the id of the export + + // Non-critical fields + // Read-only parameters DiskRetentionDays int BasePath string QueueMaxSize int + WorkerCount int } // NewExportWrapperItem creates a new export wrapper item -func NewExportWrapperItem(factID int64, params CSVParameters, user users.User) *ExportWrapperItem { - return &ExportWrapperItem{ - Id: uuid.New().String(), - FactID: factID, - Params: params, - Users: append([]users.User{}, user), - Date: time.Now(), - Status: StatusPending, - Error: nil, +func NewExportWrapperItem(factIDs []int64, fileName string, params CSVParameters, user users.User) *WrapperItem { + // sort slices (for easy comparison) + sort.Slice(factIDs, func(i, j int) bool { return factIDs[i] < factIDs[j] }) + return &WrapperItem{ + Users: append([]string{}, user.Login), + Id: uuid.New().String(), + FactIDs: factIDs, + Date: time.Now(), + Status: StatusPending, + Error: nil, + FileName: fileName, + Params: params, } } // NewExportWrapper creates a new export wrapper -func NewExportWrapper(basePath string, diskRetentionDays, queueMaxSize int) *ExportWrapper { - return &ExportWrapper{ - Workers: make([]*ExportWorker, 0), - Queue: make([]*ExportWrapperItem, 0), - Done: make([]*ExportWrapperItem, 0), +func NewExportWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize int) *Wrapper { + return &Wrapper{ + Workers: make([]*ExportWorker, workersCount), + Queue: make(chan *WrapperItem, queueMaxSize), + Success: make(chan int), + Archive: sync.Map{}, QueueMaxSize: queueMaxSize, BasePath: basePath, DiskRetentionDays: diskRetentionDays, + WorkerCount: workersCount, } } -// SetStatus sets the status of the export wrapper item -func (ew *ExportWrapperItem) SetStatus(status int) { - ew.Mutex.Lock() - defer ew.Mutex.Unlock() - ew.Status = status -} - -// SetError sets the error and status of the export wrapper item -func (ew *ExportWrapperItem) SetError(err error) { - ew.Mutex.Lock() - defer ew.Mutex.Unlock() - ew.Error = err - ew.Status = StatusError - zap.L().Error("Error happened during export worker execution", zap.Error(err)) +// ContainsFact checks if fact is part of the WrapperItem data +func (it *WrapperItem) ContainsFact(factID int64) bool { + for _, d := range it.FactIDs { + if d == factID { + return true + } + } + return false } // Init initializes the export wrapper -func (ew *ExportWrapper) Init(workers int) { +func (ew *Wrapper) Init() { // instantiate workers - for i := 0; i < workers; i++ { - ew.Workers = append(ew.Workers, NewExportWorker(ew.BasePath)) + for i := 0; i < ew.WorkerCount; i++ { + ew.Workers[i] = NewExportWorker(i, ew.BasePath, ew.Success) } go ew.StartDispatcher(context.Background()) } // AddToQueue Adds a new export to the export worker queue -func (ew *ExportWrapper) AddToQueue(factID int64, params CSVParameters, user users.User) int { - ew.QueueMutex.Lock() - defer ew.QueueMutex.Unlock() +func (ew *Wrapper) AddToQueue(factIDs []int64, fileName string, params CSVParameters, user users.User) (*WrapperItem, int) { + // implement anti-spam method? + /* + continue + } + */ + item := NewExportWrapperItem(factIDs, fileName, params, user) - for _, queueItem := range ew.Queue { - if queueItem.FactID == factID && queueItem.Params.Equals(params) { + ew.QueueItemsMutex.Lock() + defer ew.QueueItemsMutex.Unlock() + + for _, queueItem := range ew.QueueItems { + if !Int64Equals(queueItem.FactIDs, factIDs) || !queueItem.Params.Equals(params) { // check if user not already in queue.users for _, u := range queueItem.Users { - if u.ID == user.ID { - return CodeUserExists + if u == user.Login { + return nil, CodeUserExists } } - queueItem.Users = append(queueItem.Users, user) - return CodeUserAdded + //queueItem.Users = append(queueItem.Users, user) + return nil, CodeUserAdded } } - if len(ew.Queue) >= ew.QueueMaxSize { - return CodeQueueFull - } + // TODO: check ongoing exports ?? - ew.Queue = append(ew.Queue, NewExportWrapperItem(factID, params, user)) - return CodeAdded -} - -// FindAvailableWorker finds an available worker and sets it to unavailable -func (ew *ExportWrapper) FindAvailableWorker() *ExportWorker { - for _, worker := range ew.Workers { - worker.Mutex.Lock() - if worker.Available { - worker.Available = false - worker.Mutex.Unlock() - return worker - } - worker.Mutex.Unlock() + select { + case ew.Queue <- item: + ew.QueueItems = append(ew.QueueItems, item) + return item, CodeAdded + default: + return nil, CodeQueueFull } - return nil } // StartDispatcher starts the export tasks dispatcher & the expired files checker -func (ew *ExportWrapper) StartDispatcher(context context.Context) { +func (ew *Wrapper) StartDispatcher(context context.Context) { zap.L().Info("Starting export tasks dispatcher") // every 5 seconds check if there is a new task to process in queue then check if there is an available worker // if yes, start the worker with the task // if no, continue to check ticker := time.NewTicker(5 * time.Second) expiredFileTicker := time.NewTicker(24 * time.Hour) + defer ticker.Stop() + defer expiredFileTicker.Stop() + for { select { + case w := <-ew.Success: + worker := ew.Workers[w] + // TODO: send notifications here + + // archive item when finished + worker.Mutex.Lock() + ew.Workers[w].Available = true + item := worker.QueueItem + worker.QueueItem = WrapperItem{} + worker.Mutex.Unlock() + // archive item + ew.Archive.Store(item.Id, item) case <-ticker.C: - ew.QueueMutex.Lock() - if len(ew.Queue) > 0 { - for i := 0; i < len(ew.Queue); i++ { - x := ew.Queue[i] - w := ew.FindAvailableWorker() - - // if no worker available, stop the loop since no worker will be available for the next tasks - if w == nil { - break - } - - // attach the task to the worker and start the worker - go w.Start(x) - - // dequeue the task - ew.Queue = ew.Queue[1:] - - // add the task to the done list - ew.DoneMutex.Lock() - ew.Done = append(ew.Done, x) - ew.DoneMutex.Unlock() - - } - } - ew.QueueMutex.Unlock() + ew.DispatchExportQueue() case <-expiredFileTicker.C: err := ew.CheckForExpiredFiles() @@ -188,7 +185,9 @@ func (ew *ExportWrapper) StartDispatcher(context context.Context) { } case <-context.Done(): - ticker.Stop() + for i := 0; i < ew.WorkerCount; i++ { + ew.Workers[i].Cancel <- true + } return } } @@ -196,7 +195,7 @@ func (ew *ExportWrapper) StartDispatcher(context context.Context) { // CheckForExpiredFiles checks for expired files in the export directory and deletes them // it also deletes the done tasks that are older than diskRetentionDays -func (ew *ExportWrapper) CheckForExpiredFiles() error { +func (ew *Wrapper) CheckForExpiredFiles() error { // Get all files in directory and check the last edit date // if last edit date is older than diskRetentionDays, delete the file zap.L().Info("Checking for expired files") @@ -205,16 +204,17 @@ func (ew *ExportWrapper) CheckForExpiredFiles() error { return err } - // delete all done tasks of ew.Done that are older than diskRetentionDays - ew.DoneMutex.Lock() - for i := 0; i < len(ew.Done); i++ { - x := ew.Done[i] - if time.Since(x.Date).Hours() > float64(ew.DiskRetentionDays*24) { - ew.Done = append(ew.Done[:i], ew.Done[i+1:]...) - i-- + // delete all done archives of ew.Archive that are older than diskRetentionDays + ew.Archive.Range(func(key, value any) bool { + data, ok := value.(WrapperItem) + if !ok { + return true } - } - ew.DoneMutex.Unlock() + if time.Since(data.Date).Hours() > float64(ew.DiskRetentionDays*24) { + ew.Archive.Delete(key) + } + return true + }) // count the number of deleted files count := 0 @@ -250,3 +250,131 @@ func (ew *ExportWrapper) CheckForExpiredFiles() error { zap.L().Info("Deleted expired files", zap.Int("count", count)) return nil } + +func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { + var result []WrapperItem + + // first, gather all exports that are in the workers if there are any + for _, worker := range ew.Workers { + worker.Mutex.Lock() + if worker.QueueItem.ContainsUser(user) { + result = append(result, worker.QueueItem) + } + worker.Mutex.Unlock() + } + + // then, gather all exports that are archived + ew.Archive.Range(func(key, value any) bool { + data, ok := value.(WrapperItem) + if !ok { + return true + } + if data.ContainsUser(user) { + result = append(result, data) + } + return true + }) + + return result +} + +func (ew *Wrapper) DequeueWrapperItem(item *WrapperItem) bool { + ew.QueueItemsMutex.Lock() + defer ew.QueueItemsMutex.Unlock() + + for i, queueItem := range ew.QueueItems { + + // comparing pointer should work + if queueItem != item { + continue + } + + ew.QueueItems = append(ew.QueueItems[:i], ew.QueueItems[i+1:]...) + return true + } + + return false +} + +func (ew *Wrapper) DispatchExportQueue() { + if len(ew.Queue) == 0 { + return + } + +out: + for _, worker := range ew.Workers { + if worker.Available { + + // Non blocking reading + select { + case item := <-ew.Queue: + worker.Available = false + ew.DequeueWrapperItem(item) + go worker.Start(*item) + default: + break out // Nothing in queue + } + + } + } + +} + +func (ew *Wrapper) FindArchive(id string, user users.User) (WrapperItem, error) { + item, found := ew.Archive.Load(id) + if found { + if data, ok := item.(WrapperItem); ok && data.ContainsUser(user) { + return data, nil + } + } + return WrapperItem{}, fmt.Errorf("archive not found") +} + +// ContainsUser checks if user is in item +func (it *WrapperItem) ContainsUser(user users.User) bool { + for _, u := range it.Users { + if u == user.Login { + return true + } + } + return false +} + +//func (ew *Wrapper) CancelExport(id string, user users.User) error { +// // first check if the export is in the queue +// // if it is, we check if the user is the only one in the queueItem.users +// // if yes, we remove the queueItem from the queue +// // if no, we remove the user from the queueItem.users +// +// for i, worker := range ew.Workers { +// +// worker.Mutex.Lock() +// if worker.QueueItem == nil || worker.QueueItem.Id != id { +// worker.Mutex.Unlock() +// continue +// } +// worker.Mutex.Lock() +// +// if userIdx == -1 { +// worker.Mutex.Unlock() +// ew.QueueMutex.Unlock() +// return fmt.Errorf("user not found") +// } +// +// if len(worker.Users) == 1 { +// ew.Queue = append(ew.Queue[:userIdx], ew.Queue[userIdx+1:]...) +// worker.Mutex.Unlock() +// ew.QueueMutex.Unlock() +// return nil +// } +// +// worker.Users = append(worker.Users[:i], worker.Users[i+1:]...) +// worker.Mutex.Unlock() +// ew.QueueMutex.Unlock() +// return nil +// } +// +// ew.QueueMutex.Unlock() +// +// return nil +//} diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 68de99eb..aec66443 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -20,25 +20,25 @@ func TestNewExportWrapper(t *testing.T) { func TestNewExportWrapperItem(t *testing.T) { item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) - expression.AssertNotEqual(t, item.Id, "") - expression.AssertEqual(t, item.FactID, int64(1)) - expression.AssertEqual(t, item.Params.Equals(CSVParameters{}), true) - expression.AssertEqual(t, item.Status, StatusPending) + expression.AssertNotEqual(t, item.Data.Id, "") + expression.AssertEqual(t, item.Data.FactID, int64(1)) + expression.AssertEqual(t, item.Data.Params.Equals(CSVParameters{}), true) + expression.AssertEqual(t, item.Data.Status, StatusPending) } func TestExportWrapperItem_SetError(t *testing.T) { item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) - expression.AssertEqual(t, item.Status, StatusPending) + expression.AssertEqual(t, item.Data.Status, StatusPending) item.SetError(fmt.Errorf("error")) - expression.AssertEqual(t, item.Status, StatusError) - expression.AssertNotEqual(t, item.Error, nil) + expression.AssertEqual(t, item.Data.Status, StatusError) + expression.AssertNotEqual(t, item.Data.Error, nil) } func TestExportWrapperItem_SetStatus(t *testing.T) { item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) - expression.AssertEqual(t, item.Status, StatusPending) + expression.AssertEqual(t, item.Data.Status, StatusPending) item.SetStatus(StatusRunning) - expression.AssertEqual(t, item.Status, StatusRunning) + expression.AssertEqual(t, item.Data.Status, StatusRunning) } func TestAddToQueue(t *testing.T) { @@ -119,14 +119,14 @@ func TestStartDispatcher(t *testing.T) { foundItem := wrapper.Done[0] wrapper.DoneMutex.Unlock() - expression.AssertEqual(t, item.Id, foundItem.Id) + expression.AssertEqual(t, item.Data.Id, foundItem.Data.Id) fmt.Println("Sleeping 1 second to wait for status") time.Sleep(2 * time.Second) // could not create file foundItem.Mutex.Lock() - expression.AssertEqual(t, foundItem.Status, StatusError) + expression.AssertEqual(t, foundItem.Data.Status, StatusError) foundItem.Mutex.Unlock() } @@ -184,8 +184,8 @@ func TestCheckForExpiredFiles(t *testing.T) { // second test : check if expired exports are deleted goodDate := time.Now() - wrapper.Done = append(wrapper.Done, &ExportWrapperItem{Date: time.Now().AddDate(0, 0, -2)}) - wrapper.Done = append(wrapper.Done, &ExportWrapperItem{Date: goodDate}) + wrapper.Done = append(wrapper.Done, &WrapperItem{Data: ExportWrapperItemData{Date: time.Now().AddDate(0, 0, -2)}}) + wrapper.Done = append(wrapper.Done, &WrapperItem{Data: ExportWrapperItemData{Date: goodDate}}) expression.AssertEqual(t, len(wrapper.Done), 2) err = wrapper.CheckForExpiredFiles() if err != nil { @@ -193,5 +193,5 @@ func TestCheckForExpiredFiles(t *testing.T) { t.FailNow() } expression.AssertEqual(t, len(wrapper.Done), 1) - expression.AssertEqual(t, wrapper.Done[0].Date, goodDate) + expression.AssertEqual(t, wrapper.Done[0].Data.Date, goodDate) } diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 62f1604a..6dc3ae83 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -20,10 +20,10 @@ import ( ) type ExportHandler struct { - exportWrapper *export.ExportWrapper + exportWrapper *export.Wrapper } -func NewExportHandler(exportWrapper *export.ExportWrapper) *ExportHandler { +func NewExportHandler(exportWrapper *export.Wrapper) *ExportHandler { return &ExportHandler{ exportWrapper: exportWrapper, } @@ -247,15 +247,14 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, return err } -// GetFacts godoc -// @Summary Get all user exports -// @Description Get all user exports -// @Tags Exports +// GetExports godoc +// @Summary Get user exports +// @Description Get in memory user exports // @Produce json // @Security Bearer -// @Success 200 "Status OK" +// @Success 200 {json} Returns data to be saved into a file // @Failure 500 "internal server error" -// @Router /service/exports [post] +// @Router /engine/exports [get] func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { } diff --git a/internals/router/router.go b/internals/router/router.go index fe490f34..32117930 100644 --- a/internals/router/router.go +++ b/internals/router/router.go @@ -35,6 +35,7 @@ type Config struct { LogLevel zap.AtomicLevel } +// Services is a wrapper for services instances, it is passed through router functions type Services struct { PluginCore *plugin.Core ProcessorHandler *handlers.ProcessorHandler @@ -166,7 +167,7 @@ func buildRoutesV3Basic(config Config, services Services) (func(r chi.Router), e rg.Use(chimiddleware.SetHeader("Content-Type", "application/json")) rg.HandleFunc("/log_level", config.LogLevel.ServeHTTP) - rg.Mount("/engine", engineRouter()) + rg.Mount("/engine", engineRouter(services)) for _, plugin := range services.PluginCore.Plugins { rg.Mount(plugin.Plugin.HandlerPrefix(), plugin.Plugin.Handler()) @@ -257,7 +258,7 @@ func buildRoutesV3SAML(config Config, services Services) (func(r chi.Router), er rg.Use(chimiddleware.SetHeader("Content-Type", "application/json")) rg.HandleFunc("/log_level", config.LogLevel.ServeHTTP) - rg.Mount("/engine", engineRouter()) + rg.Mount("/engine", engineRouter(services)) for _, plugin := range services.PluginCore.Plugins { rg.Mount(plugin.Plugin.HandlerPrefix(), plugin.Plugin.Handler()) @@ -321,7 +322,7 @@ func buildRoutesV3OIDC(config Config, services Services) (func(r chi.Router), er rg.Use(chimiddleware.SetHeader("Content-Type", "application/json")) rg.HandleFunc("/log_level", config.LogLevel.ServeHTTP) - rg.Mount("/engine", engineRouter()) + rg.Mount("/engine", engineRouter(services)) for _, plugin := range services.PluginCore.Plugins { rg.Mount(plugin.Plugin.HandlerPrefix(), plugin.Plugin.Handler()) diff --git a/internals/router/routes.go b/internals/router/routes.go index 5d431856..1c694759 100644 --- a/internals/router/routes.go +++ b/internals/router/routes.go @@ -42,7 +42,7 @@ func adminRouter() http.Handler { return r } -func engineRouter() http.Handler { +func engineRouter(services Services) http.Handler { r := chi.NewRouter() r.Get("/security/myself", handlers.GetUserSelf) @@ -175,6 +175,12 @@ func engineRouter() http.Handler { r.Get("/facts/{id}/streamedexport", handlers.ExportFactStreamed) + // exports + r.Get("/exports", services.ExportHandler.GetExports) + r.Get("/exports/{id}", services.ExportHandler.GetExport) + r.Delete("/exports/{id}", services.ExportHandler.DeleteExport) + r.Post("/exports/fact/{id}", services.ExportHandler.ExportFact) + r.Get("/variablesconfig", handlers.GetVariablesConfig) r.Get("/variablesconfig/{id}", handlers.GetVariableConfig) r.Get("/variablesconfig/key/{key}", handlers.GetVariableConfigByKey) @@ -195,11 +201,5 @@ func serviceRouter(services Services) http.Handler { r.Get("/externalconfigs/{id}", handlers.GetExternalConfig) r.Get("/externalconfigs/name/{name}", handlers.GetExternalConfigByName) - // exports - r.Get("/exports", services.ExportHandler.GetExports) - r.Get("/exports/{id}", services.ExportHandler.GetExport) - r.Delete("/exports/{id}", services.ExportHandler.DeleteExport) - r.Post("/exports/fact/{id}", services.ExportHandler.ExportFact) - return r } From e5f69353c46fba0859c3315eabd8172fd613dac5 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 21 Nov 2023 16:54:50 +0100 Subject: [PATCH 07/35] changed queue channel to array with mutex + tests --- internals/export/utils_test.go | 5 +- internals/export/worker.go | 61 ++++--- internals/export/worker_test.go | 32 +++- internals/export/wrapper.go | 119 +++++++------ internals/export/wrapper_test.go | 277 +++++++++++++++++++++++-------- main.go | 4 +- 6 files changed, 341 insertions(+), 157 deletions(-) diff --git a/internals/export/utils_test.go b/internals/export/utils_test.go index 427eb1ec..2c7f6f1a 100644 --- a/internals/export/utils_test.go +++ b/internals/export/utils_test.go @@ -6,8 +6,8 @@ import ( ) func TestEquals(t *testing.T) { - p1 := CSVParameters{FileName: "bla"} - p2 := CSVParameters{FileName: "bla2"} + p1 := CSVParameters{} + p2 := CSVParameters{} expression.AssertEqual(t, p1.Equals(p2), false) expression.AssertEqual(t, p1.Equals(p1), true) @@ -19,7 +19,6 @@ func TestEquals(t *testing.T) { Separator: ';', Limit: 10, ChunkSize: 100, - FileName: "bla", } expression.AssertEqual(t, params3.Equals(p2), false) expression.AssertEqual(t, params3.Equals(params3), true) diff --git a/internals/export/worker.go b/internals/export/worker.go index c047aa4b..4d8b8e74 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -13,14 +13,14 @@ import ( ) type ExportWorker struct { - Id int - Mutex sync.Mutex - Available bool // do not touch this variable inside of worker it is used thread-safely by wrapper - Success chan<- int - Cancel chan bool // channel to cancel the worker - // + Mutex sync.Mutex + Id int + Success chan<- int + Cancel chan bool // channel to cancel the worker + BasePath string // base path where the file will be saved + // critical fields + Available bool QueueItem WrapperItem - BasePath string // base path where the file will be saved } func NewExportWorker(id int, basePath string, success chan<- int) *ExportWorker { @@ -33,6 +33,7 @@ func NewExportWorker(id int, basePath string, success chan<- int) *ExportWorker } } +// SetError sets the error and the status of the worker func (e *ExportWorker) SetError(error error) { e.Mutex.Lock() defer e.Mutex.Unlock() @@ -40,14 +41,31 @@ func (e *ExportWorker) SetError(error error) { e.QueueItem.Error = error } +// SetStatus sets the status of the worker func (e *ExportWorker) SetStatus(status int) { e.Mutex.Lock() defer e.Mutex.Unlock() e.QueueItem.Status = status } -// SetAvailable sets the worker availability to true and clears the queueItem -func (e *ExportWorker) SetAvailable() { +// SwapAvailable swaps the availability of the worker +func (e *ExportWorker) SwapAvailable(available bool) (old bool) { + e.Mutex.Lock() + defer e.Mutex.Unlock() + old = e.Available + e.Available = available + return old +} + +// IsAvailable returns the availability of the worker +func (e *ExportWorker) IsAvailable() bool { + e.Mutex.Lock() + defer e.Mutex.Unlock() + return e.Available +} + +// Finalise sets the worker availability to true and clears the queueItem +func (e *ExportWorker) Finalise() { e.Mutex.Lock() // set status to error if error occurred @@ -67,8 +85,8 @@ func (e *ExportWorker) SetAvailable() { // Start starts the export task // It handles one queueItem at a time and when finished it stops the goroutine -func (e *ExportWorker) Start(item WrapperItem) { - defer e.SetAvailable() +func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { + defer e.Finalise() e.Mutex.Lock() e.QueueItem = item e.Mutex.Unlock() @@ -98,7 +116,7 @@ func (e *ExportWorker) Start(item WrapperItem) { var writerErr error // local context handling - ctx, cancel := context.WithCancel(context.Background()) + ctx, cancel := context.WithCancel(ctx) defer cancel() // Increment the WaitGroup counter @@ -117,7 +135,7 @@ func (e *ExportWorker) Start(item WrapperItem) { defer close(streamedExport.Data) for _, f := range item.FactIDs { - _ = f // TODO: + _ = f // TODO: facts writerErr = streamedExport.StreamedExportFactHitsFull(ctx, engine.Fact{}, item.Params.Limit) if writerErr != nil { break // break here when error occurs? @@ -128,23 +146,21 @@ func (e *ExportWorker) Start(item WrapperItem) { // Chunk handler first := true labels := item.Params.ColumnsLabel - loop := true - for loop { +loop: + for { select { case hits, ok := <-streamedExport.Data: if !ok { // channel closed - loop = false - break + break loop } - err := WriteConvertHitsToCSV(csvWriter, hits, item.Params.Columns, labels, item.Params.FormatColumnsData, item.Params.Separator) + err = WriteConvertHitsToCSV(csvWriter, hits, item.Params.Columns, labels, item.Params.FormatColumnsData, item.Params.Separator) if err != nil { zap.L().Error("WriteConvertHitsToCSV error during export", zap.Error(err)) cancel() - loop = false - break + break loop } // Flush data @@ -154,10 +170,11 @@ func (e *ExportWorker) Start(item WrapperItem) { first = false labels = []string{} } - + case <-ctx.Done(): + break loop case <-e.Cancel: cancel() - loop = false + break loop } } diff --git a/internals/export/worker_test.go b/internals/export/worker_test.go index b4f6ad7b..9023575d 100644 --- a/internals/export/worker_test.go +++ b/internals/export/worker_test.go @@ -6,8 +6,36 @@ import ( ) func TestNewExportWorker(t *testing.T) { - worker := NewExportWorker("/tmp") + worker := NewExportWorker(0, "/tmp", make(chan<- int)) expression.AssertEqual(t, worker.BasePath, "/tmp") expression.AssertEqual(t, worker.Available, true) - expression.AssertEqual(t, worker.QueueItemId, "") + expression.AssertEqual(t, worker.Id, 0) +} + +func TestExportWorker_SetError(t *testing.T) { + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + worker.SetError(nil) + expression.AssertEqual(t, worker.QueueItem.Status, StatusError) + expression.AssertEqual(t, worker.QueueItem.Error, nil) +} + +func TestExportWorker_SetStatus(t *testing.T) { + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + worker.SetStatus(StatusPending) + expression.AssertEqual(t, worker.QueueItem.Status, StatusPending) +} + +func TestExportWorker_SwapAvailable(t *testing.T) { + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + expression.AssertEqual(t, worker.SwapAvailable(false), true) + expression.AssertEqual(t, worker.Available, false) + expression.AssertEqual(t, worker.SwapAvailable(true), false) + expression.AssertEqual(t, worker.Available, true) +} + +func TestExportWorker_IsAvailable(t *testing.T) { + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + expression.AssertEqual(t, worker.IsAvailable(), true) + worker.SwapAvailable(false) + expression.AssertEqual(t, worker.IsAvailable(), false) } diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index efc53fa7..e27a0baa 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -2,7 +2,6 @@ package export import ( "context" - "fmt" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "go.uber.org/zap" @@ -42,7 +41,7 @@ type Wrapper struct { // Queue handling QueueItemsMutex sync.RWMutex QueueItems []*WrapperItem // stores queue to handle duplicates, state - Queue chan *WrapperItem + //Queue chan *WrapperItem // contains also current handled items // Workers is final, its only instanced once and thus does not change size (ExportWorker have there indexes in this slice stored) @@ -62,8 +61,8 @@ type Wrapper struct { WorkerCount int } -// NewExportWrapperItem creates a new export wrapper item -func NewExportWrapperItem(factIDs []int64, fileName string, params CSVParameters, user users.User) *WrapperItem { +// NewWrapperItem creates a new export wrapper item +func NewWrapperItem(factIDs []int64, fileName string, params CSVParameters, user users.User) *WrapperItem { // sort slices (for easy comparison) sort.Slice(factIDs, func(i, j int) bool { return factIDs[i] < factIDs[j] }) return &WrapperItem{ @@ -78,11 +77,11 @@ func NewExportWrapperItem(factIDs []int64, fileName string, params CSVParameters } } -// NewExportWrapper creates a new export wrapper -func NewExportWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize int) *Wrapper { +// NewWrapper creates a new export wrapper +func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize int) *Wrapper { return &Wrapper{ - Workers: make([]*ExportWorker, workersCount), - Queue: make(chan *WrapperItem, queueMaxSize), + Workers: make([]*ExportWorker, 0), + QueueItems: make([]*WrapperItem, 0), Success: make(chan int), Archive: sync.Map{}, QueueMaxSize: queueMaxSize, @@ -103,51 +102,42 @@ func (it *WrapperItem) ContainsFact(factID int64) bool { } // Init initializes the export wrapper -func (ew *Wrapper) Init() { +func (ew *Wrapper) Init(ctx context.Context) { // instantiate workers for i := 0; i < ew.WorkerCount; i++ { - ew.Workers[i] = NewExportWorker(i, ew.BasePath, ew.Success) + ew.Workers = append(ew.Workers, NewExportWorker(i, ew.BasePath, ew.Success)) } - go ew.StartDispatcher(context.Background()) + go ew.StartDispatcher(ctx) } // AddToQueue Adds a new export to the export worker queue func (ew *Wrapper) AddToQueue(factIDs []int64, fileName string, params CSVParameters, user users.User) (*WrapperItem, int) { - // implement anti-spam method? - /* - continue - } - */ - item := NewExportWrapperItem(factIDs, fileName, params, user) - ew.QueueItemsMutex.Lock() defer ew.QueueItemsMutex.Unlock() for _, queueItem := range ew.QueueItems { if !Int64Equals(queueItem.FactIDs, factIDs) || !queueItem.Params.Equals(params) { + continue + } - // check if user not already in queue.users - for _, u := range queueItem.Users { - if u == user.Login { - return nil, CodeUserExists - } + // check if user not already in queue.users + for _, u := range queueItem.Users { + if u == user.Login { + return nil, CodeUserExists } - - //queueItem.Users = append(queueItem.Users, user) - return nil, CodeUserAdded } - } - // TODO: check ongoing exports ?? + queueItem.Users = append(queueItem.Users, user.Login) + return nil, CodeUserAdded + } - select { - case ew.Queue <- item: - ew.QueueItems = append(ew.QueueItems, item) - return item, CodeAdded - default: + if len(ew.QueueItems) >= ew.QueueMaxSize { return nil, CodeQueueFull } + item := NewWrapperItem(factIDs, fileName, params, user) + ew.QueueItems = append(ew.QueueItems, item) + return item, CodeAdded } // StartDispatcher starts the export tasks dispatcher & the expired files checker @@ -176,18 +166,14 @@ func (ew *Wrapper) StartDispatcher(context context.Context) { // archive item ew.Archive.Store(item.Id, item) case <-ticker.C: - ew.DispatchExportQueue() + ew.dispatchExportQueue(context) case <-expiredFileTicker.C: err := ew.CheckForExpiredFiles() if err != nil { zap.L().Error("Error during expired files check", zap.Error(err)) } - case <-context.Done(): - for i := 0; i < ew.WorkerCount; i++ { - ew.Workers[i].Cancel <- true - } return } } @@ -275,59 +261,72 @@ func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { return true }) + // finally, gather all exports that are in the queue + ew.QueueItemsMutex.Lock() + defer ew.QueueItemsMutex.Unlock() + + for _, item := range ew.QueueItems { + if item.ContainsUser(user) { + result = append(result, *item) + } + } + return result } -func (ew *Wrapper) DequeueWrapperItem(item *WrapperItem) bool { +// DequeueWrapperItem Dequeues an item, returns size of queue and true if item was found and dequeued +func (ew *Wrapper) DequeueWrapperItem(item *WrapperItem) (int, bool) { ew.QueueItemsMutex.Lock() defer ew.QueueItemsMutex.Unlock() for i, queueItem := range ew.QueueItems { - // comparing pointer should work if queueItem != item { continue } ew.QueueItems = append(ew.QueueItems[:i], ew.QueueItems[i+1:]...) - return true + return len(ew.QueueItems), true } - return false + return len(ew.QueueItems), false } -func (ew *Wrapper) DispatchExportQueue() { - if len(ew.Queue) == 0 { - return - } - -out: +// dispatchExportQueue dispatches the export queue to the available workers +func (ew *Wrapper) dispatchExportQueue(ctx context.Context) { for _, worker := range ew.Workers { + worker.Mutex.Lock() if worker.Available { + // check if there is an item in the queue + ew.QueueItemsMutex.Lock() - // Non blocking reading - select { - case item := <-ew.Queue: - worker.Available = false - ew.DequeueWrapperItem(item) - go worker.Start(*item) - default: - break out // Nothing in queue + if len(ew.QueueItems) == 0 { + ew.QueueItemsMutex.Unlock() + worker.Mutex.Unlock() + return // Nothing in queue } + item := *ew.QueueItems[0] + ew.QueueItems = append(ew.QueueItems[:0], ew.QueueItems[1:]...) + ew.QueueItemsMutex.Unlock() + + worker.Available = false + worker.Mutex.Unlock() + go worker.Start(item, ctx) + } else { + worker.Mutex.Unlock() } } - } -func (ew *Wrapper) FindArchive(id string, user users.User) (WrapperItem, error) { +func (ew *Wrapper) FindArchive(id string, user users.User) (WrapperItem, bool) { item, found := ew.Archive.Load(id) if found { if data, ok := item.(WrapperItem); ok && data.ContainsUser(user) { - return data, nil + return data, true } } - return WrapperItem{}, fmt.Errorf("archive not found") + return WrapperItem{}, false } // ContainsUser checks if user is in item diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index aec66443..8413b15c 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -1,6 +1,7 @@ package export import ( + "context" "fmt" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" @@ -11,59 +12,59 @@ import ( "time" ) -func TestNewExportWrapper(t *testing.T) { - wrapper := NewExportWrapper("/tmp", 1, 1) +func TestNewWrapper(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 1) expression.AssertEqual(t, wrapper.BasePath, "/tmp") expression.AssertEqual(t, wrapper.QueueMaxSize, 1) expression.AssertEqual(t, wrapper.DiskRetentionDays, 1) + expression.AssertEqual(t, wrapper.QueueMaxSize, 1) } -func TestNewExportWrapperItem(t *testing.T) { - item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) - expression.AssertNotEqual(t, item.Data.Id, "") - expression.AssertEqual(t, item.Data.FactID, int64(1)) - expression.AssertEqual(t, item.Data.Params.Equals(CSVParameters{}), true) - expression.AssertEqual(t, item.Data.Status, StatusPending) +func TestNewWrapperItem(t *testing.T) { + item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "test"}) + expression.AssertNotEqual(t, item.Id, "") + expression.AssertEqual(t, Int64Equals(item.FactIDs, []int64{1}), true) + expression.AssertEqual(t, item.Params.Equals(CSVParameters{}), true) + expression.AssertEqual(t, item.Status, StatusPending) + expression.AssertEqual(t, item.FileName, "test.txt") + expression.AssertNotEqual(t, len(item.Users), 0) + expression.AssertEqual(t, item.Users[0], "test") } -func TestExportWrapperItem_SetError(t *testing.T) { - item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) - expression.AssertEqual(t, item.Data.Status, StatusPending) - item.SetError(fmt.Errorf("error")) - expression.AssertEqual(t, item.Data.Status, StatusError) - expression.AssertNotEqual(t, item.Data.Error, nil) +func TestWrapperItem_ContainsFact(t *testing.T) { + item := NewWrapperItem([]int64{1, 22, 33}, "test.txt", CSVParameters{}, users.User{Login: "test"}) + expression.AssertEqual(t, item.ContainsFact(1), true) + expression.AssertEqual(t, item.ContainsFact(22), true) + expression.AssertEqual(t, item.ContainsFact(3), false) } -func TestExportWrapperItem_SetStatus(t *testing.T) { - item := NewExportWrapperItem(1, CSVParameters{}, users.User{ID: uuid.New()}) - expression.AssertEqual(t, item.Data.Status, StatusPending) - item.SetStatus(StatusRunning) - expression.AssertEqual(t, item.Data.Status, StatusRunning) +func TestWrapper_Init(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 1) + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + wrapper.Init(ctx) + time.Sleep(500 * time.Millisecond) + expression.AssertEqual(t, len(wrapper.Workers), 1) + worker := wrapper.Workers[0] + expression.AssertEqual(t, worker.Id, 0) + worker.Mutex.Lock() + defer worker.Mutex.Unlock() + expression.AssertEqual(t, worker.Available, true) } func TestAddToQueue(t *testing.T) { - wrapper := NewExportWrapper("/tmp", 1, 1) - user1 := users.User{ID: uuid.New()} - user2 := users.User{ID: uuid.New()} + wrapper := NewWrapper("/tmp", 1, 1, 1) + user1 := users.User{Login: "bla"} + user2 := users.User{Login: "blabla"} csvParams := CSVParameters{} - expression.AssertEqual(t, wrapper.AddToQueue(1, csvParams, user1), CodeAdded, "AddToQueue should return CodeAdded") - expression.AssertEqual(t, wrapper.AddToQueue(1, csvParams, user1), CodeUserExists, "AddToQueue should return CodeUserExists") - expression.AssertEqual(t, wrapper.AddToQueue(1, csvParams, user2), CodeUserAdded, "AddToQueue should return CodeUserAdded") - expression.AssertEqual(t, wrapper.AddToQueue(2, csvParams, user2), CodeQueueFull, "AddToQueue should return CodeQueueFull") -} - -func TestFindAvailableWorker(t *testing.T) { - wrapper := NewExportWrapper("/tmp", 1, 1) - // since wrapper.Init() starts the dispatcher worker that we don't want to run in this test, we initialize the workers manually - for i := 0; i < 2; i++ { - wrapper.Workers = append(wrapper.Workers, NewExportWorker("/tmp")) - } - w1 := wrapper.FindAvailableWorker() - expression.AssertNotEqual(t, w1, nil) - w2 := wrapper.FindAvailableWorker() - expression.AssertNotEqual(t, w2, nil) - w3 := wrapper.FindAvailableWorker() - expression.AssertEqual(t, w3, (*ExportWorker)(nil)) + _, result := wrapper.AddToQueue([]int64{1}, "test.txt", csvParams, user1) + expression.AssertEqual(t, result, CodeAdded, "AddToQueue should return CodeAdded") + _, result = wrapper.AddToQueue([]int64{1}, "test.txt", csvParams, user1) + expression.AssertEqual(t, result, CodeUserExists, "AddToQueue should return CodeUserExists") + _, result = wrapper.AddToQueue([]int64{1}, "test.txt", csvParams, user2) + expression.AssertEqual(t, result, CodeUserAdded, "AddToQueue should return CodeUserAdded") + _, result = wrapper.AddToQueue([]int64{2}, "test.txt", csvParams, user2) + expression.AssertEqual(t, result, CodeQueueFull, "AddToQueue should return CodeQueueFull") } func TestStartDispatcher(t *testing.T) { @@ -85,8 +86,11 @@ func TestStartDispatcher(t *testing.T) { fileName := filepath.Base(file.Name()) _ = file.Close() - wrapper := NewExportWrapper(dname, 1, 1) - wrapper.Init(1) + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + wrapper := NewWrapper(dname, 1, 1, 1) + wrapper.Init(ctx) expression.AssertEqual(t, len(wrapper.Workers), 1) // sleep one second to let the goroutine start fmt.Println("Sleeping 1 second to let the goroutine start") @@ -100,34 +104,31 @@ func TestStartDispatcher(t *testing.T) { worker.Mutex.Unlock() // add a task to the queue and check if the task was added to queue - expression.AssertEqual(t, wrapper.AddToQueue(1, CSVParameters{FileName: fileName}, users.User{ID: uuid.New()}), CodeAdded, "AddToQueue should return CodeAdded") - wrapper.QueueMutex.Lock() - item := wrapper.Queue[0] - expression.AssertEqual(t, len(wrapper.Queue), 1) - wrapper.QueueMutex.Unlock() + user := users.User{Login: "test"} + _, result := wrapper.AddToQueue([]int64{1}, fileName, CSVParameters{}, user) + expression.AssertEqual(t, result, CodeAdded, "AddToQueue should return CodeAdded") + wrapper.QueueItemsMutex.Lock() + expression.AssertEqual(t, len(wrapper.QueueItems), 1) + itemId := wrapper.QueueItems[0].Id + wrapper.QueueItemsMutex.Unlock() // sleep another 5 seconds to let the goroutine handle the task fmt.Println("Sleeping 5 seconds to let the goroutine handle the task") time.Sleep(5 * time.Second) - wrapper.QueueMutex.Lock() - expression.AssertEqual(t, len(wrapper.Queue), 0) - wrapper.QueueMutex.Unlock() + wrapper.QueueItemsMutex.Lock() + expression.AssertEqual(t, len(wrapper.QueueItems), 0) + wrapper.QueueItemsMutex.Unlock() - wrapper.DoneMutex.Lock() - expression.AssertEqual(t, len(wrapper.Done), 1) - foundItem := wrapper.Done[0] - wrapper.DoneMutex.Unlock() - - expression.AssertEqual(t, item.Data.Id, foundItem.Data.Id) + worker.Mutex.Lock() + expression.AssertEqual(t, worker.Available, true) + worker.Mutex.Unlock() - fmt.Println("Sleeping 1 second to wait for status") - time.Sleep(2 * time.Second) + time.Sleep(50 * time.Millisecond) - // could not create file - foundItem.Mutex.Lock() - expression.AssertEqual(t, foundItem.Data.Status, StatusError) - foundItem.Mutex.Unlock() + item, ok := wrapper.FindArchive(itemId, user) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, item.Status, StatusError) // could not create file } func TestCheckForExpiredFiles(t *testing.T) { @@ -162,7 +163,7 @@ func TestCheckForExpiredFiles(t *testing.T) { file2Name := file2.Name() _ = file2.Close() - wrapper := NewExportWrapper(dname, 1, 1) + wrapper := NewWrapper(dname, 1, 1, 1) err = wrapper.CheckForExpiredFiles() if err != nil { t.Error(err) @@ -184,14 +185,154 @@ func TestCheckForExpiredFiles(t *testing.T) { // second test : check if expired exports are deleted goodDate := time.Now() - wrapper.Done = append(wrapper.Done, &WrapperItem{Data: ExportWrapperItemData{Date: time.Now().AddDate(0, 0, -2)}}) - wrapper.Done = append(wrapper.Done, &WrapperItem{Data: ExportWrapperItemData{Date: goodDate}}) - expression.AssertEqual(t, len(wrapper.Done), 2) + id1 := uuid.New() + id2 := uuid.New() + wrapper.Archive.Store(id1, WrapperItem{Date: time.Now().AddDate(0, 0, -2)}) + wrapper.Archive.Store(id2, WrapperItem{Date: goodDate}) + + _, found := wrapper.Archive.Load(id1) + expression.AssertEqual(t, found, true) + _, found = wrapper.Archive.Load(id2) + expression.AssertEqual(t, found, true) + err = wrapper.CheckForExpiredFiles() if err != nil { t.Error(err) t.FailNow() } - expression.AssertEqual(t, len(wrapper.Done), 1) - expression.AssertEqual(t, wrapper.Done[0].Data.Date, goodDate) + + _, found = wrapper.Archive.Load(id1) + expression.AssertEqual(t, found, false) + _, found = wrapper.Archive.Load(id2) + expression.AssertEqual(t, found, true) +} + +func TestWrapper_GetUserExports(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 2) + user1 := users.User{Login: "bla"} + user2 := users.User{Login: "blabla"} + item1 := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, user1) + item2 := NewWrapperItem([]int64{2}, "test.txt", CSVParameters{}, user1) + item3 := NewWrapperItem([]int64{3}, "test.txt", CSVParameters{}, user1) + item4 := NewWrapperItem([]int64{4}, "test.txt", CSVParameters{}, user2) + wrapper.Archive.Store(item1.Id, *item1) + wrapper.Archive.Store(item2.Id, *item2) + wrapper.Archive.Store(item3.Id, *item3) + wrapper.Archive.Store(item4.Id, *item4) + wrapper.AddToQueue([]int64{5}, "test.txt", CSVParameters{}, user1) + wrapper.AddToQueue([]int64{6}, "test.txt", CSVParameters{}, user2) + exports := wrapper.GetUserExports(user1) + expression.AssertEqual(t, len(exports), 4) + exports = wrapper.GetUserExports(user2) + expression.AssertEqual(t, len(exports), 2) +} + +func TestWrapper_DequeueWrapperItem(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 2) + i, ok := wrapper.DequeueWrapperItem(&WrapperItem{}) + expression.AssertEqual(t, ok, false) + expression.AssertEqual(t, i, 0) + wrapper.AddToQueue([]int64{5}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + wrapper.AddToQueue([]int64{6}, "test.txt", CSVParameters{}, users.User{Login: "blabla"}) + + expression.AssertEqual(t, len(wrapper.QueueItems), 2) + item1 := wrapper.QueueItems[0] + item2 := wrapper.QueueItems[1] + + i, ok = wrapper.DequeueWrapperItem(item1) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, i, 1) + + i, ok = wrapper.DequeueWrapperItem(item2) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, i, 0) +} + +func TestWrapper_dispatchExportQueue(t *testing.T) { + // we don't want that the worker try to export data, therefore we will create a temporary directory with a temp file + // so that the worker will not be able to create the file and will return an error + dname, err := os.MkdirTemp("", "exportdispatcher") + if err != nil { + t.Error(err) + t.FailNow() + } + defer os.RemoveAll(dname) + + // create a file that is 2 days old + file, err := os.CreateTemp(dname, "exportdispatcher") + if err != nil { + t.Error(err) + t.FailNow() + } + fileName := filepath.Base(file.Name()) + _ = file.Close() + + wrapper := NewWrapper(dname, 1, 1, 2) + ctx, cancel := context.WithCancel(context.Background()) + wrapper.Init(ctx) + cancel() // stop dispatcher since we don't want him to interact with the workers or the queue + + // wait until dispatcher stops + time.Sleep(50 * time.Millisecond) + + expression.AssertEqual(t, len(wrapper.Workers), 1) + worker := wrapper.Workers[0] + + // no items in queue -> nothing should happen + expression.AssertEqual(t, worker.IsAvailable(), true) + wrapper.dispatchExportQueue(context.Background()) + expression.AssertEqual(t, worker.IsAvailable(), true, "worker should still be available, because no items in queue") + + // we add an item to the queue + wrapper.AddToQueue([]int64{1}, fileName, CSVParameters{}, users.User{Login: "test"}) + + // we test if dispatchExportQueue will not dispatch the item, no worker available + worker.SwapAvailable(false) + + wrapper.dispatchExportQueue(context.Background()) + + // the item should still be in the queue + wrapper.QueueItemsMutex.Lock() + expression.AssertEqual(t, len(wrapper.QueueItems), 1, "item should still be in the queue, since no worker is available") + wrapper.QueueItemsMutex.Unlock() + + // we test if dispatchExportQueue will dispatch the item, worker is now set to available + expression.AssertEqual(t, worker.SwapAvailable(true), false) + + wrapper.dispatchExportQueue(context.Background()) + + expression.AssertEqual(t, worker.IsAvailable(), false, "worker should not be available, because it is working on an item") + expression.AssertEqual(t, len(wrapper.QueueItems), 0) + + // wait until worker has finished + time.Sleep(1 * time.Second) + + worker.Mutex.Lock() + defer worker.Mutex.Unlock() + + expression.AssertEqual(t, worker.QueueItem.Status, StatusError, fmt.Sprintf("worker processed item should have StatusError(%d) because the file already exists", StatusError)) // could not create file +} + +func TestWrapper_FindArchive(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 2) + item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + wrapper.Archive.Store(item.Id, *item) + + // testing with non-existing item in archive + _, ok := wrapper.FindArchive("test", users.User{Login: "bla"}) + expression.AssertEqual(t, ok, false) + + // testing with existing item but not good user in archive + _, ok = wrapper.FindArchive("test", users.User{Login: "blabla"}) + expression.AssertEqual(t, ok, false) + + // testing with existing item in archive + _, ok = wrapper.FindArchive(item.Id, users.User{Login: "bla"}) + expression.AssertEqual(t, ok, true) +} + +func TestWrapper_ContainsUser(t *testing.T) { + item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + expression.AssertEqual(t, item.ContainsUser(users.User{Login: "bla"}), true) + expression.AssertEqual(t, item.ContainsUser(users.User{Login: "blabla"}), false) } diff --git a/main.go b/main.go index 88823692..b9ecd32a 100644 --- a/main.go +++ b/main.go @@ -78,8 +78,8 @@ func main() { queueMaxSize := viper.GetInt("EXPORT_QUEUE_MAX_SIZE") exportWorkersCount := viper.GetInt("EXPORT_WORKERS_COUNT") - exportWrapper := export.NewExportWrapper(basePath, diskRetentionDays, queueMaxSize) - exportWrapper.Init(exportWorkersCount) + exportWrapper := export.NewWrapper(basePath, exportWorkersCount, diskRetentionDays, queueMaxSize) + exportWrapper.Init(context.Background()) routerServices := router.Services{ PluginCore: core, From 0af54fee3f6e5773130d4c5a6993bf78004b7ef1 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 21 Nov 2023 17:07:06 +0100 Subject: [PATCH 08/35] fixed tests --- internals/export/utils_test.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internals/export/utils_test.go b/internals/export/utils_test.go index 2c7f6f1a..18b8d143 100644 --- a/internals/export/utils_test.go +++ b/internals/export/utils_test.go @@ -8,8 +8,7 @@ import ( func TestEquals(t *testing.T) { p1 := CSVParameters{} p2 := CSVParameters{} - expression.AssertEqual(t, p1.Equals(p2), false) - expression.AssertEqual(t, p1.Equals(p1), true) + expression.AssertEqual(t, p1.Equals(p2), true) // make a full test with all variables in parameters filled params3 := CSVParameters{ From 6605888dfd6d11eb4960a9d503482fc49f713ca7 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 21 Nov 2023 18:30:38 +0100 Subject: [PATCH 09/35] added endpoints + wip facts not factids in export structs --- internals/export/utils.go | 13 -- internals/export/worker.go | 6 +- internals/export/wrapper.go | 200 +++++++++++------- internals/export/wrapper_test.go | 80 +++---- internals/handlers/export_handlers.go | 130 ++++++++++-- internals/security/permissions/permission.go | 11 +- .../security/permissions/permission_test.go | 2 +- .../permissions/postgres_repository.go | 4 +- internals/security/users/user.go | 4 +- 9 files changed, 291 insertions(+), 159 deletions(-) diff --git a/internals/export/utils.go b/internals/export/utils.go index 16227976..3b683455 100644 --- a/internals/export/utils.go +++ b/internals/export/utils.go @@ -46,16 +46,3 @@ func (p CSVParameters) Equals(Params CSVParameters) bool { } return true } - -// Int64Equals compares two int64 slices -func Int64Equals(a, b []int64) bool { - if len(a) != len(b) { - return false - } - for i, v := range a { - if v != b[i] { - return false - } - } - return true -} diff --git a/internals/export/worker.go b/internals/export/worker.go index 4d8b8e74..cf03b351 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -64,8 +64,8 @@ func (e *ExportWorker) IsAvailable() bool { return e.Available } -// Finalise sets the worker availability to true and clears the queueItem -func (e *ExportWorker) Finalise() { +// finalise sets the worker availability to true and clears the queueItem +func (e *ExportWorker) finalise() { e.Mutex.Lock() // set status to error if error occurred @@ -86,7 +86,7 @@ func (e *ExportWorker) Finalise() { // Start starts the export task // It handles one queueItem at a time and when finished it stops the goroutine func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { - defer e.Finalise() + defer e.finalise() e.Mutex.Lock() e.QueueItem = item e.Mutex.Unlock() diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index e27a0baa..103580a2 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -4,10 +4,10 @@ import ( "context" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" + "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" "os" "path/filepath" - "sort" "sync" "time" ) @@ -29,6 +29,7 @@ const ( type WrapperItem struct { Id string `json:"id"` // unique id that represents an export demand FactIDs []int64 `json:"factIds"` + Facts []engine.Fact `json:"-"` Error error `json:"error"` Status int `json:"status"` FileName string `json:"fileName"` @@ -39,36 +40,33 @@ type WrapperItem struct { type Wrapper struct { // Queue handling - QueueItemsMutex sync.RWMutex - QueueItems []*WrapperItem // stores queue to handle duplicates, state - //Queue chan *WrapperItem + queueMutex sync.RWMutex + queue []*WrapperItem // stores queue to handle duplicates, state // contains also current handled items - // Workers is final, its only instanced once and thus does not change size (ExportWorker have there indexes in this slice stored) - Workers []*ExportWorker + // workers is final, its only instanced once and thus does not change size (ExportWorker have there indexes in this slice stored) + workers []*ExportWorker - // Success is passed to all workers, they write on this channel when they've finished with there export - Success chan int + // success is passed to all workers, they write on this channel when they've finished with there export + success chan int // Archived WrapperItem's - Archive sync.Map // map of all exports that have been done, key is the id of the export + archive sync.Map // map of all exports that have been done, key is the id of the export // Non-critical fields // Read-only parameters - DiskRetentionDays int - BasePath string - QueueMaxSize int - WorkerCount int + diskRetentionDays int + basePath string + queueMaxSize int + workerCount int } // NewWrapperItem creates a new export wrapper item -func NewWrapperItem(factIDs []int64, fileName string, params CSVParameters, user users.User) *WrapperItem { - // sort slices (for easy comparison) - sort.Slice(factIDs, func(i, j int) bool { return factIDs[i] < factIDs[j] }) +func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, user users.User) *WrapperItem { return &WrapperItem{ Users: append([]string{}, user.Login), Id: uuid.New().String(), - FactIDs: factIDs, + Facts: facts, Date: time.Now(), Status: StatusPending, Error: nil, @@ -80,14 +78,14 @@ func NewWrapperItem(factIDs []int64, fileName string, params CSVParameters, user // NewWrapper creates a new export wrapper func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize int) *Wrapper { return &Wrapper{ - Workers: make([]*ExportWorker, 0), - QueueItems: make([]*WrapperItem, 0), - Success: make(chan int), - Archive: sync.Map{}, - QueueMaxSize: queueMaxSize, - BasePath: basePath, - DiskRetentionDays: diskRetentionDays, - WorkerCount: workersCount, + workers: make([]*ExportWorker, 0), + queue: make([]*WrapperItem, 0), + success: make(chan int), + archive: sync.Map{}, + queueMaxSize: queueMaxSize, + basePath: basePath, + diskRetentionDays: diskRetentionDays, + workerCount: workersCount, } } @@ -104,19 +102,36 @@ func (it *WrapperItem) ContainsFact(factID int64) bool { // Init initializes the export wrapper func (ew *Wrapper) Init(ctx context.Context) { // instantiate workers - for i := 0; i < ew.WorkerCount; i++ { - ew.Workers = append(ew.Workers, NewExportWorker(i, ew.BasePath, ew.Success)) + for i := 0; i < ew.workerCount; i++ { + ew.workers = append(ew.workers, NewExportWorker(i, ew.basePath, ew.success)) } - go ew.StartDispatcher(ctx) + go ew.startDispatcher(ctx) +} + +// factsEquals checks if two slices of facts are equal +func factsEquals(a, b []engine.Fact) bool { + for _, fact := range a { + found := false + for _, fact2 := range b { + if fact.ID == fact2.ID { + found = true + break + } + } + if !found { + return false + } + } + return false } // AddToQueue Adds a new export to the export worker queue -func (ew *Wrapper) AddToQueue(factIDs []int64, fileName string, params CSVParameters, user users.User) (*WrapperItem, int) { - ew.QueueItemsMutex.Lock() - defer ew.QueueItemsMutex.Unlock() +func (ew *Wrapper) AddToQueue(facts []engine.Fact, fileName string, params CSVParameters, user users.User) (*WrapperItem, int) { + ew.queueMutex.Lock() + defer ew.queueMutex.Unlock() - for _, queueItem := range ew.QueueItems { - if !Int64Equals(queueItem.FactIDs, factIDs) || !queueItem.Params.Equals(params) { + for _, queueItem := range ew.queue { + if !factsEquals(queueItem.Facts, facts) || !queueItem.Params.Equals(params) { continue } @@ -131,17 +146,17 @@ func (ew *Wrapper) AddToQueue(factIDs []int64, fileName string, params CSVParame return nil, CodeUserAdded } - if len(ew.QueueItems) >= ew.QueueMaxSize { + if len(ew.queue) >= ew.queueMaxSize { return nil, CodeQueueFull } - item := NewWrapperItem(factIDs, fileName, params, user) - ew.QueueItems = append(ew.QueueItems, item) + item := NewWrapperItem(facts, fileName, params, user) + ew.queue = append(ew.queue, item) return item, CodeAdded } -// StartDispatcher starts the export tasks dispatcher & the expired files checker -func (ew *Wrapper) StartDispatcher(context context.Context) { +// startDispatcher starts the export tasks dispatcher & the expired files checker +func (ew *Wrapper) startDispatcher(context context.Context) { zap.L().Info("Starting export tasks dispatcher") // every 5 seconds check if there is a new task to process in queue then check if there is an available worker // if yes, start the worker with the task @@ -153,22 +168,22 @@ func (ew *Wrapper) StartDispatcher(context context.Context) { for { select { - case w := <-ew.Success: - worker := ew.Workers[w] + case w := <-ew.success: + worker := ew.workers[w] // TODO: send notifications here // archive item when finished worker.Mutex.Lock() - ew.Workers[w].Available = true + ew.workers[w].Available = true item := worker.QueueItem worker.QueueItem = WrapperItem{} worker.Mutex.Unlock() // archive item - ew.Archive.Store(item.Id, item) + ew.archive.Store(item.Id, item) case <-ticker.C: ew.dispatchExportQueue(context) case <-expiredFileTicker.C: - err := ew.CheckForExpiredFiles() + err := ew.checkForExpiredFiles() if err != nil { zap.L().Error("Error during expired files check", zap.Error(err)) @@ -179,25 +194,25 @@ func (ew *Wrapper) StartDispatcher(context context.Context) { } } -// CheckForExpiredFiles checks for expired files in the export directory and deletes them +// checkForExpiredFiles checks for expired files in the export directory and deletes them // it also deletes the done tasks that are older than diskRetentionDays -func (ew *Wrapper) CheckForExpiredFiles() error { +func (ew *Wrapper) checkForExpiredFiles() error { // Get all files in directory and check the last edit date // if last edit date is older than diskRetentionDays, delete the file zap.L().Info("Checking for expired files") - files, err := os.ReadDir(ew.BasePath) + files, err := os.ReadDir(ew.basePath) if err != nil { return err } - // delete all done archives of ew.Archive that are older than diskRetentionDays - ew.Archive.Range(func(key, value any) bool { + // delete all done archives of ew.archive that are older than diskRetentionDays + ew.archive.Range(func(key, value any) bool { data, ok := value.(WrapperItem) if !ok { return true } - if time.Since(data.Date).Hours() > float64(ew.DiskRetentionDays*24) { - ew.Archive.Delete(key) + if time.Since(data.Date).Hours() > float64(ew.diskRetentionDays*24) { + ew.archive.Delete(key) } return true }) @@ -210,7 +225,7 @@ func (ew *Wrapper) CheckForExpiredFiles() error { continue } - filePath := filepath.Join(ew.BasePath, file.Name()) + filePath := filepath.Join(ew.basePath, file.Name()) fi, err := os.Stat(filePath) if err != nil { @@ -223,7 +238,7 @@ func (ew *Wrapper) CheckForExpiredFiles() error { // continue //} - if time.Since(fi.ModTime()).Hours() > float64(ew.DiskRetentionDays*24) { + if time.Since(fi.ModTime()).Hours() > float64(ew.diskRetentionDays*24) { err = os.Remove(filePath) if err != nil { zap.L().Error("Cannot delete file", zap.String("file", filePath), zap.Error(err)) @@ -241,7 +256,7 @@ func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { var result []WrapperItem // first, gather all exports that are in the workers if there are any - for _, worker := range ew.Workers { + for _, worker := range ew.workers { worker.Mutex.Lock() if worker.QueueItem.ContainsUser(user) { result = append(result, worker.QueueItem) @@ -250,7 +265,7 @@ func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { } // then, gather all exports that are archived - ew.Archive.Range(func(key, value any) bool { + ew.archive.Range(func(key, value any) bool { data, ok := value.(WrapperItem) if !ok { return true @@ -262,10 +277,10 @@ func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { }) // finally, gather all exports that are in the queue - ew.QueueItemsMutex.Lock() - defer ew.QueueItemsMutex.Unlock() + ew.queueMutex.Lock() + defer ew.queueMutex.Unlock() - for _, item := range ew.QueueItems { + for _, item := range ew.queue { if item.ContainsUser(user) { result = append(result, *item) } @@ -274,41 +289,41 @@ func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { return result } -// DequeueWrapperItem Dequeues an item, returns size of queue and true if item was found and dequeued -func (ew *Wrapper) DequeueWrapperItem(item *WrapperItem) (int, bool) { - ew.QueueItemsMutex.Lock() - defer ew.QueueItemsMutex.Unlock() +// dequeueWrapperItem Dequeues an item, returns size of queue and true if item was found and dequeued +func (ew *Wrapper) dequeueWrapperItem(item *WrapperItem) (int, bool) { + ew.queueMutex.Lock() + defer ew.queueMutex.Unlock() - for i, queueItem := range ew.QueueItems { + for i, queueItem := range ew.queue { // comparing pointer should work if queueItem != item { continue } - ew.QueueItems = append(ew.QueueItems[:i], ew.QueueItems[i+1:]...) - return len(ew.QueueItems), true + ew.queue = append(ew.queue[:i], ew.queue[i+1:]...) + return len(ew.queue), true } - return len(ew.QueueItems), false + return len(ew.queue), false } // dispatchExportQueue dispatches the export queue to the available workers func (ew *Wrapper) dispatchExportQueue(ctx context.Context) { - for _, worker := range ew.Workers { + for _, worker := range ew.workers { worker.Mutex.Lock() if worker.Available { // check if there is an item in the queue - ew.QueueItemsMutex.Lock() + ew.queueMutex.Lock() - if len(ew.QueueItems) == 0 { - ew.QueueItemsMutex.Unlock() + if len(ew.queue) == 0 { + ew.queueMutex.Unlock() worker.Mutex.Unlock() return // Nothing in queue } - item := *ew.QueueItems[0] - ew.QueueItems = append(ew.QueueItems[:0], ew.QueueItems[1:]...) - ew.QueueItemsMutex.Unlock() + item := *ew.queue[0] + ew.queue = append(ew.queue[:0], ew.queue[1:]...) + ew.queueMutex.Unlock() worker.Available = false worker.Mutex.Unlock() @@ -319,8 +334,9 @@ func (ew *Wrapper) dispatchExportQueue(ctx context.Context) { } } +// FindArchive returns the archive item for the given id and user func (ew *Wrapper) FindArchive(id string, user users.User) (WrapperItem, bool) { - item, found := ew.Archive.Load(id) + item, found := ew.archive.Load(id) if found { if data, ok := item.(WrapperItem); ok && data.ContainsUser(user) { return data, true @@ -329,6 +345,42 @@ func (ew *Wrapper) FindArchive(id string, user users.User) (WrapperItem, bool) { return WrapperItem{}, false } +// GetUserExport returns the export item for the given id and user +// this function is similar to GetUserExports but it avoids iterating over all exports, thus it is faster +func (ew *Wrapper) GetUserExport(id string, user users.User) (item WrapperItem, ok bool) { + // start with archived items + if item, ok = ew.FindArchive(id, user); ok { + return item, ok + } + + // then check the workers + for _, worker := range ew.workers { + worker.Mutex.Lock() + if worker.QueueItem.Id == id && worker.QueueItem.ContainsUser(user) { + item = worker.QueueItem + ok = true + } + worker.Mutex.Unlock() + if ok { + return item, ok + } + } + + // finally check the queue + ew.queueMutex.Lock() + defer ew.queueMutex.Unlock() + + for _, it := range ew.queue { + ok = it.ContainsUser(user) + if ok { + item = *it + break + } + } + + return item, ok +} + // ContainsUser checks if user is in item func (it *WrapperItem) ContainsUser(user users.User) bool { for _, u := range it.Users { @@ -345,7 +397,7 @@ func (it *WrapperItem) ContainsUser(user users.User) bool { // // if yes, we remove the queueItem from the queue // // if no, we remove the user from the queueItem.users // -// for i, worker := range ew.Workers { +// for i, worker := range ew.workers { // // worker.Mutex.Lock() // if worker.QueueItem == nil || worker.QueueItem.Id != id { diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 8413b15c..5a9941bc 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -14,10 +14,10 @@ import ( func TestNewWrapper(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 1) - expression.AssertEqual(t, wrapper.BasePath, "/tmp") - expression.AssertEqual(t, wrapper.QueueMaxSize, 1) - expression.AssertEqual(t, wrapper.DiskRetentionDays, 1) - expression.AssertEqual(t, wrapper.QueueMaxSize, 1) + expression.AssertEqual(t, wrapper.basePath, "/tmp") + expression.AssertEqual(t, wrapper.queueMaxSize, 1) + expression.AssertEqual(t, wrapper.diskRetentionDays, 1) + expression.AssertEqual(t, wrapper.queueMaxSize, 1) } func TestNewWrapperItem(t *testing.T) { @@ -44,8 +44,8 @@ func TestWrapper_Init(t *testing.T) { defer cancel() wrapper.Init(ctx) time.Sleep(500 * time.Millisecond) - expression.AssertEqual(t, len(wrapper.Workers), 1) - worker := wrapper.Workers[0] + expression.AssertEqual(t, len(wrapper.workers), 1) + worker := wrapper.workers[0] expression.AssertEqual(t, worker.Id, 0) worker.Mutex.Lock() defer worker.Mutex.Unlock() @@ -91,12 +91,12 @@ func TestStartDispatcher(t *testing.T) { wrapper := NewWrapper(dname, 1, 1, 1) wrapper.Init(ctx) - expression.AssertEqual(t, len(wrapper.Workers), 1) + expression.AssertEqual(t, len(wrapper.workers), 1) // sleep one second to let the goroutine start fmt.Println("Sleeping 1 second to let the goroutine start") time.Sleep(1 * time.Second) - worker := wrapper.Workers[0] + worker := wrapper.workers[0] // check if the worker is available worker.Mutex.Lock() @@ -107,18 +107,18 @@ func TestStartDispatcher(t *testing.T) { user := users.User{Login: "test"} _, result := wrapper.AddToQueue([]int64{1}, fileName, CSVParameters{}, user) expression.AssertEqual(t, result, CodeAdded, "AddToQueue should return CodeAdded") - wrapper.QueueItemsMutex.Lock() - expression.AssertEqual(t, len(wrapper.QueueItems), 1) - itemId := wrapper.QueueItems[0].Id - wrapper.QueueItemsMutex.Unlock() + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 1) + itemId := wrapper.queue[0].Id + wrapper.queueMutex.Unlock() // sleep another 5 seconds to let the goroutine handle the task fmt.Println("Sleeping 5 seconds to let the goroutine handle the task") time.Sleep(5 * time.Second) - wrapper.QueueItemsMutex.Lock() - expression.AssertEqual(t, len(wrapper.QueueItems), 0) - wrapper.QueueItemsMutex.Unlock() + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 0) + wrapper.queueMutex.Unlock() worker.Mutex.Lock() expression.AssertEqual(t, worker.Available, true) @@ -164,7 +164,7 @@ func TestCheckForExpiredFiles(t *testing.T) { _ = file2.Close() wrapper := NewWrapper(dname, 1, 1, 1) - err = wrapper.CheckForExpiredFiles() + err = wrapper.checkForExpiredFiles() if err != nil { t.Error(err) t.FailNow() @@ -187,23 +187,23 @@ func TestCheckForExpiredFiles(t *testing.T) { goodDate := time.Now() id1 := uuid.New() id2 := uuid.New() - wrapper.Archive.Store(id1, WrapperItem{Date: time.Now().AddDate(0, 0, -2)}) - wrapper.Archive.Store(id2, WrapperItem{Date: goodDate}) + wrapper.archive.Store(id1, WrapperItem{Date: time.Now().AddDate(0, 0, -2)}) + wrapper.archive.Store(id2, WrapperItem{Date: goodDate}) - _, found := wrapper.Archive.Load(id1) + _, found := wrapper.archive.Load(id1) expression.AssertEqual(t, found, true) - _, found = wrapper.Archive.Load(id2) + _, found = wrapper.archive.Load(id2) expression.AssertEqual(t, found, true) - err = wrapper.CheckForExpiredFiles() + err = wrapper.checkForExpiredFiles() if err != nil { t.Error(err) t.FailNow() } - _, found = wrapper.Archive.Load(id1) + _, found = wrapper.archive.Load(id1) expression.AssertEqual(t, found, false) - _, found = wrapper.Archive.Load(id2) + _, found = wrapper.archive.Load(id2) expression.AssertEqual(t, found, true) } @@ -215,10 +215,10 @@ func TestWrapper_GetUserExports(t *testing.T) { item2 := NewWrapperItem([]int64{2}, "test.txt", CSVParameters{}, user1) item3 := NewWrapperItem([]int64{3}, "test.txt", CSVParameters{}, user1) item4 := NewWrapperItem([]int64{4}, "test.txt", CSVParameters{}, user2) - wrapper.Archive.Store(item1.Id, *item1) - wrapper.Archive.Store(item2.Id, *item2) - wrapper.Archive.Store(item3.Id, *item3) - wrapper.Archive.Store(item4.Id, *item4) + wrapper.archive.Store(item1.Id, *item1) + wrapper.archive.Store(item2.Id, *item2) + wrapper.archive.Store(item3.Id, *item3) + wrapper.archive.Store(item4.Id, *item4) wrapper.AddToQueue([]int64{5}, "test.txt", CSVParameters{}, user1) wrapper.AddToQueue([]int64{6}, "test.txt", CSVParameters{}, user2) exports := wrapper.GetUserExports(user1) @@ -229,21 +229,21 @@ func TestWrapper_GetUserExports(t *testing.T) { func TestWrapper_DequeueWrapperItem(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 2) - i, ok := wrapper.DequeueWrapperItem(&WrapperItem{}) + i, ok := wrapper.dequeueWrapperItem(&WrapperItem{}) expression.AssertEqual(t, ok, false) expression.AssertEqual(t, i, 0) wrapper.AddToQueue([]int64{5}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) wrapper.AddToQueue([]int64{6}, "test.txt", CSVParameters{}, users.User{Login: "blabla"}) - expression.AssertEqual(t, len(wrapper.QueueItems), 2) - item1 := wrapper.QueueItems[0] - item2 := wrapper.QueueItems[1] + expression.AssertEqual(t, len(wrapper.queue), 2) + item1 := wrapper.queue[0] + item2 := wrapper.queue[1] - i, ok = wrapper.DequeueWrapperItem(item1) + i, ok = wrapper.dequeueWrapperItem(item1) expression.AssertEqual(t, ok, true) expression.AssertEqual(t, i, 1) - i, ok = wrapper.DequeueWrapperItem(item2) + i, ok = wrapper.dequeueWrapperItem(item2) expression.AssertEqual(t, ok, true) expression.AssertEqual(t, i, 0) } @@ -275,8 +275,8 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { // wait until dispatcher stops time.Sleep(50 * time.Millisecond) - expression.AssertEqual(t, len(wrapper.Workers), 1) - worker := wrapper.Workers[0] + expression.AssertEqual(t, len(wrapper.workers), 1) + worker := wrapper.workers[0] // no items in queue -> nothing should happen expression.AssertEqual(t, worker.IsAvailable(), true) @@ -292,9 +292,9 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { wrapper.dispatchExportQueue(context.Background()) // the item should still be in the queue - wrapper.QueueItemsMutex.Lock() - expression.AssertEqual(t, len(wrapper.QueueItems), 1, "item should still be in the queue, since no worker is available") - wrapper.QueueItemsMutex.Unlock() + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 1, "item should still be in the queue, since no worker is available") + wrapper.queueMutex.Unlock() // we test if dispatchExportQueue will dispatch the item, worker is now set to available expression.AssertEqual(t, worker.SwapAvailable(true), false) @@ -302,7 +302,7 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { wrapper.dispatchExportQueue(context.Background()) expression.AssertEqual(t, worker.IsAvailable(), false, "worker should not be available, because it is working on an item") - expression.AssertEqual(t, len(wrapper.QueueItems), 0) + expression.AssertEqual(t, len(wrapper.queue), 0) // wait until worker has finished time.Sleep(1 * time.Second) @@ -316,7 +316,7 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { func TestWrapper_FindArchive(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 2) item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) - wrapper.Archive.Store(item.Id, *item) + wrapper.archive.Store(item.Id, *item) // testing with non-existing item in archive _, ok := wrapper.FindArchive("test", users.User{Login: "bla"}) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 6dc3ae83..c4e3b6ca 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -3,6 +3,7 @@ package handlers import ( "context" "errors" + "fmt" "github.com/myrteametrics/myrtea-sdk/v4/engine" "net/http" "strconv" @@ -55,21 +56,38 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { return } + filename, params, combineFacts, done := handleExportArgs(w, r, err, idFact) + if done { + return + } + + err = HandleStreamedExport(r.Context(), w, combineFacts, filename, params) + if err != nil { + render.Error(w, r, render.ErrAPIProcessError, err) + } + return + +} + +// handleExportArgs handles the export arguments and returns the filename, the parameters and the facts to export +func handleExportArgs(w http.ResponseWriter, r *http.Request, err error, idFact int64) (string, export.CSVParameters, []engine.Fact, bool) { f, found, err := fact.R().Get(idFact) if err != nil { zap.L().Error("Cannot retrieve fact", zap.Int64("factID", idFact), zap.Error(err)) render.Error(w, r, render.ErrAPIDBSelectFailed, err) - return + return "", export.CSVParameters{}, nil, true } if !found { zap.L().Warn("fact does not exist", zap.Int64("factID", idFact)) render.Error(w, r, render.ErrAPIDBResourceNotFound, err) - return + return "", export.CSVParameters{}, nil, true } var filename = r.URL.Query().Get("fileName") if filename == "" { - filename = f.Name + "_export_" + time.Now().Format("02_01_2006_15-04") + ".csv" + filename = fmt.Sprintf("%s_export_%s.csv", f.Name, time.Now().Format("02_01_2006")) + } else { + filename = fmt.Sprintf("%s_%s.csv", time.Now().Format("02_01_2006"), filename) } // suppose that type is csv @@ -101,15 +119,10 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { combineFacts = append(combineFacts, combineFact) } } - - err = HandleStreamedExport(r.Context(), w, combineFacts, filename, params) - if err != nil { - render.Error(w, r, render.ErrAPIProcessError, err) - } - return - + return filename, params, combineFacts, false } +// GetCSVParameters returns the parameters for the CSV export func GetCSVParameters(r *http.Request) export.CSVParameters { result := export.CSVParameters{Separator: ','} @@ -252,42 +265,84 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, // @Description Get in memory user exports // @Produce json // @Security Bearer -// @Success 200 {json} Returns data to be saved into a file +// @Success 200 {json} Returns a list of exports // @Failure 500 "internal server error" // @Router /engine/exports [get] func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { - + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionList)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + return + } + exports := e.exportWrapper.GetUserExports(userCtx.User) + render.JSON(w, r, exports) } -// GetFacts godoc -// @Summary Get all user exports -// @Description Get all user exports +// GetExport godoc +// @Summary Get single export from user +// @Description Get single export from user // @Tags Exports // @Produce json // @Security Bearer // @Success 200 "Status OK" // @Failure 500 "internal server error" -// @Router /service/exports/{id} [post] +// @Router /service/exports/{id} [get] func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + if id == "" { + render.Error(w, r, render.ErrAPIMissingParam, errors.New("missing id")) + return + } + + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionGet)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + return + } + item, ok := e.exportWrapper.GetUserExport(id, userCtx.User) + if !ok { + render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("export not found")) + return + } + + render.JSON(w, r, item) } -// GetFacts godoc -// @Summary Get all user exports -// @Description Get all user exports +// DeleteExport godoc +// @Summary Deletes a single export +// @Description Deletes a single export, when running it is canceled // @Tags Exports // @Produce json // @Security Bearer // @Success 200 "Status OK" // @Failure 500 "internal server error" -// @Router /service/exports/{id} [post] +// @Router /service/exports/{id} [delete] func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + if id == "" { + render.Error(w, r, render.ErrAPIMissingParam, errors.New("missing id")) + return + } + + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionDelete)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + return + } + + item, ok := e.exportWrapper.GetUserExport(id, userCtx.User) + if !ok { + render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("export not found")) + return + } + render.JSON(w, r, item) } -// GetFacts godoc -// @Summary Get all user exports -// @Description Get all user exports +// ExportFact godoc +// @Summary Creates a new export request for a fact (or multiple facts) +// @Description Creates a new export request for a fact (or multiple facts) // @Tags Exports // @Produce json // @Security Bearer @@ -295,5 +350,34 @@ func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { // @Failure 500 "internal server error" // @Router /service/exports/fact/{id} [post] func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + idFact, err := strconv.ParseInt(id, 10, 64) + + if err != nil { + zap.L().Warn("Error on parsing fact id", zap.String("idFact", id), zap.Error(err)) + render.Error(w, r, render.ErrAPIParsingInteger, err) + return + } + + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionCreate)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + return + } + + filename, params, combineFacts, done := handleExportArgs(w, r, err, idFact) + if done { + return + } + + item, status := e.exportWrapper.AddToQueue(combineFacts, filename, params, userCtx.User) + + switch status { + case export.CodeAdded: + case export.CodeUserAdded: + case export.CodeUserExists: + case export.CodeQueueFull: + } + render.JSON(w, r, item) } diff --git a/internals/security/permissions/permission.go b/internals/security/permissions/permission.go index c2b1b85f..06574794 100644 --- a/internals/security/permissions/permission.go +++ b/internals/security/permissions/permission.go @@ -26,6 +26,7 @@ const ( TypeCalendar = "calendar" TypeModel = "model" TypeFrontend = "frontend" + TypeExport = "export" ) type Permission struct { @@ -35,6 +36,7 @@ type Permission struct { Action string `json:"action"` } +// New returns a new Permission func New(resourceType string, resourceID string, action string) Permission { return Permission{ ResourceType: resourceType, @@ -43,6 +45,7 @@ func New(resourceType string, resourceID string, action string) Permission { } } +// ListMatchingPermissions returns a list of permissions matching the given permission func ListMatchingPermissions(permissions []Permission, match Permission) []Permission { lst := make([]Permission, 0) for _, permission := range permissions { @@ -60,7 +63,8 @@ func ListMatchingPermissions(permissions []Permission, match Permission) []Permi return lst } -func GetRessourceIDs(permissions []Permission) []string { +// GetResourceIDs returns a list of resource IDs from a list of permissions +func GetResourceIDs(permissions []Permission) []string { resourceIDs := make([]string, 0) for _, permission := range permissions { resourceIDs = append(resourceIDs, permission.ResourceID) @@ -68,6 +72,7 @@ func GetRessourceIDs(permissions []Permission) []string { return resourceIDs } +// HasPermission checks if the user has the required permission func matchPermission(permission string, required string) bool { if permission == All { return true @@ -81,6 +86,7 @@ func matchPermission(permission string, required string) bool { return false } +// HasPermission checks strictly if the user has the required permission func matchPermissionStrict(permission string, required string) bool { if permission == All { return true @@ -91,6 +97,7 @@ func matchPermissionStrict(permission string, required string) bool { return false } +// HasPermission checks if the user has the required permission func HasPermission(permissions []Permission, required Permission) bool { for _, permission := range permissions { if !matchPermissionStrict(permission.ResourceType, required.ResourceType) { @@ -107,6 +114,7 @@ func HasPermission(permissions []Permission, required Permission) bool { return false } +// HasPermissionAtLeastOne checks if the user has at least one of the required permissions func HasPermissionAtLeastOne(permissions []Permission, requiredAtLeastOne []Permission) bool { for _, required := range requiredAtLeastOne { if HasPermission(permissions, required) { @@ -116,6 +124,7 @@ func HasPermissionAtLeastOne(permissions []Permission, requiredAtLeastOne []Perm return false } +// HasPermissionAll checks if the user has all the required permissions func HasPermissionAll(permissions []Permission, requiredAll []Permission) bool { for _, required := range requiredAll { if !HasPermission(permissions, required) { diff --git a/internals/security/permissions/permission_test.go b/internals/security/permissions/permission_test.go index 0f51998a..62bb048a 100644 --- a/internals/security/permissions/permission_test.go +++ b/internals/security/permissions/permission_test.go @@ -117,7 +117,7 @@ func TestGetResourceIDs(t *testing.T) { New("fact", "5", "*"), } - resourceIDs := GetRessourceIDs(ListMatchingPermissions(permissions, New("situation", "*", "create"))) + resourceIDs := GetResourceIDs(ListMatchingPermissions(permissions, New("situation", "*", "create"))) if len(resourceIDs) != 3 { t.Error("invalid resourceIDs") } diff --git a/internals/security/permissions/postgres_repository.go b/internals/security/permissions/postgres_repository.go index bb49b965..58cc42fa 100644 --- a/internals/security/permissions/postgres_repository.go +++ b/internals/security/permissions/postgres_repository.go @@ -5,7 +5,7 @@ import ( "errors" sq "github.com/Masterminds/squirrel" - uuid "github.com/google/uuid" + "github.com/google/uuid" "github.com/jmoiron/sqlx" "go.uber.org/zap" ) @@ -29,7 +29,7 @@ func NewPostgresRepository(dbClient *sqlx.DB) Repository { return ifm } -//Get search and returns an User Permission from the repository by its id +// Get search and returns an User Permission from the repository by its id func (r *PostgresRepository) Get(permissionUUID uuid.UUID) (Permission, bool, error) { rows, err := r.newStatement(). Select(fields...). diff --git a/internals/security/users/user.go b/internals/security/users/user.go index 2197da7d..2793ee61 100644 --- a/internals/security/users/user.go +++ b/internals/security/users/user.go @@ -102,12 +102,12 @@ func (u UserWithPermissions) ListMatchingPermissions(match permissions.Permissio } func (u UserWithPermissions) GetMatchingResourceIDs(match permissions.Permission) []string { - return permissions.GetRessourceIDs(permissions.ListMatchingPermissions(u.Permissions, match)) + return permissions.GetResourceIDs(permissions.ListMatchingPermissions(u.Permissions, match)) } func (u UserWithPermissions) GetMatchingResourceIDsInt64(match permissions.Permission) []int64 { ids := make([]int64, 0) - for _, resourceID := range permissions.GetRessourceIDs(permissions.ListMatchingPermissions(u.Permissions, match)) { + for _, resourceID := range permissions.GetResourceIDs(permissions.ListMatchingPermissions(u.Permissions, match)) { if resourceID == permissions.All { continue } From 051daa5f0ba6e1efbbad865076aeb6b2ebec3a5c Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 22 Nov 2023 17:33:57 +0100 Subject: [PATCH 10/35] Export endpoints + cancel tests --- internals/export/worker.go | 24 +++-- internals/export/worker_test.go | 7 ++ internals/export/wrapper.go | 126 +++++++++++++++++--------- internals/export/wrapper_test.go | 112 +++++++++++++++++++---- internals/handlers/export_handlers.go | 58 ++++++++---- internals/handlers/render/render.go | 5 +- 6 files changed, 247 insertions(+), 85 deletions(-) diff --git a/internals/export/worker.go b/internals/export/worker.go index cf03b351..69d3e806 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -5,7 +5,6 @@ import ( "context" "encoding/csv" "fmt" - "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" "os" "path/filepath" @@ -28,7 +27,7 @@ func NewExportWorker(id int, basePath string, success chan<- int) *ExportWorker Id: id, Available: true, BasePath: basePath, - Cancel: make(chan bool), + Cancel: make(chan bool, 3), // buffered channel to avoid blocking Success: success, } } @@ -64,6 +63,17 @@ func (e *ExportWorker) IsAvailable() bool { return e.Available } +// DrainCancelChannel drains the cancel channel +func (e *ExportWorker) DrainCancelChannel() { + for { + select { + case <-e.Cancel: + default: + return + } + } +} + // finalise sets the worker availability to true and clears the queueItem func (e *ExportWorker) finalise() { e.Mutex.Lock() @@ -76,9 +86,11 @@ func (e *ExportWorker) finalise() { if e.QueueItem.Status != StatusError { e.QueueItem.Status = StatusDone } - e.Mutex.Unlock() + // clear Cancel channel, to avoid blocking + e.DrainCancelChannel() + // notify to the dispatcher that this worker is now available e.Success <- e.Id } @@ -89,6 +101,7 @@ func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { defer e.finalise() e.Mutex.Lock() e.QueueItem = item + e.QueueItem.Status = StatusRunning e.Mutex.Unlock() // create file @@ -134,9 +147,8 @@ func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { defer wg.Done() defer close(streamedExport.Data) - for _, f := range item.FactIDs { - _ = f // TODO: facts - writerErr = streamedExport.StreamedExportFactHitsFull(ctx, engine.Fact{}, item.Params.Limit) + for _, f := range item.Facts { + writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, item.Params.Limit) if writerErr != nil { break // break here when error occurs? } diff --git a/internals/export/worker_test.go b/internals/export/worker_test.go index 9023575d..349979ea 100644 --- a/internals/export/worker_test.go +++ b/internals/export/worker_test.go @@ -39,3 +39,10 @@ func TestExportWorker_IsAvailable(t *testing.T) { worker.SwapAvailable(false) expression.AssertEqual(t, worker.IsAvailable(), false) } + +func TestExportWorker_DrainCancelChannel(t *testing.T) { + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + worker.Cancel <- true + worker.DrainCancelChannel() + expression.AssertEqual(t, len(worker.Cancel), 0) +} diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 103580a2..2647274e 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -27,8 +27,8 @@ const ( ) type WrapperItem struct { - Id string `json:"id"` // unique id that represents an export demand - FactIDs []int64 `json:"factIds"` + Id string `json:"id"` // unique id that represents an export demand + FactIDs []int64 `json:"factIds"` // list of fact ids that are part of the export (for archive and json) Facts []engine.Fact `json:"-"` Error error `json:"error"` Status int `json:"status"` @@ -110,6 +110,9 @@ func (ew *Wrapper) Init(ctx context.Context) { // factsEquals checks if two slices of facts are equal func factsEquals(a, b []engine.Fact) bool { + if len(a) != len(b) { + return false + } for _, fact := range a { found := false for _, fact2 := range b { @@ -122,7 +125,7 @@ func factsEquals(a, b []engine.Fact) bool { return false } } - return false + return true } // AddToQueue Adds a new export to the export worker queue @@ -179,6 +182,7 @@ func (ew *Wrapper) startDispatcher(context context.Context) { worker.QueueItem = WrapperItem{} worker.Mutex.Unlock() // archive item + item.Facts = []engine.Fact{} // empty facts to avoid storing them in the archive ew.archive.Store(item.Id, item) case <-ticker.C: ew.dispatchExportQueue(context) @@ -346,7 +350,7 @@ func (ew *Wrapper) FindArchive(id string, user users.User) (WrapperItem, bool) { } // GetUserExport returns the export item for the given id and user -// this function is similar to GetUserExports but it avoids iterating over all exports, thus it is faster +// this function is similar to GetUserExports, but it avoids iterating over all exports, thus it is faster func (ew *Wrapper) GetUserExport(id string, user users.User) (item WrapperItem, ok bool) { // start with archived items if item, ok = ew.FindArchive(id, user); ok { @@ -381,6 +385,81 @@ func (ew *Wrapper) GetUserExport(id string, user users.User) (item WrapperItem, return item, ok } +// DeleteExport removes an export from the queue / archive, or cancels it if it is running +// returns true if the export was found and deleted, false otherwise +// this function is similar to GetUserExport, but it avoids iterating over all exports, thus it is faster +func (ew *Wrapper) DeleteExport(id string, user users.User) bool { + // start with archived items + if item, ok := ew.FindArchive(id, user); ok { + if len(item.Users) == 1 { + ew.archive.Delete(id) + return true + } + // remove user from item + for i, u := range item.Users { + if u == user.Login { + item.Users = append(item.Users[:i], item.Users[i+1:]...) + break + } + } + ew.archive.Store(id, item) + return true + } + + // then check the queue + ew.queueMutex.Lock() + for i, item := range ew.queue { + if item.Id == id && item.ContainsUser(user) { + // remove user from item + for j, u := range item.Users { + if u == user.Login { + item.Users = append(item.Users[:j], item.Users[j+1:]...) + break + } + } + if len(item.Users) == 0 { + ew.queue = append(ew.queue[:i], ew.queue[i+1:]...) + } + ew.queueMutex.Unlock() + return true + } + } + ew.queueMutex.Unlock() + + // finally check the workers + for _, worker := range ew.workers { + worker.Mutex.Lock() + if worker.Available || worker.QueueItem.Id != id || !worker.QueueItem.ContainsUser(user) { + worker.Mutex.Unlock() + continue + } + + // remove user from item + if len(worker.QueueItem.Users) == 1 { + // cancel worker by sending a message on the cancel channel + // the worker will check this channel and stop if it receives a message + // it can happen that the worker is already stopped, in this case, the message will be ignored + select { // non-blocking send + case worker.Cancel <- true: + default: + } + worker.Mutex.Unlock() + return true + } + + for i, u := range worker.QueueItem.Users { + if u == user.Login { + worker.QueueItem.Users = append(worker.QueueItem.Users[:i], worker.QueueItem.Users[i+1:]...) + break + } + } + worker.Mutex.Unlock() + return true + } + + return false +} + // ContainsUser checks if user is in item func (it *WrapperItem) ContainsUser(user users.User) bool { for _, u := range it.Users { @@ -390,42 +469,3 @@ func (it *WrapperItem) ContainsUser(user users.User) bool { } return false } - -//func (ew *Wrapper) CancelExport(id string, user users.User) error { -// // first check if the export is in the queue -// // if it is, we check if the user is the only one in the queueItem.users -// // if yes, we remove the queueItem from the queue -// // if no, we remove the user from the queueItem.users -// -// for i, worker := range ew.workers { -// -// worker.Mutex.Lock() -// if worker.QueueItem == nil || worker.QueueItem.Id != id { -// worker.Mutex.Unlock() -// continue -// } -// worker.Mutex.Lock() -// -// if userIdx == -1 { -// worker.Mutex.Unlock() -// ew.QueueMutex.Unlock() -// return fmt.Errorf("user not found") -// } -// -// if len(worker.Users) == 1 { -// ew.Queue = append(ew.Queue[:userIdx], ew.Queue[userIdx+1:]...) -// worker.Mutex.Unlock() -// ew.QueueMutex.Unlock() -// return nil -// } -// -// worker.Users = append(worker.Users[:i], worker.Users[i+1:]...) -// worker.Mutex.Unlock() -// ew.QueueMutex.Unlock() -// return nil -// } -// -// ew.QueueMutex.Unlock() -// -// return nil -//} diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 5a9941bc..8735d3f8 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" + "github.com/myrteametrics/myrtea-sdk/v4/engine" "github.com/myrteametrics/myrtea-sdk/v4/expression" "os" "path/filepath" @@ -20,10 +21,19 @@ func TestNewWrapper(t *testing.T) { expression.AssertEqual(t, wrapper.queueMaxSize, 1) } +func TestFactsEquals(t *testing.T) { + expression.AssertEqual(t, factsEquals([]engine.Fact{{ID: 1}}, []engine.Fact{{ID: 1}}), true) + expression.AssertEqual(t, factsEquals([]engine.Fact{{ID: 1}}, []engine.Fact{{ID: 2}}), false) + expression.AssertEqual(t, factsEquals([]engine.Fact{{ID: 1}, {ID: 2}}, []engine.Fact{{ID: 2}, {ID: 1}}), true) + expression.AssertEqual(t, factsEquals([]engine.Fact{{ID: 1}, {ID: 2}}, []engine.Fact{{ID: 1}, {ID: 3}}), false) + expression.AssertEqual(t, factsEquals([]engine.Fact{{ID: 1}, {ID: 2}}, []engine.Fact{{ID: 1}, {ID: 2}, {ID: 3}}), false) + expression.AssertEqual(t, factsEquals([]engine.Fact{{ID: 2}, {ID: 1}, {ID: 3}}, []engine.Fact{{ID: 1}, {ID: 2}}), false) +} + func TestNewWrapperItem(t *testing.T) { - item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "test"}) + item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "test"}) expression.AssertNotEqual(t, item.Id, "") - expression.AssertEqual(t, Int64Equals(item.FactIDs, []int64{1}), true) + expression.AssertEqual(t, factsEquals(item.Facts, []engine.Fact{{ID: 1}}), true) expression.AssertEqual(t, item.Params.Equals(CSVParameters{}), true) expression.AssertEqual(t, item.Status, StatusPending) expression.AssertEqual(t, item.FileName, "test.txt") @@ -32,7 +42,7 @@ func TestNewWrapperItem(t *testing.T) { } func TestWrapperItem_ContainsFact(t *testing.T) { - item := NewWrapperItem([]int64{1, 22, 33}, "test.txt", CSVParameters{}, users.User{Login: "test"}) + item := NewWrapperItem([]engine.Fact{{ID: 1}, {ID: 22}, {ID: 33}}, "test.txt", CSVParameters{}, users.User{Login: "test"}) expression.AssertEqual(t, item.ContainsFact(1), true) expression.AssertEqual(t, item.ContainsFact(22), true) expression.AssertEqual(t, item.ContainsFact(3), false) @@ -57,13 +67,13 @@ func TestAddToQueue(t *testing.T) { user1 := users.User{Login: "bla"} user2 := users.User{Login: "blabla"} csvParams := CSVParameters{} - _, result := wrapper.AddToQueue([]int64{1}, "test.txt", csvParams, user1) + _, result := wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", csvParams, user1) expression.AssertEqual(t, result, CodeAdded, "AddToQueue should return CodeAdded") - _, result = wrapper.AddToQueue([]int64{1}, "test.txt", csvParams, user1) + _, result = wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", csvParams, user1) expression.AssertEqual(t, result, CodeUserExists, "AddToQueue should return CodeUserExists") - _, result = wrapper.AddToQueue([]int64{1}, "test.txt", csvParams, user2) + _, result = wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", csvParams, user2) expression.AssertEqual(t, result, CodeUserAdded, "AddToQueue should return CodeUserAdded") - _, result = wrapper.AddToQueue([]int64{2}, "test.txt", csvParams, user2) + _, result = wrapper.AddToQueue([]engine.Fact{{ID: 2}}, "test.txt", csvParams, user2) expression.AssertEqual(t, result, CodeQueueFull, "AddToQueue should return CodeQueueFull") } @@ -105,7 +115,7 @@ func TestStartDispatcher(t *testing.T) { // add a task to the queue and check if the task was added to queue user := users.User{Login: "test"} - _, result := wrapper.AddToQueue([]int64{1}, fileName, CSVParameters{}, user) + _, result := wrapper.AddToQueue([]engine.Fact{{ID: 1}}, fileName, CSVParameters{}, user) expression.AssertEqual(t, result, CodeAdded, "AddToQueue should return CodeAdded") wrapper.queueMutex.Lock() expression.AssertEqual(t, len(wrapper.queue), 1) @@ -211,16 +221,16 @@ func TestWrapper_GetUserExports(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 2) user1 := users.User{Login: "bla"} user2 := users.User{Login: "blabla"} - item1 := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, user1) - item2 := NewWrapperItem([]int64{2}, "test.txt", CSVParameters{}, user1) - item3 := NewWrapperItem([]int64{3}, "test.txt", CSVParameters{}, user1) - item4 := NewWrapperItem([]int64{4}, "test.txt", CSVParameters{}, user2) + item1 := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, user1) + item2 := NewWrapperItem([]engine.Fact{{ID: 2}}, "test.txt", CSVParameters{}, user1) + item3 := NewWrapperItem([]engine.Fact{{ID: 3}}, "test.txt", CSVParameters{}, user1) + item4 := NewWrapperItem([]engine.Fact{{ID: 4}}, "test.txt", CSVParameters{}, user2) wrapper.archive.Store(item1.Id, *item1) wrapper.archive.Store(item2.Id, *item2) wrapper.archive.Store(item3.Id, *item3) wrapper.archive.Store(item4.Id, *item4) - wrapper.AddToQueue([]int64{5}, "test.txt", CSVParameters{}, user1) - wrapper.AddToQueue([]int64{6}, "test.txt", CSVParameters{}, user2) + wrapper.AddToQueue([]engine.Fact{{ID: 5}}, "test.txt", CSVParameters{}, user1) + wrapper.AddToQueue([]engine.Fact{{ID: 6}}, "test.txt", CSVParameters{}, user2) exports := wrapper.GetUserExports(user1) expression.AssertEqual(t, len(exports), 4) exports = wrapper.GetUserExports(user2) @@ -232,8 +242,8 @@ func TestWrapper_DequeueWrapperItem(t *testing.T) { i, ok := wrapper.dequeueWrapperItem(&WrapperItem{}) expression.AssertEqual(t, ok, false) expression.AssertEqual(t, i, 0) - wrapper.AddToQueue([]int64{5}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) - wrapper.AddToQueue([]int64{6}, "test.txt", CSVParameters{}, users.User{Login: "blabla"}) + wrapper.AddToQueue([]engine.Fact{{ID: 5}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + wrapper.AddToQueue([]engine.Fact{{ID: 6}}, "test.txt", CSVParameters{}, users.User{Login: "blabla"}) expression.AssertEqual(t, len(wrapper.queue), 2) item1 := wrapper.queue[0] @@ -284,7 +294,7 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { expression.AssertEqual(t, worker.IsAvailable(), true, "worker should still be available, because no items in queue") // we add an item to the queue - wrapper.AddToQueue([]int64{1}, fileName, CSVParameters{}, users.User{Login: "test"}) + wrapper.AddToQueue([]engine.Fact{{ID: 1}}, fileName, CSVParameters{}, users.User{Login: "test"}) // we test if dispatchExportQueue will not dispatch the item, no worker available worker.SwapAvailable(false) @@ -315,7 +325,7 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { func TestWrapper_FindArchive(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 2) - item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) wrapper.archive.Store(item.Id, *item) // testing with non-existing item in archive @@ -332,7 +342,71 @@ func TestWrapper_FindArchive(t *testing.T) { } func TestWrapper_ContainsUser(t *testing.T) { - item := NewWrapperItem([]int64{1}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) expression.AssertEqual(t, item.ContainsUser(users.User{Login: "bla"}), true) expression.AssertEqual(t, item.ContainsUser(users.User{Login: "blabla"}), false) } + +func TestWrapper_DeleteExport(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 2) + item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + + // test archive + wrapper.archive.Store(item.Id, *item) + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "item should have been deleted") + _, ok := wrapper.archive.Load(item.Id) + expression.AssertEqual(t, ok, false, "item should not be in archive anymore") + + // test archive multi-user + item.Users = []string{"bla", "blabla"} + wrapper.archive.Store(item.Id, *item) + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") + _, ok = wrapper.archive.Load(item.Id) + expression.AssertEqual(t, ok, true, "item should be in archive") + item.Users = []string{"bla"} + + // test queue + queueItem, code := wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + expression.AssertEqual(t, code, CodeAdded, "item should have been added to queue") + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 1, "item should be in queue") + wrapper.queueMutex.Unlock() + expression.AssertEqual(t, wrapper.DeleteExport(queueItem.Id, users.User{Login: "bla"}), true, "item should have been deleted") + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 0, "item should not be in queue anymore") + wrapper.queueMutex.Unlock() + + // test queue multi-user + queueItem, code = wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + expression.AssertEqual(t, code, CodeAdded, "item should have been added to queue") + _, code = wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "blabla"}) + expression.AssertEqual(t, code, CodeUserAdded, "user should have been added to existing item in queue") + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 1, "item should be in queue") + wrapper.queueMutex.Unlock() + expression.AssertEqual(t, wrapper.DeleteExport(queueItem.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") + wrapper.queueMutex.Lock() + expression.AssertEqual(t, len(wrapper.queue), 1, "item should be in queue") + wrapper.queueMutex.Unlock() + + // test workers + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + wrapper.workers = append(wrapper.workers, worker) + worker.Mutex.Lock() + worker.QueueItem = *item + worker.Available = true + worker.Mutex.Unlock() + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), false, "item should have not been deleted") + worker.SwapAvailable(false) + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "item should have been deleted") + expression.AssertEqual(t, len(worker.Cancel), 1, "worker cancel channel should have been filled") + + // clean cancel channel (non-blocking) + worker.DrainCancelChannel() + worker.Mutex.Lock() + worker.QueueItem.Users = []string{"bla", "blabla"} + worker.Mutex.Unlock() + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") + expression.AssertEqual(t, len(worker.Cancel), 0, "worker cancel channel should not have been filled") + +} diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index c4e3b6ca..cbb97aa5 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -50,8 +50,8 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { return } - userCtx, _ := GetUserFromContext(r) // TODO: set the right permission - if !userCtx.HasPermission(permissions.New(permissions.TypeFact, strconv.FormatInt(idFact, 10), permissions.ActionGet)) { + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, strconv.FormatInt(idFact, 10), permissions.ActionGet)) { render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) return } @@ -70,7 +70,8 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { } // handleExportArgs handles the export arguments and returns the filename, the parameters and the facts to export -func handleExportArgs(w http.ResponseWriter, r *http.Request, err error, idFact int64) (string, export.CSVParameters, []engine.Fact, bool) { +// done is true if an error occurred and the response has already been written +func handleExportArgs(w http.ResponseWriter, r *http.Request, err error, idFact int64) (filename string, params export.CSVParameters, combineFacts []engine.Fact, done bool) { f, found, err := fact.R().Get(idFact) if err != nil { zap.L().Error("Cannot retrieve fact", zap.Int64("factID", idFact), zap.Error(err)) @@ -83,7 +84,7 @@ func handleExportArgs(w http.ResponseWriter, r *http.Request, err error, idFact return "", export.CSVParameters{}, nil, true } - var filename = r.URL.Query().Get("fileName") + filename = r.URL.Query().Get("fileName") if filename == "" { filename = fmt.Sprintf("%s_export_%s.csv", f.Name, time.Now().Format("02_01_2006")) } else { @@ -91,9 +92,8 @@ func handleExportArgs(w http.ResponseWriter, r *http.Request, err error, idFact } // suppose that type is csv - params := GetCSVParameters(r) + params = GetCSVParameters(r) - var combineFacts []engine.Fact combineFacts = append(combineFacts, f) // export multiple facts into one file @@ -265,7 +265,8 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, // @Description Get in memory user exports // @Produce json // @Security Bearer -// @Success 200 {json} Returns a list of exports +// @Success 200 {array} export.WrapperItem Returns a list of exports +// @Failure 403 "Status Forbidden: missing permission" // @Failure 500 "internal server error" // @Router /engine/exports [get] func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { @@ -274,8 +275,7 @@ func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) return } - exports := e.exportWrapper.GetUserExports(userCtx.User) - render.JSON(w, r, exports) + render.JSON(w, r, e.exportWrapper.GetUserExports(userCtx.User)) } // GetExport godoc @@ -284,7 +284,10 @@ func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { // @Tags Exports // @Produce json // @Security Bearer -// @Success 200 "Status OK" +// @Success 200 {object} export.WrapperItem "Status OK" +// @Failure 400 "Bad Request: missing export id / id is not an integer" +// @Failure 403 "Status Forbidden: missing permission" +// @Failure 404 "Status Not Found: export not found" // @Failure 500 "internal server error" // @Router /service/exports/{id} [get] func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { @@ -315,7 +318,10 @@ func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { // @Tags Exports // @Produce json // @Security Bearer -// @Success 200 "Status OK" +// @Success 204 "Status OK" +// @Failure 400 "Bad Request: missing export id / id is not an integer" +// @Failure 403 "Status Forbidden: missing permission" +// @Failure 404 "Status Not Found: export not found" // @Failure 500 "internal server error" // @Router /service/exports/{id} [delete] func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { @@ -331,13 +337,13 @@ func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { return } - item, ok := e.exportWrapper.GetUserExport(id, userCtx.User) + ok := e.exportWrapper.DeleteExport(id, userCtx.User) if !ok { render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("export not found")) return } - render.JSON(w, r, item) + w.WriteHeader(http.StatusNoContent) } // ExportFact godoc @@ -346,7 +352,19 @@ func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { // @Tags Exports // @Produce json // @Security Bearer -// @Success 200 "Status OK" +// @Param id path string true "Fact ID" +// @Param fileName query string false "File name" +// @Param limit query int false "Limit" +// @Param columns query string false "Columns" +// @Param columnsLabel query string false "Columns label" +// @Param formateColumns query string false "Formate columns" +// @Param separator query string false "Separator" +// @Success 200 {object} export.WrapperItem "Status OK: user was added to existing export in queue" +// @Success 201 {object} export.WrapperItem "Status Created: new export was added in queue" +// @Failure 400 "Bad Request: missing fact id / fact id is not an integer" +// @Failure 403 "Status Forbidden: missing permission" +// @Failure 409 {object} export.WrapperItem "Status Conflict: user already exists in export queue" +// @Failure 429 "Status Too Many Requests: export queue is full" // @Failure 500 "internal server error" // @Router /service/exports/fact/{id} [post] func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { @@ -365,18 +383,26 @@ func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { return } - filename, params, combineFacts, done := handleExportArgs(w, r, err, idFact) + filename, params, combinedFacts, done := handleExportArgs(w, r, err, idFact) if done { return } - item, status := e.exportWrapper.AddToQueue(combineFacts, filename, params, userCtx.User) + item, status := e.exportWrapper.AddToQueue(combinedFacts, filename, params, userCtx.User) switch status { case export.CodeAdded: + w.WriteHeader(http.StatusCreated) case export.CodeUserAdded: + w.WriteHeader(http.StatusOK) case export.CodeUserExists: + w.WriteHeader(http.StatusConflict) case export.CodeQueueFull: + render.Error(w, r, render.ErrAPIQueueFull, fmt.Errorf("export queue is full")) + return + default: + render.Error(w, r, render.ErrAPIProcessError, fmt.Errorf("unknown status code (%d)", status)) + return } render.JSON(w, r, item) diff --git a/internals/handlers/render/render.go b/internals/handlers/render/render.go index ea031fff..5b1e2074 100644 --- a/internals/handlers/render/render.go +++ b/internals/handlers/render/render.go @@ -10,7 +10,7 @@ import ( "go.uber.org/zap" ) -// APIError wraps all informations required to investiguate a backend error +// APIError wraps all information required to investigate a backend error // It is mainly used to returns information to the API caller when the status is not 2xx. type APIError struct { RequestID string `json:"requestID"` @@ -44,6 +44,9 @@ var ( // ErrAPIResourceDuplicate must be used in case a duplicate resource has been identified ErrAPIResourceDuplicate = APIError{Status: http.StatusBadRequest, ErrType: "RessourceError", Code: 2002, Message: `Provided resource definition can be parsed, but is already exists`} + // ErrAPIQueueFull must be used in case an internal processing queue is full + ErrAPIQueueFull = APIError{Status: http.StatusServiceUnavailable, ErrType: "RessourceError", Code: 2003, Message: `The queue is full, please retry later`} + // ErrAPIDBResourceNotFound must be used in case a resource is not found in the backend storage system ErrAPIDBResourceNotFound = APIError{Status: http.StatusNotFound, ErrType: "RessourceError", Code: 3000, Message: `Ressource not found`} // ErrAPIDBSelectFailed must be used when a select query returns an error from the backend storage system From 85eb19645bffe01ce7bd21033312a9e5af96f453 Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 22 Nov 2023 17:49:25 +0100 Subject: [PATCH 11/35] added missing test for getuserexport and fixed pipeline --- internals/export/wrapper_test.go | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 8735d3f8..016be0f8 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -408,5 +408,23 @@ func TestWrapper_DeleteExport(t *testing.T) { worker.Mutex.Unlock() expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") expression.AssertEqual(t, len(worker.Cancel), 0, "worker cancel channel should not have been filled") +} +func TestWrapper_GetUserExport(t *testing.T) { + wrapper := NewWrapper("/tmp", 1, 1, 2) + item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + wrapper.archive.Store(item.Id, *item) + export, ok := wrapper.GetUserExport(item.Id, users.User{Login: "bla"}) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, export.Id, item.Id) + export, ok = wrapper.GetUserExport(item.Id, users.User{Login: "blabla"}) + expression.AssertEqual(t, ok, false) + // test queue + queueItem, code := wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + expression.AssertEqual(t, code, CodeAdded, "item should have been added to queue") + export, ok = wrapper.GetUserExport(queueItem.Id, users.User{Login: "bla"}) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, export.Id, queueItem.Id) + export, ok = wrapper.GetUserExport(queueItem.Id, users.User{Login: "blabla"}) + expression.AssertEqual(t, ok, false) } From 866595ae44def1a079b9772ab747e9a8e2562866 Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 22 Nov 2023 17:50:31 +0100 Subject: [PATCH 12/35] added missing test for getuserexport and fixed pipeline --- internals/export/wrapper.go | 1 + internals/handlers/export_handlers.go | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 2647274e..953176dd 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -26,6 +26,7 @@ const ( StatusCanceled = 4 ) +// WrapperItem represents an export demand type WrapperItem struct { Id string `json:"id"` // unique id that represents an export demand FactIDs []int64 `json:"factIds"` // list of fact ids that are part of the export (for archive and json) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index cbb97aa5..742053d1 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -265,7 +265,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, // @Description Get in memory user exports // @Produce json // @Security Bearer -// @Success 200 {array} export.WrapperItem Returns a list of exports +// @Success 200 {array} export.WrapperItem "Returns a list of exports" // @Failure 403 "Status Forbidden: missing permission" // @Failure 500 "internal server error" // @Router /engine/exports [get] From 19537ef4ef2f4d7891f9ed0ab820cfe1b0ef4d43 Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 22 Nov 2023 17:54:03 +0100 Subject: [PATCH 13/35] its better to define Factids --- internals/export/wrapper.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 953176dd..01f5e8d2 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -64,10 +64,15 @@ type Wrapper struct { // NewWrapperItem creates a new export wrapper item func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, user users.User) *WrapperItem { + var factIDs []int64 + for _, fact := range facts { + factIDs = append(factIDs, fact.ID) + } return &WrapperItem{ Users: append([]string{}, user.Login), Id: uuid.New().String(), Facts: facts, + FactIDs: factIDs, Date: time.Now(), Status: StatusPending, Error: nil, From 50ecd99eab2825350d9b04e62c0580922b139f9c Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 23 Nov 2023 14:45:22 +0100 Subject: [PATCH 14/35] Fixed variablesconfig responding nil when no data was found --- internals/handlers/variablesconfig_handlers.go | 2 +- internals/modeler/postgres_repository.go | 4 ++-- internals/variablesconfig/postgres_repository.go | 3 +-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/internals/handlers/variablesconfig_handlers.go b/internals/handlers/variablesconfig_handlers.go index 16805882..b5d6ecc2 100644 --- a/internals/handlers/variablesconfig_handlers.go +++ b/internals/handlers/variablesconfig_handlers.go @@ -12,7 +12,7 @@ import ( "go.uber.org/zap" ) -// GetVariablesConfigs godoc +// GetVariablesConfig godoc // @Summary Get all Variables Config definitions // @Description Get all VariableConfig definitions // @Tags VariablesConfig diff --git a/internals/modeler/postgres_repository.go b/internals/modeler/postgres_repository.go index ac8fe71c..7e7b6d65 100644 --- a/internals/modeler/postgres_repository.go +++ b/internals/modeler/postgres_repository.go @@ -16,7 +16,7 @@ type PostgresRepository struct { conn *sqlx.DB } -//NewPostgresRepository returns a new instance of PostgresRepository +// NewPostgresRepository returns a new instance of PostgresRepository func NewPostgresRepository(dbClient *sqlx.DB) Repository { r := PostgresRepository{ conn: dbClient, @@ -174,7 +174,7 @@ func (r *PostgresRepository) Delete(id int64) error { // GetAll returns all models in the repository func (r *PostgresRepository) GetAll() (map[int64]modeler.Model, error) { - models := make(map[int64]modeler.Model, 0) + models := make(map[int64]modeler.Model) query := `SELECT id, definition FROM model_v1` rows, err := r.conn.Query(query) diff --git a/internals/variablesconfig/postgres_repository.go b/internals/variablesconfig/postgres_repository.go index ec68e6f5..4e849c8f 100644 --- a/internals/variablesconfig/postgres_repository.go +++ b/internals/variablesconfig/postgres_repository.go @@ -147,8 +147,7 @@ func (r *PostgresRepository) Delete(id int64) error { // GetAll method used to get all Variables Config func (r *PostgresRepository) GetAll() ([]models.VariablesConfig, error) { - - var variablesConfig []models.VariablesConfig + variablesConfig := make([]models.VariablesConfig, 0) rows, err := r.newStatement(). Select("id", "key", "value"). From ea541adaf90d11208851a843c3f7a601b6747aaa Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 29 Nov 2023 17:24:47 +0100 Subject: [PATCH 15/35] Lot of changes for export: wrapped export request --- internals/export/csv.go | 26 ++-- internals/export/csv_test.go | 82 ++++++++-- internals/export/utils.go | 60 ++++---- internals/export/utils_test.go | 108 ++++++------- internals/export/worker.go | 4 +- internals/handlers/export_handlers.go | 196 +++++++----------------- internals/handlers/utils.go | 43 ++++-- internals/handlers/utils_test.go | 17 -- internals/router/routes.go | 5 +- internals/tasker/situation_reporting.go | 74 +++++---- internals/utils/utils.go | 15 +- internals/utils/utils_test.go | 51 ++++++ 12 files changed, 361 insertions(+), 320 deletions(-) create mode 100644 internals/utils/utils_test.go diff --git a/internals/export/csv.go b/internals/export/csv.go index dd6c93cb..3cf9b644 100644 --- a/internals/export/csv.go +++ b/internals/export/csv.go @@ -11,29 +11,30 @@ import ( "go.uber.org/zap" ) -func WriteConvertHitsToCSV(w *csv.Writer, hits []reader.Hit, columns []string, columnsLabel []string, formatColumnsData map[string]string, separator rune) error { - w.Comma = separator +// WriteConvertHitsToCSV writes hits to CSV +func WriteConvertHitsToCSV(w *csv.Writer, hits []reader.Hit, params CSVParameters, writeHeader bool) error { + w.Comma = params.Separator // avoid to print header when labels are empty - if len(columnsLabel) > 0 { - w.Write(columnsLabel) + if writeHeader && len(params.Columns) > 0 { + w.Write(params.GetColumnsLabel()) } for _, hit := range hits { record := make([]string, 0) - for _, column := range columns { - value, err := nestedMapLookup(hit.Fields, strings.Split(column, ".")...) + for _, column := range params.Columns { + value, err := nestedMapLookup(hit.Fields, strings.Split(column.Name, ".")...) if err != nil { value = "" - } else if format, ok := formatColumnsData[column]; ok { + } else if column.Format != "" { if date, ok := value.(time.Time); ok { - value = date.Format(format) + value = date.Format(column.Format) } else if dateStr, ok := value.(string); ok { date, err := parseDate(dateStr) if err != nil { zap.L().Error("Failed to parse date string:", zap.Any(":", dateStr), zap.Error(err)) } else { - value = date.Format(format) + value = date.Format(column.Format) } } } @@ -46,10 +47,11 @@ func WriteConvertHitsToCSV(w *csv.Writer, hits []reader.Hit, columns []string, c return w.Error() } -func ConvertHitsToCSV(hits []reader.Hit, columns []string, columnsLabel []string, formatColumnsData map[string]string, separator rune) ([]byte, error) { +// ConvertHitsToCSV converts hits to CSV +func ConvertHitsToCSV(hits []reader.Hit, params CSVParameters, writeHeader bool) ([]byte, error) { b := new(bytes.Buffer) w := csv.NewWriter(b) - err := WriteConvertHitsToCSV(w, hits, columns, columnsLabel, formatColumnsData, separator) + err := WriteConvertHitsToCSV(w, hits, params, writeHeader) if err != nil { return nil, err @@ -58,6 +60,7 @@ func ConvertHitsToCSV(hits []reader.Hit, columns []string, columnsLabel []string return b.Bytes(), nil } +// nestedMapLookup looks up a nested map item func nestedMapLookup(m map[string]interface{}, ks ...string) (rval interface{}, err error) { var ok bool if len(ks) == 0 { @@ -74,6 +77,7 @@ func nestedMapLookup(m map[string]interface{}, ks ...string) (rval interface{}, } } +// parseDate parses a date string func parseDate(dateStr string) (time.Time, error) { formats := []string{ "2006-01-02T15:04:05.999", diff --git a/internals/export/csv_test.go b/internals/export/csv_test.go index b71c2d6f..930125eb 100644 --- a/internals/export/csv_test.go +++ b/internals/export/csv_test.go @@ -15,12 +15,17 @@ func TestConvertHitsToCSV(t *testing.T) { {ID: "3", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "date": "2023-06-30T10:42:59.500"}}, {ID: "1", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "d": map[string]interface{}{"zzz": "nested"}, "date": "2023-06-30T10:42:59.500"}}, } - columns := []string{"a", "b", "c", "d.e", "date"} - columnsLabel := []string{"Label A", "Label B", "Label C", "Label D.E", "Date"} - formatColumnsData := map[string]string{ - "date": "02/01/2006", + params := CSVParameters{ + Columns: []Column{ + {Name: "a", Label: "Label A", Format: ""}, + {Name: "b", Label: "Label B", Format: ""}, + {Name: "c", Label: "Label C", Format: ""}, + {Name: "d.e", Label: "Label D.E", Format: ""}, + {Name: "date", Label: "Date", Format: "02/01/2006"}, + }, + Separator: ',', } - csv, err := ConvertHitsToCSV(hits, columns, columnsLabel, formatColumnsData, ',') + csv, err := ConvertHitsToCSV(hits, params, true) if err != nil { t.Log(err) t.FailNow() @@ -35,17 +40,74 @@ func TestWriteConvertHitsToCSV(t *testing.T) { {ID: "3", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "date": "2023-06-30T10:42:59.500"}}, {ID: "1", Fields: map[string]interface{}{"a": "hello", "b": 20, "c": 3.123456, "d": map[string]interface{}{"zzz": "nested"}, "date": "2023-06-30T10:42:59.500"}}, } - columns := []string{"a", "b", "c", "d.e", "date"} - columnsLabel := []string{"Label A", "Label B", "Label C", "Label D.E", "Date"} - formatColumnsData := map[string]string{ - "date": "02/01/2006", + params := CSVParameters{ + Columns: []Column{ + {Name: "a", Label: "Label A", Format: ""}, + {Name: "b", Label: "Label B", Format: ""}, + {Name: "c", Label: "Label C", Format: ""}, + {Name: "d.e", Label: "Label D.E", Format: ""}, + {Name: "date", Label: "Date", Format: "02/01/2006"}, + }, + Separator: ',', } b := new(bytes.Buffer) w := csv2.NewWriter(b) - err := WriteConvertHitsToCSV(w, hits, columns, columnsLabel, formatColumnsData, ',') + err := WriteConvertHitsToCSV(w, hits, params, true) if err != nil { t.Log(err) t.FailNow() } t.Log("\n" + string(b.Bytes())) } + +func TestNestedMapLookup_WithEmptyKeys(t *testing.T) { + _, err := nestedMapLookup(map[string]interface{}{}, "") + if err == nil { + t.FailNow() + } +} + +func TestNestedMapLookup_WithNonExistentKey(t *testing.T) { + _, err := nestedMapLookup(map[string]interface{}{"a": "hello"}, "b") + if err == nil { + t.FailNow() + } +} + +func TestNestedMapLookup_WithNestedNonExistentKey(t *testing.T) { + _, err := nestedMapLookup(map[string]interface{}{"a": map[string]interface{}{"b": "hello"}}, "a", "c") + if err == nil { + t.FailNow() + } +} + +func TestNestedMapLookup_WithNestedKey(t *testing.T) { + val, err := nestedMapLookup(map[string]interface{}{"a": map[string]interface{}{"b": "hello"}}, "a", "b") + if err != nil || val != "hello" { + t.Error(err) + t.FailNow() + } +} + +func TestParseDate_WithInvalidFormat(t *testing.T) { + _, err := parseDate("2023-06-30") + if err == nil { + t.FailNow() + } +} + +func TestParseDate_WithValidFormat(t *testing.T) { + _, err := parseDate("2023-06-30T10:42:59.500") + if err != nil { + t.Error(err) + t.FailNow() + } +} + +func TestConvertHitsToCSV_WithEmptyHits(t *testing.T) { + _, err := ConvertHitsToCSV([]reader.Hit{}, CSVParameters{}, true) + if err != nil { + t.Error(err) + t.FailNow() + } +} diff --git a/internals/export/utils.go b/internals/export/utils.go index 3b683455..d709cc1b 100644 --- a/internals/export/utils.go +++ b/internals/export/utils.go @@ -1,48 +1,52 @@ package export type CSVParameters struct { - Columns []string - ColumnsLabel []string - FormatColumnsData map[string]string - Separator rune - Limit int64 - ChunkSize int64 + Columns []Column `json:"columns"` + Separator rune `json:"separator" default:","` + Limit int64 `json:"limit"` } -// Equals compares two CSVParameters -func (p CSVParameters) Equals(Params CSVParameters) bool { - if p.Separator != Params.Separator { - return false - } - if p.Limit != Params.Limit { +type Column struct { + Name string `json:"name"` + Label string `json:"label"` + Format string `json:"format" default:""` +} + +// Equals compares two Column +func (p Column) Equals(column Column) bool { + if p.Name != column.Name { return false } - if p.ChunkSize != Params.ChunkSize { + if p.Label != column.Label { return false } - if len(p.Columns) != len(Params.Columns) { + if p.Format != column.Format { return false } - for i, column := range p.Columns { - if column != Params.Columns[i] { - return false - } - } - if len(p.ColumnsLabel) != len(Params.ColumnsLabel) { + return true +} + +// Equals compares two CSVParameters +func (p CSVParameters) Equals(params CSVParameters) bool { + if p.Separator != params.Separator { return false } - for i, columnLabel := range p.ColumnsLabel { - if columnLabel != Params.ColumnsLabel[i] { - return false - } - } - if len(p.FormatColumnsData) != len(Params.FormatColumnsData) { + if p.Limit != params.Limit { return false } - for key, value := range p.FormatColumnsData { - if value != Params.FormatColumnsData[key] { + for i, column := range p.Columns { + if !column.Equals(params.Columns[i]) { return false } } return true } + +// GetColumnsLabel returns the label of the columns +func (p CSVParameters) GetColumnsLabel() []string { + columns := make([]string, 0) + for _, column := range p.Columns { + columns = append(columns, column.Label) + } + return columns +} diff --git a/internals/export/utils_test.go b/internals/export/utils_test.go index 18b8d143..2892c45f 100644 --- a/internals/export/utils_test.go +++ b/internals/export/utils_test.go @@ -5,70 +5,64 @@ import ( "testing" ) -func TestEquals(t *testing.T) { - p1 := CSVParameters{} - p2 := CSVParameters{} - expression.AssertEqual(t, p1.Equals(p2), true) - - // make a full test with all variables in parameters filled - params3 := CSVParameters{ - Columns: []string{"col1", "col2"}, - ColumnsLabel: []string{"col1", "col2"}, - FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}, - Separator: ';', - Limit: 10, - ChunkSize: 100, - } - expression.AssertEqual(t, params3.Equals(p2), false) - expression.AssertEqual(t, params3.Equals(params3), true) - - // test separator - p1 = CSVParameters{Separator: ';'} - p2 = CSVParameters{Separator: ','} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestColumnEquals_WithDifferentName(t *testing.T) { + column1 := Column{Name: "name1", Label: "label", Format: "format"} + column2 := Column{Name: "name2", Label: "label", Format: "format"} + expression.AssertEqual(t, column1.Equals(column2), false) +} - // test limit - p1 = CSVParameters{Limit: 10} - p2 = CSVParameters{Limit: 101} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestColumnEquals_WithDifferentLabel(t *testing.T) { + column1 := Column{Name: "name", Label: "label1", Format: "format"} + column2 := Column{Name: "name", Label: "label2", Format: "format"} + expression.AssertEqual(t, column1.Equals(column2), false) +} - // test chunk size - p1 = CSVParameters{ChunkSize: 100} - p2 = CSVParameters{ChunkSize: 10} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestColumnEquals_WithDifferentFormat(t *testing.T) { + column1 := Column{Name: "name", Label: "label", Format: "format1"} + column2 := Column{Name: "name", Label: "label", Format: "format2"} + expression.AssertEqual(t, column1.Equals(column2), false) +} - // test columns size - p1 = CSVParameters{Columns: []string{"col1", "col2"}} - p2 = CSVParameters{Columns: []string{"col1", "col2", "col3"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestColumnEquals_WithSameValues(t *testing.T) { + column1 := Column{Name: "name", Label: "label", Format: "format"} + column2 := Column{Name: "name", Label: "label", Format: "format"} + expression.AssertEqual(t, column1.Equals(column2), true) +} - // test columns values - p1 = CSVParameters{Columns: []string{"col1", "col2"}} - p2 = CSVParameters{Columns: []string{"col1", "col3"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestCSVParametersEquals_WithDifferentSeparator(t *testing.T) { + params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ';', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + expression.AssertEqual(t, params1.Equals(params2), false) +} - // test columnsLabel size - p1 = CSVParameters{ColumnsLabel: []string{"col1", "col2"}} - p2 = CSVParameters{ColumnsLabel: []string{"col1", "col2", "col3"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestCSVParametersEquals_WithDifferentLimit(t *testing.T) { + params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ',', Limit: 20, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + expression.AssertEqual(t, params1.Equals(params2), false) +} - // test columnsLabel values - p1 = CSVParameters{ColumnsLabel: []string{"col1", "col2"}} - p2 = CSVParameters{ColumnsLabel: []string{"col1", "col3"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestCSVParametersEquals_WithDifferentColumns(t *testing.T) { + params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name1", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name2", Label: "label", Format: "format"}}} + expression.AssertEqual(t, params1.Equals(params2), false) +} - // test formatColumnsData size - p1 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}} - p2 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2", "col3": "format3"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestCSVParametersEquals_WithSameValues(t *testing.T) { + params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + expression.AssertEqual(t, params1.Equals(params2), true) +} - // test formatColumnsData values - p1 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}} - p2 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format3"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestGetColumnsLabel_WithNoColumns(t *testing.T) { + params := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{}} + labels := params.GetColumnsLabel() + expression.AssertEqual(t, len(labels), 0) +} - // test formatColumnsData keys - p1 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col2": "format2"}} - p2 = CSVParameters{FormatColumnsData: map[string]string{"col1": "format1", "col3": "format2"}} - expression.AssertEqual(t, p1.Equals(p2), false) +func TestGetColumnsLabel_WithColumns(t *testing.T) { + params := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name1", Label: "label1", Format: "format1"}, {Name: "name2", Label: "label2", Format: "format2"}}} + labels := params.GetColumnsLabel() + expression.AssertEqual(t, len(labels), 2) + expression.AssertEqual(t, labels[0], "label1") + expression.AssertEqual(t, labels[1], "label2") } diff --git a/internals/export/worker.go b/internals/export/worker.go index 69d3e806..6314ed4f 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -157,7 +157,6 @@ func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { // Chunk handler first := true - labels := item.Params.ColumnsLabel loop: for { @@ -167,7 +166,7 @@ loop: break loop } - err = WriteConvertHitsToCSV(csvWriter, hits, item.Params.Columns, labels, item.Params.FormatColumnsData, item.Params.Separator) + err = WriteConvertHitsToCSV(csvWriter, hits, item.Params, first) if err != nil { zap.L().Error("WriteConvertHitsToCSV error during export", zap.Error(err)) @@ -180,7 +179,6 @@ loop: if first { first = false - labels = []string{} } case <-ctx.Done(): break loop diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 742053d1..f38e0d18 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -2,66 +2,69 @@ package handlers import ( "context" + "encoding/json" "errors" "fmt" - "github.com/myrteametrics/myrtea-sdk/v4/engine" - "net/http" - "strconv" - "strings" - "sync" - "time" - "unicode/utf8" - "github.com/go-chi/chi/v5" "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/fact" "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers/render" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/permissions" "go.uber.org/zap" + "net/http" + "strconv" + "sync" ) type ExportHandler struct { exportWrapper *export.Wrapper } +// NewExportHandler returns a new ExportHandler func NewExportHandler(exportWrapper *export.Wrapper) *ExportHandler { return &ExportHandler{ exportWrapper: exportWrapper, } } +// ExportRequest represents a request for an export +type ExportRequest struct { + export.CSVParameters + FactIDs []int64 `json:"factIDs"` + FileName string `json:"fileName"` +} + // ExportFactStreamed godoc // @Summary CSV streamed export facts in chunks // @Description CSV Streamed export for facts in chunks // @Tags ExportFactStreamed // @Produce octet-stream // @Security Bearer +// @Param request body handlers.ExportRequest true "request (json)" // @Success 200 {file} Returns data to be saved into a file // @Failure 500 "internal server error" -// @Router /engine/export/facts/{id} [get] +// @Router /engine/facts/streamedexport [get] func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { - id := chi.URLParam(r, "id") - - idFact, err := strconv.ParseInt(id, 10, 64) - - if err != nil { - zap.L().Warn("Error on parsing fact id", zap.String("idFact", id), zap.Error(err)) - render.Error(w, r, render.ErrAPIParsingInteger, err) + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionGet)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) return } - userCtx, _ := GetUserFromContext(r) - if !userCtx.HasPermission(permissions.New(permissions.TypeExport, strconv.FormatInt(idFact, 10), permissions.ActionGet)) { - render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + var request ExportRequest + err := json.NewDecoder(r.Body).Decode(&request) + if err != nil { + zap.L().Warn("Decode export request json", zap.Error(err)) + render.Error(w, r, render.ErrAPIDecodeJSONBody, err) return } - filename, params, combineFacts, done := handleExportArgs(w, r, err, idFact) - if done { + if len(request.FactIDs) == 0 { + zap.L().Warn("Missing factIDs in export request") + render.Error(w, r, render.ErrAPIMissingParam, errors.New("missing factIDs")) return } - err = HandleStreamedExport(r.Context(), w, combineFacts, filename, params) + err = HandleStreamedExport(r.Context(), w, request) if err != nil { render.Error(w, r, render.ErrAPIProcessError, err) } @@ -69,103 +72,19 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { } -// handleExportArgs handles the export arguments and returns the filename, the parameters and the facts to export -// done is true if an error occurred and the response has already been written -func handleExportArgs(w http.ResponseWriter, r *http.Request, err error, idFact int64) (filename string, params export.CSVParameters, combineFacts []engine.Fact, done bool) { - f, found, err := fact.R().Get(idFact) - if err != nil { - zap.L().Error("Cannot retrieve fact", zap.Int64("factID", idFact), zap.Error(err)) - render.Error(w, r, render.ErrAPIDBSelectFailed, err) - return "", export.CSVParameters{}, nil, true - } - if !found { - zap.L().Warn("fact does not exist", zap.Int64("factID", idFact)) - render.Error(w, r, render.ErrAPIDBResourceNotFound, err) - return "", export.CSVParameters{}, nil, true - } - - filename = r.URL.Query().Get("fileName") - if filename == "" { - filename = fmt.Sprintf("%s_export_%s.csv", f.Name, time.Now().Format("02_01_2006")) - } else { - filename = fmt.Sprintf("%s_%s.csv", time.Now().Format("02_01_2006"), filename) - } - - // suppose that type is csv - params = GetCSVParameters(r) - - combineFacts = append(combineFacts, f) - - // export multiple facts into one file - combineFactIds, err := QueryParamToOptionalInt64Array(r, "combineFactIds", ",", false, []int64{}) - if err != nil { - zap.L().Warn("Could not parse parameter combineFactIds", zap.Error(err)) - } else { - for _, factId := range combineFactIds { - // no duplicates - if factId == idFact { - continue - } - - combineFact, found, err := fact.R().Get(factId) - if err != nil { - zap.L().Error("Export combineFact cannot retrieve fact", zap.Int64("factID", factId), zap.Error(err)) - continue - } - if !found { - zap.L().Warn("Export combineFact fact does not exist", zap.Int64("factID", factId)) - continue - } - combineFacts = append(combineFacts, combineFact) - } - } - return filename, params, combineFacts, false -} - -// GetCSVParameters returns the parameters for the CSV export -func GetCSVParameters(r *http.Request) export.CSVParameters { - result := export.CSVParameters{Separator: ','} - - limit, err := QueryParamToOptionalInt64(r, "limit", -1) - if err != nil { - result.Limit = -1 - } else { - result.Limit = limit - } - - result.Columns = QueryParamToOptionalStringArray(r, "columns", ",", []string{}) - result.ColumnsLabel = QueryParamToOptionalStringArray(r, "columnsLabel", ",", []string{}) - - formatColumnsData := QueryParamToOptionalStringArray(r, "formateColumns", ",", []string{}) - result.FormatColumnsData = make(map[string]string) - for _, formatData := range formatColumnsData { - parts := strings.Split(formatData, ";") - if len(parts) != 2 { - continue - } - key := strings.TrimSpace(parts[0]) - result.FormatColumnsData[key] = parts[1] - } - separator := r.URL.Query().Get("separator") - if separator != "" { - sep, size := utf8.DecodeRuneInString(separator) - if size != 1 { - result.Separator = ',' - } else { - result.Separator = sep - } - } - - return result -} - // HandleStreamedExport actually only handles CSV -func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, facts []engine.Fact, fileName string, params export.CSVParameters) error { +func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, request ExportRequest) error { w.Header().Set("Connection", "Keep-Alive") w.Header().Set("Transfer-Encoding", "chunked") w.Header().Set("X-Content-Type-Options", "nosniff") - w.Header().Set("Content-Disposition", "attachment; filename="+strconv.Quote(fileName)) + w.Header().Set("Content-Disposition", "attachment; filename="+strconv.Quote(request.FileName)) w.Header().Set("Content-Type", "application/octet-stream") + + facts := findCombineFacts(request.FactIDs) + if len(facts) == 0 { + return errors.New("no fact found") + } + streamedExport := export.NewStreamedExport() var wg sync.WaitGroup @@ -196,7 +115,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, defer close(streamedExport.Data) for _, f := range facts { - writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, params.Limit) + writerErr = streamedExport.StreamedExportFactHitsFull(ctx, f, request.Limit) if writerErr != nil { zap.L().Error("Error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) break // break here when error occurs? @@ -209,7 +128,6 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, go func() { defer wg.Done() first := true - labels := params.ColumnsLabel for { select { @@ -218,7 +136,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, return } - data, err := export.ConvertHitsToCSV(hits, params.Columns, labels, params.FormatColumnsData, params.Separator) + data, err := export.ConvertHitsToCSV(hits, request.CSVParameters, first) if err != nil { zap.L().Error("ConvertHitsToCSV error during export (StreamedExportFactHitsFullV8)", zap.Error(err)) @@ -238,7 +156,6 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, if first { first = false - labels = []string{} } case <-requestContext.Done(): @@ -289,7 +206,7 @@ func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { // @Failure 403 "Status Forbidden: missing permission" // @Failure 404 "Status Not Found: export not found" // @Failure 500 "internal server error" -// @Router /service/exports/{id} [get] +// @Router /engine/exports/{id} [get] func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { id := chi.URLParam(r, "id") if id == "" { @@ -323,7 +240,7 @@ func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { // @Failure 403 "Status Forbidden: missing permission" // @Failure 404 "Status Not Found: export not found" // @Failure 500 "internal server error" -// @Router /service/exports/{id} [delete] +// @Router /engine/exports/{id} [delete] func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { id := chi.URLParam(r, "id") if id == "" { @@ -352,13 +269,7 @@ func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { // @Tags Exports // @Produce json // @Security Bearer -// @Param id path string true "Fact ID" -// @Param fileName query string false "File name" -// @Param limit query int false "Limit" -// @Param columns query string false "Columns" -// @Param columnsLabel query string false "Columns label" -// @Param formateColumns query string false "Formate columns" -// @Param separator query string false "Separator" +// @Param request body handlers.ExportRequest true "request (json)" // @Success 200 {object} export.WrapperItem "Status OK: user was added to existing export in queue" // @Success 201 {object} export.WrapperItem "Status Created: new export was added in queue" // @Failure 400 "Bad Request: missing fact id / fact id is not an integer" @@ -366,29 +277,36 @@ func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { // @Failure 409 {object} export.WrapperItem "Status Conflict: user already exists in export queue" // @Failure 429 "Status Too Many Requests: export queue is full" // @Failure 500 "internal server error" -// @Router /service/exports/fact/{id} [post] +// @Router /engine/exports/fact [post] func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { - id := chi.URLParam(r, "id") - idFact, err := strconv.ParseInt(id, 10, 64) + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionCreate)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + return + } + var request ExportRequest + err := json.NewDecoder(r.Body).Decode(&request) if err != nil { - zap.L().Warn("Error on parsing fact id", zap.String("idFact", id), zap.Error(err)) - render.Error(w, r, render.ErrAPIParsingInteger, err) + zap.L().Warn("Decode export request json", zap.Error(err)) + render.Error(w, r, render.ErrAPIDecodeJSONBody, err) return } - userCtx, _ := GetUserFromContext(r) - if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionCreate)) { - render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + if len(request.FactIDs) == 0 { + zap.L().Warn("Missing factIDs in export request") + render.Error(w, r, render.ErrAPIMissingParam, errors.New("missing factIDs")) return } - filename, params, combinedFacts, done := handleExportArgs(w, r, err, idFact) - if done { + facts := findCombineFacts(request.FactIDs) + if len(facts) == 0 { + zap.L().Warn("No fact was found in export request") + render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("No fact was found in export request")) return } - item, status := e.exportWrapper.AddToQueue(combinedFacts, filename, params, userCtx.User) + item, status := e.exportWrapper.AddToQueue(facts, request.FileName, request.CSVParameters, userCtx.User) switch status { case export.CodeAdded: diff --git a/internals/handlers/utils.go b/internals/handlers/utils.go index a163b132..2d5fdad8 100644 --- a/internals/handlers/utils.go +++ b/internals/handlers/utils.go @@ -6,6 +6,9 @@ import ( "crypto/rand" "encoding/base64" "fmt" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/fact" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/utils" + "github.com/myrteametrics/myrtea-sdk/v4/engine" "io" "regexp" "strconv" @@ -31,6 +34,7 @@ const ( parseGlobalVariables = false ) +// QueryParamToOptionalInt parse a string from a string func QueryParamToOptionalInt(r *http.Request, name string, orDefault int) (int, error) { param := r.URL.Query().Get(name) if param != "" { @@ -39,6 +43,7 @@ func QueryParamToOptionalInt(r *http.Request, name string, orDefault int) (int, return orDefault, nil } +// QueryParamToOptionalInt64 parse an int64 from a string func QueryParamToOptionalInt64(r *http.Request, name string, orDefault int64) (int64, error) { param := r.URL.Query().Get(name) if param != "" { @@ -47,6 +52,7 @@ func QueryParamToOptionalInt64(r *http.Request, name string, orDefault int64) (i return orDefault, nil } +// QueryParamToOptionalInt64Array parse multiple int64 entries separated by a separator from a string func QueryParamToOptionalInt64Array(r *http.Request, name, separator string, allowDuplicates bool, orDefault []int64) ([]int64, error) { param := r.URL.Query().Get(name) if param == "" { @@ -64,7 +70,7 @@ func QueryParamToOptionalInt64Array(r *http.Request, name, separator string, all } if !allowDuplicates { - return removeDuplicate(result), nil + return utils.RemoveDuplicates(result), nil } return result, nil @@ -217,25 +223,13 @@ func GetUserFromContext(r *http.Request) (users.UserWithPermissions, bool) { return user, true } -func removeDuplicate[T string | int | int64](sliceList []T) []T { - allKeys := make(map[T]bool) - var list []T - for _, item := range sliceList { - if _, value := allKeys[item]; !value { - allKeys[item] = true - list = append(list, item) - } - } - return list -} - // handleError is a helper function that logs the error and sends a response. func handleError(w http.ResponseWriter, r *http.Request, message string, err error, apiError render.APIError) { zap.L().Error(message, zap.Error(err)) render.Error(w, r, apiError, err) } -// Generate a State use by OIDC authentification +// generateRandomState Generate a State used by OIDC authentication func generateRandomState() (string, error) { b := make([]byte, 32) _, err := rand.Read(b) @@ -244,6 +238,8 @@ func generateRandomState() (string, error) { } return base64.StdEncoding.EncodeToString(b), nil } + +// generateEncryptedState Generate a State used by OIDC authentication func generateEncryptedState(key []byte) (string, error) { // Generate random state plainState, err := generateRandomState() @@ -269,6 +265,8 @@ func generateEncryptedState(key []byte) (string, error) { b64State := base64.StdEncoding.EncodeToString(ciphertext) return b64State, nil } + +// verifyEncryptedState Verify the State used by OIDC authentication func verifyEncryptedState(state string, key []byte) (string, error) { // Decode from base64 decodedState, err := base64.StdEncoding.DecodeString(state) @@ -292,3 +290,20 @@ func verifyEncryptedState(state string, key []byte) (string, error) { return string(decodedState), nil } + +// findCombineFacts returns the combine facts +func findCombineFacts(combineFactIds []int64) (combineFacts []engine.Fact) { + for _, factId := range utils.RemoveDuplicates(combineFactIds) { + combineFact, found, err := fact.R().Get(factId) + if err != nil { + zap.L().Error("findCombineFacts cannot retrieve fact", zap.Int64("factID", factId), zap.Error(err)) + continue + } + if !found { + zap.L().Warn("findCombineFacts fact does not exist", zap.Int64("factID", factId)) + continue + } + combineFacts = append(combineFacts, combineFact) + } + return combineFacts +} diff --git a/internals/handlers/utils_test.go b/internals/handlers/utils_test.go index 401a5992..d7489f6d 100644 --- a/internals/handlers/utils_test.go +++ b/internals/handlers/utils_test.go @@ -139,23 +139,6 @@ func TestQueryParamToOptionalInt64Array(t *testing.T) { } -func TestRemoveDuplicate(t *testing.T) { - sample := []int64{1, 1, 1, 2, 2, 3, 4} - expectedResult := []int64{1, 2, 3, 4} - result := removeDuplicate(sample) - - if len(result) != len(expectedResult) { - t.FailNow() - } - - for i := 0; i < len(expectedResult); i++ { - if expectedResult[i] != result[i] { - t.FailNow() - } - } - -} - func TestHandleError(t *testing.T) { // response writer and request w := httptest.NewRecorder() diff --git a/internals/router/routes.go b/internals/router/routes.go index 1c694759..865bce4a 100644 --- a/internals/router/routes.go +++ b/internals/router/routes.go @@ -71,6 +71,7 @@ func engineRouter(services Services) http.Handler { r.Post("/facts/execute", handlers.ExecuteFactFromSource) // ?time=2019-05-10T12:00:00.000 debug= r.Get("/facts/{id}/hits", handlers.GetFactHits) // ?time=2019-05-10T12:00:00.000 debug= r.Get("/facts/{id}/es", handlers.FactToESQuery) + r.Get("/facts/streamedexport", handlers.ExportFactStreamed) r.Get("/situations", handlers.GetSituations) r.Get("/situations/{id}", handlers.GetSituation) @@ -173,13 +174,11 @@ func engineRouter(services Services) http.Handler { r.Get("/connector/{id}/executions/last", handlers.GetlastConnectorExecutionDateTime) - r.Get("/facts/{id}/streamedexport", handlers.ExportFactStreamed) - // exports r.Get("/exports", services.ExportHandler.GetExports) r.Get("/exports/{id}", services.ExportHandler.GetExport) r.Delete("/exports/{id}", services.ExportHandler.DeleteExport) - r.Post("/exports/fact/{id}", services.ExportHandler.ExportFact) + r.Post("/exports/fact", services.ExportHandler.ExportFact) r.Get("/variablesconfig", handlers.GetVariablesConfig) r.Get("/variablesconfig/{id}", handlers.GetVariableConfig) diff --git a/internals/tasker/situation_reporting.go b/internals/tasker/situation_reporting.go index 113291a3..d5a24bd1 100644 --- a/internals/tasker/situation_reporting.go +++ b/internals/tasker/situation_reporting.go @@ -30,18 +30,16 @@ func verifyCache(key string, timeout time.Duration) bool { // SituationReportingTask struct for close issues created in the current day from the BRMS type SituationReportingTask struct { - ID string `json:"id"` - IssueID string `json:"issueId"` - Subject string `json:"subject"` - BodyTemplate string `json:"bodyTemplate"` - To []string `json:"to"` - AttachmentFileNames []string `json:"attachmentFileNames"` - AttachmentFactIDs []int64 `json:"attachmentFactIds"` - Columns []string `json:"columns"` - FormatColumnsData map[string]string `json:"formateColumns"` - ColumnsLabel []string `json:"columnsLabel"` - Separator rune `json:"separator"` - Timeout string `json:"timeout"` + ID string `json:"id"` + IssueID string `json:"issueId"` + Subject string `json:"subject"` + BodyTemplate string `json:"bodyTemplate"` + To []string `json:"to"` + AttachmentFileNames []string `json:"attachmentFileNames"` + AttachmentFactIDs []int64 `json:"attachmentFactIds"` + Columns []export.Column `json:"columns"` + Separator rune `json:"separator"` + Timeout string `json:"timeout"` } func buildSituationReportingTask(parameters map[string]interface{}) (SituationReportingTask, error) { @@ -100,25 +98,43 @@ func buildSituationReportingTask(parameters map[string]interface{}) (SituationRe } if val, ok := parameters["columns"].(string); ok && val != "" { - task.Columns = strings.Split(val, ",") - } + columns := strings.Split(val, ",") + var columnsLabel []string + + if val, ok = parameters["columnsLabel"].(string); ok && val != "" { + columnsLabel = strings.Split(val, ",") + } - if val, ok := parameters["formateColumns"].(string); ok && val != "" { - formatColumnsData := strings.Split(val, ",") - task.FormatColumnsData = make(map[string]string) - for _, formatData := range formatColumnsData { - parts := strings.Split(formatData, ";") - if len(parts) != 2 { - continue + if len(columns) != len(columnsLabel) { + return task, errors.New("parameters 'columns' and 'columns label' have different length") + } + + formatColumnsDataMap := make(map[string]string) + + if val, ok = parameters["formateColumns"].(string); ok && val != "" { + formatColumnsData := strings.Split(val, ",") + for _, formatData := range formatColumnsData { + parts := strings.Split(formatData, ";") + if len(parts) != 2 { + continue + } + key := strings.TrimSpace(parts[0]) + formatColumnsDataMap[key] = parts[1] } - key := strings.TrimSpace(parts[0]) - task.FormatColumnsData[key] = parts[1] } - } + for i, column := range columns { + exportColumn := export.Column{ + Name: column, + Label: columnsLabel[i], + } + + if format, ok := formatColumnsDataMap[column]; ok { + exportColumn.Format = format + } - if val, ok := parameters["columnsLabel"].(string); ok && val != "" { - task.ColumnsLabel = strings.Split(val, ",") + task.Columns = append(task.Columns, exportColumn) + } } if val, ok := parameters["separator"].(string); ok && val != "" { @@ -127,10 +143,6 @@ func buildSituationReportingTask(parameters map[string]interface{}) (SituationRe task.Separator = ',' } - if len(task.Columns) != len(task.ColumnsLabel) { - return task, errors.New("parameters 'columns' and 'columns label' have different length") - } - if val, ok := parameters["timeout"].(string); ok && val != "" { task.Timeout = val } else { @@ -202,7 +214,7 @@ func (task SituationReportingTask) Perform(key string, context ContextData) erro return err } - csvAttachment, err := export.ConvertHitsToCSV(fullHits, task.Columns, task.ColumnsLabel, task.FormatColumnsData, task.Separator) + csvAttachment, err := export.ConvertHitsToCSV(fullHits, export.CSVParameters{Columns: task.Columns, Separator: task.Separator}, true) if err != nil { return err } diff --git a/internals/utils/utils.go b/internals/utils/utils.go index dae701c3..4cbfd750 100644 --- a/internals/utils/utils.go +++ b/internals/utils/utils.go @@ -1,12 +1,13 @@ package utils -func RemoveDuplicates(stringSlice []string) []string { - keys := make(map[string]bool) - list := []string{} - for _, entry := range stringSlice { - if _, value := keys[entry]; !value { - keys[entry] = true - list = append(list, entry) +// RemoveDuplicates remove duplicate values from a slice +func RemoveDuplicates[T string | int | int64](sliceList []T) []T { + allKeys := make(map[T]bool) + var list []T + for _, item := range sliceList { + if _, value := allKeys[item]; !value { + allKeys[item] = true + list = append(list, item) } } return list diff --git a/internals/utils/utils_test.go b/internals/utils/utils_test.go new file mode 100644 index 00000000..ab230f88 --- /dev/null +++ b/internals/utils/utils_test.go @@ -0,0 +1,51 @@ +package utils + +import "testing" + +func TestRemoveDuplicates_Int64(t *testing.T) { + sample := []int64{1, 1, 1, 2, 2, 3, 4} + expectedResult := []int64{1, 2, 3, 4} + result := RemoveDuplicates(sample) + + if len(result) != len(expectedResult) { + t.FailNow() + } + + for i := 0; i < len(expectedResult); i++ { + if expectedResult[i] != result[i] { + t.FailNow() + } + } +} + +func TestRemoveDuplicates_Int(t *testing.T) { + sample := []int{1, 1, 1, 2, 2, 3, 4} + expectedResult := []int{1, 2, 3, 4} + result := RemoveDuplicates(sample) + + if len(result) != len(expectedResult) { + t.FailNow() + } + + for i := 0; i < len(expectedResult); i++ { + if expectedResult[i] != result[i] { + t.FailNow() + } + } +} + +func TestRemoveDuplicates_String(t *testing.T) { + sample := []string{"a", "a", "a", "b", "b", "c", "d"} + expectedResult := []string{"a", "b", "c", "d"} + result := RemoveDuplicates(sample) + + if len(result) != len(expectedResult) { + t.FailNow() + } + + for i := 0; i < len(expectedResult); i++ { + if expectedResult[i] != result[i] { + t.FailNow() + } + } +} From f21e01f5bec4e0b603aebbfa7827133e96c4a22c Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 30 Nov 2023 14:00:12 +0100 Subject: [PATCH 16/35] changed export method to post --- internals/handlers/export_handlers.go | 4 ++-- internals/handlers/utils.go | 2 -- internals/router/routes.go | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index f38e0d18..2769143c 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -42,7 +42,7 @@ type ExportRequest struct { // @Param request body handlers.ExportRequest true "request (json)" // @Success 200 {file} Returns data to be saved into a file // @Failure 500 "internal server error" -// @Router /engine/facts/streamedexport [get] +// @Router /engine/facts/streamedexport [post] func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { userCtx, _ := GetUserFromContext(r) if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionGet)) { @@ -302,7 +302,7 @@ func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { facts := findCombineFacts(request.FactIDs) if len(facts) == 0 { zap.L().Warn("No fact was found in export request") - render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("No fact was found in export request")) + render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("no fact was found in export request")) return } diff --git a/internals/handlers/utils.go b/internals/handlers/utils.go index 2d5fdad8..27c6e9d8 100644 --- a/internals/handlers/utils.go +++ b/internals/handlers/utils.go @@ -296,11 +296,9 @@ func findCombineFacts(combineFactIds []int64) (combineFacts []engine.Fact) { for _, factId := range utils.RemoveDuplicates(combineFactIds) { combineFact, found, err := fact.R().Get(factId) if err != nil { - zap.L().Error("findCombineFacts cannot retrieve fact", zap.Int64("factID", factId), zap.Error(err)) continue } if !found { - zap.L().Warn("findCombineFacts fact does not exist", zap.Int64("factID", factId)) continue } combineFacts = append(combineFacts, combineFact) diff --git a/internals/router/routes.go b/internals/router/routes.go index 865bce4a..d9cc3753 100644 --- a/internals/router/routes.go +++ b/internals/router/routes.go @@ -71,7 +71,7 @@ func engineRouter(services Services) http.Handler { r.Post("/facts/execute", handlers.ExecuteFactFromSource) // ?time=2019-05-10T12:00:00.000 debug= r.Get("/facts/{id}/hits", handlers.GetFactHits) // ?time=2019-05-10T12:00:00.000 debug= r.Get("/facts/{id}/es", handlers.FactToESQuery) - r.Get("/facts/streamedexport", handlers.ExportFactStreamed) + r.Post("/facts/streamedexport", handlers.ExportFactStreamed) r.Get("/situations", handlers.GetSituations) r.Get("/situations/{id}", handlers.GetSituation) From f638da614074c7e7796cf6464e352570a207437a Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 30 Nov 2023 16:56:42 +0100 Subject: [PATCH 17/35] wip: reworking notification system --- go.mod | 1 - go.sum | 19 +---- internals/app/services.go | 1 + internals/handlers/notifier_handlers.go | 33 +++++++++ internals/notifier/manager.go | 2 +- internals/notifier/notification/handler.go | 51 +++++++++++++ .../notifier/notification/handler_test.go | 23 ++++++ .../notifier/notification/notification.go | 25 ++++++- .../notification/notification_mock.go | 1 + .../notification/postgres_repository.go | 72 +++++++++---------- internals/notifier/notification/repository.go | 4 +- internals/notifier/notification/utils.go | 11 +++ 12 files changed, 183 insertions(+), 60 deletions(-) create mode 100644 internals/notifier/notification/handler.go create mode 100644 internals/notifier/notification/handler_test.go create mode 100644 internals/notifier/notification/utils.go diff --git a/go.mod b/go.mod index 0307e7b7..60ad25f2 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,6 @@ go 1.20 require ( github.com/Masterminds/squirrel v1.5.3 - github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0 github.com/coreos/go-oidc/v3 v3.6.0 github.com/crewjam/saml v0.4.6 github.com/dgrijalva/jwt-go v3.2.0+incompatible diff --git a/go.sum b/go.sum index 36856edf..19569d75 100644 --- a/go.sum +++ b/go.sum @@ -43,15 +43,11 @@ github.com/PaesslerAG/gval v1.2.2 h1:Y7iBzhgE09IGTt5QgGQ2IdaYYYOU134YGHBThD+wm9E github.com/PaesslerAG/gval v1.2.2/go.mod h1:XRFLwvmkTEdYziLdaCeCa5ImcGVrfQbeNUbVR+C6xac= github.com/PaesslerAG/jsonpath v0.1.0 h1:gADYeifvlqK3R3i2cR5B4DGgxLXIPb3TRTH1mGi0jPI= github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0 h1:BVts5dexXf4i+JX8tXlKT0aKoi38JwTXSe+3WUneX0k= -github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0/go.mod h1:FDIQmoMNJJl5/k7upZEnGvgWVZfFeE6qHeN7iCMbCsA= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs= @@ -62,6 +58,7 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -224,11 +221,8 @@ github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51 github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= github.com/hashicorp/go-hclog v1.3.1 h1:vDwF1DFNZhntP4DAjuTpOw3uEgMUpXh1pB5fW9DqHpo= github.com/hashicorp/go-hclog v1.3.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.3.0 h1:4d/wJojzvHV1I4i/rrjVaeuyxWrLzDE1mDCyDy8fXS8= -github.com/hashicorp/go-plugin v1.3.0/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYtXdgmf1AVNs0= github.com/hashicorp/go-plugin v1.5.2 h1:aWv8eimFqWlsEiMrYZdPYl+FdHaBJSN4AWwGWfT1G2Y= github.com/hashicorp/go-plugin v1.5.2/go.mod h1:w1sAEES3g3PuV/RzUrgow20W2uErMly84hhD3um1WL4= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= @@ -238,8 +232,7 @@ github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= -github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= +github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ= @@ -314,7 +307,6 @@ github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/ github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= @@ -329,8 +321,6 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/myrteametrics/myrtea-sdk/v4 v4.4.5 h1:8dbIWpNLzvOq9/fQTiJUpZd56NRVAEoAmcRQoC+uD5c= -github.com/myrteametrics/myrtea-sdk/v4 v4.4.5/go.mod h1:wa9nwNcFGpGbZeqXXqhTLp7sXERbCrRhhcASGY6H0QA= github.com/myrteametrics/myrtea-sdk/v4 v4.4.7 h1:cIn6+hCgzGAaWGjtAm0rFPdXX/cl6z4wWQuWI+KG9eQ= github.com/myrteametrics/myrtea-sdk/v4 v4.4.7/go.mod h1:wa9nwNcFGpGbZeqXXqhTLp7sXERbCrRhhcASGY6H0QA= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= @@ -496,7 +486,6 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= -golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -694,7 +683,6 @@ google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -727,7 +715,6 @@ google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4 h1:ysnBoUyeL/H6RCvNRhWHjKoDEmguI+mPU+qHgK8qv/w= google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -757,8 +744,6 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.2-0.20230222093303-bc1253ad3743 h1:yqElulDvOF26oZ2O+2/aoX7mQ8DY/6+p39neytrycd8= google.golang.org/protobuf v1.28.2-0.20230222093303-bc1253ad3743/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= diff --git a/internals/app/services.go b/internals/app/services.go index c319b757..23ca2662 100644 --- a/internals/app/services.go +++ b/internals/app/services.go @@ -78,6 +78,7 @@ func stopServices() { } func initNotifier() { + notification.ReplaceHandlerGlobals(notification.NewHandler()) notifier.ReplaceGlobals(notifier.NewNotifier()) } diff --git a/internals/handlers/notifier_handlers.go b/internals/handlers/notifier_handlers.go index 7936954e..27b48a1d 100644 --- a/internals/handlers/notifier_handlers.go +++ b/internals/handlers/notifier_handlers.go @@ -1,7 +1,12 @@ package handlers import ( + "encoding/json" + "github.com/google/uuid" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "net/http" + "time" "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers/render" "github.com/myrteametrics/myrtea-engine-api/v5/internals/models" @@ -42,11 +47,39 @@ func NotificationsWSRegister(w http.ResponseWriter, r *http.Request) { zap.L().Error("Add new WS Client to manager", zap.Error(err)) return } + go func(client *notifier.WebsocketClient) { + zap.L().Info("starting notifier") + ticker := time.NewTicker(1 * time.Second) + after := time.After(30 * time.Second) + for { + select { + case <-ticker.C: + notifier.C().SendToUsers(ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), FileName: "test.bla"}}, []uuid.UUID{user.ID}) + zap.L().Info("send notification") + case <-after: + return + } + } + }(client) go client.Write() // go client.Read() // Disabled until proper usage } +type ExportNotification struct { + notification.Notification + Export export.WrapperItem `json:"export"` + Status int `json:"status"` +} + +func (e ExportNotification) ToBytes() ([]byte, error) { + b, err := json.Marshal(e) + if err != nil { + return nil, err + } + return b, nil +} + // NotificationsSSERegister godoc // @Summary Register a new client to the notifications system using SSE // @Description Register a new client to the notifications system using SSE diff --git a/internals/notifier/manager.go b/internals/notifier/manager.go index 498c163d..aee6f0f1 100644 --- a/internals/notifier/manager.go +++ b/internals/notifier/manager.go @@ -12,7 +12,7 @@ type ClientManager struct { Clients map[Client]bool } -// NewClientManager renders a new manager responsible of every connection +// NewClientManager renders a new manager responsible for every connection func NewClientManager() *ClientManager { return &ClientManager{ Clients: make(map[Client]bool), diff --git a/internals/notifier/notification/handler.go b/internals/notifier/notification/handler.go new file mode 100644 index 00000000..f495ba30 --- /dev/null +++ b/internals/notifier/notification/handler.go @@ -0,0 +1,51 @@ +package notification + +import "sync" + +var ( + _globalHandlerMu sync.RWMutex + _globalHandler *Handler +) + +// H is used to access the global notification handler singleton +func H() *Handler { + _globalHandlerMu.RLock() + defer _globalHandlerMu.RUnlock() + return _globalHandler +} + +// ReplaceHandlerGlobals affects a new repository to the global notification handler singleton +func ReplaceHandlerGlobals(handler *Handler) func() { + _globalHandlerMu.Lock() + defer _globalHandlerMu.Unlock() + + prev := _globalHandler + _globalHandler = handler + return func() { ReplaceHandlerGlobals(prev) } +} + +type Handler struct { + notificationTypes map[string]Notification +} + +func NewHandler() *Handler { + handler := &Handler{ + notificationTypes: make(map[string]Notification), + } + handler.RegisterNotificationTypes() + return handler +} + +// RegisterNotificationType register a new notification type +func (h *Handler) RegisterNotificationType(notification Notification) { + h.notificationTypes[getType(notification)] = notification +} + +// UnregisterNotificationType unregister a notification type +func (h *Handler) UnregisterNotificationType(notification Notification) { + delete(h.notificationTypes, getType(notification)) +} + +func (h *Handler) RegisterNotificationTypes() { + h.RegisterNotificationType(BaseNotification{}) +} diff --git a/internals/notifier/notification/handler_test.go b/internals/notifier/notification/handler_test.go new file mode 100644 index 00000000..dfdcdd27 --- /dev/null +++ b/internals/notifier/notification/handler_test.go @@ -0,0 +1,23 @@ +package notification + +import ( + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "testing" +) + +func TestNewHandler(t *testing.T) { + handler := NewHandler() + expression.AssertNotEqual(t, handler, nil, "NewHandler() should not return nil") + expression.AssertNotEqual(t, len(handler.notificationTypes), 0, "NewHandler() should not return an empty notificationTypes") +} + +func TestHandler_RegisterNotificationType(t *testing.T) { +} + +func TestHandler_RegisterNotificationTypes(t *testing.T) { + +} + +func TestHandler_UnregisterNotificationType(t *testing.T) { + +} diff --git a/internals/notifier/notification/notification.go b/internals/notifier/notification/notification.go index 46544ab6..7ccca8a5 100644 --- a/internals/notifier/notification/notification.go +++ b/internals/notifier/notification/notification.go @@ -1,12 +1,33 @@ package notification -//FrontNotification data structure represente the notification and her current state -type FrontNotification struct { +import "encoding/json" + +// BaseNotification data structure represents a basic notification and her current state +type BaseNotification struct { Notification + Id int64 + Type string IsRead bool } // Notification is a general interface for all notifications types type Notification interface { ToBytes() ([]byte, error) + NewInstance(id int64, data []byte, isRead bool) (Notification, error) +} + +func (n BaseNotification) ToBytes() ([]byte, error) { + //TODO: + return nil, nil +} + +func (n BaseNotification) NewInstance(id int64, data []byte, isRead bool) (Notification, error) { + var notification BaseNotification + err := json.Unmarshal(data, ¬ification) + if err != nil { + return nil, err + } + notification.Id = id + notification.IsRead = isRead + return ¬ification, nil } diff --git a/internals/notifier/notification/notification_mock.go b/internals/notifier/notification/notification_mock.go index 4a8ca546..9e79ff93 100644 --- a/internals/notifier/notification/notification_mock.go +++ b/internals/notifier/notification/notification_mock.go @@ -7,6 +7,7 @@ import ( // MockNotification is an implementation of a notification main type type MockNotification struct { + BaseNotification ID int64 `json:"id"` Type string `json:"type"` CreationDate time.Time `json:"creationDate"` diff --git a/internals/notifier/notification/postgres_repository.go b/internals/notifier/notification/postgres_repository.go index 94496668..b5628a7f 100644 --- a/internals/notifier/notification/postgres_repository.go +++ b/internals/notifier/notification/postgres_repository.go @@ -7,7 +7,6 @@ import ( "github.com/jmoiron/sqlx" "github.com/myrteametrics/myrtea-engine-api/v5/internals/dbutils" - "go.uber.org/zap" ) // PostgresRepository is a repository containing the Fact definition based on a PSQL database and @@ -27,16 +26,16 @@ func NewPostgresRepository(dbClient *sqlx.DB) Repository { // Create creates a new Notification definition in the repository func (r *PostgresRepository) Create(notif Notification) (int64, error) { - data, err := json.Marshal(notif) if err != nil { return -1, err } ts := time.Now().Truncate(1 * time.Millisecond).UTC() - query := `INSERT INTO notifications_history_v1 (id, data, created_at) VALUES (DEFAULT, :data, :created_at) RETURNING id` + query := `INSERT INTO notifications_history_v1 (id, data, type, created_at) VALUES (DEFAULT, :data, :type :created_at) RETURNING id` params := map[string]interface{}{ "data": data, + "type": getType(notif), "created_at": ts, } @@ -55,19 +54,18 @@ func (r *PostgresRepository) Create(notif Notification) (int64, error) { return id, nil } -// Get returns a notification by it's ID -func (r *PostgresRepository) Get(id int64) *FrontNotification { +// Get returns a notification by its ID +func (r *PostgresRepository) Get(id int64) (Notification, error) { // TODO: "ORDER BY" should be an option in dbutils.DBQueryOptionnal - query := `SELECT id, data, isread FROM notifications_history_v1 WHERE id = :id` + query := `SELECT id, data, isread, type FROM notifications_history_v1 WHERE id = :id` params := map[string]interface{}{ "id": id, } rows, err := r.conn.NamedQuery(query, params) if err != nil { - zap.L().Error("", zap.Error(err)) - return nil + return nil, errors.New("couldn't retrieve any notification with this id. The query is equal to: " + err.Error()) } defer rows.Close() @@ -75,35 +73,35 @@ func (r *PostgresRepository) Get(id int64) *FrontNotification { var id int64 var data string var isRead bool + var notifType string - err := rows.Scan(&id, &data, &isRead) + err := rows.Scan(&id, &data, &isRead, ¬ifType) if err != nil { - zap.L().Error("", zap.Error(err)) - return nil + return nil, errors.New("couldn't retrieve any notification. The query is equal to: " + err.Error()) } - var notif MockNotification - err = json.Unmarshal([]byte(data), ¬if) - if err != nil { - zap.L().Error("", zap.Error(err)) - return nil + t, ok := H().notificationTypes[notifType] + + if !ok { + return nil, errors.New("notification type does not exist") } - notif.ID = id + instance, err := t.NewInstance(id, []byte(data), isRead) - return &FrontNotification{ - Notification: notif, - IsRead: isRead, + if err != nil { + return nil, errors.New("notification couldn't be instanced") } + + return instance, nil } - return nil + return nil, errors.New("no notification found with this id") } // GetByRoles returns all notifications related to a certain list of roles -func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]FrontNotification, error) { +func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]Notification, error) { // TODO: "ORDER BY" should be an option in dbutils.DBQueryOptionnal - query := `SELECT id, data, isread FROM notifications_history_v1` + query := `SELECT id, data, isread, type FROM notifications_history_v1` params := map[string]interface{}{} if queryOptionnal.MaxAge > 0 { query += ` WHERE created_at > :created_at` @@ -125,32 +123,32 @@ func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([] } defer rows.Close() - notifications := make([]FrontNotification, 0) + notifications := make([]Notification, 0) for rows.Next() { var id int64 var data string - var notif MockNotification - var isRead bool + var notifType string - err := rows.Scan(&id, &data, &isRead) + err := rows.Scan(&id, &data, &isRead, ¬ifType) if err != nil { return nil, errors.New("couldn't scan the notification data:" + err.Error()) } - // Retrieve data json data - err = json.Unmarshal([]byte(data), ¬if) - if err != nil { - return nil, errors.New("couldn't convert data content:" + err.Error()) + t, ok := H().notificationTypes[notifType] + + if !ok { + return nil, errors.New("notification type does not exist") } - notif.ID = id + instance, err := t.NewInstance(id, []byte(data), isRead) + + if err != nil { + return nil, errors.New("notification couldn't be instanced") + } - notifications = append(notifications, FrontNotification{ - Notification: notif, - IsRead: isRead, - }) + notifications = append(notifications, instance) } if err != nil { return nil, errors.New("deformed Data " + err.Error()) @@ -178,7 +176,7 @@ func (r *PostgresRepository) Delete(id int64) error { return nil } -//UpdateRead updates a notification status by changing the isRead state to true once it has been read +// UpdateRead updates a notification status by changing the isRead state to true once it has been read func (r *PostgresRepository) UpdateRead(id int64, status bool) error { query := `UPDATE notifications_history_v1 SET isread = :status WHERE id = :id` diff --git a/internals/notifier/notification/repository.go b/internals/notifier/notification/repository.go index a789ffd2..5e91ed2a 100644 --- a/internals/notifier/notification/repository.go +++ b/internals/notifier/notification/repository.go @@ -11,8 +11,8 @@ import ( // It allows standard CRUD operation on facts type Repository interface { Create(notif Notification) (int64, error) - Get(id int64) *FrontNotification - GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]FrontNotification, error) + Get(id int64) (Notification, error) + GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]Notification, error) Delete(id int64) error UpdateRead(id int64, state bool) error } diff --git a/internals/notifier/notification/utils.go b/internals/notifier/notification/utils.go new file mode 100644 index 00000000..e8cc152b --- /dev/null +++ b/internals/notifier/notification/utils.go @@ -0,0 +1,11 @@ +package notification + +import "reflect" + +func getType(myvar interface{}) string { + if t := reflect.TypeOf(myvar); t.Kind() == reflect.Ptr { + return "*" + t.Elem().Name() + } else { + return t.Name() + } +} From 0ec576ee087491b95249dbebc3169e2a7b49e5e4 Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Mon, 4 Dec 2023 20:47:24 +0100 Subject: [PATCH 18/35] changed notification way of work --- internals/handlers/notifier_handlers.go | 16 ---- internals/notifier/notification/handler.go | 1 + .../notifier/notification/handler_test.go | 38 +++++++- .../notifier/notification/notification.go | 31 ++++--- .../notification/notification_export.go | 44 +++++++++ .../notification/notification_test.go | 92 +++++++++++++++++++ .../notification/postgres_repository.go | 9 +- internals/notifier/notifier.go | 6 +- 8 files changed, 196 insertions(+), 41 deletions(-) create mode 100644 internals/notifier/notification/notification_export.go create mode 100644 internals/notifier/notification/notification_test.go diff --git a/internals/handlers/notifier_handlers.go b/internals/handlers/notifier_handlers.go index 27b48a1d..34c5274c 100644 --- a/internals/handlers/notifier_handlers.go +++ b/internals/handlers/notifier_handlers.go @@ -1,10 +1,8 @@ package handlers import ( - "encoding/json" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "net/http" "time" @@ -66,20 +64,6 @@ func NotificationsWSRegister(w http.ResponseWriter, r *http.Request) { // go client.Read() // Disabled until proper usage } -type ExportNotification struct { - notification.Notification - Export export.WrapperItem `json:"export"` - Status int `json:"status"` -} - -func (e ExportNotification) ToBytes() ([]byte, error) { - b, err := json.Marshal(e) - if err != nil { - return nil, err - } - return b, nil -} - // NotificationsSSERegister godoc // @Summary Register a new client to the notifications system using SSE // @Description Register a new client to the notifications system using SSE diff --git a/internals/notifier/notification/handler.go b/internals/notifier/notification/handler.go index f495ba30..f65dff28 100644 --- a/internals/notifier/notification/handler.go +++ b/internals/notifier/notification/handler.go @@ -48,4 +48,5 @@ func (h *Handler) UnregisterNotificationType(notification Notification) { func (h *Handler) RegisterNotificationTypes() { h.RegisterNotificationType(BaseNotification{}) + h.RegisterNotificationType(ExportNotification{}) } diff --git a/internals/notifier/notification/handler_test.go b/internals/notifier/notification/handler_test.go index dfdcdd27..198e48e9 100644 --- a/internals/notifier/notification/handler_test.go +++ b/internals/notifier/notification/handler_test.go @@ -11,13 +11,45 @@ func TestNewHandler(t *testing.T) { expression.AssertNotEqual(t, len(handler.notificationTypes), 0, "NewHandler() should not return an empty notificationTypes") } -func TestHandler_RegisterNotificationType(t *testing.T) { +func TestHandler_RegisterNotificationType_AddsNewType(t *testing.T) { + handler := NewHandler() + notification := BaseNotification{} + handler.RegisterNotificationType(notification) + _, exists := handler.notificationTypes[getType(notification)] + expression.AssertEqual(t, exists, true, "RegisterNotificationType() should add new type") } -func TestHandler_RegisterNotificationTypes(t *testing.T) { +func TestHandler_RegisterNotificationType_OverwritesExistingType(t *testing.T) { + handler := NewHandler() + notification := BaseNotification{} + handler.RegisterNotificationType(notification) + notification2 := BaseNotification{} // Assuming this has the same type as the first one + handler.RegisterNotificationType(notification2) + expression.AssertEqual(t, handler.notificationTypes[getType(notification)], notification2, "RegisterNotificationType() should overwrite existing type") +} +func TestHandler_UnregisterNotificationType_RemovesExistingType(t *testing.T) { + handler := NewHandler() + notification := BaseNotification{} + handler.RegisterNotificationType(notification) + handler.UnregisterNotificationType(notification) + _, exists := handler.notificationTypes[getType(notification)] + expression.AssertEqual(t, exists, false, "UnregisterNotificationType() should remove existing type") } -func TestHandler_UnregisterNotificationType(t *testing.T) { +func TestHandler_UnregisterNotificationType_DoesNothingForNonExistingType(t *testing.T) { + handler := NewHandler() + notification := BaseNotification{} + handler.UnregisterNotificationType(notification) + _, exists := handler.notificationTypes[getType(notification)] + expression.AssertEqual(t, exists, false, "UnregisterNotificationType() should do nothing for non-existing type") +} +func TestReplaceHandlerGlobals_ReplacesGlobalHandler(t *testing.T) { + handler := NewHandler() + prevHandler := H() + undo := ReplaceHandlerGlobals(handler) + expression.AssertEqual(t, H(), handler, "ReplaceHandlerGlobals() should replace global handler") + undo() + expression.AssertEqual(t, H(), prevHandler, "Undo function should restore previous global handler") } diff --git a/internals/notifier/notification/notification.go b/internals/notifier/notification/notification.go index 7ccca8a5..02354d4f 100644 --- a/internals/notifier/notification/notification.go +++ b/internals/notifier/notification/notification.go @@ -1,14 +1,8 @@ package notification -import "encoding/json" - -// BaseNotification data structure represents a basic notification and her current state -type BaseNotification struct { - Notification - Id int64 - Type string - IsRead bool -} +import ( + "encoding/json" +) // Notification is a general interface for all notifications types type Notification interface { @@ -16,11 +10,15 @@ type Notification interface { NewInstance(id int64, data []byte, isRead bool) (Notification, error) } -func (n BaseNotification) ToBytes() ([]byte, error) { - //TODO: - return nil, nil +// BaseNotification data structure represents a basic notification and her current state +type BaseNotification struct { + Notification `json:"-"` + Id int64 `json:"id"` + IsRead bool `json:"isRead"` + Type string `json:"type"` } +// NewInstance returns a new instance of a BaseNotification func (n BaseNotification) NewInstance(id int64, data []byte, isRead bool) (Notification, error) { var notification BaseNotification err := json.Unmarshal(data, ¬ification) @@ -29,5 +27,14 @@ func (n BaseNotification) NewInstance(id int64, data []byte, isRead bool) (Notif } notification.Id = id notification.IsRead = isRead + notification.Notification = notification return ¬ification, nil } + +func (n BaseNotification) ToBytes() ([]byte, error) { + b, err := json.Marshal(n) + if err != nil { + return nil, err + } + return b, nil +} diff --git a/internals/notifier/notification/notification_export.go b/internals/notifier/notification/notification_export.go new file mode 100644 index 00000000..a451fa54 --- /dev/null +++ b/internals/notifier/notification/notification_export.go @@ -0,0 +1,44 @@ +package notification + +import ( + "encoding/json" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" +) + +type ExportNotification struct { + BaseNotification + Export export.WrapperItem `json:"export"` + Status int `json:"status"` +} + +func NewExportNotification(id int64, export export.WrapperItem, status int) *ExportNotification { + return &ExportNotification{ + BaseNotification: BaseNotification{ + Id: id, + Type: "ExportNotification", + }, + Export: export, + Status: status, + } +} + +func (e ExportNotification) ToBytes() ([]byte, error) { + b, err := json.Marshal(e) + if err != nil { + return nil, err + } + return b, nil +} + +// NewInstance returns a new instance of a ExportNotification +func (e ExportNotification) NewInstance(id int64, data []byte, isRead bool) (Notification, error) { + var notification ExportNotification + err := json.Unmarshal(data, ¬ification) + if err != nil { + return nil, err + } + notification.Id = id + notification.IsRead = isRead + notification.Notification = notification + return ¬ification, nil +} diff --git a/internals/notifier/notification/notification_test.go b/internals/notifier/notification/notification_test.go new file mode 100644 index 00000000..e2ae90be --- /dev/null +++ b/internals/notifier/notification/notification_test.go @@ -0,0 +1,92 @@ +package notification + +import ( + "github.com/google/uuid" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "reflect" + "testing" +) + +func TestBaseNotificationToBytes(t *testing.T) { + notification := BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + } + + bytes, err := notification.ToBytes() + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if bytes == nil { + t.Errorf("Expected bytes, got nil") + } +} + +func TestBaseNotificationNewInstance(t *testing.T) { + data := []byte(`{"Notification":null,"Id":1,"Type":"Test","IsRead":true}`) + notification, err := BaseNotification{}.NewInstance(1, data, true) + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + expected := &BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + } + + expression.AssertEqual(t, reflect.DeepEqual(notification, expected), true) +} + +func TestBaseNotificationNewInstanceWithInvalidData(t *testing.T) { + data := []byte(`{"Notification":null,"Id":1,"Type":"Test","IsRead":"invalid"}`) + _, err := BaseNotification{}.NewInstance(1, data, true) + if err == nil { + t.Errorf("Expected error, got nil") + } +} + +func TextExportNotification(t *testing.T) { + // init handler + ReplaceHandlerGlobals(NewHandler()) + + notification := ExportNotification{ + Export: export.WrapperItem{ + Id: uuid.New().String(), + }, + Status: 1, + } + notification.Id = 1 + notification.IsRead = false + + bytes, err := notification.ToBytes() + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if bytes == nil { + t.Errorf("Expected bytes, got nil") + } + + t.Log(string(bytes)) + + // find type and create new instance + notifType, ok := H().notificationTypes["ExportNotification"] + if !ok { + t.Errorf("Notification type does not exist") + t.FailNow() + } + + instance, err := notifType.NewInstance(1, bytes, false) + if err != nil { + t.Errorf("Notification couldn't be instanced") + t.FailNow() + } + bt, _ := instance.ToBytes() + t.Log(string(bt)) + + expression.AssertEqual(t, string(bytes), string(bt)) +} diff --git a/internals/notifier/notification/postgres_repository.go b/internals/notifier/notification/postgres_repository.go index b5628a7f..aa25a2de 100644 --- a/internals/notifier/notification/postgres_repository.go +++ b/internals/notifier/notification/postgres_repository.go @@ -1,7 +1,6 @@ package notification import ( - "encoding/json" "errors" "time" @@ -26,7 +25,7 @@ func NewPostgresRepository(dbClient *sqlx.DB) Repository { // Create creates a new Notification definition in the repository func (r *PostgresRepository) Create(notif Notification) (int64, error) { - data, err := json.Marshal(notif) + data, err := notif.ToBytes() if err != nil { return -1, err } @@ -81,13 +80,11 @@ func (r *PostgresRepository) Get(id int64) (Notification, error) { } t, ok := H().notificationTypes[notifType] - if !ok { return nil, errors.New("notification type does not exist") } instance, err := t.NewInstance(id, []byte(data), isRead) - if err != nil { return nil, errors.New("notification couldn't be instanced") } @@ -97,7 +94,7 @@ func (r *PostgresRepository) Get(id int64) (Notification, error) { return nil, errors.New("no notification found with this id") } -// GetByRoles returns all notifications related to a certain list of roles +// GetAll returns all notifications from the repository func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]Notification, error) { // TODO: "ORDER BY" should be an option in dbutils.DBQueryOptionnal @@ -137,13 +134,11 @@ func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([] } t, ok := H().notificationTypes[notifType] - if !ok { return nil, errors.New("notification type does not exist") } instance, err := t.NewInstance(id, []byte(data), isRead) - if err != nil { return nil, errors.New("notification couldn't be instanced") } diff --git a/internals/notifier/notifier.go b/internals/notifier/notifier.go index 2846f74c..19162063 100644 --- a/internals/notifier/notifier.go +++ b/internals/notifier/notifier.go @@ -45,7 +45,7 @@ func NewNotifier() *Notifier { cm := NewClientManager() return &Notifier{ clientManager: cm, - cache: make(map[string]time.Time, 0), + cache: make(map[string]time.Time), } } @@ -85,7 +85,7 @@ func (notifier *Notifier) SendToRoles(cacheKey string, timeout time.Duration, no return } - notifFull := notification.R().Get(id) + notifFull, err := notification.R().Get(id) if notifFull == nil { zap.L().Error("Notification not found after creation", zap.Int64("id", id)) } @@ -137,7 +137,7 @@ func (notifier *Notifier) SendToUsers(notif notification.Notification, users []u } } -// Send send a byte slices to a specific websocket client +// Send a byte slices to a specific websocket client func (notifier *Notifier) Send(message []byte, client Client) { if client != nil { client.GetSendChannel() <- message From b80260b96307858590f2483a8632049c3d835628 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 5 Dec 2023 11:31:32 +0100 Subject: [PATCH 19/35] Changed notifications & user.login as main unique id for users --- internals/handlers/notifier_handlers.go | 5 +- .../notifier/notification/notification.go | 22 +- .../notification/notification_export.go | 21 +- .../notification/notification_mock.go | 81 +++++- .../notification/notification_test.go | 232 +++++++++++++++++- internals/notifier/notifier.go | 11 +- internals/router/oidc/oidc_middleware.go | 3 +- internals/router/saml_middleware.go | 3 +- internals/security/users/user.go | 2 +- 9 files changed, 350 insertions(+), 30 deletions(-) diff --git a/internals/handlers/notifier_handlers.go b/internals/handlers/notifier_handlers.go index 34c5274c..e95f45fe 100644 --- a/internals/handlers/notifier_handlers.go +++ b/internals/handlers/notifier_handlers.go @@ -3,6 +3,7 @@ package handlers import ( "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "net/http" "time" @@ -45,14 +46,14 @@ func NotificationsWSRegister(w http.ResponseWriter, r *http.Request) { zap.L().Error("Add new WS Client to manager", zap.Error(err)) return } - go func(client *notifier.WebsocketClient) { + go func(client *notifier.WebsocketClient) { // temporary for tests zap.L().Info("starting notifier") ticker := time.NewTicker(1 * time.Second) after := time.After(30 * time.Second) for { select { case <-ticker.C: - notifier.C().SendToUsers(ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), FileName: "test.bla"}}, []uuid.UUID{user.ID}) + notifier.C().SendToUsers(notification.ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), FileName: "test.bla"}}, []users.UserWithPermissions{user}) zap.L().Info("send notification") case <-after: return diff --git a/internals/notifier/notification/notification.go b/internals/notifier/notification/notification.go index 02354d4f..20c79ab9 100644 --- a/internals/notifier/notification/notification.go +++ b/internals/notifier/notification/notification.go @@ -8,6 +8,7 @@ import ( type Notification interface { ToBytes() ([]byte, error) NewInstance(id int64, data []byte, isRead bool) (Notification, error) + Equals(notification Notification) bool } // BaseNotification data structure represents a basic notification and her current state @@ -28,9 +29,10 @@ func (n BaseNotification) NewInstance(id int64, data []byte, isRead bool) (Notif notification.Id = id notification.IsRead = isRead notification.Notification = notification - return ¬ification, nil + return notification, nil } +// ToBytes convert a notification in a json byte slice to be sent though any required channel func (n BaseNotification) ToBytes() ([]byte, error) { b, err := json.Marshal(n) if err != nil { @@ -38,3 +40,21 @@ func (n BaseNotification) ToBytes() ([]byte, error) { } return b, nil } + +// Equals returns true if the two notifications are equals +func (n BaseNotification) Equals(notification Notification) bool { + notif, ok := notification.(BaseNotification) + if !ok { + return ok + } + if n.Id != notif.Id { + return false + } + if n.IsRead != notif.IsRead { + return false + } + if n.Type != notif.Type { + return false + } + return true +} diff --git a/internals/notifier/notification/notification_export.go b/internals/notifier/notification/notification_export.go index a451fa54..66c3ed5b 100644 --- a/internals/notifier/notification/notification_export.go +++ b/internals/notifier/notification/notification_export.go @@ -3,6 +3,7 @@ package notification import ( "encoding/json" "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" + "reflect" ) type ExportNotification struct { @@ -40,5 +41,23 @@ func (e ExportNotification) NewInstance(id int64, data []byte, isRead bool) (Not notification.Id = id notification.IsRead = isRead notification.Notification = notification - return ¬ification, nil + return notification, nil +} + +// Equals returns true if the two notifications are equals +func (e ExportNotification) Equals(notification Notification) bool { + notif, ok := notification.(ExportNotification) + if !ok { + return ok + } + if !notif.BaseNotification.Equals(e.BaseNotification) { + return false + } + if !reflect.DeepEqual(notif.Export, e.Export) { + return false + } + if notif.Status != e.Status { + return false + } + return true } diff --git a/internals/notifier/notification/notification_mock.go b/internals/notifier/notification/notification_mock.go index 9e79ff93..3fa7dd25 100644 --- a/internals/notifier/notification/notification_mock.go +++ b/internals/notifier/notification/notification_mock.go @@ -8,8 +8,6 @@ import ( // MockNotification is an implementation of a notification main type type MockNotification struct { BaseNotification - ID int64 `json:"id"` - Type string `json:"type"` CreationDate time.Time `json:"creationDate"` Groups []int64 `json:"groups"` Level string `json:"level"` @@ -20,18 +18,21 @@ type MockNotification struct { } // NewMockNotification renders a new MockNotification instance -func NewMockNotification(level string, title string, subTitle string, description string, creationDate time.Time, +func NewMockNotification(id int64, level string, title string, subTitle string, description string, creationDate time.Time, groups []int64, context map[string]interface{}) *MockNotification { return &MockNotification{ - Type: "mock", + BaseNotification: BaseNotification{ + Id: id, + Type: "MockNotification", + }, CreationDate: creationDate, - // Groups: groups, - Level: level, - Title: title, - SubTitle: subTitle, - Description: description, - Context: context, + Groups: groups, + Level: level, + Title: title, + SubTitle: subTitle, + Description: description, + Context: context, } } @@ -43,3 +44,63 @@ func (n MockNotification) ToBytes() ([]byte, error) { } return b, nil } + +// NewInstance returns a new instance of a MockNotification +func (n MockNotification) NewInstance(id int64, data []byte, isRead bool) (Notification, error) { + var notification MockNotification + err := json.Unmarshal(data, ¬ification) + if err != nil { + return nil, err + } + notification.Id = id + notification.IsRead = isRead + notification.Notification = notification + return notification, nil +} + +// Equals returns true if the two notifications are equals +func (n MockNotification) Equals(notification Notification) bool { + notif, ok := notification.(MockNotification) + if !ok { + return ok + } + if !notif.BaseNotification.Equals(n.BaseNotification) { + return false + } + if notif.CreationDate != n.CreationDate { + return false + } + if notif.Level != n.Level { + return false + } + if notif.Title != n.Title { + return false + } + if notif.SubTitle != n.SubTitle { + return false + } + if notif.Description != n.Description { + return false + } + if notif.Context != nil && n.Context != nil { + if len(notif.Context) != len(n.Context) { + return false + } + for k, v := range notif.Context { + if n.Context[k] != v { + return false + } + } + } else if notif.Context != nil || n.Context != nil { + return false + } + if len(notif.Groups) != len(n.Groups) { + return false + } + for i, v := range notif.Groups { + if n.Groups[i] != v { + return false + } + } + return true +} diff --git a/internals/notifier/notification/notification_test.go b/internals/notifier/notification/notification_test.go index e2ae90be..78d6640a 100644 --- a/internals/notifier/notification/notification_test.go +++ b/internals/notifier/notification/notification_test.go @@ -4,8 +4,8 @@ import ( "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" "github.com/myrteametrics/myrtea-sdk/v4/expression" - "reflect" "testing" + "time" ) func TestBaseNotificationToBytes(t *testing.T) { @@ -26,30 +26,40 @@ func TestBaseNotificationToBytes(t *testing.T) { } func TestBaseNotificationNewInstance(t *testing.T) { - data := []byte(`{"Notification":null,"Id":1,"Type":"Test","IsRead":true}`) + s := BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + } + se, e := s.ToBytes() + if e == nil { + t.Log(string(se)) + } + + data := []byte(`{"id":1,"type":"Test","isRead":true}`) notification, err := BaseNotification{}.NewInstance(1, data, true) if err != nil { t.Errorf("Unexpected error: %v", err) } - expected := &BaseNotification{ + expected := BaseNotification{ Id: 1, Type: "Test", IsRead: true, } - expression.AssertEqual(t, reflect.DeepEqual(notification, expected), true) + expression.AssertEqual(t, expected.Equals(notification), true) } func TestBaseNotificationNewInstanceWithInvalidData(t *testing.T) { - data := []byte(`{"Notification":null,"Id":1,"Type":"Test","IsRead":"invalid"}`) + data := []byte(`{"id":1,"type":"Test","isRead":"invalid"}`) _, err := BaseNotification{}.NewInstance(1, data, true) if err == nil { t.Errorf("Expected error, got nil") } } -func TextExportNotification(t *testing.T) { +func TestExportNotification(t *testing.T) { // init handler ReplaceHandlerGlobals(NewHandler()) @@ -90,3 +100,213 @@ func TextExportNotification(t *testing.T) { expression.AssertEqual(t, string(bytes), string(bt)) } + +func TestBaseNotification_Equals(t *testing.T) { + notif := BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + } + + expression.AssertEqual(t, notif.Equals(BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }), true) + + expression.AssertEqual(t, notif.Equals(BaseNotification{ + Id: 2, + Type: "Test", + IsRead: true, + }), false) + + expression.AssertEqual(t, notif.Equals(BaseNotification{ + Id: 1, + Type: "Test2", + IsRead: true, + }), false) + + expression.AssertEqual(t, notif.Equals(BaseNotification{ + Id: 1, + Type: "Test", + IsRead: false, + }), false) +} + +func TestMockNotification_Equals(t *testing.T) { + baseNotification := BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + } + now := time.Now() + notif := MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + } + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), true) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: BaseNotification{ + Id: 2, + Type: "Test", + IsRead: true, + }, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), false) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: time.Now().AddDate(1, 0, 0), + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), false) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "infos", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), false) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "titles", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), false) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitles", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), false) + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "descriptions", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2}, + }), false) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"tests": "test"}, + Groups: []int64{1, 2}, + }), false) + + expression.AssertEqual(t, notif.Equals(MockNotification{ + BaseNotification: baseNotification, + CreationDate: now, + Level: "info", + Title: "title", + SubTitle: "subTitle", + Description: "description", + Context: map[string]interface{}{"test": "test"}, + Groups: []int64{1, 2, 3}, + }), false) + +} + +func TestExportNotification_Equals(t *testing.T) { + id := uuid.New().String() + exportNotification := ExportNotification{ + BaseNotification: BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Export: export.WrapperItem{ + Id: id, + }, + Status: 1, + } + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Status: 1, + Export: export.WrapperItem{Id: id}, + }), true) + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: BaseNotification{ + Id: 2, + Type: "Test", + IsRead: true, + }, + Status: 1, + Export: export.WrapperItem{Id: id}, + }), false) + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Status: 2, + Export: export.WrapperItem{Id: id}, + }), false) + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Status: 1, + Export: export.WrapperItem{Id: uuid.New().String()}, + }), false) + +} diff --git a/internals/notifier/notifier.go b/internals/notifier/notifier.go index 19162063..00ca6f54 100644 --- a/internals/notifier/notifier.go +++ b/internals/notifier/notifier.go @@ -1,6 +1,7 @@ package notifier import ( + "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "sync" "time" @@ -126,10 +127,10 @@ func (notifier *Notifier) Broadcast(notif notification.Notification) { } // SendToUsers send a notification to users corresponding the input ids -func (notifier *Notifier) SendToUsers(notif notification.Notification, users []uuid.UUID) { +func (notifier *Notifier) SendToUsers(notif notification.Notification, users []users.UserWithPermissions) { if users != nil && len(users) > 0 { - for _, userID := range users { - clients := notifier.findClientsByUserID(userID) + for _, user := range users { + clients := notifier.findClientsByUserLogin(user.Login) for _, client := range clients { notifier.sendToClient(notif, client) } @@ -144,10 +145,10 @@ func (notifier *Notifier) Send(message []byte, client Client) { } } -func (notifier *Notifier) findClientsByUserID(id uuid.UUID) []Client { +func (notifier *Notifier) findClientsByUserLogin(login string) []Client { clients := make([]Client, 0) for _, client := range notifier.clientManager.GetClients() { - if client.GetUser() != nil && client.GetUser().ID == id { + if client.GetUser() != nil && client.GetUser().Login == login { clients = append(clients, client) } } diff --git a/internals/router/oidc/oidc_middleware.go b/internals/router/oidc/oidc_middleware.go index d54b193b..3fbc5627 100644 --- a/internals/router/oidc/oidc_middleware.go +++ b/internals/router/oidc/oidc_middleware.go @@ -3,7 +3,6 @@ package oidcAuth import ( "context" "errors" - "fmt" "net/http" "strings" "time" @@ -129,7 +128,7 @@ func ContextMiddleware(next http.Handler) http.Handler { loggerR := r.Context().Value(models.ContextKeyLoggerR) if loggerR != nil { - gorillacontext.Set(loggerR.(*http.Request), models.UserLogin, fmt.Sprintf("%s(%d)", up.User.Login, up.User.ID)) + gorillacontext.Set(loggerR.(*http.Request), models.UserLogin, up.User.Login) } ctx := context.WithValue(r.Context(), models.ContextKeyUser, up) diff --git a/internals/router/saml_middleware.go b/internals/router/saml_middleware.go index 5b02a707..ec94d776 100644 --- a/internals/router/saml_middleware.go +++ b/internals/router/saml_middleware.go @@ -6,7 +6,6 @@ import ( "crypto/tls" "crypto/x509" "errors" - "fmt" "net/http" "net/url" @@ -217,7 +216,7 @@ func (m *SamlSPMiddleware) ContextMiddleware(next http.Handler) http.Handler { loggerR := r.Context().Value(models.ContextKeyLoggerR) if loggerR != nil { - gorillacontext.Set(loggerR.(*http.Request), models.UserLogin, fmt.Sprintf("%s(%d)", up.User.Login, up.User.ID)) + gorillacontext.Set(loggerR.(*http.Request), models.UserLogin, up.User.Login) } ctx := context.WithValue(r.Context(), models.ContextKeyUser, up) diff --git a/internals/security/users/user.go b/internals/security/users/user.go index 2793ee61..542dca8b 100644 --- a/internals/security/users/user.go +++ b/internals/security/users/user.go @@ -13,7 +13,7 @@ import ( // User is used as the main user struct type User struct { ID uuid.UUID `json:"id"` - Login string `json:"login"` + Login string `json:"login"` // is the unique identifier of the user, through the different connection modes Created time.Time `json:"created"` LastName string `json:"lastName"` FirstName string `json:"firstName"` From d8950d500438fd733c2ce58a51aabacb48188e72 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 5 Dec 2023 16:33:59 +0100 Subject: [PATCH 20/35] improved notification system --- config/engine-api.toml | 5 +- internals/app/services.go | 3 +- internals/handlers/notification_handlers.go | 20 ++-- internals/notifier/notification/handler.go | 42 +++++++- .../notifier/notification/handler_test.go | 12 +-- .../notifier/notification/notification.go | 9 +- .../notification/notification_export.go | 7 ++ .../notification/notification_mock.go | 6 ++ .../notification/notification_test.go | 40 ++++++- .../notification/postgres_repository.go | 100 +++++++++++------- internals/notifier/notification/repository.go | 17 ++- internals/notifier/notifier.go | 96 +++++++++++------ internals/notifier/websocket_client_test.go | 53 ++++++++++ 13 files changed, 304 insertions(+), 106 deletions(-) diff --git a/config/engine-api.toml b/config/engine-api.toml index 29b5ccef..666195d8 100644 --- a/config/engine-api.toml +++ b/config/engine-api.toml @@ -231,4 +231,7 @@ AUTHENTICATION_OIDC_FRONT_END_URL = "http://127.0.0.1:4200" # Note: The key length is critical for the AES encryption algorithm used for state encryption/decryption. # It must be exactly 16, 24 or 32 characters long. # Default value: "thisis24characterslongs." (24 characters) -AUTHENTICATION_OIDC_ENCRYPTION_KEY = "thisis24characterslongs." \ No newline at end of file +AUTHENTICATION_OIDC_ENCRYPTION_KEY = "thisis24characterslongs." + +# NOTIFICATION_LIFETIME: The lifetime of a notification in the database. +NOTIFICATION_LIFETIME = "1w" \ No newline at end of file diff --git a/internals/app/services.go b/internals/app/services.go index 23ca2662..245831a5 100644 --- a/internals/app/services.go +++ b/internals/app/services.go @@ -78,7 +78,8 @@ func stopServices() { } func initNotifier() { - notification.ReplaceHandlerGlobals(notification.NewHandler()) + notificationLifetime := viper.GetDuration("NOTIFICATION_LIFETIME") + notification.ReplaceHandlerGlobals(notification.NewHandler(notificationLifetime)) notifier.ReplaceGlobals(notifier.NewNotifier()) } diff --git a/internals/handlers/notification_handlers.go b/internals/handlers/notification_handlers.go index 6e9ab55c..4abd79c1 100644 --- a/internals/handlers/notification_handlers.go +++ b/internals/handlers/notification_handlers.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/go-chi/chi/v5" - "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/dbutils" "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers/render" "github.com/myrteametrics/myrtea-engine-api/v5/internals/models" @@ -23,7 +22,6 @@ import ( // @Param nhit query int false "Hit per page" // @Param offset query int false "Offset number for pagination" // @Security Bearer -// @Success 200 {array} notification.FrontNotification "list of notifications" // @Failure 500 "internal server error" // @Router /engine/notifications [get] func GetNotifications(w http.ResponseWriter, r *http.Request) { @@ -57,17 +55,12 @@ func GetNotifications(w http.ResponseWriter, r *http.Request) { } user := _user.(users.UserWithPermissions) - roleIDs := make([]uuid.UUID, 0) - for _, role := range user.Roles { - roleIDs = append(roleIDs, role.ID) - } - - queryOptionnal := dbutils.DBQueryOptionnal{ + queryOptional := dbutils.DBQueryOptionnal{ Limit: nhit, Offset: offset, MaxAge: maxAge, } - notifications, err := notification.R().GetAll(queryOptionnal) + notifications, err := notification.R().GetAll(queryOptional, user.Login) if err != nil { zap.L().Error("Error getting notifications", zap.Error(err)) render.Error(w, r, render.ErrAPIDBSelectFailed, err) @@ -100,11 +93,18 @@ func UpdateRead(w http.ResponseWriter, r *http.Request) { _status := r.URL.Query().Get("status") status := false + _user := r.Context().Value(models.ContextKeyUser) + if _user == nil { + zap.L().Warn("No context user provided") + return + } + user := _user.(users.UserWithPermissions) + if _status == "true" { status = true } - err = notification.R().UpdateRead(idNotif, status) + err = notification.R().UpdateRead(idNotif, status, user.Login) if err != nil { zap.L().Error("Error while updating notifications", zap.Error(err)) render.Error(w, r, render.ErrAPIDBUpdateFailed, err) diff --git a/internals/notifier/notification/handler.go b/internals/notifier/notification/handler.go index f65dff28..6bb35484 100644 --- a/internals/notifier/notification/handler.go +++ b/internals/notifier/notification/handler.go @@ -1,6 +1,11 @@ package notification -import "sync" +import ( + "context" + "go.uber.org/zap" + "sync" + "time" +) var ( _globalHandlerMu sync.RWMutex @@ -25,14 +30,23 @@ func ReplaceHandlerGlobals(handler *Handler) func() { } type Handler struct { - notificationTypes map[string]Notification + notificationTypes map[string]Notification + notificationLifetime time.Duration } -func NewHandler() *Handler { +// NewHandler returns a pointer to a new instance of Handler +func NewHandler(notificationLifetime time.Duration) *Handler { handler := &Handler{ - notificationTypes: make(map[string]Notification), + notificationTypes: make(map[string]Notification), + notificationLifetime: notificationLifetime, } handler.RegisterNotificationTypes() + + // useless to start cleaner if lifetime is less than 0 + if notificationLifetime > 0 { + go handler.startCleaner(context.Background()) + } + return handler } @@ -46,7 +60,27 @@ func (h *Handler) UnregisterNotificationType(notification Notification) { delete(h.notificationTypes, getType(notification)) } +// RegisterNotificationTypes register all notification types func (h *Handler) RegisterNotificationTypes() { h.RegisterNotificationType(BaseNotification{}) h.RegisterNotificationType(ExportNotification{}) } + +// startCleaner start a ticker to clean expired notifications in database every 24 hours +func (h *Handler) startCleaner(context context.Context) { + ticker := time.NewTicker(time.Hour * 24) + defer ticker.Stop() + for { + select { + case <-context.Done(): + return + case <-ticker.C: + affectedRows, err := R().CleanExpired(h.notificationLifetime) + if err != nil { + zap.L().Error("Error while cleaning expired notifications", zap.Error(err)) + } else { + zap.L().Debug("Cleaned expired notifications", zap.Int64("affectedRows", affectedRows)) + } + } + } +} diff --git a/internals/notifier/notification/handler_test.go b/internals/notifier/notification/handler_test.go index 198e48e9..1ae6f0b8 100644 --- a/internals/notifier/notification/handler_test.go +++ b/internals/notifier/notification/handler_test.go @@ -6,13 +6,13 @@ import ( ) func TestNewHandler(t *testing.T) { - handler := NewHandler() + handler := NewHandler(0) expression.AssertNotEqual(t, handler, nil, "NewHandler() should not return nil") expression.AssertNotEqual(t, len(handler.notificationTypes), 0, "NewHandler() should not return an empty notificationTypes") } func TestHandler_RegisterNotificationType_AddsNewType(t *testing.T) { - handler := NewHandler() + handler := NewHandler(0) notification := BaseNotification{} handler.RegisterNotificationType(notification) _, exists := handler.notificationTypes[getType(notification)] @@ -20,7 +20,7 @@ func TestHandler_RegisterNotificationType_AddsNewType(t *testing.T) { } func TestHandler_RegisterNotificationType_OverwritesExistingType(t *testing.T) { - handler := NewHandler() + handler := NewHandler(0) notification := BaseNotification{} handler.RegisterNotificationType(notification) notification2 := BaseNotification{} // Assuming this has the same type as the first one @@ -29,7 +29,7 @@ func TestHandler_RegisterNotificationType_OverwritesExistingType(t *testing.T) { } func TestHandler_UnregisterNotificationType_RemovesExistingType(t *testing.T) { - handler := NewHandler() + handler := NewHandler(0) notification := BaseNotification{} handler.RegisterNotificationType(notification) handler.UnregisterNotificationType(notification) @@ -38,7 +38,7 @@ func TestHandler_UnregisterNotificationType_RemovesExistingType(t *testing.T) { } func TestHandler_UnregisterNotificationType_DoesNothingForNonExistingType(t *testing.T) { - handler := NewHandler() + handler := NewHandler(0) notification := BaseNotification{} handler.UnregisterNotificationType(notification) _, exists := handler.notificationTypes[getType(notification)] @@ -46,7 +46,7 @@ func TestHandler_UnregisterNotificationType_DoesNothingForNonExistingType(t *tes } func TestReplaceHandlerGlobals_ReplacesGlobalHandler(t *testing.T) { - handler := NewHandler() + handler := NewHandler(0) prevHandler := H() undo := ReplaceHandlerGlobals(handler) expression.AssertEqual(t, H(), handler, "ReplaceHandlerGlobals() should replace global handler") diff --git a/internals/notifier/notification/notification.go b/internals/notifier/notification/notification.go index 20c79ab9..44b334bc 100644 --- a/internals/notifier/notification/notification.go +++ b/internals/notifier/notification/notification.go @@ -9,6 +9,7 @@ type Notification interface { ToBytes() ([]byte, error) NewInstance(id int64, data []byte, isRead bool) (Notification, error) Equals(notification Notification) bool + SetId(id int64) Notification } // BaseNotification data structure represents a basic notification and her current state @@ -32,7 +33,7 @@ func (n BaseNotification) NewInstance(id int64, data []byte, isRead bool) (Notif return notification, nil } -// ToBytes convert a notification in a json byte slice to be sent though any required channel +// ToBytes convert a notification in a json byte slice to be sent through any required channel func (n BaseNotification) ToBytes() ([]byte, error) { b, err := json.Marshal(n) if err != nil { @@ -58,3 +59,9 @@ func (n BaseNotification) Equals(notification Notification) bool { } return true } + +// SetId set the notification ID +func (n BaseNotification) SetId(id int64) Notification { + n.Id = id + return n +} diff --git a/internals/notifier/notification/notification_export.go b/internals/notifier/notification/notification_export.go index 66c3ed5b..98e37780 100644 --- a/internals/notifier/notification/notification_export.go +++ b/internals/notifier/notification/notification_export.go @@ -23,6 +23,7 @@ func NewExportNotification(id int64, export export.WrapperItem, status int) *Exp } } +// ToBytes convert a notification in a json byte slice to be sent through any required channel func (e ExportNotification) ToBytes() ([]byte, error) { b, err := json.Marshal(e) if err != nil { @@ -61,3 +62,9 @@ func (e ExportNotification) Equals(notification Notification) bool { } return true } + +// SetId set the notification ID +func (e ExportNotification) SetId(id int64) Notification { + e.Id = id + return e +} diff --git a/internals/notifier/notification/notification_mock.go b/internals/notifier/notification/notification_mock.go index 3fa7dd25..f147dc13 100644 --- a/internals/notifier/notification/notification_mock.go +++ b/internals/notifier/notification/notification_mock.go @@ -104,3 +104,9 @@ func (n MockNotification) Equals(notification Notification) bool { } return true } + +// SetId set the notification ID +func (n MockNotification) SetId(id int64) Notification { + n.Id = id + return n +} diff --git a/internals/notifier/notification/notification_test.go b/internals/notifier/notification/notification_test.go index 78d6640a..ed3467d7 100644 --- a/internals/notifier/notification/notification_test.go +++ b/internals/notifier/notification/notification_test.go @@ -61,7 +61,7 @@ func TestBaseNotificationNewInstanceWithInvalidData(t *testing.T) { func TestExportNotification(t *testing.T) { // init handler - ReplaceHandlerGlobals(NewHandler()) + ReplaceHandlerGlobals(NewHandler(0)) notification := ExportNotification{ Export: export.WrapperItem{ @@ -87,13 +87,11 @@ func TestExportNotification(t *testing.T) { notifType, ok := H().notificationTypes["ExportNotification"] if !ok { t.Errorf("Notification type does not exist") - t.FailNow() } instance, err := notifType.NewInstance(1, bytes, false) if err != nil { t.Errorf("Notification couldn't be instanced") - t.FailNow() } bt, _ := instance.ToBytes() t.Log(string(bt)) @@ -310,3 +308,39 @@ func TestExportNotification_Equals(t *testing.T) { }), false) } + +func TestBaseNotification_SetId(t *testing.T) { + notif, err := BaseNotification{}.NewInstance(1, []byte(`{}`), true) + if err != nil { + t.Errorf("Error: %v", err) + } + + notif = notif.SetId(2) + baseNotification, ok := notif.(BaseNotification) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, baseNotification.Id, int64(2)) +} + +func TestExportNotification_SetId(t *testing.T) { + notif, err := ExportNotification{}.NewInstance(1, []byte(`{}`), true) + if err != nil { + t.Errorf("Error: %v", err) + } + + notif = notif.SetId(2) + exportNotification, ok := notif.(ExportNotification) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, exportNotification.Id, int64(2)) +} + +func TestMockNotification_SetId(t *testing.T) { + notif, err := MockNotification{}.NewInstance(1, []byte(`{}`), true) + if err != nil { + t.Errorf("Error: %v", err) + } + + notif = notif.SetId(2) + mockNotification, ok := notif.(MockNotification) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, mockNotification.Id, int64(2)) +} diff --git a/internals/notifier/notification/postgres_repository.go b/internals/notifier/notification/postgres_repository.go index aa25a2de..c4761d17 100644 --- a/internals/notifier/notification/postgres_repository.go +++ b/internals/notifier/notification/postgres_repository.go @@ -4,6 +4,7 @@ import ( "errors" "time" + sq "github.com/Masterminds/squirrel" "github.com/jmoiron/sqlx" "github.com/myrteametrics/myrtea-engine-api/v5/internals/dbutils" ) @@ -24,21 +25,21 @@ func NewPostgresRepository(dbClient *sqlx.DB) Repository { } // Create creates a new Notification definition in the repository -func (r *PostgresRepository) Create(notif Notification) (int64, error) { +func (r *PostgresRepository) Create(notif Notification, userLogin string) (int64, error) { data, err := notif.ToBytes() if err != nil { return -1, err } ts := time.Now().Truncate(1 * time.Millisecond).UTC() - query := `INSERT INTO notifications_history_v1 (id, data, type, created_at) VALUES (DEFAULT, :data, :type :created_at) RETURNING id` - params := map[string]interface{}{ - "data": data, - "type": getType(notif), - "created_at": ts, - } - rows, err := r.conn.NamedQuery(query, params) + insertStatement := newStatement(). + Insert("notifications_history_v1"). + Columns("id", "data", "type", "user_login", "created_at"). + Values(sq.Expr("DEFAULT"), data, getType(notif), userLogin, ts). + Suffix("RETURNING id") + + rows, err := insertStatement.RunWith(r.conn.DB).Query() if err != nil { return -1, err } @@ -54,15 +55,13 @@ func (r *PostgresRepository) Create(notif Notification) (int64, error) { } // Get returns a notification by its ID -func (r *PostgresRepository) Get(id int64) (Notification, error) { - - // TODO: "ORDER BY" should be an option in dbutils.DBQueryOptionnal - query := `SELECT id, data, isread, type FROM notifications_history_v1 WHERE id = :id` - params := map[string]interface{}{ - "id": id, - } +func (r *PostgresRepository) Get(id int64, userLogin string) (Notification, error) { + getStatement := newStatement(). + Select("id", "data", "isread", "type"). + Where(sq.And{sq.Eq{"id": id}, sq.Eq{"user_login": userLogin}}). + From("notifications_history_v1") - rows, err := r.conn.NamedQuery(query, params) + rows, err := getStatement.RunWith(r.conn.DB).Query() if err != nil { return nil, errors.New("couldn't retrieve any notification with this id. The query is equal to: " + err.Error()) } @@ -74,7 +73,7 @@ func (r *PostgresRepository) Get(id int64) (Notification, error) { var isRead bool var notifType string - err := rows.Scan(&id, &data, &isRead, ¬ifType) + err = rows.Scan(&id, &data, &isRead, ¬ifType) if err != nil { return nil, errors.New("couldn't retrieve any notification. The query is equal to: " + err.Error()) } @@ -95,26 +94,28 @@ func (r *PostgresRepository) Get(id int64) (Notification, error) { } // GetAll returns all notifications from the repository -func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]Notification, error) { +func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal, userLogin string) ([]Notification, error) { + getStatement := newStatement(). + Select("id", "data", "isread", "type"). + Where(sq.Eq{"user_login": userLogin}). + From("notifications_history_v1") - // TODO: "ORDER BY" should be an option in dbutils.DBQueryOptionnal - query := `SELECT id, data, isread, type FROM notifications_history_v1` - params := map[string]interface{}{} if queryOptionnal.MaxAge > 0 { - query += ` WHERE created_at > :created_at` - params["created_at"] = time.Now().UTC().Add(-1 * queryOptionnal.MaxAge) + getStatement = getStatement.Where(sq.Gt{"created_at": time.Now().UTC().Add(-1 * queryOptionnal.MaxAge)}) } - query += ` ORDER BY created_at DESC` + if queryOptionnal.Limit > 0 { - query += ` LIMIT :limit` - params["limit"] = queryOptionnal.Limit + getStatement = getStatement.Limit(uint64(queryOptionnal.Limit)) } + if queryOptionnal.Offset > 0 { - query += ` OFFSET :offset` - params["offset"] = queryOptionnal.Offset + getStatement = getStatement.Offset(uint64(queryOptionnal.Offset)) } - rows, err := r.conn.NamedQuery(query, params) + // TODO: "ORDER BY" should be an option in dbutils.DBQueryOptionnal + getStatement = getStatement.OrderBy("created_at DESC") + + rows, err := getStatement.RunWith(r.conn.DB).Query() if err != nil { return nil, errors.New("couldn't retrieve any notification with these roles. The query is equal to: " + err.Error()) } @@ -128,7 +129,7 @@ func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([] var isRead bool var notifType string - err := rows.Scan(&id, &data, &isRead, ¬ifType) + err = rows.Scan(&id, &data, &isRead, ¬ifType) if err != nil { return nil, errors.New("couldn't scan the notification data:" + err.Error()) } @@ -152,12 +153,12 @@ func (r *PostgresRepository) GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([] } // Delete deletes a notification from the repository by its id -func (r *PostgresRepository) Delete(id int64) error { - query := `DELETE FROM notifications_history_v1 WHERE id = :id` +func (r *PostgresRepository) Delete(id int64, userLogin string) error { + deleteStatement := newStatement(). + Delete("notifications_history_v1"). + Where(sq.And{sq.Eq{"id": id}, sq.Eq{"user_login": userLogin}}) - res, err := r.conn.NamedExec(query, map[string]interface{}{ - "id": id, - }) + res, err := deleteStatement.RunWith(r.conn.DB).Exec() if err != nil { return err } @@ -172,13 +173,13 @@ func (r *PostgresRepository) Delete(id int64) error { } // UpdateRead updates a notification status by changing the isRead state to true once it has been read -func (r *PostgresRepository) UpdateRead(id int64, status bool) error { - query := `UPDATE notifications_history_v1 SET isread = :status WHERE id = :id` +func (r *PostgresRepository) UpdateRead(id int64, status bool, userLogin string) error { + update := newStatement(). + Update("notifications_history_v1"). + Set("isread", status). + Where(sq.And{sq.Eq{"id": id}, sq.Eq{"user_login": userLogin}}) - res, err := r.conn.NamedExec(query, map[string]interface{}{ - "status": status, - "id": id, - }) + res, err := update.RunWith(r.conn.DB).Exec() if err != nil { return err } @@ -191,3 +192,20 @@ func (r *PostgresRepository) UpdateRead(id int64, status bool) error { } return nil } + +// CleanExpired deletes all notifications older than the given lifetime +func (r *PostgresRepository) CleanExpired(lifetime time.Duration) (int64, error) { + deleteStatement := newStatement(). + Delete("notifications_history_v1"). + Where(sq.Lt{"created_at": time.Now().UTC().Add(-1 * lifetime)}) + + res, err := deleteStatement.RunWith(r.conn.DB).Exec() + if err != nil { + return 0, err + } + i, err := res.RowsAffected() + if err != nil { + return 0, err + } + return i, nil +} diff --git a/internals/notifier/notification/repository.go b/internals/notifier/notification/repository.go index 5e91ed2a..89d95d44 100644 --- a/internals/notifier/notification/repository.go +++ b/internals/notifier/notification/repository.go @@ -1,7 +1,9 @@ package notification import ( + sq "github.com/Masterminds/squirrel" "sync" + "time" "github.com/myrteametrics/myrtea-engine-api/v5/internals/dbutils" ) @@ -10,11 +12,12 @@ import ( // (in-memory map, sql database, in-memory cache, file system, ...) // It allows standard CRUD operation on facts type Repository interface { - Create(notif Notification) (int64, error) - Get(id int64) (Notification, error) - GetAll(queryOptionnal dbutils.DBQueryOptionnal) ([]Notification, error) - Delete(id int64) error - UpdateRead(id int64, state bool) error + Create(notif Notification, userLogin string) (int64, error) + Get(id int64, userLogin string) (Notification, error) + GetAll(queryOptionnal dbutils.DBQueryOptionnal, userLogin string) ([]Notification, error) + Delete(id int64, userLogin string) error + UpdateRead(id int64, state bool, userLogin string) error + CleanExpired(lifetime time.Duration) (int64, error) } var ( @@ -40,3 +43,7 @@ func ReplaceGlobals(repository Repository) func() { _globalRepository = repository return func() { ReplaceGlobals(prev) } } + +func newStatement() sq.StatementBuilderType { + return sq.StatementBuilder.PlaceholderFormat(sq.Dollar) +} diff --git a/internals/notifier/notifier.go b/internals/notifier/notifier.go index 00ca6f54..6a1d3933 100644 --- a/internals/notifier/notifier.go +++ b/internals/notifier/notifier.go @@ -62,6 +62,7 @@ func (notifier *Notifier) Unregister(client Client) error { return notifier.clientManager.Unregister(client) } +// verifyCache check if a notification has already been sent func (notifier *Notifier) verifyCache(key string, timeout time.Duration) bool { if val, ok := notifier.cache[key]; ok && time.Now().UTC().Before(val) { return false @@ -70,43 +71,52 @@ func (notifier *Notifier) verifyCache(key string, timeout time.Duration) bool { return true } -// SendToRoles send a notification to every user related to the input list of roles -func (notifier *Notifier) SendToRoles(cacheKey string, timeout time.Duration, notif notification.Notification, roles []uuid.UUID) { - - zap.L().Debug("notifier.SendToRoles", zap.Any("roles", roles), zap.Any("notification", notif)) - - if cacheKey != "" && !notifier.verifyCache(cacheKey, timeout) { - zap.L().Debug("Notification send skipped") - return - } - - id, err := notification.R().Create(notif) - if err != nil { - zap.L().Error("Add notification to history", zap.Error(err)) - return - } - - notifFull, err := notification.R().Get(id) - if notifFull == nil { - zap.L().Error("Notification not found after creation", zap.Int64("id", id)) +func (notifier *Notifier) CleanCache() { + for key, val := range notifier.cache { + if time.Now().UTC().After(val) { + delete(notifier.cache, key) + } } - - // FIXME: This should be fully reworking after security refactoring and removal of groups - - // if roles != nil && len(roles) > 0 { - // clients := make(map[Client]bool, 0) - // for _, roleID := range roles { - // roleClients := notifier.findClientsByRoleID(roleID) - // for _, client := range roleClients { - // clients[client] = true - // } - // } - // for client := range clients { - // notifier.sendToClient(notifFull, client) - // } - // } } +// TODO: renew this +//// SendToRoles send a notification to every user related to the input list of roles +//func (notifier *Notifier) SendToRoles(cacheKey string, timeout time.Duration, notif notification.Notification, roles []uuid.UUID) { +// +// zap.L().Debug("notifier.SendToRoles", zap.Any("roles", roles), zap.Any("notification", notif)) +// +// if cacheKey != "" && !notifier.verifyCache(cacheKey, timeout) { +// zap.L().Debug("Notification send skipped") +// return +// } +// +// id, err := notification.R().Create(notif, "") +// if err != nil { +// zap.L().Error("Add notification to history", zap.Error(err)) +// return +// } +// +// notifFull, err := notification.R().Get(id) +// if notifFull == nil { +// zap.L().Error("Notification not found after creation", zap.Int64("id", id)) +// } +// +// // FIXME: This should be fully reworking after security refactoring and removal of groups +// +// // if roles != nil && len(roles) > 0 { +// // clients := make(map[Client]bool, 0) +// // for _, roleID := range roles { +// // roleClients := notifier.findClientsByRoleID(roleID) +// // for _, client := range roleClients { +// // clients[client] = true +// // } +// // } +// // for client := range clients { +// // notifier.sendToClient(notifFull, client) +// // } +// // } +//} + // sendToClient convert and send a notification to a specific client // Every multiplexing function must call this function in the end to send message func (notifier *Notifier) sendToClient(notif notification.Notification, client Client) { @@ -138,6 +148,22 @@ func (notifier *Notifier) SendToUsers(notif notification.Notification, users []u } } +// SendToUser send a notification to a specific user +func (notifier *Notifier) SendToUser(notif notification.Notification, user users.UserWithPermissions) error { + id, err := notification.R().Create(notif, user.Login) + if err != nil { + zap.L().Error("Add notification to history", zap.Error(err)) + return err + } + + notif = notif.SetId(id) + clients := notifier.findClientsByUserLogin(user.Login) + for _, client := range clients { + notifier.sendToClient(notif, client) + } + return nil +} + // Send a byte slices to a specific websocket client func (notifier *Notifier) Send(message []byte, client Client) { if client != nil { @@ -145,6 +171,7 @@ func (notifier *Notifier) Send(message []byte, client Client) { } } +// findClientsByUserLogin returns a list of clients corresponding to the input login func (notifier *Notifier) findClientsByUserLogin(login string) []Client { clients := make([]Client, 0) for _, client := range notifier.clientManager.GetClients() { @@ -155,6 +182,7 @@ func (notifier *Notifier) findClientsByUserLogin(login string) []Client { return clients } +// findClientsByRoleID returns a list of clients corresponding to the input role id func (notifier *Notifier) findClientsByRoleID(id uuid.UUID) []Client { clients := make([]Client, 0) for _, client := range notifier.clientManager.GetClients() { diff --git a/internals/notifier/websocket_client_test.go b/internals/notifier/websocket_client_test.go index c417d7b2..f9c917bc 100644 --- a/internals/notifier/websocket_client_test.go +++ b/internals/notifier/websocket_client_test.go @@ -4,7 +4,9 @@ import ( "net/http" "net/http/httptest" "strings" + "sync" "testing" + "time" "github.com/gorilla/websocket" ) @@ -14,7 +16,10 @@ func TestNewWSClient(t *testing.T) { // Server-side initialisation var client *WebsocketClient + wg := sync.WaitGroup{} + wg.Add(1) s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer wg.Done() var err error client, err = BuildWebsocketClient(w, r, nil) if err != nil { @@ -30,6 +35,20 @@ func TestNewWSClient(t *testing.T) { } defer ws.Close() + c := make(chan struct{}) + + // wait for the client to be ready + go func() { + wg.Wait() + c <- struct{}{} + }() + + select { + case <-c: + case <-time.After(time.Second): + t.Fatalf("Timed out waiting for wait group\n") + } + // Tests if client == nil { t.Fatal("Client not built") @@ -41,7 +60,10 @@ func TestWSClientRead(t *testing.T) { // Server-side initialisation var client *WebsocketClient + wg := sync.WaitGroup{} + wg.Add(1) s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer wg.Done() var err error client, err = BuildWebsocketClient(w, r, nil) if err != nil { @@ -58,6 +80,20 @@ func TestWSClientRead(t *testing.T) { } defer ws.Close() + c := make(chan struct{}) + + // wait for the client to be ready + go func() { + wg.Wait() + c <- struct{}{} + }() + + select { + case <-c: + case <-time.After(time.Second): + t.Fatalf("Timed out waiting for wait group\n") + } + // Tests for i := 0; i < 10; i++ { if err := ws.WriteMessage(websocket.TextMessage, []byte("hello")); err != nil { @@ -79,8 +115,11 @@ func TestWSClientWrite(t *testing.T) { ReplaceGlobals(NewNotifier()) // Server-side initialisation + wg := sync.WaitGroup{} + wg.Add(1) var client *WebsocketClient s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer wg.Done() var err error client, err = BuildWebsocketClient(w, r, nil) if err != nil { @@ -97,6 +136,20 @@ func TestWSClientWrite(t *testing.T) { } defer ws.Close() + c := make(chan struct{}) + + // wait for the client to be ready + go func() { + wg.Wait() + c <- struct{}{} + }() + + select { + case <-c: + case <-time.After(time.Second): + t.Fatalf("Timed out waiting for wait group\n") + } + // Tests for i := 0; i < 10; i++ { // Send message directly on the client Send channel From 9eaabbbcd848dc61ee955772a1a8f37785fd449d Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 6 Dec 2023 12:32:41 +0100 Subject: [PATCH 21/35] notification system improvements --- config/engine-api.toml | 2 +- internals/app/services.go | 2 +- internals/handlers/notification_handlers.go | 5 +++ internals/handlers/notifier_handlers.go | 35 ++++++++----------- internals/handlers/processor_handlers.go | 12 ++----- internals/ingester/aggregate.go | 28 +++++++++------ internals/notifier/notification/handler.go | 6 +++- .../notifier/notification/notification.go | 9 +++++ .../notification/notification_mock.go | 1 + 9 files changed, 58 insertions(+), 42 deletions(-) diff --git a/config/engine-api.toml b/config/engine-api.toml index 666195d8..c2a5f9bd 100644 --- a/config/engine-api.toml +++ b/config/engine-api.toml @@ -234,4 +234,4 @@ AUTHENTICATION_OIDC_FRONT_END_URL = "http://127.0.0.1:4200" AUTHENTICATION_OIDC_ENCRYPTION_KEY = "thisis24characterslongs." # NOTIFICATION_LIFETIME: The lifetime of a notification in the database. -NOTIFICATION_LIFETIME = "1w" \ No newline at end of file +NOTIFICATION_LIFETIME = "168h" # 168h = 7 days, available units are "ns", "us" (or "µs"), "ms", "s", "m", "h" \ No newline at end of file diff --git a/internals/app/services.go b/internals/app/services.go index 245831a5..29ca6433 100644 --- a/internals/app/services.go +++ b/internals/app/services.go @@ -116,7 +116,7 @@ func initCoordinator() { instanceName := viper.GetString("INSTANCE_NAME") if err = coordinator.InitInstance(instanceName, models); err != nil { - zap.L().Fatal("Intialisation of coordinator master", zap.Error(err)) + zap.L().Fatal("Initialization of coordinator master", zap.Error(err)) } if viper.GetBool("ENABLE_CRONS_ON_START") { for _, li := range coordinator.GetInstance().LogicalIndices { diff --git a/internals/handlers/notification_handlers.go b/internals/handlers/notification_handlers.go index 4abd79c1..3edcbe11 100644 --- a/internals/handlers/notification_handlers.go +++ b/internals/handlers/notification_handlers.go @@ -1,8 +1,10 @@ package handlers import ( + "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" "net/http" "strconv" + "time" "github.com/go-chi/chi/v5" "github.com/myrteametrics/myrtea-engine-api/v5/internals/dbutils" @@ -67,6 +69,9 @@ func GetNotifications(w http.ResponseWriter, r *http.Request) { return } + notifications = append(notifications, notification.NewMockNotification(1, "level", "title", "subTitle", "description", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) + notifications = append(notifications, notification.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) + notifications = append(notifications, notification.NewBaseNotification(3, false)) render.JSON(w, r, notifications) } diff --git a/internals/handlers/notifier_handlers.go b/internals/handlers/notifier_handlers.go index e95f45fe..cecf1be2 100644 --- a/internals/handlers/notifier_handlers.go +++ b/internals/handlers/notifier_handlers.go @@ -1,17 +1,12 @@ package handlers import ( - "github.com/google/uuid" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" - "net/http" - "time" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers/render" "github.com/myrteametrics/myrtea-engine-api/v5/internals/models" "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "go.uber.org/zap" + "net/http" ) // NotificationsWSRegister godoc @@ -46,20 +41,20 @@ func NotificationsWSRegister(w http.ResponseWriter, r *http.Request) { zap.L().Error("Add new WS Client to manager", zap.Error(err)) return } - go func(client *notifier.WebsocketClient) { // temporary for tests - zap.L().Info("starting notifier") - ticker := time.NewTicker(1 * time.Second) - after := time.After(30 * time.Second) - for { - select { - case <-ticker.C: - notifier.C().SendToUsers(notification.ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), FileName: "test.bla"}}, []users.UserWithPermissions{user}) - zap.L().Info("send notification") - case <-after: - return - } - } - }(client) + //go func(client *notifier.WebsocketClient) { // temporary for tests + // zap.L().Info("starting notifier") + // ticker := time.NewTicker(1 * time.Second) + // after := time.After(30 * time.Second) + // for { + // select { + // case <-ticker.C: + // notifier.C().SendToUsers(notification.ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), FileName: "test.bla"}}, []users.UserWithPermissions{user}) + // zap.L().Info("send notification") + // case <-after: + // return + // } + // } + //}(client) go client.Write() // go client.Read() // Disabled until proper usage diff --git a/internals/handlers/processor_handlers.go b/internals/handlers/processor_handlers.go index cafa02c4..72726de9 100644 --- a/internals/handlers/processor_handlers.go +++ b/internals/handlers/processor_handlers.go @@ -3,15 +3,13 @@ package handlers import ( "encoding/json" "errors" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/ingester" - "net/http" - "time" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers/render" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/ingester" "github.com/myrteametrics/myrtea-engine-api/v5/internals/processor" "github.com/myrteametrics/myrtea-engine-api/v5/internals/scheduler" "github.com/myrteametrics/myrtea-sdk/v4/models" "go.uber.org/zap" + "net/http" ) // ProcessorHandler is a basic struct allowing to set up a single aggregateIngester instance for all handlers @@ -21,12 +19,8 @@ type ProcessorHandler struct { // NewProcessorHandler returns a pointer to an ProcessorHandler instance func NewProcessorHandler() *ProcessorHandler { - var aggregateIngester = ingester.NewAggregateIngester() - go aggregateIngester.Run() // Start ingester - time.Sleep(10 * time.Millisecond) // goroutine warm-up - return &ProcessorHandler{ - aggregateIngester: aggregateIngester, + aggregateIngester: ingester.NewAggregateIngester(), } } diff --git a/internals/ingester/aggregate.go b/internals/ingester/aggregate.go index 94831449..b8c4924a 100644 --- a/internals/ingester/aggregate.go +++ b/internals/ingester/aggregate.go @@ -13,8 +13,9 @@ import ( // AggregateIngester is a component which process scheduler.ExternalAggregate type AggregateIngester struct { - Data chan []scheduler.ExternalAggregate + data chan []scheduler.ExternalAggregate metricQueueGauge *stdprometheus.Gauge + running bool } var ( @@ -39,16 +40,17 @@ func _newRegisteredGauge() *stdprometheus.Gauge { // NewAggregateIngester returns a pointer to a new AggregateIngester instance func NewAggregateIngester() *AggregateIngester { return &AggregateIngester{ - Data: make(chan []scheduler.ExternalAggregate, viper.GetInt("AGGREGATEINGESTER_QUEUE_BUFFER_SIZE")), + data: make(chan []scheduler.ExternalAggregate, viper.GetInt("AGGREGATEINGESTER_QUEUE_BUFFER_SIZE")), metricQueueGauge: _aggregateIngesterGauge, + running: false, } } // Run is the main routine of a TypeIngester instance -func (ingester *AggregateIngester) Run() { +func (ai *AggregateIngester) Run() { zap.L().Info("Starting AggregateIngester") - for ir := range ingester.Data { + for ir := range ai.data { zap.L().Debug("Received ExternalAggregate", zap.Int("ExternalAggregate items count", len(ir))) err := HandleAggregates(ir) @@ -57,25 +59,31 @@ func (ingester *AggregateIngester) Run() { } // Update queue gauge - (*ingester.metricQueueGauge).Set(float64(len(ingester.Data))) + (*ai.metricQueueGauge).Set(float64(len(ai.data))) } } // Ingest process an array of scheduler.ExternalAggregate -func (ingester *AggregateIngester) Ingest(aggregates []scheduler.ExternalAggregate) error { - dataLen := len(ingester.Data) +func (ai *AggregateIngester) Ingest(aggregates []scheduler.ExternalAggregate) error { + dataLen := len(ai.data) + + // Start ingester if not running + if !ai.running { + go ai.Run() + ai.running = true + } zap.L().Debug("Ingesting data", zap.Any("aggregates", aggregates)) // Check for channel overloading - if dataLen+1 >= cap(ingester.Data) { + if dataLen+1 >= cap(ai.data) { zap.L().Debug("Buffered channel would be overloaded with incoming bulkIngestRequest") - (*ingester.metricQueueGauge).Set(float64(dataLen)) + (*ai.metricQueueGauge).Set(float64(dataLen)) return errors.New("channel overload") } - ingester.Data <- aggregates + ai.data <- aggregates return nil } diff --git a/internals/notifier/notification/handler.go b/internals/notifier/notification/handler.go index 6bb35484..c82be210 100644 --- a/internals/notifier/notification/handler.go +++ b/internals/notifier/notification/handler.go @@ -45,6 +45,8 @@ func NewHandler(notificationLifetime time.Duration) *Handler { // useless to start cleaner if lifetime is less than 0 if notificationLifetime > 0 { go handler.startCleaner(context.Background()) + } else { + zap.L().Info("Notification cleaner will not be started", zap.Duration("notificationLifetime", notificationLifetime)) } return handler @@ -68,7 +70,9 @@ func (h *Handler) RegisterNotificationTypes() { // startCleaner start a ticker to clean expired notifications in database every 24 hours func (h *Handler) startCleaner(context context.Context) { - ticker := time.NewTicker(time.Hour * 24) + cleanRate := time.Hour * 24 + zap.L().Info("Starting notification cleaner", zap.Duration("cleanRate", cleanRate), zap.Duration("notificationLifetime", h.notificationLifetime)) + ticker := time.NewTicker(cleanRate) defer ticker.Stop() for { select { diff --git a/internals/notifier/notification/notification.go b/internals/notifier/notification/notification.go index 44b334bc..9c0369f1 100644 --- a/internals/notifier/notification/notification.go +++ b/internals/notifier/notification/notification.go @@ -20,6 +20,15 @@ type BaseNotification struct { Type string `json:"type"` } +// NewBaseNotification returns a new instance of a BaseNotification +func NewBaseNotification(id int64, isRead bool) BaseNotification { + return BaseNotification{ + Id: id, + IsRead: isRead, + Type: "BaseNotification", + } +} + // NewInstance returns a new instance of a BaseNotification func (n BaseNotification) NewInstance(id int64, data []byte, isRead bool) (Notification, error) { var notification BaseNotification diff --git a/internals/notifier/notification/notification_mock.go b/internals/notifier/notification/notification_mock.go index f147dc13..930ba1cd 100644 --- a/internals/notifier/notification/notification_mock.go +++ b/internals/notifier/notification/notification_mock.go @@ -14,6 +14,7 @@ type MockNotification struct { Title string `json:"title"` SubTitle string `json:"subtitle"` Description string `json:"description"` + Target string `json:"target"` Context map[string]interface{} `json:"context,omitempty"` } From c4558c9e9837a30c33d3d7e4c9e7084ff700318a Mon Sep 17 00:00:00 2001 From: SchawnnDev Date: Wed, 6 Dec 2023 20:05:01 +0100 Subject: [PATCH 22/35] fixed returning handler returning null --- internals/export/wrapper.go | 2 +- internals/handlers/notification_handlers.go | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 01f5e8d2..5e474ed3 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -263,7 +263,7 @@ func (ew *Wrapper) checkForExpiredFiles() error { } func (ew *Wrapper) GetUserExports(user users.User) []WrapperItem { - var result []WrapperItem + result := make([]WrapperItem, 0) // first, gather all exports that are in the workers if there are any for _, worker := range ew.workers { diff --git a/internals/handlers/notification_handlers.go b/internals/handlers/notification_handlers.go index 3edcbe11..89730b01 100644 --- a/internals/handlers/notification_handlers.go +++ b/internals/handlers/notification_handlers.go @@ -69,7 +69,12 @@ func GetNotifications(w http.ResponseWriter, r *http.Request) { return } - notifications = append(notifications, notification.NewMockNotification(1, "level", "title", "subTitle", "description", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) + // generate rando mock notifications for testing 1 to 15 + for i := 2; i < 17; i++ { + notifications = append(notifications, notification.NewMockNotification(int64(i), "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now().AddDate(0, 0, -i), []int64{1}, map[string]interface{}{"issueId": 1})) + } + + notifications = append(notifications, notification.NewMockNotification(1, "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) notifications = append(notifications, notification.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) notifications = append(notifications, notification.NewBaseNotification(3, false)) render.JSON(w, r, notifications) From 200b48960510aa4ae0fa2ce922f5ede44daba203 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 7 Dec 2023 18:19:47 +0100 Subject: [PATCH 23/35] changed error to string since its not marshalling --- internals/export/worker.go | 4 +-- internals/export/worker_test.go | 2 +- internals/export/wrapper.go | 54 +++++++++++++++++++++++++++++++-- 3 files changed, 54 insertions(+), 6 deletions(-) diff --git a/internals/export/worker.go b/internals/export/worker.go index 6314ed4f..9b25c5b3 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -37,7 +37,7 @@ func (e *ExportWorker) SetError(error error) { e.Mutex.Lock() defer e.Mutex.Unlock() e.QueueItem.Status = StatusError - e.QueueItem.Error = error + e.QueueItem.Error = error.Error() } // SetStatus sets the status of the worker @@ -79,7 +79,7 @@ func (e *ExportWorker) finalise() { e.Mutex.Lock() // set status to error if error occurred - if e.QueueItem.Error != nil { + if e.QueueItem.Error != "" { e.QueueItem.Status = StatusError } // set status to done if no error occurred diff --git a/internals/export/worker_test.go b/internals/export/worker_test.go index 349979ea..fd1b50b4 100644 --- a/internals/export/worker_test.go +++ b/internals/export/worker_test.go @@ -16,7 +16,7 @@ func TestExportWorker_SetError(t *testing.T) { worker := NewExportWorker(0, "/tmp", make(chan<- int)) worker.SetError(nil) expression.AssertEqual(t, worker.QueueItem.Status, StatusError) - expression.AssertEqual(t, worker.QueueItem.Error, nil) + expression.AssertEqual(t, worker.QueueItem.Error, "") } func TestExportWorker_SetStatus(t *testing.T) { diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 5e474ed3..808ef25b 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -31,7 +31,7 @@ type WrapperItem struct { Id string `json:"id"` // unique id that represents an export demand FactIDs []int64 `json:"factIds"` // list of fact ids that are part of the export (for archive and json) Facts []engine.Fact `json:"-"` - Error error `json:"error"` + Error string `json:"error"` Status int `json:"status"` FileName string `json:"fileName"` Date time.Time `json:"date"` @@ -75,7 +75,7 @@ func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, FactIDs: factIDs, Date: time.Now(), Status: StatusPending, - Error: nil, + Error: "", FileName: fileName, Params: params, } @@ -83,7 +83,7 @@ func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, // NewWrapper creates a new export wrapper func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize int) *Wrapper { - return &Wrapper{ + wrapper := &Wrapper{ workers: make([]*ExportWorker, 0), queue: make([]*WrapperItem, 0), success: make(chan int), @@ -93,6 +93,54 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i diskRetentionDays: diskRetentionDays, workerCount: workersCount, } + + wrapper.archive.Store("c7f0044b-29f7-4c26-ab56-04e109683637", WrapperItem{ + Users: []string{"admin"}, + Date: time.Now(), + Status: StatusPending, + Id: "c7f0044b-29f7-4c26-ab56-04e109683637", + FactIDs: []int64{1, 2, 3}, + FileName: "export.csv.tar.gz", + }) + + wrapper.archive.Store("736ba596-7399-422e-b241-1407581cf454", WrapperItem{ + Users: []string{"admin"}, + Date: time.Now(), + Status: StatusRunning, + Id: "736ba596-7399-422e-b241-1407581cf454", + FactIDs: []int64{3, 6}, + FileName: "export-23.csv.tar.gz", + }) + + wrapper.archive.Store("5ea87155-7ea5-4152-aec5-386871dbfe1c", WrapperItem{ + Users: []string{"admin"}, + Date: time.Now().AddDate(0, 0, -3), + Status: StatusDone, + Id: "5ea87155-7ea5-4152-aec5-386871dbfe1c", + FactIDs: []int64{2, 3, 6}, + FileName: "exportee-236.csv.tar.gz", + }) + + wrapper.archive.Store("9fb91d1f-5b9c-4856-8be4-436831d2596e", WrapperItem{ + Users: []string{"admin"}, + Date: time.Now().AddDate(0, 0, -1), + Status: StatusError, + Id: "9fb91d1f-5b9c-4856-8be4-436831d2596e", + FactIDs: []int64{22, 23, 6}, + FileName: "exporteeqsdqsd-236.csv.tar.gz", + Error: "error while exporting", + }) + + wrapper.archive.Store("d0502ac6-8d99-4532-a278-e3e7bd1c887b", WrapperItem{ + Users: []string{"admin"}, + Date: time.Now(), + Status: StatusDone, + Id: "d0502ac6-8d99-4532-a278-e3e7bd1c887b", + FactIDs: []int64{1, 23, 26}, + FileName: "finisedexport-236.csv.tar.gz", + }) + + return wrapper } // ContainsFact checks if fact is part of the WrapperItem data From 7254974bf55676c9fecc42735d30b231efb21af1 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Fri, 8 Dec 2023 17:56:37 +0100 Subject: [PATCH 24/35] Some fixes & file moves --- internals/app/app.go | 2 +- internals/app/services.go | 10 +- internals/export/notification.go | 88 +++++++++++++ internals/export/notification_test.go | 116 ++++++++++++++++++ internals/export/wrapper.go | 73 ++++++++--- internals/handlers/notification_handlers.go | 4 +- internals/notifier/notification/handler.go | 9 +- .../notifier/notification/notification.go | 23 +++- .../notification/notification_export.go | 70 ----------- .../notification/notification_mock.go | 16 ++- .../notification/notification_test.go | 110 ----------------- internals/notifier/notifier.go | 41 +++++-- 12 files changed, 336 insertions(+), 226 deletions(-) create mode 100644 internals/export/notification.go create mode 100644 internals/export/notification_test.go delete mode 100644 internals/notifier/notification/notification_export.go diff --git a/internals/app/app.go b/internals/app/app.go index 70ce6d1f..b5e25c21 100644 --- a/internals/app/app.go +++ b/internals/app/app.go @@ -5,7 +5,7 @@ import ( "github.com/spf13/viper" ) -// Init initialiaze all the app configuration and components +// Init initialize all the app configuration and components func Init() { docs.SwaggerInfo.Host = viper.GetString("SWAGGER_HOST") diff --git a/internals/app/services.go b/internals/app/services.go index 29ca6433..ebf19d20 100644 --- a/internals/app/services.go +++ b/internals/app/services.go @@ -2,6 +2,7 @@ package app import ( "errors" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" "strings" "github.com/myrteametrics/myrtea-engine-api/v5/internals/calendar" @@ -79,7 +80,10 @@ func stopServices() { func initNotifier() { notificationLifetime := viper.GetDuration("NOTIFICATION_LIFETIME") - notification.ReplaceHandlerGlobals(notification.NewHandler(notificationLifetime)) + handler := notification.NewHandler(notificationLifetime) + handler.RegisterNotificationType(notification.MockNotification{}) + handler.RegisterNotificationType(export.ExportNotification{}) + notification.ReplaceHandlerGlobals(handler) notifier.ReplaceGlobals(notifier.NewNotifier()) } @@ -92,7 +96,6 @@ func initScheduler() { if viper.GetBool("ENABLE_CRONS_ON_START") { scheduler.S().C.Start() } - } } func initTasker() { @@ -102,7 +105,6 @@ func initTasker() { func initCalendars() { calendar.Init() - } func initCoordinator() { @@ -129,13 +131,11 @@ func initCoordinator() { } func initEmailSender() { - username := viper.GetString("SMTP_USERNAME") password := viper.GetString("SMTP_PASSWORD") host := viper.GetString("SMTP_HOST") port := viper.GetString("SMTP_PORT") email.InitSender(username, password, host, port) - } func initOidcAuthentication() { diff --git a/internals/export/notification.go b/internals/export/notification.go new file mode 100644 index 00000000..99e3695d --- /dev/null +++ b/internals/export/notification.go @@ -0,0 +1,88 @@ +package export + +import ( + "encoding/json" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" + "reflect" +) + +const ( + ExportNotificationStarted = 0 + ExportNotificationArchived = 1 // happens when + ExportNotificationDeleted = 2 // happens when the export is deleted from archive +) + +type ExportNotification struct { + notification.BaseNotification + Export WrapperItem `json:"export"` + Status int `json:"status"` +} + +func NewExportNotification(id int64, export WrapperItem, status int) *ExportNotification { + return &ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: id, + Type: "ExportNotification", + Persistent: false, + }, + Export: export, + Status: status, + } +} + +// ToBytes convert a notification in a json byte slice to be sent through any required channel +func (e ExportNotification) ToBytes() ([]byte, error) { + b, err := json.Marshal(e) + if err != nil { + return nil, err + } + return b, nil +} + +// NewInstance returns a new instance of a ExportNotification +func (e ExportNotification) NewInstance(id int64, data []byte, isRead bool) (notification.Notification, error) { + var notif ExportNotification + err := json.Unmarshal(data, ¬if) + if err != nil { + return nil, err + } + notif.Id = id + notif.IsRead = isRead + notif.Notification = notif + return notif, nil +} + +// Equals returns true if the two notifications are equals +func (e ExportNotification) Equals(notification notification.Notification) bool { + notif, ok := notification.(ExportNotification) + if !ok { + return ok + } + if !notif.BaseNotification.Equals(e.BaseNotification) { + return false + } + if !reflect.DeepEqual(notif.Export, e.Export) { + return false + } + if notif.Status != e.Status { + return false + } + return true +} + +// SetId set the notification ID +func (e ExportNotification) SetId(id int64) notification.Notification { + e.Id = id + return e +} + +// SetPersistent sets whether the notification is persistent (saved to a database) +func (e ExportNotification) SetPersistent(persistent bool) notification.Notification { + e.Persistent = persistent + return e +} + +// IsPersistent returns whether the notification is persistent (saved to a database) +func (e ExportNotification) IsPersistent() bool { + return e.Persistent +} diff --git a/internals/export/notification_test.go b/internals/export/notification_test.go new file mode 100644 index 00000000..30b4862d --- /dev/null +++ b/internals/export/notification_test.go @@ -0,0 +1,116 @@ +package export + +import ( + "github.com/google/uuid" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" + "github.com/myrteametrics/myrtea-sdk/v4/expression" + "testing" +) + +func TestExportNotification(t *testing.T) { + // init handler + notification.ReplaceHandlerGlobals(notification.NewHandler(0)) + + notif := ExportNotification{ + Export: WrapperItem{ + Id: uuid.New().String(), + }, + Status: 1, + } + notif.Id = 1 + notif.IsRead = false + + bytes, err := notif.ToBytes() + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if bytes == nil { + t.Errorf("Expected bytes, got nil") + } + + t.Log(string(bytes)) + + // find type and create new instance + notifType, ok := notification.H().GetNotificationByType("ExportNotification") + if !ok { + t.Errorf("ExportNotification type does not exist") + } + + instance, err := notifType.NewInstance(1, bytes, false) + if err != nil { + t.Errorf("ExportNotification couldn't be instanced") + } + bt, _ := instance.ToBytes() + t.Log(string(bt)) + + expression.AssertEqual(t, string(bytes), string(bt)) +} + +func TestExportNotification_Equals(t *testing.T) { + id := uuid.New().String() + exportNotification := ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Export: WrapperItem{ + Id: id, + }, + Status: 1, + } + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Status: 1, + Export: WrapperItem{Id: id}, + }), true) + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 2, + Type: "Test", + IsRead: true, + }, + Status: 1, + Export: WrapperItem{Id: id}, + }), false) + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Status: 2, + Export: WrapperItem{Id: id}, + }), false) + + expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 1, + Type: "Test", + IsRead: true, + }, + Status: 1, + Export: WrapperItem{Id: uuid.New().String()}, + }), false) + +} + +func TestExportNotification_SetId(t *testing.T) { + notif, err := ExportNotification{}.NewInstance(1, []byte(`{}`), true) + if err != nil { + t.Errorf("Error: %v", err) + } + + notif = notif.SetId(2) + exportNotification, ok := notif.(ExportNotification) + expression.AssertEqual(t, ok, true) + expression.AssertEqual(t, exportNotification.Id, int64(2)) +} diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 808ef25b..200ae45a 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -3,6 +3,8 @@ package export import ( "context" "github.com/google/uuid" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" @@ -227,7 +229,6 @@ func (ew *Wrapper) startDispatcher(context context.Context) { select { case w := <-ew.success: worker := ew.workers[w] - // TODO: send notifications here // archive item when finished worker.Mutex.Lock() @@ -235,9 +236,17 @@ func (ew *Wrapper) startDispatcher(context context.Context) { item := worker.QueueItem worker.QueueItem = WrapperItem{} worker.Mutex.Unlock() + // archive item item.Facts = []engine.Fact{} // empty facts to avoid storing them in the archive ew.archive.Store(item.Id, item) + + // send notification to user (non-blocking) + go func(wrapperItem WrapperItem) { + _ = notifier.C().SendToUserLogins( + ew.createExportNotification(ExportNotificationArchived, &wrapperItem), + wrapperItem.Users) + }(item) case <-ticker.C: ew.dispatchExportQueue(context) case <-expiredFileTicker.C: @@ -252,6 +261,20 @@ func (ew *Wrapper) startDispatcher(context context.Context) { } } +// createExportNotification creates an export notification using given parameters +func (ew *Wrapper) createExportNotification(status int, item *WrapperItem) ExportNotification { + return ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 0, + IsRead: false, + Type: "ExportNotification", + Persistent: false, + }, + Export: *item, + Status: status, + } +} + // checkForExpiredFiles checks for expired files in the export directory and deletes them // it also deletes the done tasks that are older than diskRetentionDays func (ew *Wrapper) checkForExpiredFiles() error { @@ -271,6 +294,14 @@ func (ew *Wrapper) checkForExpiredFiles() error { } if time.Since(data.Date).Hours() > float64(ew.diskRetentionDays*24) { ew.archive.Delete(key) + + // send notification to user (non-blocking) + go func(wrapperItem WrapperItem) { + _ = notifier.C().SendToUserLogins( + ew.createExportNotification(ExportNotificationDeleted, &wrapperItem), + wrapperItem.Users) + }(data) + } return true }) @@ -369,26 +400,34 @@ func (ew *Wrapper) dequeueWrapperItem(item *WrapperItem) (int, bool) { func (ew *Wrapper) dispatchExportQueue(ctx context.Context) { for _, worker := range ew.workers { worker.Mutex.Lock() - if worker.Available { - // check if there is an item in the queue - ew.queueMutex.Lock() - - if len(ew.queue) == 0 { - ew.queueMutex.Unlock() - worker.Mutex.Unlock() - return // Nothing in queue - } + if !worker.Available { + worker.Mutex.Unlock() + continue + } + // check if there is an item in the queue + ew.queueMutex.Lock() - item := *ew.queue[0] - ew.queue = append(ew.queue[:0], ew.queue[1:]...) + if len(ew.queue) == 0 { ew.queueMutex.Unlock() - - worker.Available = false - worker.Mutex.Unlock() - go worker.Start(item, ctx) - } else { worker.Mutex.Unlock() + return // Nothing in queue } + + item := *ew.queue[0] + ew.queue = append(ew.queue[:0], ew.queue[1:]...) + ew.queueMutex.Unlock() + + worker.Available = false + worker.Mutex.Unlock() + go worker.Start(item, ctx) + + // send notification to user (non-blocking) + go func(wrapperItem WrapperItem) { + _ = notifier.C().SendToUserLogins( + ew.createExportNotification(ExportNotificationStarted, &wrapperItem), + wrapperItem.Users) + }(item) + } } diff --git a/internals/handlers/notification_handlers.go b/internals/handlers/notification_handlers.go index 89730b01..d619446f 100644 --- a/internals/handlers/notification_handlers.go +++ b/internals/handlers/notification_handlers.go @@ -75,8 +75,8 @@ func GetNotifications(w http.ResponseWriter, r *http.Request) { } notifications = append(notifications, notification.NewMockNotification(1, "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) - notifications = append(notifications, notification.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) - notifications = append(notifications, notification.NewBaseNotification(3, false)) + notifications = append(notifications, export.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) + notifications = append(notifications, notification.NewBaseNotification(3, false, true)) render.JSON(w, r, notifications) } diff --git a/internals/notifier/notification/handler.go b/internals/notifier/notification/handler.go index c82be210..55758710 100644 --- a/internals/notifier/notification/handler.go +++ b/internals/notifier/notification/handler.go @@ -40,7 +40,6 @@ func NewHandler(notificationLifetime time.Duration) *Handler { notificationTypes: make(map[string]Notification), notificationLifetime: notificationLifetime, } - handler.RegisterNotificationTypes() // useless to start cleaner if lifetime is less than 0 if notificationLifetime > 0 { @@ -62,10 +61,10 @@ func (h *Handler) UnregisterNotificationType(notification Notification) { delete(h.notificationTypes, getType(notification)) } -// RegisterNotificationTypes register all notification types -func (h *Handler) RegisterNotificationTypes() { - h.RegisterNotificationType(BaseNotification{}) - h.RegisterNotificationType(ExportNotification{}) +// GetNotificationByType gets notification interface by its type +func (h *Handler) GetNotificationByType(notificationType string) (notif Notification, ok bool) { + notif, ok = h.notificationTypes[notificationType] + return notif, ok } // startCleaner start a ticker to clean expired notifications in database every 24 hours diff --git a/internals/notifier/notification/notification.go b/internals/notifier/notification/notification.go index 9c0369f1..c6515c56 100644 --- a/internals/notifier/notification/notification.go +++ b/internals/notifier/notification/notification.go @@ -10,6 +10,8 @@ type Notification interface { NewInstance(id int64, data []byte, isRead bool) (Notification, error) Equals(notification Notification) bool SetId(id int64) Notification + SetPersistent(persistent bool) Notification + IsPersistent() bool } // BaseNotification data structure represents a basic notification and her current state @@ -18,14 +20,16 @@ type BaseNotification struct { Id int64 `json:"id"` IsRead bool `json:"isRead"` Type string `json:"type"` + Persistent bool `json:"persistent"` // is notification saved in db or not ? } // NewBaseNotification returns a new instance of a BaseNotification -func NewBaseNotification(id int64, isRead bool) BaseNotification { +func NewBaseNotification(id int64, isRead bool, persistent bool) BaseNotification { return BaseNotification{ - Id: id, - IsRead: isRead, - Type: "BaseNotification", + Id: id, + IsRead: isRead, + Persistent: persistent, + Type: "BaseNotification", } } @@ -74,3 +78,14 @@ func (n BaseNotification) SetId(id int64) Notification { n.Id = id return n } + +// SetPersistent sets whether the notification is persistent (saved to a database) +func (n BaseNotification) SetPersistent(persistent bool) Notification { + n.Persistent = persistent + return n +} + +// IsPersistent returns whether the notification is persistent (saved to a database) +func (n BaseNotification) IsPersistent() bool { + return n.Persistent +} diff --git a/internals/notifier/notification/notification_export.go b/internals/notifier/notification/notification_export.go deleted file mode 100644 index 98e37780..00000000 --- a/internals/notifier/notification/notification_export.go +++ /dev/null @@ -1,70 +0,0 @@ -package notification - -import ( - "encoding/json" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" - "reflect" -) - -type ExportNotification struct { - BaseNotification - Export export.WrapperItem `json:"export"` - Status int `json:"status"` -} - -func NewExportNotification(id int64, export export.WrapperItem, status int) *ExportNotification { - return &ExportNotification{ - BaseNotification: BaseNotification{ - Id: id, - Type: "ExportNotification", - }, - Export: export, - Status: status, - } -} - -// ToBytes convert a notification in a json byte slice to be sent through any required channel -func (e ExportNotification) ToBytes() ([]byte, error) { - b, err := json.Marshal(e) - if err != nil { - return nil, err - } - return b, nil -} - -// NewInstance returns a new instance of a ExportNotification -func (e ExportNotification) NewInstance(id int64, data []byte, isRead bool) (Notification, error) { - var notification ExportNotification - err := json.Unmarshal(data, ¬ification) - if err != nil { - return nil, err - } - notification.Id = id - notification.IsRead = isRead - notification.Notification = notification - return notification, nil -} - -// Equals returns true if the two notifications are equals -func (e ExportNotification) Equals(notification Notification) bool { - notif, ok := notification.(ExportNotification) - if !ok { - return ok - } - if !notif.BaseNotification.Equals(e.BaseNotification) { - return false - } - if !reflect.DeepEqual(notif.Export, e.Export) { - return false - } - if notif.Status != e.Status { - return false - } - return true -} - -// SetId set the notification ID -func (e ExportNotification) SetId(id int64) Notification { - e.Id = id - return e -} diff --git a/internals/notifier/notification/notification_mock.go b/internals/notifier/notification/notification_mock.go index 930ba1cd..4d68eca4 100644 --- a/internals/notifier/notification/notification_mock.go +++ b/internals/notifier/notification/notification_mock.go @@ -24,8 +24,9 @@ func NewMockNotification(id int64, level string, title string, subTitle string, return &MockNotification{ BaseNotification: BaseNotification{ - Id: id, - Type: "MockNotification", + Id: id, + Type: "MockNotification", + Persistent: true, }, CreationDate: creationDate, Groups: groups, @@ -111,3 +112,14 @@ func (n MockNotification) SetId(id int64) Notification { n.Id = id return n } + +// SetPersistent sets whether the notification is persistent (saved to a database) +func (n MockNotification) SetPersistent(persistent bool) Notification { + n.Persistent = persistent + return n +} + +// IsPersistent returns whether the notification is persistent (saved to a database) +func (n MockNotification) IsPersistent() bool { + return n.Persistent +} diff --git a/internals/notifier/notification/notification_test.go b/internals/notifier/notification/notification_test.go index ed3467d7..6d10d316 100644 --- a/internals/notifier/notification/notification_test.go +++ b/internals/notifier/notification/notification_test.go @@ -1,8 +1,6 @@ package notification import ( - "github.com/google/uuid" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" "github.com/myrteametrics/myrtea-sdk/v4/expression" "testing" "time" @@ -59,46 +57,6 @@ func TestBaseNotificationNewInstanceWithInvalidData(t *testing.T) { } } -func TestExportNotification(t *testing.T) { - // init handler - ReplaceHandlerGlobals(NewHandler(0)) - - notification := ExportNotification{ - Export: export.WrapperItem{ - Id: uuid.New().String(), - }, - Status: 1, - } - notification.Id = 1 - notification.IsRead = false - - bytes, err := notification.ToBytes() - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - - if bytes == nil { - t.Errorf("Expected bytes, got nil") - } - - t.Log(string(bytes)) - - // find type and create new instance - notifType, ok := H().notificationTypes["ExportNotification"] - if !ok { - t.Errorf("Notification type does not exist") - } - - instance, err := notifType.NewInstance(1, bytes, false) - if err != nil { - t.Errorf("Notification couldn't be instanced") - } - bt, _ := instance.ToBytes() - t.Log(string(bt)) - - expression.AssertEqual(t, string(bytes), string(bt)) -} - func TestBaseNotification_Equals(t *testing.T) { notif := BaseNotification{ Id: 1, @@ -253,62 +211,6 @@ func TestMockNotification_Equals(t *testing.T) { } -func TestExportNotification_Equals(t *testing.T) { - id := uuid.New().String() - exportNotification := ExportNotification{ - BaseNotification: BaseNotification{ - Id: 1, - Type: "Test", - IsRead: true, - }, - Export: export.WrapperItem{ - Id: id, - }, - Status: 1, - } - - expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ - BaseNotification: BaseNotification{ - Id: 1, - Type: "Test", - IsRead: true, - }, - Status: 1, - Export: export.WrapperItem{Id: id}, - }), true) - - expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ - BaseNotification: BaseNotification{ - Id: 2, - Type: "Test", - IsRead: true, - }, - Status: 1, - Export: export.WrapperItem{Id: id}, - }), false) - - expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ - BaseNotification: BaseNotification{ - Id: 1, - Type: "Test", - IsRead: true, - }, - Status: 2, - Export: export.WrapperItem{Id: id}, - }), false) - - expression.AssertEqual(t, exportNotification.Equals(ExportNotification{ - BaseNotification: BaseNotification{ - Id: 1, - Type: "Test", - IsRead: true, - }, - Status: 1, - Export: export.WrapperItem{Id: uuid.New().String()}, - }), false) - -} - func TestBaseNotification_SetId(t *testing.T) { notif, err := BaseNotification{}.NewInstance(1, []byte(`{}`), true) if err != nil { @@ -321,18 +223,6 @@ func TestBaseNotification_SetId(t *testing.T) { expression.AssertEqual(t, baseNotification.Id, int64(2)) } -func TestExportNotification_SetId(t *testing.T) { - notif, err := ExportNotification{}.NewInstance(1, []byte(`{}`), true) - if err != nil { - t.Errorf("Error: %v", err) - } - - notif = notif.SetId(2) - exportNotification, ok := notif.(ExportNotification) - expression.AssertEqual(t, ok, true) - expression.AssertEqual(t, exportNotification.Id, int64(2)) -} - func TestMockNotification_SetId(t *testing.T) { notif, err := MockNotification{}.NewInstance(1, []byte(`{}`), true) if err != nil { diff --git a/internals/notifier/notifier.go b/internals/notifier/notifier.go index 6a1d3933..eefd5d3c 100644 --- a/internals/notifier/notifier.go +++ b/internals/notifier/notifier.go @@ -137,27 +137,48 @@ func (notifier *Notifier) Broadcast(notif notification.Notification) { } // SendToUsers send a notification to users corresponding the input ids -func (notifier *Notifier) SendToUsers(notif notification.Notification, users []users.UserWithPermissions) { +func (notifier *Notifier) SendToUsers(notif notification.Notification, users []users.UserWithPermissions) error { if users != nil && len(users) > 0 { for _, user := range users { - clients := notifier.findClientsByUserLogin(user.Login) - for _, client := range clients { - notifier.sendToClient(notif, client) + err := notifier.SendToUser(notif, user) + if err != nil { + return err } } } + return nil +} + +// SendToUserLogins send a notification to user logins corresponding the input ids +func (notifier *Notifier) SendToUserLogins(notif notification.Notification, users []string) error { + if users != nil && len(users) > 0 { + for _, user := range users { + err := notifier.SendToUserLogin(notif, user) + if err != nil { + return err + } + } + } + return nil } // SendToUser send a notification to a specific user func (notifier *Notifier) SendToUser(notif notification.Notification, user users.UserWithPermissions) error { - id, err := notification.R().Create(notif, user.Login) - if err != nil { - zap.L().Error("Add notification to history", zap.Error(err)) - return err + return notifier.SendToUserLogin(notif, user.Login) +} + +// SendToUserLogin send a notification to a specific user using his login +func (notifier *Notifier) SendToUserLogin(notif notification.Notification, login string) error { + if notif.IsPersistent() { // Not all notifications needs notifications to be saved to database + id, err := notification.R().Create(notif, login) + if err != nil { + zap.L().Error("Add notification to history", zap.Error(err)) + return err + } + notif = notif.SetId(id) } - notif = notif.SetId(id) - clients := notifier.findClientsByUserLogin(user.Login) + clients := notifier.findClientsByUserLogin(login) for _, client := range clients { notifier.sendToClient(notif, client) } From 23600b17b0db8c46191d57ae5ca644260e6976f5 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 20 Dec 2023 18:27:05 +0100 Subject: [PATCH 25/35] a lot of fixes --- .gitignore | 7 +- config/engine-api.toml | 18 +++- internals/export/csv.go | 10 +- internals/export/csv_test.go | 4 +- internals/export/utils.go | 2 +- internals/export/utils_test.go | 20 ++-- internals/export/worker.go | 21 +++- internals/export/wrapper.go | 131 ++++++++++++++---------- internals/export/wrapper_test.go | 3 +- internals/tasker/situation_reporting.go | 2 +- 10 files changed, 146 insertions(+), 72 deletions(-) diff --git a/.gitignore b/.gitignore index 94a47d5b..dd62ffc3 100644 --- a/.gitignore +++ b/.gitignore @@ -35,8 +35,11 @@ Gopkg.toml # Version file (used in CI) version -/certs - +# Myrtea Plugins plugin/*.plugin +# Exports directory (for local testing) +exports/ + +# GoLang Linter executable golangci-lint.exe \ No newline at end of file diff --git a/config/engine-api.toml b/config/engine-api.toml index c2a5f9bd..3d48e814 100644 --- a/config/engine-api.toml +++ b/config/engine-api.toml @@ -234,4 +234,20 @@ AUTHENTICATION_OIDC_FRONT_END_URL = "http://127.0.0.1:4200" AUTHENTICATION_OIDC_ENCRYPTION_KEY = "thisis24characterslongs." # NOTIFICATION_LIFETIME: The lifetime of a notification in the database. -NOTIFICATION_LIFETIME = "168h" # 168h = 7 days, available units are "ns", "us" (or "µs"), "ms", "s", "m", "h" \ No newline at end of file +NOTIFICATION_LIFETIME = "168h" # 168h = 7 days, available units are "ns", "us" (or "µs"), "ms", "s", "m", "h" + +# Path to directory where the resulting export files will be stored. +# Default: exports/ +EXPORT_BASE_PATH = "exports/" + +# Number of days before one export file will be auto deleted +# Default: 4 +EXPORT_DISK_RETENTION_DAYS = 4 + +# Export queue max size, any export request that is made when queue is full will be refused. +# Default value: 30 +EXPORT_QUEUE_MAX_SIZE = 30 + +# Number of concurrent export workers +# Default value: 4 +EXPORT_WORKERS_COUNT = 4 \ No newline at end of file diff --git a/internals/export/csv.go b/internals/export/csv.go index 3cf9b644..584cec86 100644 --- a/internals/export/csv.go +++ b/internals/export/csv.go @@ -6,6 +6,7 @@ import ( "fmt" "strings" "time" + "unicode/utf8" "github.com/myrteametrics/myrtea-engine-api/v5/internals/reader" "go.uber.org/zap" @@ -13,7 +14,14 @@ import ( // WriteConvertHitsToCSV writes hits to CSV func WriteConvertHitsToCSV(w *csv.Writer, hits []reader.Hit, params CSVParameters, writeHeader bool) error { - w.Comma = params.Separator + if len(params.Separator) == 1 { + w.Comma, _ = utf8.DecodeRune([]byte(params.Separator)) + if w.Comma == utf8.RuneError { + w.Comma = ',' + } + } else { + w.Comma = ',' + } // avoid to print header when labels are empty if writeHeader && len(params.Columns) > 0 { diff --git a/internals/export/csv_test.go b/internals/export/csv_test.go index 930125eb..e1bcc644 100644 --- a/internals/export/csv_test.go +++ b/internals/export/csv_test.go @@ -23,7 +23,7 @@ func TestConvertHitsToCSV(t *testing.T) { {Name: "d.e", Label: "Label D.E", Format: ""}, {Name: "date", Label: "Date", Format: "02/01/2006"}, }, - Separator: ',', + Separator: ",", } csv, err := ConvertHitsToCSV(hits, params, true) if err != nil { @@ -48,7 +48,7 @@ func TestWriteConvertHitsToCSV(t *testing.T) { {Name: "d.e", Label: "Label D.E", Format: ""}, {Name: "date", Label: "Date", Format: "02/01/2006"}, }, - Separator: ',', + Separator: ",", } b := new(bytes.Buffer) w := csv2.NewWriter(b) diff --git a/internals/export/utils.go b/internals/export/utils.go index d709cc1b..bc67bf26 100644 --- a/internals/export/utils.go +++ b/internals/export/utils.go @@ -2,7 +2,7 @@ package export type CSVParameters struct { Columns []Column `json:"columns"` - Separator rune `json:"separator" default:","` + Separator string `json:"separator"` Limit int64 `json:"limit"` } diff --git a/internals/export/utils_test.go b/internals/export/utils_test.go index 2892c45f..8be6a793 100644 --- a/internals/export/utils_test.go +++ b/internals/export/utils_test.go @@ -30,37 +30,37 @@ func TestColumnEquals_WithSameValues(t *testing.T) { } func TestCSVParametersEquals_WithDifferentSeparator(t *testing.T) { - params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} - params2 := CSVParameters{Separator: ';', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params1 := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ";", Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} expression.AssertEqual(t, params1.Equals(params2), false) } func TestCSVParametersEquals_WithDifferentLimit(t *testing.T) { - params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} - params2 := CSVParameters{Separator: ',', Limit: 20, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params1 := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ",", Limit: 20, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} expression.AssertEqual(t, params1.Equals(params2), false) } func TestCSVParametersEquals_WithDifferentColumns(t *testing.T) { - params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name1", Label: "label", Format: "format"}}} - params2 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name2", Label: "label", Format: "format"}}} + params1 := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name1", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name2", Label: "label", Format: "format"}}} expression.AssertEqual(t, params1.Equals(params2), false) } func TestCSVParametersEquals_WithSameValues(t *testing.T) { - params1 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} - params2 := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params1 := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} + params2 := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name", Label: "label", Format: "format"}}} expression.AssertEqual(t, params1.Equals(params2), true) } func TestGetColumnsLabel_WithNoColumns(t *testing.T) { - params := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{}} + params := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{}} labels := params.GetColumnsLabel() expression.AssertEqual(t, len(labels), 0) } func TestGetColumnsLabel_WithColumns(t *testing.T) { - params := CSVParameters{Separator: ',', Limit: 10, Columns: []Column{{Name: "name1", Label: "label1", Format: "format1"}, {Name: "name2", Label: "label2", Format: "format2"}}} + params := CSVParameters{Separator: ",", Limit: 10, Columns: []Column{{Name: "name1", Label: "label1", Format: "format1"}, {Name: "name2", Label: "label2", Format: "format2"}}} labels := params.GetColumnsLabel() expression.AssertEqual(t, len(labels), 2) expression.AssertEqual(t, labels[0], "label1") diff --git a/internals/export/worker.go b/internals/export/worker.go index 9b25c5b3..3d29c59c 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -5,6 +5,8 @@ import ( "context" "encoding/csv" "fmt" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "go.uber.org/zap" "os" "path/filepath" @@ -99,11 +101,28 @@ func (e *ExportWorker) finalise() { // It handles one queueItem at a time and when finished it stops the goroutine func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { defer e.finalise() + item.Status = StatusRunning + e.Mutex.Lock() e.QueueItem = item - e.QueueItem.Status = StatusRunning e.Mutex.Unlock() + // send notification to user (non-blocking) + go func(wrapperItem WrapperItem) { + _ = notifier.C().SendToUserLogins( + ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 0, + IsRead: false, + Type: "ExportNotification", + Persistent: false, + }, + Export: wrapperItem, + Status: ExportNotificationStarted, + }, + wrapperItem.Users) + }(item) + // create file path := filepath.Join(e.BasePath, item.FileName) // check if file not already exists diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 200ae45a..8ec51c20 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -5,11 +5,13 @@ import ( "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/security" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" "os" "path/filepath" + "strings" "sync" "time" ) @@ -26,6 +28,8 @@ const ( StatusDone = 2 StatusError = 3 StatusCanceled = 4 + + randCharSet = "abcdefghijklmnopqrstuvwxyz0123456789" ) // WrapperItem represents an export demand @@ -70,6 +74,15 @@ func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, for _, fact := range facts { factIDs = append(factIDs, fact.ID) } + + // file extension should be gz + if !strings.HasSuffix(fileName, ".gz") { + fileName += ".gz" + } + + // add random string to avoid multiple files with same name + fileName = security.RandStringWithCharset(5, randCharSet) + "_" + fileName + return &WrapperItem{ Users: append([]string{}, user.Login), Id: uuid.New().String(), @@ -96,51 +109,51 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i workerCount: workersCount, } - wrapper.archive.Store("c7f0044b-29f7-4c26-ab56-04e109683637", WrapperItem{ - Users: []string{"admin"}, - Date: time.Now(), - Status: StatusPending, - Id: "c7f0044b-29f7-4c26-ab56-04e109683637", - FactIDs: []int64{1, 2, 3}, - FileName: "export.csv.tar.gz", - }) - - wrapper.archive.Store("736ba596-7399-422e-b241-1407581cf454", WrapperItem{ - Users: []string{"admin"}, - Date: time.Now(), - Status: StatusRunning, - Id: "736ba596-7399-422e-b241-1407581cf454", - FactIDs: []int64{3, 6}, - FileName: "export-23.csv.tar.gz", - }) - - wrapper.archive.Store("5ea87155-7ea5-4152-aec5-386871dbfe1c", WrapperItem{ - Users: []string{"admin"}, - Date: time.Now().AddDate(0, 0, -3), - Status: StatusDone, - Id: "5ea87155-7ea5-4152-aec5-386871dbfe1c", - FactIDs: []int64{2, 3, 6}, - FileName: "exportee-236.csv.tar.gz", - }) - - wrapper.archive.Store("9fb91d1f-5b9c-4856-8be4-436831d2596e", WrapperItem{ - Users: []string{"admin"}, - Date: time.Now().AddDate(0, 0, -1), - Status: StatusError, - Id: "9fb91d1f-5b9c-4856-8be4-436831d2596e", - FactIDs: []int64{22, 23, 6}, - FileName: "exporteeqsdqsd-236.csv.tar.gz", - Error: "error while exporting", - }) - - wrapper.archive.Store("d0502ac6-8d99-4532-a278-e3e7bd1c887b", WrapperItem{ - Users: []string{"admin"}, - Date: time.Now(), - Status: StatusDone, - Id: "d0502ac6-8d99-4532-a278-e3e7bd1c887b", - FactIDs: []int64{1, 23, 26}, - FileName: "finisedexport-236.csv.tar.gz", - }) + //wrapper.archive.Store("c7f0044b-29f7-4c26-ab56-04e109683637", WrapperItem{ + // Users: []string{"admin"}, + // Date: time.Now(), + // Status: StatusPending, + // Id: "c7f0044b-29f7-4c26-ab56-04e109683637", + // FactIDs: []int64{1, 2, 3}, + // FileName: "export.csv.gz", + //}) + // + //wrapper.archive.Store("736ba596-7399-422e-b241-1407581cf454", WrapperItem{ + // Users: []string{"admin"}, + // Date: time.Now(), + // Status: StatusRunning, + // Id: "736ba596-7399-422e-b241-1407581cf454", + // FactIDs: []int64{3, 6}, + // FileName: "export-23.csv.gz", + //}) + // + //wrapper.archive.Store("5ea87155-7ea5-4152-aec5-386871dbfe1c", WrapperItem{ + // Users: []string{"admin"}, + // Date: time.Now().AddDate(0, 0, -3), + // Status: StatusDone, + // Id: "5ea87155-7ea5-4152-aec5-386871dbfe1c", + // FactIDs: []int64{2, 3, 6}, + // FileName: "exportee-236.csv.gz", + //}) + // + //wrapper.archive.Store("9fb91d1f-5b9c-4856-8be4-436831d2596e", WrapperItem{ + // Users: []string{"admin"}, + // Date: time.Now().AddDate(0, 0, -1), + // Status: StatusError, + // Id: "9fb91d1f-5b9c-4856-8be4-436831d2596e", + // FactIDs: []int64{22, 23, 6}, + // FileName: "exporteeqsdqsd-236.csv.gz", + // Error: "error while exporting", + //}) + // + //wrapper.archive.Store("d0502ac6-8d99-4532-a278-e3e7bd1c887b", WrapperItem{ + // Users: []string{"admin"}, + // Date: time.Now(), + // Status: StatusDone, + // Id: "d0502ac6-8d99-4532-a278-e3e7bd1c887b", + // FactIDs: []int64{1, 23, 26}, + // FileName: "finisedexport-236.csv.gz", + //}) return wrapper } @@ -161,6 +174,26 @@ func (ew *Wrapper) Init(ctx context.Context) { for i := 0; i < ew.workerCount; i++ { ew.workers = append(ew.workers, NewExportWorker(i, ew.basePath, ew.success)) } + + // check if destination folder exists + _, err := os.Stat(ew.basePath) + if err != nil { + + if os.IsNotExist(err) { + zap.L().Info("The export directory not exists, trying to create...", zap.String("EXPORT_BASE_PATH", ew.basePath)) + + if err := os.MkdirAll(ew.basePath, os.ModePerm); err != nil { + zap.L().Fatal("Couldn't create export directory", zap.String("EXPORT_BASE_PATH", ew.basePath), zap.Error(err)) + } else { + zap.L().Info("The export directory has been successfully created.") + } + + } else { + zap.L().Fatal("Couldn't access to export directory", zap.String("EXPORT_BASE_PATH", ew.basePath), zap.Error(err)) + } + + } + go ew.startDispatcher(ctx) } @@ -419,14 +452,8 @@ func (ew *Wrapper) dispatchExportQueue(ctx context.Context) { worker.Available = false worker.Mutex.Unlock() - go worker.Start(item, ctx) - // send notification to user (non-blocking) - go func(wrapperItem WrapperItem) { - _ = notifier.C().SendToUserLogins( - ew.createExportNotification(ExportNotificationStarted, &wrapperItem), - wrapperItem.Users) - }(item) + go worker.Start(item, ctx) } } diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 016be0f8..102ba8c0 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -9,6 +9,7 @@ import ( "github.com/myrteametrics/myrtea-sdk/v4/expression" "os" "path/filepath" + "strings" "testing" "time" ) @@ -36,7 +37,7 @@ func TestNewWrapperItem(t *testing.T) { expression.AssertEqual(t, factsEquals(item.Facts, []engine.Fact{{ID: 1}}), true) expression.AssertEqual(t, item.Params.Equals(CSVParameters{}), true) expression.AssertEqual(t, item.Status, StatusPending) - expression.AssertEqual(t, item.FileName, "test.txt") + expression.AssertEqual(t, strings.HasSuffix(item.FileName, "test.txt.gz"), true, "test.txt.gz") expression.AssertNotEqual(t, len(item.Users), 0) expression.AssertEqual(t, item.Users[0], "test") } diff --git a/internals/tasker/situation_reporting.go b/internals/tasker/situation_reporting.go index d5a24bd1..e0f7ebb0 100644 --- a/internals/tasker/situation_reporting.go +++ b/internals/tasker/situation_reporting.go @@ -214,7 +214,7 @@ func (task SituationReportingTask) Perform(key string, context ContextData) erro return err } - csvAttachment, err := export.ConvertHitsToCSV(fullHits, export.CSVParameters{Columns: task.Columns, Separator: task.Separator}, true) + csvAttachment, err := export.ConvertHitsToCSV(fullHits, export.CSVParameters{Columns: task.Columns, Separator: string(task.Separator)}, true) if err != nil { return err } From 5fbcc3b47e6d6e57408c056dd00cce173edb222e Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 21 Dec 2023 18:04:33 +0100 Subject: [PATCH 26/35] some changes & added title field --- internals/export/utils.go | 16 ++++++++ internals/export/worker.go | 14 +------ internals/export/wrapper.go | 44 ++++++--------------- internals/handlers/export_handlers.go | 8 ++-- internals/handlers/notification_handlers.go | 19 ++++----- internals/handlers/notifier_handlers.go | 2 +- 6 files changed, 44 insertions(+), 59 deletions(-) diff --git a/internals/export/utils.go b/internals/export/utils.go index bc67bf26..741c4817 100644 --- a/internals/export/utils.go +++ b/internals/export/utils.go @@ -1,5 +1,7 @@ package export +import "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" + type CSVParameters struct { Columns []Column `json:"columns"` Separator string `json:"separator"` @@ -50,3 +52,17 @@ func (p CSVParameters) GetColumnsLabel() []string { } return columns } + +// createExportNotification creates an export notification using given parameters +func createExportNotification(status int, item *WrapperItem) ExportNotification { + return ExportNotification{ + BaseNotification: notification.BaseNotification{ + Id: 0, + IsRead: false, + Type: "ExportNotification", + Persistent: false, + }, + Export: *item, + Status: status, + } +} diff --git a/internals/export/worker.go b/internals/export/worker.go index 3d29c59c..2bca4cea 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -6,7 +6,6 @@ import ( "encoding/csv" "fmt" "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "go.uber.org/zap" "os" "path/filepath" @@ -85,7 +84,7 @@ func (e *ExportWorker) finalise() { e.QueueItem.Status = StatusError } // set status to done if no error occurred - if e.QueueItem.Status != StatusError { + if e.QueueItem.Status != StatusError && e.QueueItem.Status != StatusCanceled { e.QueueItem.Status = StatusDone } e.Mutex.Unlock() @@ -110,16 +109,7 @@ func (e *ExportWorker) Start(item WrapperItem, ctx context.Context) { // send notification to user (non-blocking) go func(wrapperItem WrapperItem) { _ = notifier.C().SendToUserLogins( - ExportNotification{ - BaseNotification: notification.BaseNotification{ - Id: 0, - IsRead: false, - Type: "ExportNotification", - Persistent: false, - }, - Export: wrapperItem, - Status: ExportNotificationStarted, - }, + createExportNotification(ExportNotificationStarted, &item), wrapperItem.Users) }(item) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 8ec51c20..cc09425f 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -4,14 +4,12 @@ import ( "context" "github.com/google/uuid" "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" - "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "github.com/myrteametrics/myrtea-sdk/v4/engine" "go.uber.org/zap" "os" "path/filepath" - "strings" "sync" "time" ) @@ -40,6 +38,7 @@ type WrapperItem struct { Error string `json:"error"` Status int `json:"status"` FileName string `json:"fileName"` + Title string `json:"title"` Date time.Time `json:"date"` Users []string `json:"-"` Params CSVParameters `json:"-"` @@ -69,19 +68,15 @@ type Wrapper struct { } // NewWrapperItem creates a new export wrapper item -func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, user users.User) *WrapperItem { +func NewWrapperItem(facts []engine.Fact, title string, params CSVParameters, user users.User) *WrapperItem { var factIDs []int64 for _, fact := range facts { factIDs = append(factIDs, fact.ID) } // file extension should be gz - if !strings.HasSuffix(fileName, ".gz") { - fileName += ".gz" - } - // add random string to avoid multiple files with same name - fileName = security.RandStringWithCharset(5, randCharSet) + "_" + fileName + fileName := security.RandStringWithCharset(5, randCharSet) + "_" + title + ".csv.gz" return &WrapperItem{ Users: append([]string{}, user.Login), @@ -92,6 +87,7 @@ func NewWrapperItem(facts []engine.Fact, fileName string, params CSVParameters, Status: StatusPending, Error: "", FileName: fileName, + Title: title, Params: params, } } @@ -115,7 +111,7 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i // Status: StatusPending, // Id: "c7f0044b-29f7-4c26-ab56-04e109683637", // FactIDs: []int64{1, 2, 3}, - // FileName: "export.csv.gz", + // Title: "export.csv.gz", //}) // //wrapper.archive.Store("736ba596-7399-422e-b241-1407581cf454", WrapperItem{ @@ -124,7 +120,7 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i // Status: StatusRunning, // Id: "736ba596-7399-422e-b241-1407581cf454", // FactIDs: []int64{3, 6}, - // FileName: "export-23.csv.gz", + // Title: "export-23.csv.gz", //}) // //wrapper.archive.Store("5ea87155-7ea5-4152-aec5-386871dbfe1c", WrapperItem{ @@ -133,7 +129,7 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i // Status: StatusDone, // Id: "5ea87155-7ea5-4152-aec5-386871dbfe1c", // FactIDs: []int64{2, 3, 6}, - // FileName: "exportee-236.csv.gz", + // Title: "exportee-236.csv.gz", //}) // //wrapper.archive.Store("9fb91d1f-5b9c-4856-8be4-436831d2596e", WrapperItem{ @@ -142,7 +138,7 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i // Status: StatusError, // Id: "9fb91d1f-5b9c-4856-8be4-436831d2596e", // FactIDs: []int64{22, 23, 6}, - // FileName: "exporteeqsdqsd-236.csv.gz", + // Title: "exporteeqsdqsd-236.csv.gz", // Error: "error while exporting", //}) // @@ -152,7 +148,7 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i // Status: StatusDone, // Id: "d0502ac6-8d99-4532-a278-e3e7bd1c887b", // FactIDs: []int64{1, 23, 26}, - // FileName: "finisedexport-236.csv.gz", + // Title: "finisedexport-236.csv.gz", //}) return wrapper @@ -218,7 +214,7 @@ func factsEquals(a, b []engine.Fact) bool { } // AddToQueue Adds a new export to the export worker queue -func (ew *Wrapper) AddToQueue(facts []engine.Fact, fileName string, params CSVParameters, user users.User) (*WrapperItem, int) { +func (ew *Wrapper) AddToQueue(facts []engine.Fact, title string, params CSVParameters, user users.User) (*WrapperItem, int) { ew.queueMutex.Lock() defer ew.queueMutex.Unlock() @@ -242,7 +238,7 @@ func (ew *Wrapper) AddToQueue(facts []engine.Fact, fileName string, params CSVPa return nil, CodeQueueFull } - item := NewWrapperItem(facts, fileName, params, user) + item := NewWrapperItem(facts, title, params, user) ew.queue = append(ew.queue, item) return item, CodeAdded } @@ -277,7 +273,7 @@ func (ew *Wrapper) startDispatcher(context context.Context) { // send notification to user (non-blocking) go func(wrapperItem WrapperItem) { _ = notifier.C().SendToUserLogins( - ew.createExportNotification(ExportNotificationArchived, &wrapperItem), + createExportNotification(ExportNotificationArchived, &wrapperItem), wrapperItem.Users) }(item) case <-ticker.C: @@ -294,20 +290,6 @@ func (ew *Wrapper) startDispatcher(context context.Context) { } } -// createExportNotification creates an export notification using given parameters -func (ew *Wrapper) createExportNotification(status int, item *WrapperItem) ExportNotification { - return ExportNotification{ - BaseNotification: notification.BaseNotification{ - Id: 0, - IsRead: false, - Type: "ExportNotification", - Persistent: false, - }, - Export: *item, - Status: status, - } -} - // checkForExpiredFiles checks for expired files in the export directory and deletes them // it also deletes the done tasks that are older than diskRetentionDays func (ew *Wrapper) checkForExpiredFiles() error { @@ -331,7 +313,7 @@ func (ew *Wrapper) checkForExpiredFiles() error { // send notification to user (non-blocking) go func(wrapperItem WrapperItem) { _ = notifier.C().SendToUserLogins( - ew.createExportNotification(ExportNotificationDeleted, &wrapperItem), + createExportNotification(ExportNotificationDeleted, &wrapperItem), wrapperItem.Users) }(data) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 2769143c..9590d360 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -29,8 +29,8 @@ func NewExportHandler(exportWrapper *export.Wrapper) *ExportHandler { // ExportRequest represents a request for an export type ExportRequest struct { export.CSVParameters - FactIDs []int64 `json:"factIDs"` - FileName string `json:"fileName"` + FactIDs []int64 `json:"factIDs"` + Title string `json:"title"` } // ExportFactStreamed godoc @@ -77,7 +77,7 @@ func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, w.Header().Set("Connection", "Keep-Alive") w.Header().Set("Transfer-Encoding", "chunked") w.Header().Set("X-Content-Type-Options", "nosniff") - w.Header().Set("Content-Disposition", "attachment; filename="+strconv.Quote(request.FileName)) + w.Header().Set("Content-Disposition", "attachment; filename="+strconv.Quote(request.Title+".csv")) w.Header().Set("Content-Type", "application/octet-stream") facts := findCombineFacts(request.FactIDs) @@ -306,7 +306,7 @@ func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { return } - item, status := e.exportWrapper.AddToQueue(facts, request.FileName, request.CSVParameters, userCtx.User) + item, status := e.exportWrapper.AddToQueue(facts, request.Title, request.CSVParameters, userCtx.User) switch status { case export.CodeAdded: diff --git a/internals/handlers/notification_handlers.go b/internals/handlers/notification_handlers.go index d619446f..f097e091 100644 --- a/internals/handlers/notification_handlers.go +++ b/internals/handlers/notification_handlers.go @@ -1,11 +1,6 @@ package handlers import ( - "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" - "net/http" - "strconv" - "time" - "github.com/go-chi/chi/v5" "github.com/myrteametrics/myrtea-engine-api/v5/internals/dbutils" "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers/render" @@ -13,6 +8,8 @@ import ( "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier/notification" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "go.uber.org/zap" + "net/http" + "strconv" ) // GetNotifications godoc @@ -70,13 +67,13 @@ func GetNotifications(w http.ResponseWriter, r *http.Request) { } // generate rando mock notifications for testing 1 to 15 - for i := 2; i < 17; i++ { - notifications = append(notifications, notification.NewMockNotification(int64(i), "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now().AddDate(0, 0, -i), []int64{1}, map[string]interface{}{"issueId": 1})) - } + //for i := 2; i < 17; i++ { + // notifications = append(notifications, notification.NewMockNotification(int64(i), "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now().AddDate(0, 0, -i), []int64{1}, map[string]interface{}{"issueId": 1})) + //} - notifications = append(notifications, notification.NewMockNotification(1, "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) - notifications = append(notifications, export.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) - notifications = append(notifications, notification.NewBaseNotification(3, false, true)) + //notifications = append(notifications, notification.NewMockNotification(1, "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) + //notifications = append(notifications, export.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) + //notifications = append(notifications, notification.NewBaseNotification(3, false, true)) render.JSON(w, r, notifications) } diff --git a/internals/handlers/notifier_handlers.go b/internals/handlers/notifier_handlers.go index cecf1be2..bf24c300 100644 --- a/internals/handlers/notifier_handlers.go +++ b/internals/handlers/notifier_handlers.go @@ -48,7 +48,7 @@ func NotificationsWSRegister(w http.ResponseWriter, r *http.Request) { // for { // select { // case <-ticker.C: - // notifier.C().SendToUsers(notification.ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), FileName: "test.bla"}}, []users.UserWithPermissions{user}) + // notifier.C().SendToUsers(notification.ExportNotification{Status: export.StatusPending, Export: export.WrapperItem{Id: uuid.New().String(), Title: "test.bla"}}, []users.UserWithPermissions{user}) // zap.L().Info("send notification") // case <-after: // return From ac4220904443a143dc47afa342ff952e8e359f95 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Thu, 21 Dec 2023 19:02:40 +0100 Subject: [PATCH 27/35] added new status export canceling --- internals/export/wrapper.go | 25 ++++++++++++++++++------- internals/handlers/export_handlers.go | 6 ++++++ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index cc09425f..19056393 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -10,6 +10,7 @@ import ( "go.uber.org/zap" "os" "path/filepath" + "strings" "sync" "time" ) @@ -21,11 +22,12 @@ const ( CodeQueueFull = -2 // WrapperItem statuses - StatusPending = 0 - StatusRunning = 1 - StatusDone = 2 - StatusError = 3 - StatusCanceled = 4 + StatusPending = 0 + StatusRunning = 1 + StatusDone = 2 + StatusError = 3 + StatusCanceled = 4 + StatusCanceling = 5 randCharSet = "abcdefghijklmnopqrstuvwxyz0123456789" ) @@ -76,7 +78,8 @@ func NewWrapperItem(facts []engine.Fact, title string, params CSVParameters, use // file extension should be gz // add random string to avoid multiple files with same name - fileName := security.RandStringWithCharset(5, randCharSet) + "_" + title + ".csv.gz" + fileName := security.RandStringWithCharset(5, randCharSet) + "_" + + strings.ReplaceAll(title, " ", "_") + ".csv.gz" return &WrapperItem{ Users: append([]string{}, user.Login), @@ -219,7 +222,7 @@ func (ew *Wrapper) AddToQueue(facts []engine.Fact, title string, params CSVParam defer ew.queueMutex.Unlock() for _, queueItem := range ew.queue { - if !factsEquals(queueItem.Facts, facts) || !queueItem.Params.Equals(params) { + if !factsEquals(queueItem.Facts, facts) || !queueItem.Params.Equals(params) || queueItem.Title != title { continue } @@ -536,6 +539,12 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { continue } + // worker found but already canceling + if worker.QueueItem.Status == StatusCanceling { + worker.Mutex.Unlock() + return false + } + // remove user from item if len(worker.QueueItem.Users) == 1 { // cancel worker by sending a message on the cancel channel @@ -545,6 +554,7 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { case worker.Cancel <- true: default: } + worker.QueueItem.Status = StatusCanceling worker.Mutex.Unlock() return true } @@ -552,6 +562,7 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { for i, u := range worker.QueueItem.Users { if u == user.Login { worker.QueueItem.Users = append(worker.QueueItem.Users[:i], worker.QueueItem.Users[i+1:]...) + // TODO: send message? or change return to say that user was deleted. break } } diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 9590d360..81f84652 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -299,6 +299,12 @@ func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { return } + if len(request.Title) == 0 { + zap.L().Warn("Missing title (len is 0) in export request") + render.Error(w, r, render.ErrAPIMissingParam, errors.New("missing title (len is 0)")) + return + } + facts := findCombineFacts(request.FactIDs) if len(facts) == 0 { zap.L().Warn("No fact was found in export request") From f81ceaf16653682d8e9941f5337a21b57d698c0f Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Fri, 22 Dec 2023 15:40:07 +0100 Subject: [PATCH 28/35] some fixes --- internals/export/wrapper.go | 81 ++++++--------------- internals/handlers/export_handlers.go | 17 +++-- internals/handlers/notification_handlers.go | 8 -- 3 files changed, 36 insertions(+), 70 deletions(-) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 19056393..bd7054a5 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -29,6 +29,12 @@ const ( StatusCanceled = 4 StatusCanceling = 5 + // Delete return codes + DeleteExportNotFound = 0 + DeleteExportDeleted = 1 + DeleteExportUserDeleted = 2 + DeleteExportCanceled = 3 + randCharSet = "abcdefghijklmnopqrstuvwxyz0123456789" ) @@ -108,52 +114,6 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i workerCount: workersCount, } - //wrapper.archive.Store("c7f0044b-29f7-4c26-ab56-04e109683637", WrapperItem{ - // Users: []string{"admin"}, - // Date: time.Now(), - // Status: StatusPending, - // Id: "c7f0044b-29f7-4c26-ab56-04e109683637", - // FactIDs: []int64{1, 2, 3}, - // Title: "export.csv.gz", - //}) - // - //wrapper.archive.Store("736ba596-7399-422e-b241-1407581cf454", WrapperItem{ - // Users: []string{"admin"}, - // Date: time.Now(), - // Status: StatusRunning, - // Id: "736ba596-7399-422e-b241-1407581cf454", - // FactIDs: []int64{3, 6}, - // Title: "export-23.csv.gz", - //}) - // - //wrapper.archive.Store("5ea87155-7ea5-4152-aec5-386871dbfe1c", WrapperItem{ - // Users: []string{"admin"}, - // Date: time.Now().AddDate(0, 0, -3), - // Status: StatusDone, - // Id: "5ea87155-7ea5-4152-aec5-386871dbfe1c", - // FactIDs: []int64{2, 3, 6}, - // Title: "exportee-236.csv.gz", - //}) - // - //wrapper.archive.Store("9fb91d1f-5b9c-4856-8be4-436831d2596e", WrapperItem{ - // Users: []string{"admin"}, - // Date: time.Now().AddDate(0, 0, -1), - // Status: StatusError, - // Id: "9fb91d1f-5b9c-4856-8be4-436831d2596e", - // FactIDs: []int64{22, 23, 6}, - // Title: "exporteeqsdqsd-236.csv.gz", - // Error: "error while exporting", - //}) - // - //wrapper.archive.Store("d0502ac6-8d99-4532-a278-e3e7bd1c887b", WrapperItem{ - // Users: []string{"admin"}, - // Date: time.Now(), - // Status: StatusDone, - // Id: "d0502ac6-8d99-4532-a278-e3e7bd1c887b", - // FactIDs: []int64{1, 23, 26}, - // Title: "finisedexport-236.csv.gz", - //}) - return wrapper } @@ -491,14 +451,18 @@ func (ew *Wrapper) GetUserExport(id string, user users.User) (item WrapperItem, } // DeleteExport removes an export from the queue / archive, or cancels it if it is running -// returns true if the export was found and deleted, false otherwise +// returns : +// DeleteExportNotFound (0): if the export was not found +// DeleteExportDeleted (1): if the export was found and deleted +// DeleteExportUserDeleted (2): if the export was found and the user was removed +// DeleteExportCanceled (3): if the export was found and the cancellation request was made // this function is similar to GetUserExport, but it avoids iterating over all exports, thus it is faster -func (ew *Wrapper) DeleteExport(id string, user users.User) bool { +func (ew *Wrapper) DeleteExport(id string, user users.User) int { // start with archived items if item, ok := ew.FindArchive(id, user); ok { if len(item.Users) == 1 { ew.archive.Delete(id) - return true + return DeleteExportDeleted } // remove user from item for i, u := range item.Users { @@ -508,7 +472,7 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { } } ew.archive.Store(id, item) - return true + return DeleteExportUserDeleted } // then check the queue @@ -524,9 +488,12 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { } if len(item.Users) == 0 { ew.queue = append(ew.queue[:i], ew.queue[i+1:]...) + ew.queueMutex.Unlock() + return DeleteExportDeleted } + ew.queueMutex.Unlock() - return true + return DeleteExportUserDeleted } } ew.queueMutex.Unlock() @@ -542,7 +509,7 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { // worker found but already canceling if worker.QueueItem.Status == StatusCanceling { worker.Mutex.Unlock() - return false + return DeleteExportNotFound } // remove user from item @@ -556,21 +523,21 @@ func (ew *Wrapper) DeleteExport(id string, user users.User) bool { } worker.QueueItem.Status = StatusCanceling worker.Mutex.Unlock() - return true + return DeleteExportCanceled } for i, u := range worker.QueueItem.Users { if u == user.Login { worker.QueueItem.Users = append(worker.QueueItem.Users[:i], worker.QueueItem.Users[i+1:]...) - // TODO: send message? or change return to say that user was deleted. - break + worker.Mutex.Unlock() + return DeleteExportUserDeleted } } worker.Mutex.Unlock() - return true + return DeleteExportNotFound } - return false + return DeleteExportNotFound } // ContainsUser checks if user is in item diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 81f84652..8d577036 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -235,7 +235,8 @@ func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { // @Tags Exports // @Produce json // @Security Bearer -// @Success 204 "Status OK" +// @Success 202 "Status Accepted: export found & cancellation request has been taken into account & will be processed" +// @Success 204 "Status OK: export was found and deleted" // @Failure 400 "Bad Request: missing export id / id is not an integer" // @Failure 403 "Status Forbidden: missing permission" // @Failure 404 "Status Not Found: export not found" @@ -254,13 +255,19 @@ func (e *ExportHandler) DeleteExport(w http.ResponseWriter, r *http.Request) { return } - ok := e.exportWrapper.DeleteExport(id, userCtx.User) - if !ok { + status := e.exportWrapper.DeleteExport(id, userCtx.User) + + switch status { + case export.DeleteExportDeleted: + fallthrough + case export.DeleteExportUserDeleted: + w.WriteHeader(http.StatusNoContent) + case export.DeleteExportCanceled: + w.WriteHeader(http.StatusAccepted) + default: render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("export not found")) - return } - w.WriteHeader(http.StatusNoContent) } // ExportFact godoc diff --git a/internals/handlers/notification_handlers.go b/internals/handlers/notification_handlers.go index f097e091..d7b4290e 100644 --- a/internals/handlers/notification_handlers.go +++ b/internals/handlers/notification_handlers.go @@ -66,14 +66,6 @@ func GetNotifications(w http.ResponseWriter, r *http.Request) { return } - // generate rando mock notifications for testing 1 to 15 - //for i := 2; i < 17; i++ { - // notifications = append(notifications, notification.NewMockNotification(int64(i), "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now().AddDate(0, 0, -i), []int64{1}, map[string]interface{}{"issueId": 1})) - //} - - //notifications = append(notifications, notification.NewMockNotification(1, "OK", "MockNotification", "Toodododo", "You must do something lol", time.Now(), []int64{1}, map[string]interface{}{"issueId": 1})) - //notifications = append(notifications, export.NewExportNotification(2, export.WrapperItem{Id: "test"}, 1)) - //notifications = append(notifications, notification.NewBaseNotification(3, false, true)) render.JSON(w, r, notifications) } From 3c1835c86e90e6be99a218a985a3282f32216135 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 26 Dec 2023 16:03:34 +0100 Subject: [PATCH 29/35] added download export --- config/engine-api.toml | 16 +++++-- internals/export/wrapper.go | 20 ++++---- internals/export/wrapper_test.go | 2 +- internals/handlers/export_handlers.go | 67 +++++++++++++++++++++++---- internals/handlers/oidc_handlers.go | 4 +- internals/handlers/render/render.go | 49 +++++++++++++++++--- internals/router/routes.go | 1 + main.go | 21 +++++---- 8 files changed, 140 insertions(+), 40 deletions(-) diff --git a/config/engine-api.toml b/config/engine-api.toml index 3d48e814..1f35904d 100644 --- a/config/engine-api.toml +++ b/config/engine-api.toml @@ -234,14 +234,15 @@ AUTHENTICATION_OIDC_FRONT_END_URL = "http://127.0.0.1:4200" AUTHENTICATION_OIDC_ENCRYPTION_KEY = "thisis24characterslongs." # NOTIFICATION_LIFETIME: The lifetime of a notification in the database. +# Default value: "168h" NOTIFICATION_LIFETIME = "168h" # 168h = 7 days, available units are "ns", "us" (or "µs"), "ms", "s", "m", "h" # Path to directory where the resulting export files will be stored. -# Default: exports/ +# Default value: "exports/" EXPORT_BASE_PATH = "exports/" # Number of days before one export file will be auto deleted -# Default: 4 +# Default value: 4 EXPORT_DISK_RETENTION_DAYS = 4 # Export queue max size, any export request that is made when queue is full will be refused. @@ -250,4 +251,13 @@ EXPORT_QUEUE_MAX_SIZE = 30 # Number of concurrent export workers # Default value: 4 -EXPORT_WORKERS_COUNT = 4 \ No newline at end of file +EXPORT_WORKERS_COUNT = 4 + +# Whether download must be directly streamed through http or handled by an external web server +# Default value: true +EXPORT_DIRECT_DOWNLOAD = true + +# Reverse proxy like nginx, apache gives direct access to the exports directory at a specific path +# Full URL to the exports directory +# Default value: "" +EXPORT_INDIRECT_DOWNLOAD_URL = "" \ No newline at end of file diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index bd7054a5..6666587d 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -70,7 +70,7 @@ type Wrapper struct { // Non-critical fields // Read-only parameters diskRetentionDays int - basePath string + BasePath string // public for export_handlers queueMaxSize int workerCount int } @@ -109,7 +109,7 @@ func NewWrapper(basePath string, workersCount, diskRetentionDays, queueMaxSize i success: make(chan int), archive: sync.Map{}, queueMaxSize: queueMaxSize, - basePath: basePath, + BasePath: basePath, diskRetentionDays: diskRetentionDays, workerCount: workersCount, } @@ -131,24 +131,24 @@ func (it *WrapperItem) ContainsFact(factID int64) bool { func (ew *Wrapper) Init(ctx context.Context) { // instantiate workers for i := 0; i < ew.workerCount; i++ { - ew.workers = append(ew.workers, NewExportWorker(i, ew.basePath, ew.success)) + ew.workers = append(ew.workers, NewExportWorker(i, ew.BasePath, ew.success)) } // check if destination folder exists - _, err := os.Stat(ew.basePath) + _, err := os.Stat(ew.BasePath) if err != nil { if os.IsNotExist(err) { - zap.L().Info("The export directory not exists, trying to create...", zap.String("EXPORT_BASE_PATH", ew.basePath)) + zap.L().Info("The export directory not exists, trying to create...", zap.String("EXPORT_BASE_PATH", ew.BasePath)) - if err := os.MkdirAll(ew.basePath, os.ModePerm); err != nil { - zap.L().Fatal("Couldn't create export directory", zap.String("EXPORT_BASE_PATH", ew.basePath), zap.Error(err)) + if err := os.MkdirAll(ew.BasePath, os.ModePerm); err != nil { + zap.L().Fatal("Couldn't create export directory", zap.String("EXPORT_BASE_PATH", ew.BasePath), zap.Error(err)) } else { zap.L().Info("The export directory has been successfully created.") } } else { - zap.L().Fatal("Couldn't access to export directory", zap.String("EXPORT_BASE_PATH", ew.basePath), zap.Error(err)) + zap.L().Fatal("Couldn't access to export directory", zap.String("EXPORT_BASE_PATH", ew.BasePath), zap.Error(err)) } } @@ -259,7 +259,7 @@ func (ew *Wrapper) checkForExpiredFiles() error { // Get all files in directory and check the last edit date // if last edit date is older than diskRetentionDays, delete the file zap.L().Info("Checking for expired files") - files, err := os.ReadDir(ew.basePath) + files, err := os.ReadDir(ew.BasePath) if err != nil { return err } @@ -292,7 +292,7 @@ func (ew *Wrapper) checkForExpiredFiles() error { continue } - filePath := filepath.Join(ew.basePath, file.Name()) + filePath := filepath.Join(ew.BasePath, file.Name()) fi, err := os.Stat(filePath) if err != nil { diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 102ba8c0..706b4a73 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -16,7 +16,7 @@ import ( func TestNewWrapper(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 1) - expression.AssertEqual(t, wrapper.basePath, "/tmp") + expression.AssertEqual(t, wrapper.BasePath, "/tmp") expression.AssertEqual(t, wrapper.queueMaxSize, 1) expression.AssertEqual(t, wrapper.diskRetentionDays, 1) expression.AssertEqual(t, wrapper.queueMaxSize, 1) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 8d577036..0bbbe60d 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -11,18 +11,24 @@ import ( "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/permissions" "go.uber.org/zap" "net/http" + "net/url" + "path/filepath" "strconv" "sync" ) type ExportHandler struct { - exportWrapper *export.Wrapper + exportWrapper *export.Wrapper + directDownload bool + indirectDownloadUrl string } // NewExportHandler returns a new ExportHandler -func NewExportHandler(exportWrapper *export.Wrapper) *ExportHandler { +func NewExportHandler(exportWrapper *export.Wrapper, directDownload bool, indirectDownloadUrl string) *ExportHandler { return &ExportHandler{ - exportWrapper: exportWrapper, + exportWrapper: exportWrapper, + directDownload: directDownload, + indirectDownloadUrl: indirectDownloadUrl, } } @@ -64,7 +70,7 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { return } - err = HandleStreamedExport(r.Context(), w, request) + err = handleStreamedExport(r.Context(), w, request) if err != nil { render.Error(w, r, render.ErrAPIProcessError, err) } @@ -72,8 +78,8 @@ func ExportFactStreamed(w http.ResponseWriter, r *http.Request) { } -// HandleStreamedExport actually only handles CSV -func HandleStreamedExport(requestContext context.Context, w http.ResponseWriter, request ExportRequest) error { +// handleStreamedExport actually only handles CSV +func handleStreamedExport(requestContext context.Context, w http.ResponseWriter, request ExportRequest) error { w.Header().Set("Connection", "Keep-Alive") w.Header().Set("Transfer-Encoding", "chunked") w.Header().Set("X-Content-Type-Options", "nosniff") @@ -202,7 +208,7 @@ func (e *ExportHandler) GetExports(w http.ResponseWriter, r *http.Request) { // @Produce json // @Security Bearer // @Success 200 {object} export.WrapperItem "Status OK" -// @Failure 400 "Bad Request: missing export id / id is not an integer" +// @Failure 400 "Bad Request: missing export id" // @Failure 403 "Status Forbidden: missing permission" // @Failure 404 "Status Not Found: export not found" // @Failure 500 "internal server error" @@ -237,7 +243,7 @@ func (e *ExportHandler) GetExport(w http.ResponseWriter, r *http.Request) { // @Security Bearer // @Success 202 "Status Accepted: export found & cancellation request has been taken into account & will be processed" // @Success 204 "Status OK: export was found and deleted" -// @Failure 400 "Bad Request: missing export id / id is not an integer" +// @Failure 400 "Bad Request: missing export id" // @Failure 403 "Status Forbidden: missing permission" // @Failure 404 "Status Not Found: export not found" // @Failure 500 "internal server error" @@ -338,3 +344,48 @@ func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { render.JSON(w, r, item) } + +// DownloadExport godoc +// @Summary Download export +// @Description Download export +// @Tags Exports +// @Produce json +// @Security Bearer +// @Failure 400 "Bad Request: missing export id" +// @Failure 403 "Status Forbidden: missing permission" +// @Failure 404 "Status Not Found: export not found" +// @Failure 500 "internal server error" +// @Router /engine/exports/{id}/download [get] +func (e *ExportHandler) DownloadExport(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + if id == "" { + render.Error(w, r, render.ErrAPIMissingParam, errors.New("missing id")) + return + } + + userCtx, _ := GetUserFromContext(r) + if !userCtx.HasPermission(permissions.New(permissions.TypeExport, permissions.All, permissions.ActionGet)) { + render.Error(w, r, render.ErrAPISecurityNoPermissions, errors.New("missing permission")) + return + } + + item, ok := e.exportWrapper.GetUserExport(id, userCtx.User) + if !ok { + render.Error(w, r, render.ErrAPIDBResourceNotFound, errors.New("export not found")) + return + } + + if e.directDownload { + path := filepath.Join(e.exportWrapper.BasePath, item.FileName) + render.StreamFile(path, item.FileName, w, r) + return + } + + path, err := url.JoinPath(e.indirectDownloadUrl, item.FileName) + if err != nil { + render.Error(w, r, render.ErrAPIProcessError, err) + return + } + + http.Redirect(w, r, path, http.StatusOK) +} diff --git a/internals/handlers/oidc_handlers.go b/internals/handlers/oidc_handlers.go index d0066d81..8ce0c6bd 100644 --- a/internals/handlers/oidc_handlers.go +++ b/internals/handlers/oidc_handlers.go @@ -24,7 +24,7 @@ func HandleOIDCRedirect(w http.ResponseWriter, r *http.Request) { handleError(w, r, "", err, render.ErrAPIProcessError) return } - render.Redirect(w, r, instanceOidc.OidcConfig.AuthCodeURL(expectedState), http.StatusFound) + http.Redirect(w, r, instanceOidc.OidcConfig.AuthCodeURL(expectedState), http.StatusFound) } func HandleOIDCCallback(w http.ResponseWriter, r *http.Request) { @@ -64,5 +64,5 @@ func HandleOIDCCallback(w http.ResponseWriter, r *http.Request) { baseURL := viper.GetString("AUTHENTICATION_OIDC_FRONT_END_URL") redirectURL := fmt.Sprintf("%s/auth/oidc/callback?token=%s", baseURL, url.QueryEscape(rawIDToken)) - render.Redirect(w, r, redirectURL, http.StatusFound) + http.Redirect(w, r, redirectURL, http.StatusFound) } diff --git a/internals/handlers/render/render.go b/internals/handlers/render/render.go index 5b1e2074..f2c6f670 100644 --- a/internals/handlers/render/render.go +++ b/internals/handlers/render/render.go @@ -2,7 +2,10 @@ package render import ( "encoding/json" + "fmt" + "io" "net/http" + "os" "strconv" "github.com/go-chi/chi/v5/middleware" @@ -162,11 +165,43 @@ func File(w http.ResponseWriter, filename string, data []byte) { } } -// Redirect is a helper function to redirect the user to a specified location -// -// func Redirect(w http.ResponseWriter, r *http.Request, location string, code int) { -// http.Redirect(w, r, location, code) -// } -func Redirect(w http.ResponseWriter, r *http.Request, location string, code int) { - http.Redirect(w, r, location, code) +// StreamFile handle files streamed response with allows the download of a file in chunks +func StreamFile(filePath, fileName string, w http.ResponseWriter, r *http.Request) { + file, err := os.Open(filePath) + if err != nil { + Error(w, r, ErrAPIDBResourceNotFound, fmt.Errorf("error opening file: %s", err)) + return + } + defer file.Close() + + // Set all necessary headers + w.Header().Set("Connection", "Keep-Alive") + w.Header().Set("Transfer-Encoding", "chunked") + w.Header().Set("X-Content-Type-Options", "nosniff") + w.Header().Set("Content-Disposition", "attachment; filename="+strconv.Quote(fileName)) + w.Header().Set("Content-Type", "application/octet-stream") + + const bufferSize = 4096 + buffer := make([]byte, bufferSize) + + for { + // Read a chunk of the file + bytesRead, err := file.Read(buffer) + if err == io.EOF { + break + } else if err != nil { + Error(w, r, ErrAPIProcessError, fmt.Errorf("error reading file: %s", err)) + return + } + + // Write the chunk to the response writer + _, err = w.Write(buffer[:bytesRead]) + if err != nil { + // If writing to the response writer fails, log the error and stop streaming + Error(w, r, ErrAPIProcessError, fmt.Errorf("error writing to response writer: %s", err)) + break + } + + w.(http.Flusher).Flush() + } } diff --git a/internals/router/routes.go b/internals/router/routes.go index d9cc3753..61c0916f 100644 --- a/internals/router/routes.go +++ b/internals/router/routes.go @@ -177,6 +177,7 @@ func engineRouter(services Services) http.Handler { // exports r.Get("/exports", services.ExportHandler.GetExports) r.Get("/exports/{id}", services.ExportHandler.GetExport) + r.Get("/exports/{id}/download", services.ExportHandler.DownloadExport) r.Delete("/exports/{id}", services.ExportHandler.DeleteExport) r.Post("/exports/fact", services.ExportHandler.ExportFact) diff --git a/main.go b/main.go index b9ecd32a..240cf2c3 100644 --- a/main.go +++ b/main.go @@ -2,6 +2,7 @@ package main import ( "context" + "errors" "github.com/myrteametrics/myrtea-engine-api/v5/internals/export" "github.com/myrteametrics/myrtea-engine-api/v5/internals/handlers" "github.com/myrteametrics/myrtea-engine-api/v5/internals/metrics" @@ -41,7 +42,6 @@ var ( // @name Authorization func main() { - hostname, _ := os.Hostname() metrics.InitMetricLabels(hostname) @@ -72,19 +72,22 @@ func main() { LogLevel: zapConfig.Level, } - // basePath string, diskRetentionDays int, queueMaxSize int - basePath := viper.GetString("EXPORT_BASE_PATH") - diskRetentionDays := viper.GetInt("EXPORT_DISK_RETENTION_DAYS") - queueMaxSize := viper.GetInt("EXPORT_QUEUE_MAX_SIZE") - exportWorkersCount := viper.GetInt("EXPORT_WORKERS_COUNT") + // Exports + directDownload := viper.GetBool("EXPORT_DIRECT_DOWNLOAD") + indirectDownloadUrl := viper.GetString("EXPORT_INDIRECT_DOWNLOAD_URL") - exportWrapper := export.NewWrapper(basePath, exportWorkersCount, diskRetentionDays, queueMaxSize) + exportWrapper := export.NewWrapper( + viper.GetString("EXPORT_BASE_PATH"), // basePath + viper.GetInt("EXPORT_WORKERS_COUNT"), // workersCount + viper.GetInt("EXPORT_DISK_RETENTION_DAYS"), // diskRetentionDays + viper.GetInt("EXPORT_QUEUE_MAX_SIZE"), // queueMaxSize + ) exportWrapper.Init(context.Background()) routerServices := router.Services{ PluginCore: core, ProcessorHandler: handlers.NewProcessorHandler(), - ExportHandler: handlers.NewExportHandler(exportWrapper), + ExportHandler: handlers.NewExportHandler(exportWrapper, directDownload, indirectDownloadUrl), } router := router.New(routerConfig, routerServices) @@ -105,7 +108,7 @@ func main() { } else { err = srv.ListenAndServe() } - if err != nil && err != http.ErrServerClosed { + if err != nil && !errors.Is(err, http.ErrServerClosed) { zap.L().Fatal("Server listen", zap.Error(err)) } }() From bdda359ab87dc5d78459bcff68f350a83eae4cef Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Tue, 26 Dec 2023 18:44:40 +0100 Subject: [PATCH 30/35] changed swagger doc --- internals/handlers/export_handlers.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/internals/handlers/export_handlers.go b/internals/handlers/export_handlers.go index 0bbbe60d..be9b44b0 100644 --- a/internals/handlers/export_handlers.go +++ b/internals/handlers/export_handlers.go @@ -351,6 +351,8 @@ func (e *ExportHandler) ExportFact(w http.ResponseWriter, r *http.Request) { // @Tags Exports // @Produce json // @Security Bearer +// @Success 200 {file} Returns data to be saved into a file +// @Success 308 Redirects to the export file location // @Failure 400 "Bad Request: missing export id" // @Failure 403 "Status Forbidden: missing permission" // @Failure 404 "Status Not Found: export not found" @@ -387,5 +389,5 @@ func (e *ExportHandler) DownloadExport(w http.ResponseWriter, r *http.Request) { return } - http.Redirect(w, r, path, http.StatusOK) + http.Redirect(w, r, path, http.StatusPermanentRedirect) } From 3e885095155fd55d1ea4bec21340d00283606409 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 27 Dec 2023 12:59:22 +0100 Subject: [PATCH 31/35] fixed test --- internals/notifier/notification/handler_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internals/notifier/notification/handler_test.go b/internals/notifier/notification/handler_test.go index 1ae6f0b8..da2b1289 100644 --- a/internals/notifier/notification/handler_test.go +++ b/internals/notifier/notification/handler_test.go @@ -8,7 +8,6 @@ import ( func TestNewHandler(t *testing.T) { handler := NewHandler(0) expression.AssertNotEqual(t, handler, nil, "NewHandler() should not return nil") - expression.AssertNotEqual(t, len(handler.notificationTypes), 0, "NewHandler() should not return an empty notificationTypes") } func TestHandler_RegisterNotificationType_AddsNewType(t *testing.T) { From 046c22401dca425471af6a3144503dd5c8f42694 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 27 Dec 2023 13:08:12 +0100 Subject: [PATCH 32/35] fixed another tests --- internals/export/notification_test.go | 15 +++++++++------ .../notifier/notification/notification_test.go | 12 ++++++------ 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/internals/export/notification_test.go b/internals/export/notification_test.go index 30b4862d..ca390224 100644 --- a/internals/export/notification_test.go +++ b/internals/export/notification_test.go @@ -9,7 +9,10 @@ import ( func TestExportNotification(t *testing.T) { // init handler - notification.ReplaceHandlerGlobals(notification.NewHandler(0)) + handler := notification.NewHandler(0) + handler.RegisterNotificationType(notification.MockNotification{}) + handler.RegisterNotificationType(ExportNotification{}) + notification.ReplaceHandlerGlobals(handler) notif := ExportNotification{ Export: WrapperItem{ @@ -22,11 +25,11 @@ func TestExportNotification(t *testing.T) { bytes, err := notif.ToBytes() if err != nil { - t.Errorf("Unexpected error: %v", err) + t.Fatalf("Unexpected error: %v", err) } if bytes == nil { - t.Errorf("Expected bytes, got nil") + t.Fatalf("Expected bytes, got nil") } t.Log(string(bytes)) @@ -34,12 +37,12 @@ func TestExportNotification(t *testing.T) { // find type and create new instance notifType, ok := notification.H().GetNotificationByType("ExportNotification") if !ok { - t.Errorf("ExportNotification type does not exist") + t.Fatalf("ExportNotification type does not exist") } instance, err := notifType.NewInstance(1, bytes, false) if err != nil { - t.Errorf("ExportNotification couldn't be instanced") + t.Fatalf("ExportNotification couldn't be instanced") } bt, _ := instance.ToBytes() t.Log(string(bt)) @@ -106,7 +109,7 @@ func TestExportNotification_Equals(t *testing.T) { func TestExportNotification_SetId(t *testing.T) { notif, err := ExportNotification{}.NewInstance(1, []byte(`{}`), true) if err != nil { - t.Errorf("Error: %v", err) + t.Fatalf("Error: %v", err) } notif = notif.SetId(2) diff --git a/internals/notifier/notification/notification_test.go b/internals/notifier/notification/notification_test.go index 6d10d316..d8c3d3d4 100644 --- a/internals/notifier/notification/notification_test.go +++ b/internals/notifier/notification/notification_test.go @@ -15,11 +15,11 @@ func TestBaseNotificationToBytes(t *testing.T) { bytes, err := notification.ToBytes() if err != nil { - t.Errorf("Unexpected error: %v", err) + t.Fatalf("Unexpected error: %v", err) } if bytes == nil { - t.Errorf("Expected bytes, got nil") + t.Fatalf("Expected bytes, got nil") } } @@ -37,7 +37,7 @@ func TestBaseNotificationNewInstance(t *testing.T) { data := []byte(`{"id":1,"type":"Test","isRead":true}`) notification, err := BaseNotification{}.NewInstance(1, data, true) if err != nil { - t.Errorf("Unexpected error: %v", err) + t.Fatalf("Unexpected error: %v", err) } expected := BaseNotification{ @@ -53,7 +53,7 @@ func TestBaseNotificationNewInstanceWithInvalidData(t *testing.T) { data := []byte(`{"id":1,"type":"Test","isRead":"invalid"}`) _, err := BaseNotification{}.NewInstance(1, data, true) if err == nil { - t.Errorf("Expected error, got nil") + t.Fatalf("Expected error, got nil") } } @@ -214,7 +214,7 @@ func TestMockNotification_Equals(t *testing.T) { func TestBaseNotification_SetId(t *testing.T) { notif, err := BaseNotification{}.NewInstance(1, []byte(`{}`), true) if err != nil { - t.Errorf("Error: %v", err) + t.Fatalf("Error: %v", err) } notif = notif.SetId(2) @@ -226,7 +226,7 @@ func TestBaseNotification_SetId(t *testing.T) { func TestMockNotification_SetId(t *testing.T) { notif, err := MockNotification{}.NewInstance(1, []byte(`{}`), true) if err != nil { - t.Errorf("Error: %v", err) + t.Fatalf("Error: %v", err) } notif = notif.SetId(2) From ed0800c7439edcf1b196496b26692962d2adb4a0 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 27 Dec 2023 13:48:25 +0100 Subject: [PATCH 33/35] fixed tests --- internals/export/worker.go | 6 +++++- internals/export/wrapper_test.go | 21 ++++++++++++--------- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/internals/export/worker.go b/internals/export/worker.go index 2bca4cea..eb051d2d 100644 --- a/internals/export/worker.go +++ b/internals/export/worker.go @@ -38,7 +38,11 @@ func (e *ExportWorker) SetError(error error) { e.Mutex.Lock() defer e.Mutex.Unlock() e.QueueItem.Status = StatusError - e.QueueItem.Error = error.Error() + if error == nil { + e.QueueItem.Error = "" + } else { + e.QueueItem.Error = error.Error() + } } // SetStatus sets the status of the worker diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 706b4a73..1acdc92b 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "github.com/google/uuid" + "github.com/myrteametrics/myrtea-engine-api/v5/internals/notifier" "github.com/myrteametrics/myrtea-engine-api/v5/internals/security/users" "github.com/myrteametrics/myrtea-sdk/v4/engine" "github.com/myrteametrics/myrtea-sdk/v4/expression" @@ -32,12 +33,12 @@ func TestFactsEquals(t *testing.T) { } func TestNewWrapperItem(t *testing.T) { - item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "test"}) + item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test", CSVParameters{}, users.User{Login: "test"}) expression.AssertNotEqual(t, item.Id, "") expression.AssertEqual(t, factsEquals(item.Facts, []engine.Fact{{ID: 1}}), true) expression.AssertEqual(t, item.Params.Equals(CSVParameters{}), true) expression.AssertEqual(t, item.Status, StatusPending) - expression.AssertEqual(t, strings.HasSuffix(item.FileName, "test.txt.gz"), true, "test.txt.gz") + expression.AssertEqual(t, strings.HasSuffix(item.FileName, "test.csv.gz"), true, "test.txt.gz") expression.AssertNotEqual(t, len(item.Users), 0) expression.AssertEqual(t, item.Users[0], "test") } @@ -100,6 +101,7 @@ func TestStartDispatcher(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() + notifier.ReplaceGlobals(notifier.NewNotifier()) // for notifications wrapper := NewWrapper(dname, 1, 1, 1) wrapper.Init(ctx) expression.AssertEqual(t, len(wrapper.workers), 1) @@ -278,6 +280,7 @@ func TestWrapper_dispatchExportQueue(t *testing.T) { fileName := filepath.Base(file.Name()) _ = file.Close() + notifier.ReplaceGlobals(notifier.NewNotifier()) // for notifications wrapper := NewWrapper(dname, 1, 1, 2) ctx, cancel := context.WithCancel(context.Background()) wrapper.Init(ctx) @@ -354,14 +357,14 @@ func TestWrapper_DeleteExport(t *testing.T) { // test archive wrapper.archive.Store(item.Id, *item) - expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "item should have been deleted") + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportDeleted, "item should have been deleted") _, ok := wrapper.archive.Load(item.Id) expression.AssertEqual(t, ok, false, "item should not be in archive anymore") // test archive multi-user item.Users = []string{"bla", "blabla"} wrapper.archive.Store(item.Id, *item) - expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportUserDeleted, "user should have been deleted from existing export") _, ok = wrapper.archive.Load(item.Id) expression.AssertEqual(t, ok, true, "item should be in archive") item.Users = []string{"bla"} @@ -372,7 +375,7 @@ func TestWrapper_DeleteExport(t *testing.T) { wrapper.queueMutex.Lock() expression.AssertEqual(t, len(wrapper.queue), 1, "item should be in queue") wrapper.queueMutex.Unlock() - expression.AssertEqual(t, wrapper.DeleteExport(queueItem.Id, users.User{Login: "bla"}), true, "item should have been deleted") + expression.AssertEqual(t, wrapper.DeleteExport(queueItem.Id, users.User{Login: "bla"}), DeleteExportDeleted, "item should have been deleted") wrapper.queueMutex.Lock() expression.AssertEqual(t, len(wrapper.queue), 0, "item should not be in queue anymore") wrapper.queueMutex.Unlock() @@ -385,7 +388,7 @@ func TestWrapper_DeleteExport(t *testing.T) { wrapper.queueMutex.Lock() expression.AssertEqual(t, len(wrapper.queue), 1, "item should be in queue") wrapper.queueMutex.Unlock() - expression.AssertEqual(t, wrapper.DeleteExport(queueItem.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") + expression.AssertEqual(t, wrapper.DeleteExport(queueItem.Id, users.User{Login: "bla"}), DeleteExportUserDeleted, "user should have been deleted from existing export") wrapper.queueMutex.Lock() expression.AssertEqual(t, len(wrapper.queue), 1, "item should be in queue") wrapper.queueMutex.Unlock() @@ -397,9 +400,9 @@ func TestWrapper_DeleteExport(t *testing.T) { worker.QueueItem = *item worker.Available = true worker.Mutex.Unlock() - expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), false, "item should have not been deleted") + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportNotFound, "item should have not been deleted") worker.SwapAvailable(false) - expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "item should have been deleted") + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportCanceled, "item should have been deleted") expression.AssertEqual(t, len(worker.Cancel), 1, "worker cancel channel should have been filled") // clean cancel channel (non-blocking) @@ -407,7 +410,7 @@ func TestWrapper_DeleteExport(t *testing.T) { worker.Mutex.Lock() worker.QueueItem.Users = []string{"bla", "blabla"} worker.Mutex.Unlock() - expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), true, "user should have been deleted from existing export") + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportNotFound, "user should have been deleted from existing export") expression.AssertEqual(t, len(worker.Cancel), 0, "worker cancel channel should not have been filled") } From da2bb975c1298d44e56d090338d719ada56b746e Mon Sep 17 00:00:00 2001 From: Paul Date: Wed, 27 Dec 2023 16:12:59 +0100 Subject: [PATCH 34/35] Update go.yml rm branch condition --- .github/workflows/go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 69816c05..ff89a08a 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -34,7 +34,7 @@ jobs: - name: Coverage uses: gwatts/go-coverage-action@v1 - if: github.ref == 'refs/heads/master' + # if: github.ref == 'refs/heads/master' # continue-on-error: true with: add-comment: true From 3d77752948510cc36d0daf18bef26672f420d670 Mon Sep 17 00:00:00 2001 From: Paul Meyer Date: Wed, 27 Dec 2023 16:17:26 +0100 Subject: [PATCH 35/35] added some tests --- internals/export/wrapper.go | 3 +-- internals/export/wrapper_test.go | 26 ++++++++++++++++++++++++-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/internals/export/wrapper.go b/internals/export/wrapper.go index 6666587d..d5d26dcd 100644 --- a/internals/export/wrapper.go +++ b/internals/export/wrapper.go @@ -362,8 +362,7 @@ func (ew *Wrapper) dequeueWrapperItem(item *WrapperItem) (int, bool) { defer ew.queueMutex.Unlock() for i, queueItem := range ew.queue { - // comparing pointer should work - if queueItem != item { + if queueItem.Id != item.Id { continue } diff --git a/internals/export/wrapper_test.go b/internals/export/wrapper_test.go index 1acdc92b..2b0f595b 100644 --- a/internals/export/wrapper_test.go +++ b/internals/export/wrapper_test.go @@ -394,6 +394,7 @@ func TestWrapper_DeleteExport(t *testing.T) { wrapper.queueMutex.Unlock() // test workers + item.Users = []string{"bla", "blibli"} worker := NewExportWorker(0, "/tmp", make(chan<- int)) wrapper.workers = append(wrapper.workers, worker) worker.Mutex.Lock() @@ -402,6 +403,8 @@ func TestWrapper_DeleteExport(t *testing.T) { worker.Mutex.Unlock() expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportNotFound, "item should have not been deleted") worker.SwapAvailable(false) + expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "blibli"}), DeleteExportUserDeleted, "user should have been deleted from export") + expression.AssertEqual(t, len(worker.Cancel), 0, "worker cancel channel should not have been filled") expression.AssertEqual(t, wrapper.DeleteExport(item.Id, users.User{Login: "bla"}), DeleteExportCanceled, "item should have been deleted") expression.AssertEqual(t, len(worker.Cancel), 1, "worker cancel channel should have been filled") @@ -417,18 +420,37 @@ func TestWrapper_DeleteExport(t *testing.T) { func TestWrapper_GetUserExport(t *testing.T) { wrapper := NewWrapper("/tmp", 1, 1, 2) item := NewWrapperItem([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) + + // test item in archive wrapper.archive.Store(item.Id, *item) export, ok := wrapper.GetUserExport(item.Id, users.User{Login: "bla"}) expression.AssertEqual(t, ok, true) expression.AssertEqual(t, export.Id, item.Id) export, ok = wrapper.GetUserExport(item.Id, users.User{Login: "blabla"}) expression.AssertEqual(t, ok, false) - // test queue + wrapper.archive.Delete(item.Id) + + // test item in queue queue queueItem, code := wrapper.AddToQueue([]engine.Fact{{ID: 1}}, "test.txt", CSVParameters{}, users.User{Login: "bla"}) expression.AssertEqual(t, code, CodeAdded, "item should have been added to queue") export, ok = wrapper.GetUserExport(queueItem.Id, users.User{Login: "bla"}) expression.AssertEqual(t, ok, true) expression.AssertEqual(t, export.Id, queueItem.Id) - export, ok = wrapper.GetUserExport(queueItem.Id, users.User{Login: "blabla"}) + _, ok = wrapper.GetUserExport(queueItem.Id, users.User{Login: "blabla"}) + expression.AssertEqual(t, ok, false) + _, ok = wrapper.dequeueWrapperItem(&export) + expression.AssertEqual(t, ok, true) + + // test worker + worker := NewExportWorker(0, "/tmp", make(chan<- int)) + wrapper.workers = append(wrapper.workers, worker) + worker.Mutex.Lock() + worker.QueueItem = *item + worker.Available = false + worker.Mutex.Unlock() + + _, ok = wrapper.GetUserExport(item.Id, users.User{Login: "blabla"}) expression.AssertEqual(t, ok, false) + _, ok = wrapper.GetUserExport(item.Id, users.User{Login: "bla"}) + expression.AssertEqual(t, ok, true) }