Skip to content

Commit

Permalink
enh: some logging refactoring; remove aws deprecations; fix tests (#30)
Browse files Browse the repository at this point in the history
* enh: some logging refactoring; remove aws deprecations; fix tests

* fix: lint

* fix: lint
  • Loading branch information
notdodo authored Jun 7, 2024
1 parent fb15554 commit d5f1611
Show file tree
Hide file tree
Showing 10 changed files with 104 additions and 104 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/golangci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,6 @@ jobs:
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
# v4.1.6
- name: Run Gosec Security Scanner
# master to fetch latest checks
# kics-scan ignore-line
uses: securego/gosec@6fbd381238e97e1d1f3358f0d6d65de78dcf9245
# V.2.20.0
with:
Expand Down
5 changes: 3 additions & 2 deletions assets/tests/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ OUT_DIR=dist
OUT_PREFIX=nuvola

all:
docker-compose up -d
docker compose up -d
pipenv install
pipenv run tflocal apply -auto-approve
pipenv run ../nuvola dump --outputdir ./ --endpoint-url http://localhost:4566
pipenv run ../../nuvola dump --output-dir ./ --aws-endpoint-url http://localhost:4566
143 changes: 73 additions & 70 deletions cmd/assess.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"bytes"
"fmt"
"io"
"log"
"strings"

"github.com/primait/nuvola/pkg/connector"
Expand All @@ -19,41 +18,33 @@ import (
var assessCmd = &cobra.Command{
Use: "assess",
Short: "Execute assessment queries against data loaded in Neo4J",
Run: func(cmd *cobra.Command, args []string) {
if cmd.Flags().Changed(flagVerbose) {
logger.SetVerboseLevel()
}
if cmd.Flags().Changed(flagDebug) {
logger.SetDebugLevel()
}
Run: runAssessCmd,
}

connector.SetActions()
storageConnector := connector.NewStorageConnector()
if importFile != "" {
logger.Info("Flushing database")
logger.Info(fmt.Sprintf("Importing %s", importFile))
importZipFile(storageConnector, importFile)
}
func runAssessCmd(cmd *cobra.Command, args []string) {
if cmd.Flags().Changed(flagVerbose) {
logger.SetVerboseLevel()
}
if cmd.Flags().Changed(flagDebug) {
logger.SetDebugLevel()
}

assess(storageConnector, "./assets/rules/")
},
storageConnector := connector.NewStorageConnector()
if importFile != "" {
logger.Debug(fmt.Sprintf("Importing %s", importFile))
importZipFile(storageConnector, importFile)
logger.Debug(fmt.Sprintf("Imported %s", importFile))
}

assess(storageConnector, "./assets/rules/")
}

func importZipFile(connector *connector.StorageConnector, zipfile string) {
connector.FlushAll()
var ordering = []string{
"Groups",
"Users",
"Roles",
"Buckets",
"EC2s",
"VPCs",
"Lambdas",
"RDS",
"DynamoDBs",
"RedshiftDBs",
ordering := []string{
"Groups", "Users", "Roles", "Buckets", "EC2s", "VPCs", "Lambdas", "RDS", "DynamoDBs", "RedshiftDBs",
}
var orderedFiles = make([]*zip.File, len(ordering))
orderedFiles := make([]*zip.File, len(ordering))

r := unzip.UnzipInMemory(zipfile)
defer r.Close()
Expand All @@ -62,61 +53,73 @@ func importZipFile(connector *connector.StorageConnector, zipfile string) {
for ord := range ordering {
if strings.HasPrefix(f.Name, ordering[ord]) {
orderedFiles[ord] = f
break
}
}
}

for _, f := range orderedFiles {
rc, err := f.Open()
if err != nil {
logging.HandleError(err, "Assess", "Opening content of ZIP")
if f == nil {
continue
}
defer func() {
if err := rc.Close(); err != nil {
log.Printf("error closing resource: %s", err)
}
}()

buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc) // #nosecG110
if err != nil {
logging.HandleError(err, "Assess", "Copying buffer from ZIP")
if err := processZipFile(connector, f); err != nil {
logging.HandleError(err, "Assess", "Processing ZIP file")
}
connector.ImportResults(f.Name, buf.Bytes())
}
}

func processZipFile(connector *connector.StorageConnector, f *zip.File) error {
rc, err := f.Open()
if err != nil {
return fmt.Errorf("opening content of ZIP: %w", err)
}
defer rc.Close()

buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc) // #nosecG110
if err != nil {
return fmt.Errorf("copying buffer from ZIP: %w", err)
}

connector.ImportResults(f.Name, buf.Bytes())
return nil
}

func assess(connector *connector.StorageConnector, rulesPath string) {
// perform checks based on pre-defined static rules
for _, rule := range files.GetFiles(rulesPath, ".ya?ml") {
var c = yamler.GetConf(rule)
if c.Enabled {
query, args := yamler.PrepareQuery(c)
results := connector.Query(query, args)

logging.PrintRed("Running rule: " + rule)
logging.PrintGreen("Name: " + c.Name)
logging.PrintGreen("Arguments:")
logging.PrintDarkGreen(yamler.ArgsToQueryNeo4jBrowser(args))
logging.PrintGreen("Query:")
logging.PrintDarkGreen(query)
logging.PrintGreen("Description: " + c.Description)

for _, resultMap := range results {
for key, value := range resultMap {
for _, retValue := range c.Return {
if string(retValue[len(retValue)-1]) == "*" {
// Return value contains a *: return all matching keys
retValue = retValue[0 : len(retValue)-1]
retValue = strings.TrimRight(retValue, "_")
}
if strings.HasPrefix(key, retValue) {
fmt.Printf("%s: %v\n", key, value)
}
}
}
c := yamler.GetConf(rule)
if !c.Enabled {
continue
}

query, args := yamler.PrepareQuery(c)
results := connector.Query(query, args)

logging.PrintRed("Running rule: " + rule)
logging.PrintGreen("Name: " + c.Name)
logging.PrintGreen("Arguments:")
logging.PrintDarkGreen(yamler.ArgsToQueryNeo4jBrowser(args))
logging.PrintGreen("Query:")
logging.PrintDarkGreen(query)
logging.PrintGreen("Description: " + c.Description)

for _, resultMap := range results {
for key, value := range resultMap {
printResults(c.Return, key, value)
}
fmt.Print("\n")
}
fmt.Print("\n")
}
}

func printResults(returnKeys []string, key string, value interface{}) {
for _, retValue := range returnKeys {
if strings.HasSuffix(retValue, "*") {
retValue = strings.TrimRight(retValue[:len(retValue)-1], "_")
}
if strings.HasPrefix(key, retValue) {
fmt.Printf("%s: %v\n", key, value)
}
}
}
Expand Down
7 changes: 4 additions & 3 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,12 @@ func init() {
logger = logging.GetLogManager()
rootCmd.PersistentFlags().BoolP(flagVerbose, "v", false, "Verbose output")
rootCmd.PersistentFlags().BoolP(flagDebug, "d", false, "Debug output")
dumpCmd.Flags().StringVarP(&awsProfile, flagAWSProfile, "p", "default", "AWS Profile to use")
dumpCmd.Flags().StringVarP(&awsProfile, flagAWSProfile, "p", "", "AWS Profile to use")
dumpCmd.Flags().BoolVarP(&dumpOnly, flagDumpOnly, "", false, "Flag to prevent loading data into Neo4j (default: \"false\")")
dumpCmd.Flags().StringVarP(&awsEndpointUrl, flagAWSEndpointUrl, "e", "", "AWS Endpoint to use (e.g. for Localstack)")
dumpCmd.Flags().StringVarP(&outputDirectory, flagOutputDirectory, "o", "", "Output folder where the files will be saved (default: \".\")")
dumpCmd.Flags().StringVarP(&outputFormat, flagOutputFormat, "f", "zip", "Output format: ZIP or json files")
dumpCmd.Flags().BoolVarP(&dumpOnly, flagDumpOnly, "", false, "Flag to prevent loading data into Neo4j (default: \"false\")")
_ = dumpCmd.MarkFlagRequired(flagAWSProfile)
// _ = dumpCmd.MarkFlagRequired(flagAWSProfile)

assessCmd.Flags().StringVarP(&importFile, flagImportFile, "i", "", "Input ZIP file to load")
assessCmd.Flags().BoolVarP(&noImport, flagNoImport, "", false, "Use stored data from Neo4j without import (default)")
Expand Down
2 changes: 2 additions & 0 deletions pkg/connector/cloud_connector.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ import (
"strings"

awsconfig "github.com/primait/nuvola/pkg/connector/services/aws"
"github.com/primait/nuvola/pkg/io/logging"
)

func NewCloudConnector(profile string, endpointUrl string) (*CloudConnector, error) {
cc := &CloudConnector{
AWSConfig: awsconfig.InitAWSConfiguration(profile, endpointUrl),
logger: logging.GetLogManager(),
}
if !cc.testConnection("aws") {
return nil, errors.New("invalid credentials or expired session")
Expand Down
3 changes: 3 additions & 0 deletions pkg/connector/connector_structs.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@ package connector
import (
awsconfig "github.com/primait/nuvola/pkg/connector/services/aws"
neo4jconnector "github.com/primait/nuvola/pkg/connector/services/neo4j"
"github.com/primait/nuvola/pkg/io/logging"
)

type StorageConnector struct {
Client neo4jconnector.Neo4jClient
logger logging.LogManager
}

type CloudConnector struct {
AWSConfig awsconfig.AWSConfig
logger logging.LogManager
}
20 changes: 4 additions & 16 deletions pkg/connector/services/aws/aws.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package awsconnector

import (
"context"
"os"

"github.com/primait/nuvola/pkg/connector/services/aws/database"
"github.com/primait/nuvola/pkg/connector/services/aws/ec2"
Expand All @@ -25,30 +24,19 @@ var (
)

func InitAWSConfiguration(profile string, awsEndpoint string) (awsc AWSConfig) {
customResolver := aws.EndpointResolverWithOptionsFunc(func(service, region string, options ...interface{}) (aws.Endpoint, error) {
if awsEndpoint != "" {
return aws.Endpoint{
PartitionID: "aws",
URL: awsEndpoint,
SigningRegion: os.Getenv("AWS_DEFAULT_REGION"),
}, nil
}

// returning EndpointNotFoundError will allow the service to fallback to it's default resolution
return aws.Endpoint{}, &aws.EndpointNotFoundError{}
})

// Load the Shared AWS Configuration (~/.aws/config)
cfg, _ := config.LoadDefaultConfig(context.TODO(), config.WithSharedConfigProfile(profile),
config.WithRetryer(func() aws.Retryer {
return retry.AddWithMaxAttempts(retry.NewStandard(), countRetries)
}),
config.WithEndpointResolverWithOptions(customResolver),
)
cfg.RetryMode = aws.RetryModeStandard
if awsEndpoint != "" {
cfg.BaseEndpoint = aws.String(awsEndpoint)
}
awsc = AWSConfig{Profile: profile, Config: cfg}
SetActions()
// Get the AWS regions dynamically
// Get the available AWS regions dynamically
ec2.ListAndSaveRegions(cfg)
iam.ActionsList = unique(ActionsList)
iam.ActionsMap = ActionsMap
Expand Down
4 changes: 3 additions & 1 deletion pkg/connector/services/aws/s3/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ import (
)

func ListBuckets(cfg aws.Config) (buckets []*Bucket) {
s3Client := S3Client{Config: cfg, client: s3.NewFromConfig(cfg, func(o *s3.Options) { o.UsePathStyle = true })}
s3Client := S3Client{Config: cfg, client: s3.NewFromConfig(cfg, func(o *s3.Options) {
o.UsePathStyle = true
})}

output, err := s3Client.client.ListBuckets(context.TODO(), &s3.ListBucketsInput{})
if errors.As(err, &re) {
Expand Down
20 changes: 11 additions & 9 deletions pkg/connector/storage_connector.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,31 +6,33 @@ import (
"os"
"regexp"

"github.com/primait/nuvola/pkg/io/logging"

"github.com/primait/nuvola/pkg/connector/services/aws/database"
"github.com/primait/nuvola/pkg/connector/services/aws/ec2"
"github.com/primait/nuvola/pkg/connector/services/aws/iam"
"github.com/primait/nuvola/pkg/connector/services/aws/lambda"
"github.com/primait/nuvola/pkg/connector/services/aws/s3"
neo4jconnector "github.com/primait/nuvola/pkg/connector/services/neo4j"
neo4j "github.com/primait/nuvola/pkg/connector/services/neo4j"
"github.com/primait/nuvola/pkg/io/logging"
)

func NewStorageConnector() *StorageConnector {
neo4jURL := os.Getenv("NEO4J_URL")
neo4jUsername := "neo4j"
neo4jPassword := os.Getenv("PASSWORD")
client, err := neo4jconnector.Connect(neo4jURL, neo4jUsername, neo4jPassword)
logger := logging.GetLogManager()
client, err := neo4j.Connect(neo4jURL, neo4jUsername, neo4jPassword)
if err != nil {
logging.HandleError(err, "NewStorageConnector", "Error connecting to database")
logger.Error("Error connecting to database", "err", err)
}
connector := &StorageConnector{
Client: *client,
logger: logger,
}
return connector
}

func (sc *StorageConnector) FlushAll() *StorageConnector {
sc.logger.Info("Flushing the database")
sc.Client.DeleteAll()
return sc
}
Expand All @@ -49,7 +51,7 @@ func (sc *StorageConnector) ImportResults(what string, content []byte) {
var dynamodbs = regexp.MustCompile(`^DynamoDBs`)
var redshiftdbs = regexp.MustCompile(`^RedshiftDBs`)

logging.GetLogManager().Info(fmt.Sprintf("Importing: %s", what))
sc.logger.Debug(fmt.Sprintf("Importing: %s", what))
switch {
case whoami.MatchString(what):
case credentialReport.MatchString(what):
Expand Down Expand Up @@ -95,16 +97,16 @@ func (sc *StorageConnector) ImportResults(what string, content []byte) {
_ = json.Unmarshal(content, &contentStruct)
sc.Client.AddRedshift(&contentStruct)
default:
logging.HandleError(nil, "ImportResults", "Error importing data")
sc.logger.Error("Error importing data", "data", what)
}
logging.GetLogManager().Info(fmt.Sprintf("Imported: %s", what))
sc.logger.Info(fmt.Sprintf("Imported: %s", what))
}

func (sc *StorageConnector) ImportBulkResults(content map[string]interface{}) {
for k, v := range content {
value, err := json.Marshal(v)
if err != nil {
logging.HandleError(err, "ImportBulkResults", "Error on marshalling data")
sc.logger.Error("Error on marshalling data", "err", err)
}
sc.ImportResults(k, value)
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/io/logging/logging.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ func GetLogManager() LogManager {
logger = &logManager{
logger: log.NewWithOptions(os.Stdout, log.Options{
CallerOffset: 1,
Level: log.InfoLevel,
Level: log.WarnLevel,
ReportCaller: true,
ReportTimestamp: true,
TimeFormat: time.RFC1123,
Expand Down

0 comments on commit d5f1611

Please sign in to comment.