From ac9f9bd1d52d971d9dcb398604d5f1ee1ed5bc90 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Tue, 24 Aug 2021 13:15:24 +0300 Subject: [PATCH 01/12] Start spike for s3 lifecycle rules for backups --- app/models/conditions_response/backup.rb | 33 +++++++++++++++++++ .../models/conditions_response/backup_spec.rb | 11 +++++++ 2 files changed, 44 insertions(+) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 84e94a6b3..d20e20862 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -287,6 +287,39 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla end end + + STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE) + + # method signature => s3_lifecycle_rules(string, string, string, {days: integer, storage_class: member class in STORAGE_CLASSES list}) + # e.g s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: "STANDARD_IA"}, {days: 90, storage_class: "GLACIER"}) + def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs) + client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], + credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], + ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) + + unless (storage_rules_kwargs.map{|h| h.values.last} & STORAGE_CLASSES).empty? + client.put_bucket_lifecycle_configuration({ + bucket: bucket, + lifecycle_configuration: { + rules: [ + { + expiration: { + date: Time.now, + days: 365, + expired_object_delete_marker: false, + }, + filter: { + prefix: bucket_full_prefix, + }, + id: ENV['SHF_AWS_S3_BACKUP_KEY_ID'], + status: status.capitalize, # String showing 'Enabled' or 'Disabled' + transitions: storage_rules_kwargs + } + ] + } + }) + end + end # Record the error and additional_info to the given log # and send a Slack notification if we are using Slack notifications diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index b9d4c894c..ffc479034 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1304,6 +1304,17 @@ def create_faux_backup_file(backups_dir, file_prefix) end end + + describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do + # Will add tests for spike after I get .env file to setup test database + it 'returns nil for invalid storage classes' do + described_class + binding.pry + end + + it 'returns the correct lifecycle rules transitions' do + end + end end end From 648e1331821f8923ae43e8ab69256001a3c4e60b Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Tue, 24 Aug 2021 13:33:06 +0300 Subject: [PATCH 02/12] Cleanup hound issues --- app/models/conditions_response/backup.rb | 6 +++--- spec/models/conditions_response/backup_spec.rb | 3 +-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index d20e20862..79a95750e 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -290,7 +290,7 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE) - # method signature => s3_lifecycle_rules(string, string, string, {days: integer, storage_class: member class in STORAGE_CLASSES list}) + # method signature => s3_lifecycle_rules(string, string, string, {days: integer, storage_class: upcase string in STORAGE_CLASSES list}) # e.g s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: "STANDARD_IA"}, {days: 90, storage_class: "GLACIER"}) def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], @@ -306,10 +306,10 @@ def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_k expiration: { date: Time.now, days: 365, - expired_object_delete_marker: false, + expired_object_delete_marker: false }, filter: { - prefix: bucket_full_prefix, + prefix: bucket_full_prefix }, id: ENV['SHF_AWS_S3_BACKUP_KEY_ID'], status: status.capitalize, # String showing 'Enabled' or 'Disabled' diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index ffc479034..ede84b7cc 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1308,8 +1308,7 @@ def create_faux_backup_file(backups_dir, file_prefix) describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do # Will add tests for spike after I get .env file to setup test database it 'returns nil for invalid storage classes' do - described_class - binding.pry + # binding.pry end it 'returns the correct lifecycle rules transitions' do From 8fa55e8a6761358178527f235b1e9f53a15b834f Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Wed, 25 Aug 2021 10:39:26 +0300 Subject: [PATCH 03/12] Cleanup lifecycle_rules method --- app/models/conditions_response/backup.rb | 13 +++++++------ spec/models/conditions_response/backup_spec.rb | 5 ++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 79a95750e..8503f45ad 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -289,15 +289,16 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE) - + define_method(:storage_class_is_valid?) {|storage_class_list| !(storage_class_list & STORAGE_CLASSES).empty? ? true : "Invalid storage class"} + # method signature => s3_lifecycle_rules(string, string, string, {days: integer, storage_class: upcase string in STORAGE_CLASSES list}) # e.g s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: "STANDARD_IA"}, {days: 90, storage_class: "GLACIER"}) def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs) - client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], - credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], - ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) - - unless (storage_rules_kwargs.map{|h| h.values.last} & STORAGE_CLASSES).empty? + client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], + credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], qENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) + + storage_class_list = storage_rules_kwargs.map{|h| h.values.last} + unless storage_class_is_valid? storage_class_list client.put_bucket_lifecycle_configuration({ bucket: bucket, lifecycle_configuration: { diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index ede84b7cc..6a1be10d4 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1306,9 +1306,8 @@ def create_faux_backup_file(backups_dir, file_prefix) end describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do - # Will add tests for spike after I get .env file to setup test database - it 'returns nil for invalid storage classes' do - # binding.pry + it "returns 'Invalid storage class' for invalid storage classes" do + binding.pry end it 'returns the correct lifecycle rules transitions' do From 302f212b5221eb19c3ebeafbf67f20816c0c08e0 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Wed, 25 Aug 2021 13:52:29 +0300 Subject: [PATCH 04/12] Add initial tests --- app/models/conditions_response/backup.rb | 14 +++++++++++--- spec/models/conditions_response/backup_spec.rb | 16 ++++++++++++++-- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 8503f45ad..a969f9103 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -288,14 +288,22 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla end - STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE) - define_method(:storage_class_is_valid?) {|storage_class_list| !(storage_class_list & STORAGE_CLASSES).empty? ? true : "Invalid storage class"} + STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE).freeze + class << self + define_method(:storage_class_is_valid?) do |storage_class_list| + unless storage_class_list.empty? + (storage_class_list + STORAGE_CLASSES).uniq.count <= STORAGE_CLASSES.count ? true : "Invalid storage class" + else + "Empty storage class" + end + end + end # method signature => s3_lifecycle_rules(string, string, string, {days: integer, storage_class: upcase string in STORAGE_CLASSES list}) # e.g s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: "STANDARD_IA"}, {days: 90, storage_class: "GLACIER"}) def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], - credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], qENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) + credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) storage_class_list = storage_rules_kwargs.map{|h| h.values.last} unless storage_class_is_valid? storage_class_list diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index 6a1be10d4..70273f055 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1306,11 +1306,23 @@ def create_faux_backup_file(backups_dir, file_prefix) end describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do - it "returns 'Invalid storage class' for invalid storage classes" do - binding.pry + let(:invalid_storage_class_list) { ["INVALID_STORAGE_CLASS", "OTHER_INVALID_STORAGE_CLASS"] } + let(:another_invalid_storage_class_list) { ["INVALID_STORAGE_CLASS", "STANDARD_IA", "GLACIER"] } + + it "returns 'Invalid storage class' for a list containing only invalid storage classes" do + expect(described_class.storage_class_is_valid? invalid_storage_class_list).to eq("Invalid storage class") + end + + it "returns 'Invalid storage class' for a list containing both valid and invalid storage classes" do + expect(described_class.storage_class_is_valid? another_invalid_storage_class_list).to eq("Invalid storage class") + end + + it "fails for empty storage classes list" do + expect(described_class.storage_class_is_valid? []).to eq("Empty storage class") end it 'returns the correct lifecycle rules transitions' do + #TODO: Add this test end end end From 016ec4e070f78ba602a0d381d4bb3ffe861a3510 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Thu, 26 Aug 2021 12:37:23 +0300 Subject: [PATCH 05/12] Add transition tests --- app/models/conditions_response/backup.rb | 7 ++--- .../models/conditions_response/backup_spec.rb | 28 ++++++++++++++++--- 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index a969f9103..e0f7ee09c 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -299,13 +299,12 @@ class << self end end - # method signature => s3_lifecycle_rules(string, string, string, {days: integer, storage_class: upcase string in STORAGE_CLASSES list}) - # e.g s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: "STANDARD_IA"}, {days: 90, storage_class: "GLACIER"}) + # s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}) def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) - storage_class_list = storage_rules_kwargs.map{|h| h.values.last} + storage_class_list = storage_rules_kwargs.flatten.map{|h| h.values.last} unless storage_class_is_valid? storage_class_list client.put_bucket_lifecycle_configuration({ bucket: bucket, @@ -321,7 +320,7 @@ def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_k prefix: bucket_full_prefix }, id: ENV['SHF_AWS_S3_BACKUP_KEY_ID'], - status: status.capitalize, # String showing 'Enabled' or 'Disabled' + status: status.capitalize, transitions: storage_rules_kwargs } ] diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index 70273f055..86eae4ac0 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -10,6 +10,7 @@ let(:mock_bucket_object) { double('Aws::S3::Object', upload_file: true) } let(:mock_bucket) { double('Aws::S3::Bucket', object: mock_bucket_object) } let(:mock_s3) { double('Aws::S3::Resource', bucket: mock_bucket) } + let(:mock_s3_client) { instance_double('Aws::S3::Client') } let(:bucket_name) { 'bucket_name' } let(:bucket_full_prefix) { 'bucket/top/prefix' } @@ -1306,8 +1307,21 @@ def create_faux_backup_file(backups_dir, file_prefix) end describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do - let(:invalid_storage_class_list) { ["INVALID_STORAGE_CLASS", "OTHER_INVALID_STORAGE_CLASS"] } - let(:another_invalid_storage_class_list) { ["INVALID_STORAGE_CLASS", "STANDARD_IA", "GLACIER"] } + let(:invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'OTHER_INVALID_STORAGE_CLASS'] } + let(:another_invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'STANDARD_IA', 'GLACIER'] } + let(:status) { 'Enabled' } + let(:storage_rules) { [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] } + + let(:mock_s3_client) do + client = Aws::S3::Client.new(stub_responses: true) + client.stub_responses( + :put_bucket_lifecycle_configuration, ->(context) { + bucket = context.params[:bucket] + lifecycle_configuration = context.params[:lifecycle_configuration][:rules] + } + ) + client + end it "returns 'Invalid storage class' for a list containing only invalid storage classes" do expect(described_class.storage_class_is_valid? invalid_storage_class_list).to eq("Invalid storage class") @@ -1317,12 +1331,18 @@ def create_faux_backup_file(backups_dir, file_prefix) expect(described_class.storage_class_is_valid? another_invalid_storage_class_list).to eq("Invalid storage class") end - it "fails for empty storage classes list" do + it "returns 'Empty storage class' for empty storage classes list" do expect(described_class.storage_class_is_valid? []).to eq("Empty storage class") end it 'returns the correct lifecycle rules transitions' do - #TODO: Add this test + put_lifecycle_data = mock_s3_client.put_bucket_lifecycle_configuration(bucket: bucket_name, lifecycle_configuration: {rules: [{status: status, transitions: storage_rules}]}) + expect(mock_s3_client).to receive(:get_bucket_lifecycle_configuration).with({bucket: bucket_name}).and_return(put_lifecycle_data) + obj = mock_s3_client.get_bucket_lifecycle_configuration(bucket: bucket_name) + + expect(obj[0][:status]).to eq 'Enabled' + expect(obj[0][:transitions].count).to eq 2 + expect(obj[0][:transitions]).to eq [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] end end end From 06519316948b38fa435d75cfae46b1bdf000a66b Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Thu, 26 Aug 2021 12:50:41 +0300 Subject: [PATCH 06/12] Remove redundant client in setup --- spec/models/conditions_response/backup_spec.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index 86eae4ac0..1d3613f22 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -10,7 +10,6 @@ let(:mock_bucket_object) { double('Aws::S3::Object', upload_file: true) } let(:mock_bucket) { double('Aws::S3::Bucket', object: mock_bucket_object) } let(:mock_s3) { double('Aws::S3::Resource', bucket: mock_bucket) } - let(:mock_s3_client) { instance_double('Aws::S3::Client') } let(:bucket_name) { 'bucket_name' } let(:bucket_full_prefix) { 'bucket/top/prefix' } From 4d4ed3fac77bec7228f8fee07b7575d3e45e9f65 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Mon, 30 Aug 2021 15:29:23 +0300 Subject: [PATCH 07/12] storage_rules arg naming Modify method signature comment to reflect storage_rules as a list --- app/models/conditions_response/backup.rb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index e0f7ee09c..edca118b6 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -299,12 +299,12 @@ class << self end end - # s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', {days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}) - def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs) + # s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) + def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) - storage_class_list = storage_rules_kwargs.flatten.map{|h| h.values.last} + storage_class_list = storage_rules.map{|h| h.values.last} unless storage_class_is_valid? storage_class_list client.put_bucket_lifecycle_configuration({ bucket: bucket, @@ -321,7 +321,7 @@ def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_k }, id: ENV['SHF_AWS_S3_BACKUP_KEY_ID'], status: status.capitalize, - transitions: storage_rules_kwargs + transitions: storage_rules } ] } From 3101aedb9ba5c850b791bdb6cc1c42809fb36ef1 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Tue, 31 Aug 2021 15:18:56 +0300 Subject: [PATCH 08/12] Add more test assertions --- app/models/conditions_response/backup.rb | 2 +- .../models/conditions_response/backup_spec.rb | 34 +++++++++++++++---- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index edca118b6..70e61e25a 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -304,7 +304,7 @@ def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) - storage_class_list = storage_rules.map{|h| h.values.last} + storage_class_list = storage_rules.flatten.map{|h| h.values.last} unless storage_class_is_valid? storage_class_list client.put_bucket_lifecycle_configuration({ bucket: bucket, diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index 1d3613f22..6383d3fd1 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1335,13 +1335,33 @@ def create_faux_backup_file(backups_dir, file_prefix) end it 'returns the correct lifecycle rules transitions' do - put_lifecycle_data = mock_s3_client.put_bucket_lifecycle_configuration(bucket: bucket_name, lifecycle_configuration: {rules: [{status: status, transitions: storage_rules}]}) - expect(mock_s3_client).to receive(:get_bucket_lifecycle_configuration).with({bucket: bucket_name}).and_return(put_lifecycle_data) - obj = mock_s3_client.get_bucket_lifecycle_configuration(bucket: bucket_name) - - expect(obj[0][:status]).to eq 'Enabled' - expect(obj[0][:transitions].count).to eq 2 - expect(obj[0][:transitions]).to eq [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] + put_mock_data = mock_s3_client.put_bucket_lifecycle_configuration( + bucket: bucket_name, + lifecycle_configuration: { + rules: [ + { + expiration: { + days: 365 + }, + filter: { + prefix: bucket_full_prefix + }, + id: "TestOnly", + status: status, + transitions: storage_rules + } + ] + } + ) + + expect(mock_s3_client).to receive(:get_bucket_lifecycle_configuration).with({bucket: bucket_name}).and_return(put_mock_data) + get_mock_data = mock_s3_client.get_bucket_lifecycle_configuration(bucket: bucket_name) + + expect(get_mock_data[0][:id]).to eq 'TestOnly' + expect(get_mock_data[0][:status]).to eq 'Enabled' + expect(get_mock_data[0][:filter][:prefix]).to eq 'bucket/top/prefix' + expect(get_mock_data[0][:transitions].count).to eq 2 + expect(get_mock_data[0][:transitions]).to eq [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] end end end From bf19659da390bcb928fd31f57e02dff8350f4735 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Wed, 1 Sep 2021 09:50:37 +0300 Subject: [PATCH 09/12] Add test to assert method call --- spec/models/conditions_response/backup_spec.rb | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index 6383d3fd1..6a694d883 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1321,6 +1321,11 @@ def create_faux_backup_file(backups_dir, file_prefix) ) client end + + it 'calls #s3_lifecycle_rules once' do + expect(described_class).to receive(:s3_lifecycle_rules).with(bucket_name, bucket_full_prefix, status, storage_rules) + described_class.s3_lifecycle_rules(bucket_name, bucket_full_prefix, status, storage_rules) + end it "returns 'Invalid storage class' for a list containing only invalid storage classes" do expect(described_class.storage_class_is_valid? invalid_storage_class_list).to eq("Invalid storage class") @@ -1346,7 +1351,7 @@ def create_faux_backup_file(backups_dir, file_prefix) filter: { prefix: bucket_full_prefix }, - id: "TestOnly", + id: 'TestOnly', status: status, transitions: storage_rules } From 631f7e03c206307b59a41500b1dcc5c68db4a9a2 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Thu, 2 Sep 2021 20:47:17 +0300 Subject: [PATCH 10/12] Change method params from positional args to required kwargs --- app/models/conditions_response/backup.rb | 6 +++--- spec/models/conditions_response/backup_spec.rb | 9 +++++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 70e61e25a..1b000be17 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -299,15 +299,15 @@ class << self end end - # s3_lifecycle_rules('bucket', 'bucket_full_prefix', 'enabled', [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) - def self.s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules) + # s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) + def self.s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) storage_class_list = storage_rules.flatten.map{|h| h.values.last} unless storage_class_is_valid? storage_class_list client.put_bucket_lifecycle_configuration({ - bucket: bucket, + bucket: bucket_name, lifecycle_configuration: { rules: [ { diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index 6a694d883..a34f613ae 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -1310,6 +1310,7 @@ def create_faux_backup_file(backups_dir, file_prefix) let(:another_invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'STANDARD_IA', 'GLACIER'] } let(:status) { 'Enabled' } let(:storage_rules) { [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] } + let(:stub_s3_client) { double('Aws::S3::Client', bucket: mock_bucket) } let(:mock_s3_client) do client = Aws::S3::Client.new(stub_responses: true) @@ -1323,8 +1324,8 @@ def create_faux_backup_file(backups_dir, file_prefix) end it 'calls #s3_lifecycle_rules once' do - expect(described_class).to receive(:s3_lifecycle_rules).with(bucket_name, bucket_full_prefix, status, storage_rules) - described_class.s3_lifecycle_rules(bucket_name, bucket_full_prefix, status, storage_rules) + expect(described_class).to receive(:s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + described_class.s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) end it "returns 'Invalid storage class' for a list containing only invalid storage classes" do @@ -1359,8 +1360,8 @@ def create_faux_backup_file(backups_dir, file_prefix) } ) - expect(mock_s3_client).to receive(:get_bucket_lifecycle_configuration).with({bucket: bucket_name}).and_return(put_mock_data) - get_mock_data = mock_s3_client.get_bucket_lifecycle_configuration(bucket: bucket_name) + allow(stub_s3_client).to receive(:get_bucket_lifecycle_configuration).with({bucket: bucket_name}).and_return(put_mock_data) + get_mock_data = stub_s3_client.get_bucket_lifecycle_configuration(bucket: bucket_name) expect(get_mock_data[0][:id]).to eq 'TestOnly' expect(get_mock_data[0][:status]).to eq 'Enabled' From 5dfe439df9e223d344912e8d2668c6995d18216d Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Thu, 7 Oct 2021 15:26:20 +0300 Subject: [PATCH 11/12] Add call to set_s3_lifecycle method in iterate_and_log_notify_errors --- app/models/conditions_response/backup.rb | 9 ++++++++- spec/models/conditions_response/backup_spec.rb | 18 ++++++++++++------ 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 1b000be17..5baa12787 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -86,6 +86,13 @@ def self.condition_response(condition, log, use_slack_notification: true) iterate_and_log_notify_errors(backup_files, 'in backup_files loop, uploading_file_to_s3', log) do |backup_file| upload_file_to_s3(aws_s3, aws_s3_backup_bucket, aws_backup_bucket_full_prefix, backup_file) + # When we first upload our file to s3, the default storage class is STANDARD + # After 1 month, we want to to transition the object to STANDARD IA, + # then GLACIER after 3 months. This can be changed to meet our needs. + # This will help us save on costs. + # This however has effects on retrieval time for objects which you can see in the link below + # https://aws.amazon.com/s3/storage-classes/#Performance_across_the_S3_Storage_Classes + set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) end log.record('info', 'Pruning older backups on local storage') @@ -300,7 +307,7 @@ class << self end # s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) - def self.s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:) + def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index a34f613ae..a461b6dfd 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -692,7 +692,6 @@ def create_faux_backup_file(backups_dir, file_prefix) let!(:temp_backups_dir) { Dir.mktmpdir('faux-backups-dir') } let!(:faux_backup_fn) { create_faux_backup_file(temp_backups_dir, 'faux_backup.bak') } - it '.upload_file_to_s3 calls .upload_file for the bucket, full object name, and file to upload' do expect(mock_bucket_object).to receive(:upload_file).with(faux_backup_fn, anything) Backup.upload_file_to_s3(mock_s3, bucket_name, bucket_full_prefix, faux_backup_fn) @@ -1251,7 +1250,9 @@ def create_faux_backup_file(backups_dir, file_prefix) describe 'iterate_and_log_notify_errors(list, slack_error_details, log)' do - + let(:status) { 'Enabled' } + let(:storage_rules) { [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] } + before(:each) do allow(SHFNotifySlack).to receive(:failure_notification) .with(anything, anything) @@ -1303,9 +1304,14 @@ def create_faux_backup_file(backups_dir, file_prefix) expect(@result_str).to eq 'ac' end + it 'adds a bucket lifecycle policy to the object' do + expect(described_class).to receive(:set_s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + described_class.set_s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + end + end - describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do + describe 'set_s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do let(:invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'OTHER_INVALID_STORAGE_CLASS'] } let(:another_invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'STANDARD_IA', 'GLACIER'] } let(:status) { 'Enabled' } @@ -1323,9 +1329,9 @@ def create_faux_backup_file(backups_dir, file_prefix) client end - it 'calls #s3_lifecycle_rules once' do - expect(described_class).to receive(:s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) - described_class.s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + it 'calls #set_s3_lifecycle_rules once' do + expect(described_class).to receive(:set_s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + described_class.set_s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) end it "returns 'Invalid storage class' for a list containing only invalid storage classes" do From d4e3fee9455338d69a4a4325e67d5e65b312f058 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Mon, 11 Oct 2021 17:44:01 +0300 Subject: [PATCH 12/12] Modify storage class days based on revised document --- app/models/conditions_response/backup.rb | 18 +++++++----------- spec/models/conditions_response/backup_spec.rb | 5 +++-- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 5baa12787..94aa345c8 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -86,13 +86,8 @@ def self.condition_response(condition, log, use_slack_notification: true) iterate_and_log_notify_errors(backup_files, 'in backup_files loop, uploading_file_to_s3', log) do |backup_file| upload_file_to_s3(aws_s3, aws_s3_backup_bucket, aws_backup_bucket_full_prefix, backup_file) - # When we first upload our file to s3, the default storage class is STANDARD - # After 1 month, we want to to transition the object to STANDARD IA, - # then GLACIER after 3 months. This can be changed to meet our needs. - # This will help us save on costs. - # This however has effects on retrieval time for objects which you can see in the link below - # https://aws.amazon.com/s3/storage-classes/#Performance_across_the_S3_Storage_Classes - set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) + # When we first upload our file to s3, the default storage class is STANDARD_IA + set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 90, storage_class: 'GLACIER'}, {days: 450, storage_class: 'DEEP_ARCHIVE'}]) end log.record('info', 'Pruning older backups on local storage') @@ -149,7 +144,7 @@ def self.s3_backup_bucket_full_prefix(today = Date.current) # @see https://aws.amazon.com/blogs/developer/uploading-files-to-amazon-s3/ def self.upload_file_to_s3(s3, bucket, bucket_folder, file) obj = s3.bucket(bucket).object(bucket_folder + File.basename(file)) - obj.upload_file(file, { tagging: aws_date_tags }) + obj.upload_file(file, { tagging: aws_date_tags, storage_class: 'STANDARD_IA' }) end @@ -295,7 +290,7 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla end - STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE).freeze + STORAGE_CLASSES = %w(GLACIER DEEP_ARCHIVE).freeze class << self define_method(:storage_class_is_valid?) do |storage_class_list| unless storage_class_list.empty? @@ -306,7 +301,7 @@ class << self end end - # s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) + # s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 90, storage_class: 'GLACIER'}, {days: 450, storage_class: 'DEEP_ARCHIVE'}]) def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) @@ -319,8 +314,9 @@ def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, stor rules: [ { expiration: { + # Expire objects after 10 years date: Time.now, - days: 365, + days: 3650, expired_object_delete_marker: false }, filter: { diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index a461b6dfd..84200c81d 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -699,10 +699,10 @@ def create_faux_backup_file(backups_dir, file_prefix) FileUtils.remove_entry(temp_backups_dir, true) end - it 'adds date tags to the object' do + it 'adds date tags and STANDARD_IA storage class to the object' do expect(mock_bucket_object).to receive(:upload_file) .with(faux_backup_fn, - {tagging: 'this is the tagging string'}) + {storage_class: 'STANDARD_IA', tagging: 'this is the tagging string'}) expect(described_class).to receive(:aws_date_tags).and_return('this is the tagging string') Backup.upload_file_to_s3(mock_s3, bucket_name, bucket_full_prefix, faux_backup_fn) @@ -1372,6 +1372,7 @@ def create_faux_backup_file(backups_dir, file_prefix) expect(get_mock_data[0][:id]).to eq 'TestOnly' expect(get_mock_data[0][:status]).to eq 'Enabled' expect(get_mock_data[0][:filter][:prefix]).to eq 'bucket/top/prefix' + expect(get_mock_data[0][:expiration][:days]).to eq 365 expect(get_mock_data[0][:transitions].count).to eq 2 expect(get_mock_data[0][:transitions]).to eq [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] end