Skip to content

Commit

Permalink
Modify storage class days based on revised document
Browse files Browse the repository at this point in the history
  • Loading branch information
BasilMawejje committed Oct 11, 2021
1 parent bd4fca2 commit d4e3fee
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 13 deletions.
18 changes: 7 additions & 11 deletions app/models/conditions_response/backup.rb
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,8 @@ def self.condition_response(condition, log, use_slack_notification: true)

iterate_and_log_notify_errors(backup_files, 'in backup_files loop, uploading_file_to_s3', log) do |backup_file|
upload_file_to_s3(aws_s3, aws_s3_backup_bucket, aws_backup_bucket_full_prefix, backup_file)
# When we first upload our file to s3, the default storage class is STANDARD
# After 1 month, we want to to transition the object to STANDARD IA,
# then GLACIER after 3 months. This can be changed to meet our needs.
# This will help us save on costs.
# This however has effects on retrieval time for objects which you can see in the link below
# https://aws.amazon.com/s3/storage-classes/#Performance_across_the_S3_Storage_Classes
set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}])
# When we first upload our file to s3, the default storage class is STANDARD_IA
set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 90, storage_class: 'GLACIER'}, {days: 450, storage_class: 'DEEP_ARCHIVE'}])
end

log.record('info', 'Pruning older backups on local storage')
Expand Down Expand Up @@ -149,7 +144,7 @@ def self.s3_backup_bucket_full_prefix(today = Date.current)
# @see https://aws.amazon.com/blogs/developer/uploading-files-to-amazon-s3/
def self.upload_file_to_s3(s3, bucket, bucket_folder, file)
obj = s3.bucket(bucket).object(bucket_folder + File.basename(file))
obj.upload_file(file, { tagging: aws_date_tags })
obj.upload_file(file, { tagging: aws_date_tags, storage_class: 'STANDARD_IA' })
end


Expand Down Expand Up @@ -295,7 +290,7 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla
end


STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE).freeze
STORAGE_CLASSES = %w(GLACIER DEEP_ARCHIVE).freeze
class << self
define_method(:storage_class_is_valid?) do |storage_class_list|
unless storage_class_list.empty?
Expand All @@ -306,7 +301,7 @@ class << self
end
end

# s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}])
# s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 90, storage_class: 'GLACIER'}, {days: 450, storage_class: 'DEEP_ARCHIVE'}])
def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:)
client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'],
credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY']))
Expand All @@ -319,8 +314,9 @@ def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, stor
rules: [
{
expiration: {
# Expire objects after 10 years
date: Time.now,
days: 365,
days: 3650,
expired_object_delete_marker: false
},
filter: {
Expand Down
5 changes: 3 additions & 2 deletions spec/models/conditions_response/backup_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -699,10 +699,10 @@ def create_faux_backup_file(backups_dir, file_prefix)
FileUtils.remove_entry(temp_backups_dir, true)
end

it 'adds date tags to the object' do
it 'adds date tags and STANDARD_IA storage class to the object' do
expect(mock_bucket_object).to receive(:upload_file)
.with(faux_backup_fn,
{tagging: 'this is the tagging string'})
{storage_class: 'STANDARD_IA', tagging: 'this is the tagging string'})

expect(described_class).to receive(:aws_date_tags).and_return('this is the tagging string')
Backup.upload_file_to_s3(mock_s3, bucket_name, bucket_full_prefix, faux_backup_fn)
Expand Down Expand Up @@ -1372,6 +1372,7 @@ def create_faux_backup_file(backups_dir, file_prefix)
expect(get_mock_data[0][:id]).to eq 'TestOnly'
expect(get_mock_data[0][:status]).to eq 'Enabled'
expect(get_mock_data[0][:filter][:prefix]).to eq 'bucket/top/prefix'
expect(get_mock_data[0][:expiration][:days]).to eq 365
expect(get_mock_data[0][:transitions].count).to eq 2
expect(get_mock_data[0][:transitions]).to eq [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]
end
Expand Down

0 comments on commit d4e3fee

Please sign in to comment.