Skip to content

Commit 95a9c21

Browse files
committed
backup/restore script
1 parent 75cb168 commit 95a9c21

File tree

2 files changed

+13
-28
lines changed

2 files changed

+13
-28
lines changed

lib/tasks/backup_db_rds.rake

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
require 'aws-sdk-s3'
2+
13
desc "Update the development db to what is being used in prod"
24
task :backup_db_rds => :environment do
35
logger = Logger.new(STDOUT)
@@ -6,18 +8,12 @@ task :backup_db_rds => :environment do
68
current_time = Time.current.strftime("%Y%m%d%H%M%S")
79

810
logger.info("Copying the database...")
9-
backup_filename = "#{current_time}.rds.dump"
11+
backup_filename = "#{Rails.env}-#{current_time}.rds.dump"
1012
system("PGPASSWORD='#{ENV["DIAPER_DB_PASSWORD"]}' pg_dump -Fc -v --host=#{ENV["DIAPER_DB_HOST"]} --username=#{ENV["DIAPER_DB_USERNAME"]} --dbname=#{ENV["DIAPER_DB_DATABASE"]} -f #{backup_filename}")
1113

12-
account_name = ENV["AZURE_STORAGE_ACCOUNT_NAME"]
13-
account_key = ENV["AZURE_STORAGE_ACCESS_KEY"]
14-
15-
blob_client = Azure::Storage::Blob::BlobService.create(
16-
storage_account_name: account_name,
17-
storage_access_key: account_key
18-
)
14+
client = Aws::S3::Client.new
1915

2016
logger.info("Uploading #{backup_filename}")
21-
blob_client.create_block_blob("backups", backup_filename, File.read(backup_filename))
17+
client.put_object(bucket: "human-essentials-backups", key: "backups/#{backup_filename}", body: File.read(backup_filename))
2218
File.delete(backup_filename)
2319
end

lib/tasks/fetch_latest_db.rake

Lines changed: 8 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1+
require 'aws-sdk-s3'
2+
13
desc "Update the development db to what is being used in prod"
2-
BACKUP_CONTAINER_NAME = 'backups'
4+
BUCKET_NAME = "human-essentials-backups"
35
PASSWORD_REPLACEMENT = 'password'
46

57
task :fetch_latest_db do
@@ -51,44 +53,31 @@ end
5153
private
5254

5355
def fetch_latest_backups
54-
backups = blob_client.list_blobs(BACKUP_CONTAINER_NAME)
56+
backups = blob_client.list_objects_v2(bucket: BUCKET_NAME)
5557

5658
#
5759
# Retrieve the most up to date version of the DB dump
5860
#
5961
backup = backups.select { |b| b.name.match?(".rds.dump") }.sort do |a,b|
60-
Time.parse(a.properties[:last_modified]) <=> Time.parse(b.properties[:last_modified])
62+
Time.parse(a.last_modified) <=> Time.parse(b.last_modified)
6163
end.reverse.first
6264

6365
#
6466
# Download each of the backups onto the local disk in tmp
6567
#
6668
filepath = fetch_file_path(backup)
6769
puts "\nDownloading blob #{backup.name} to #{filepath}"
68-
blob, content = blob_client.get_blob(BACKUP_CONTAINER_NAME, backup.name)
69-
File.open(filepath, "wb") { |f| f.write(content) }
70+
blob_client.get_object(bucket: BUCKET_NAME, key: backup.name, response_target: filepath)
7071

7172
#
7273
# At this point, the dumps should be stored on the local
7374
# machine of the user under tmp.
7475
#
75-
return backup
76+
backup
7677
end
7778

7879
def blob_client
79-
return @blob_client if @blob_client
80-
81-
account_name = ENV["AZURE_STORAGE_ACCOUNT_NAME"]
82-
account_key = ENV["AZURE_STORAGE_ACCESS_KEY"]
83-
84-
if account_name.blank? || account_key.blank?
85-
raise "You must have the correct azure credentials in your ENV"
86-
end
87-
88-
@blob_client = Azure::Storage::Blob::BlobService.create(
89-
storage_account_name: account_name,
90-
storage_access_key: account_key
91-
)
80+
Aws::S3::Client.new
9281
end
9382

9483
def fetch_file_path(backup)

0 commit comments

Comments
 (0)