Skip to content
This repository was archived by the owner on Dec 12, 2025. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .tool-versions
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ruby 2.7.1
9 changes: 9 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM ruby:2.7.1

RUN apt-get update -qq && apt-get install -y gpg

ADD Gemfile Gemfile
ADD Gemfile.lock Gemfile.lock
ADD dockerized_backup.rb dockerized_backup.rb

RUN bundle
2 changes: 1 addition & 1 deletion Gemfile
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
source "https://rubygems.org"

gem 'aws-sdk'
gem 'aws-sdk-s3'
31 changes: 21 additions & 10 deletions Gemfile.lock
Original file line number Diff line number Diff line change
@@ -1,18 +1,29 @@
GEM
remote: https://rubygems.org/
specs:
aws-sdk (1.61.0)
aws-sdk-v1 (= 1.61.0)
aws-sdk-v1 (1.61.0)
json (~> 1.4)
nokogiri (>= 1.4.4)
json (1.8.2)
mini_portile (0.6.2)
nokogiri (1.6.6.1)
mini_portile (~> 0.6.0)
aws-eventstream (1.1.0)
aws-partitions (1.356.0)
aws-sdk-core (3.104.3)
aws-eventstream (~> 1, >= 1.0.2)
aws-partitions (~> 1, >= 1.239.0)
aws-sigv4 (~> 1.1)
jmespath (~> 1.0)
aws-sdk-kms (1.36.0)
aws-sdk-core (~> 3, >= 3.99.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.78.0)
aws-sdk-core (~> 3, >= 3.104.3)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.1)
aws-sigv4 (1.2.2)
aws-eventstream (~> 1, >= 1.0.2)
jmespath (1.4.0)

PLATFORMS
ruby

DEPENDENCIES
aws-sdk
aws-sdk-s3

BUNDLED WITH
2.1.4
20 changes: 16 additions & 4 deletions backup.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
require 'aws-sdk'
require 'fileutils'

# .pgpass file required, it is in the following format
# .pgpass file required if not connecting via peer/ident, it is in the following format
# hostname:port:database:username:password
pg_user = ENV["POSTGRES_USERNAME"] || "postgres"
pg_host = ENV["POSTGRES_HOST"] || "localhost"
pg_user = ENV["POSTGRES_USERNAME"]
pg_host = ENV["POSTGRES_HOST"]
pg_port = ENV["POSTGRES_PORT"] || "5432"
pg_database = ENV["POSTGRES_DATABASE"]

Expand All @@ -18,13 +18,25 @@
time = Time.now.strftime("%Y-%m-%d")
filename = "backup.#{Time.now.to_i}.#{time}.sql.dump"

`pg_dump -Fc --username=#{pg_user} --no-password --host #{pg_host} --port #{pg_port} #{pg_database} > #{filename}`
if pg_user && pg_host
`pg_dump -Fc --username=#{pg_user} --no-password --host #{pg_host} --port #{pg_port} #{pg_database} > #{filename}`
elsif pg_host
`pg_dump -Fc --host #{pg_host} --port #{pg_port} #{pg_database} > #{filename}`
else
`pg_dump -Fc #{pg_database} > #{filename}`
end

# verify file exists and file size is > 0 bytes
unless File.exists?(filename) && File.new(filename).size > 0
raise "Database was not backed up"
end

if ENV["ENCRYPT_TO"]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you pipe data into gpg? We could skip the unencrypted file if it does.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

probably. In the case I wrote this for we are doing all of this within an encrypted partition, so it didn't really matter that there was an inflight unencrypted file. Seems like a reasonable change if gpg can do it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I found a few things that show data being pipped into gpg --encrypt -r identity-key. I don't have gpg keys set up to play around with it so I don't know for sure.

`gpg -r #{ENV["ENCRYPT_TO"]} -e #{filename}`
FileUtils.rm(filename)
filename = "#{filename}.gpg"
end

s3 = AWS.s3
bucket = s3.buckets[bucket_name]
object = bucket.objects["#{project_name}/#{filename}"]
Expand Down
56 changes: 56 additions & 0 deletions dockerized_backup.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/usr/bin/env ruby
require 'time'
require 'aws-sdk-s3'
require 'fileutils'

pg_url = ENV["DATABASE_URL"]

bucket_name = ENV["BACKUP_BUCKET_NAME"]
project_name = ENV["PROJECT_NAME"]

# backup pg

time = Time.now.strftime("%Y-%m-%d")
filename = "backup.#{Time.now.to_i}.#{time}.sql.dump"

if encrypt_to = ENV["ENCRYPT_TO"]
keyring = ENV["KEYRING_PATH"]
filename = "#{filename}.gpg"
puts "Backing up to #{filename}"
`pg_dump -Fc #{pg_url} | gpg --no-default-keyring --keyring #{keyring} -r #{encrypt_to} --trusted-key #{encrypt_to} -o #{filename}`
puts "Back up to #{filename} complete"
else
puts "Backing up to #{filename}"
`pg_dump -Fc #{pg_url} > #{filename}`
puts "Back up to #{filename} complete"
end

unless File.exists?(filename) && File.new(filename).size > 0
raise "Database backup failed, file not found, or file empty"
end

if bucket_name
s3 = AWS.s3
bucket = s3.buckets[bucket_name]
object = bucket.objects["#{project_name}/#{filename}"]
object.write(Pathname.new(filename), {
:acl => :private,
})

if object.exists?
FileUtils.rm(filename)
end

if ENV["CLEAN"]
DAYS_30 = 30 * 24 * 60 * 60

objects = bucket.objects.select do |object|
time = Time.at(object.key.split("/").last.split(".")[1].to_i)
time < Time.now - DAYS_30
end

objects.each do |object|
object.delete
end
end
end