From 5c9ea3a88549e56b949d77797a9d60626be3d060 Mon Sep 17 00:00:00 2001 From: Dan Ivovich Date: Mon, 16 Mar 2015 20:10:35 -0400 Subject: [PATCH 1/2] use the running users creds for pg_dump and enable gpg encrypt --- backup.rb | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/backup.rb b/backup.rb index 78be565..5d7da0a 100755 --- a/backup.rb +++ b/backup.rb @@ -3,10 +3,10 @@ require 'aws-sdk' require 'fileutils' -# .pgpass file required, it is in the following format +# .pgpass file required if not connecting via peer/ident, it is in the following format # hostname:port:database:username:password -pg_user = ENV["POSTGRES_USERNAME"] || "postgres" -pg_host = ENV["POSTGRES_HOST"] || "localhost" +pg_user = ENV["POSTGRES_USERNAME"] +pg_host = ENV["POSTGRES_HOST"] pg_port = ENV["POSTGRES_PORT"] || "5432" pg_database = ENV["POSTGRES_DATABASE"] @@ -18,13 +18,25 @@ time = Time.now.strftime("%Y-%m-%d") filename = "backup.#{Time.now.to_i}.#{time}.sql.dump" -`pg_dump -Fc --username=#{pg_user} --no-password --host #{pg_host} --port #{pg_port} #{pg_database} > #{filename}` +if pg_user && pg_host + `pg_dump -Fc --username=#{pg_user} --no-password --host #{pg_host} --port #{pg_port} #{pg_database} > #{filename}` +elsif pg_host + `pg_dump -Fc --host #{pg_host} --port #{pg_port} #{pg_database} > #{filename}` +else + `pg_dump -Fc #{pg_database} > #{filename}` +end # verify file exists and file size is > 0 bytes unless File.exists?(filename) && File.new(filename).size > 0 raise "Database was not backed up" end +if ENV["ENCRYPT_TO"] + `gpg -r #{ENV["ENCRYPT_TO"]} -e #{filename}` + FileUtils.rm(filename) + filename = "#{filename}.gpg" +end + s3 = AWS.s3 bucket = s3.buckets[bucket_name] object = bucket.objects["#{project_name}/#{filename}"] From 87c3b9425aff8be9e722426cac2977753089e42e Mon Sep 17 00:00:00 2001 From: Dan Ivovich Date: Fri, 12 Dec 2025 14:13:06 -0500 Subject: [PATCH 2/2] docker version --- .tool-versions | 1 + Dockerfile | 9 +++++++ Gemfile | 2 +- Gemfile.lock | 31 ++++++++++++++++-------- dockerized_backup.rb | 56 ++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 88 insertions(+), 11 deletions(-) create mode 100644 .tool-versions create mode 100644 Dockerfile create mode 100755 dockerized_backup.rb diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000..a9e31a4 --- /dev/null +++ b/.tool-versions @@ -0,0 +1 @@ +ruby 2.7.1 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..b361abb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,9 @@ +FROM ruby:2.7.1 + +RUN apt-get update -qq && apt-get install -y gpg + +ADD Gemfile Gemfile +ADD Gemfile.lock Gemfile.lock +ADD dockerized_backup.rb dockerized_backup.rb + +RUN bundle diff --git a/Gemfile b/Gemfile index 4298a26..1f99dfd 100644 --- a/Gemfile +++ b/Gemfile @@ -1,3 +1,3 @@ source "https://rubygems.org" -gem 'aws-sdk' +gem 'aws-sdk-s3' diff --git a/Gemfile.lock b/Gemfile.lock index f658676..51d242e 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,18 +1,29 @@ GEM remote: https://rubygems.org/ specs: - aws-sdk (1.61.0) - aws-sdk-v1 (= 1.61.0) - aws-sdk-v1 (1.61.0) - json (~> 1.4) - nokogiri (>= 1.4.4) - json (1.8.2) - mini_portile (0.6.2) - nokogiri (1.6.6.1) - mini_portile (~> 0.6.0) + aws-eventstream (1.1.0) + aws-partitions (1.356.0) + aws-sdk-core (3.104.3) + aws-eventstream (~> 1, >= 1.0.2) + aws-partitions (~> 1, >= 1.239.0) + aws-sigv4 (~> 1.1) + jmespath (~> 1.0) + aws-sdk-kms (1.36.0) + aws-sdk-core (~> 3, >= 3.99.0) + aws-sigv4 (~> 1.1) + aws-sdk-s3 (1.78.0) + aws-sdk-core (~> 3, >= 3.104.3) + aws-sdk-kms (~> 1) + aws-sigv4 (~> 1.1) + aws-sigv4 (1.2.2) + aws-eventstream (~> 1, >= 1.0.2) + jmespath (1.4.0) PLATFORMS ruby DEPENDENCIES - aws-sdk + aws-sdk-s3 + +BUNDLED WITH + 2.1.4 diff --git a/dockerized_backup.rb b/dockerized_backup.rb new file mode 100755 index 0000000..7ac04a2 --- /dev/null +++ b/dockerized_backup.rb @@ -0,0 +1,56 @@ +#!/usr/bin/env ruby +require 'time' +require 'aws-sdk-s3' +require 'fileutils' + +pg_url = ENV["DATABASE_URL"] + +bucket_name = ENV["BACKUP_BUCKET_NAME"] +project_name = ENV["PROJECT_NAME"] + +# backup pg + +time = Time.now.strftime("%Y-%m-%d") +filename = "backup.#{Time.now.to_i}.#{time}.sql.dump" + +if encrypt_to = ENV["ENCRYPT_TO"] + keyring = ENV["KEYRING_PATH"] + filename = "#{filename}.gpg" + puts "Backing up to #{filename}" + `pg_dump -Fc #{pg_url} | gpg --no-default-keyring --keyring #{keyring} -r #{encrypt_to} --trusted-key #{encrypt_to} -o #{filename}` + puts "Back up to #{filename} complete" +else + puts "Backing up to #{filename}" + `pg_dump -Fc #{pg_url} > #{filename}` + puts "Back up to #{filename} complete" +end + +unless File.exists?(filename) && File.new(filename).size > 0 + raise "Database backup failed, file not found, or file empty" +end + +if bucket_name + s3 = AWS.s3 + bucket = s3.buckets[bucket_name] + object = bucket.objects["#{project_name}/#{filename}"] + object.write(Pathname.new(filename), { + :acl => :private, + }) + + if object.exists? + FileUtils.rm(filename) + end + + if ENV["CLEAN"] + DAYS_30 = 30 * 24 * 60 * 60 + + objects = bucket.objects.select do |object| + time = Time.at(object.key.split("/").last.split(".")[1].to_i) + time < Time.now - DAYS_30 + end + + objects.each do |object| + object.delete + end + end +end