Scratch the idea then.
So I found this gem called db2s3 which is used to backup the database
with a simple rake command. I having trouble modifying it accordingly.
require ‘activesupport’
require ‘aws/s3’
require ‘tempfile’
class BLUECAT
class Config
end
def initialize
end
def full_backup
file_name = "system-#{Time.now.utc.strftime("%Y%m%d%H%M")}.gz"
store.store(file_name, open(backup_directory.path))
store.store(most_recent_dump_file_name, file_name)
end
private
def backup_directory
backup_file = Tempfile.new(“dump”)
cmd = "cp -r #{RAILS_ROOT}/public/system/ bucket"
cmd += " | gzip > #{backup_file.path}"
run(cmd)
backup_file
end
def store
@store ||= S3Store.new
end
def most_recent_dump_file_name
“most-recent-dump.txt”
end
def run(command)
result = system(command)
raise(“error, process exited with status #{$?.exitstatus}”) unless
result
end
def db_credentials
ActiveRecord::Base.connection.instance_eval { @config } # Dodgy!
end
class S3Store
def initialize
@connected = false
end
def ensure_connected
return if @connected
AWS::S3::Base.establish_connection!(BLUECAT::Config::S3.slice(:access_key_id,
:secret_access_key).merge(:use_ssl => true))
AWS::S3::Bucket.create(bucket)
@connected = true
end
def store(file_name, file)
ensure_connected
AWS::S3::S3Object.store(file_name, file, bucket)
end
def fetch(file_name)
ensure_connected
AWS::S3::S3Object.find(file_name, bucket)
file = Tempfile.new("dump")
open(file.path, 'w') do |f|
AWS::S3::S3Object.stream(file_name, bucket) do |chunk|
f.write chunk
end
end
file
end
def list
ensure_connected
AWS::S3::Bucket.find(bucket).objects.collect {|x| x.path }
end
def delete(file_name)
if object = AWS::S3::S3Object.find(file_name, bucket)
object.delete
end
end
private
def bucket
BLUECAT::Config::S3[:bucket]
end
end
end
In the backup_directory method, I can’t figure out how to get it to copy
the system directory into my s3 bucket.