2014-10-11 02:04:07 +08:00
|
|
|
module BackupRestore
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
class Backuper
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2014-08-04 23:55:09 +08:00
|
|
|
attr_reader :success
|
|
|
|
|
2014-08-21 00:48:56 +08:00
|
|
|
def initialize(user_id, opts={})
|
|
|
|
@user_id = user_id
|
2015-08-28 02:02:13 +08:00
|
|
|
@client_id = opts[:client_id]
|
2014-08-21 00:48:56 +08:00
|
|
|
@publish_to_message_bus = opts[:publish_to_message_bus] || false
|
|
|
|
@with_uploads = opts[:with_uploads].nil? ? true : opts[:with_uploads]
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
ensure_no_operation_is_running
|
|
|
|
ensure_we_have_a_user
|
|
|
|
|
|
|
|
initialize_state
|
|
|
|
end
|
|
|
|
|
|
|
|
def run
|
|
|
|
log "[STARTED]"
|
|
|
|
log "'#{@user.username}' has started the backup!"
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
mark_backup_as_running
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
listen_for_shutdown_signal
|
|
|
|
|
2014-03-14 22:53:58 +08:00
|
|
|
ensure_directory_exists(@tmp_directory)
|
|
|
|
ensure_directory_exists(@archive_directory)
|
|
|
|
|
|
|
|
write_metadata
|
|
|
|
|
|
|
|
### READ-ONLY / START ###
|
2014-02-13 12:32:58 +08:00
|
|
|
enable_readonly_mode
|
|
|
|
|
|
|
|
pause_sidekiq
|
|
|
|
wait_for_sidekiq
|
|
|
|
|
|
|
|
dump_public_schema
|
|
|
|
|
2014-03-14 22:53:58 +08:00
|
|
|
disable_readonly_mode
|
|
|
|
### READ-ONLY / END ###
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
log "Finalizing backup..."
|
|
|
|
|
2014-03-14 22:53:58 +08:00
|
|
|
update_dump
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
create_archive
|
|
|
|
|
2014-03-13 04:23:47 +08:00
|
|
|
after_create_hook
|
2014-02-13 12:32:58 +08:00
|
|
|
rescue SystemExit
|
|
|
|
log "Backup process was cancelled!"
|
|
|
|
rescue Exception => ex
|
|
|
|
log "EXCEPTION: " + ex.message
|
|
|
|
log ex.backtrace.join("\n")
|
|
|
|
else
|
|
|
|
@success = true
|
|
|
|
"#{@archive_basename}.tar.gz"
|
|
|
|
ensure
|
2014-05-13 22:18:08 +08:00
|
|
|
notify_user rescue nil
|
2015-09-21 16:52:03 +08:00
|
|
|
remove_old rescue nil
|
2014-02-13 12:32:58 +08:00
|
|
|
clean_up
|
|
|
|
@success ? log("[SUCCESS]") : log("[FAILED]")
|
|
|
|
end
|
|
|
|
|
|
|
|
protected
|
|
|
|
|
|
|
|
def ensure_no_operation_is_running
|
|
|
|
raise BackupRestore::OperationRunningError if BackupRestore.is_operation_running?
|
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_we_have_a_user
|
2014-05-06 21:41:59 +08:00
|
|
|
@user = User.find_by(id: @user_id)
|
2014-02-13 12:32:58 +08:00
|
|
|
raise Discourse::InvalidParameters.new(:user_id) unless @user
|
|
|
|
end
|
|
|
|
|
|
|
|
def initialize_state
|
|
|
|
@success = false
|
|
|
|
@current_db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
|
|
|
@tmp_directory = File.join(Rails.root, "tmp", "backups", @current_db, @timestamp)
|
|
|
|
@dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
|
|
|
@meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE)
|
|
|
|
@archive_directory = File.join(Rails.root, "public", "backups", @current_db)
|
2014-03-07 08:34:31 +08:00
|
|
|
@archive_basename = File.join(@archive_directory, "#{SiteSetting.title.parameterize}-#{@timestamp}")
|
2014-03-25 02:34:16 +08:00
|
|
|
@logs = []
|
2014-03-28 19:15:53 +08:00
|
|
|
@readonly_mode_was_enabled = Discourse.readonly_mode?
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def listen_for_shutdown_signal
|
|
|
|
Thread.new do
|
|
|
|
while BackupRestore.is_operation_running?
|
|
|
|
exit if BackupRestore.should_shutdown?
|
|
|
|
sleep 0.1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
def mark_backup_as_running
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Marking backup as running..."
|
|
|
|
BackupRestore.mark_as_running!
|
|
|
|
end
|
|
|
|
|
|
|
|
def enable_readonly_mode
|
2014-03-28 19:15:53 +08:00
|
|
|
return if @readonly_mode_was_enabled
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Enabling readonly mode..."
|
|
|
|
Discourse.enable_readonly_mode
|
|
|
|
end
|
|
|
|
|
|
|
|
def pause_sidekiq
|
|
|
|
log "Pausing sidekiq..."
|
|
|
|
Sidekiq.pause!
|
|
|
|
end
|
|
|
|
|
|
|
|
def wait_for_sidekiq
|
|
|
|
log "Waiting for sidekiq to finish running jobs..."
|
2014-03-14 22:49:35 +08:00
|
|
|
iterations = 1
|
|
|
|
while sidekiq_has_running_jobs?
|
|
|
|
log "Waiting for sidekiq to finish running jobs... ##{iterations}"
|
|
|
|
sleep 5
|
2014-02-13 12:32:58 +08:00
|
|
|
iterations += 1
|
2014-03-14 22:49:35 +08:00
|
|
|
raise "Sidekiq did not finish running all the jobs in the allowed time!" if iterations > 6
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-03-14 22:49:35 +08:00
|
|
|
def sidekiq_has_running_jobs?
|
2014-08-15 05:54:55 +08:00
|
|
|
Sidekiq::Workers.new.each do |_, _, worker|
|
2014-03-14 22:49:35 +08:00
|
|
|
payload = worker.try(:payload)
|
|
|
|
return true if payload.try(:all_sites)
|
|
|
|
return true if payload.try(:current_site_id) == @current_db
|
|
|
|
end
|
|
|
|
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def write_metadata
|
|
|
|
log "Writing metadata to '#{@meta_filename}'..."
|
|
|
|
metadata = {
|
|
|
|
source: "discourse",
|
|
|
|
version: BackupRestore.current_version
|
|
|
|
}
|
|
|
|
File.write(@meta_filename, metadata.to_json)
|
|
|
|
end
|
|
|
|
|
|
|
|
def dump_public_schema
|
|
|
|
log "Dumping the public schema of the database..."
|
|
|
|
|
|
|
|
logs = Queue.new
|
|
|
|
pg_dump_running = true
|
|
|
|
|
|
|
|
Thread.new do
|
2014-04-08 01:38:47 +08:00
|
|
|
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
2014-02-13 12:32:58 +08:00
|
|
|
while pg_dump_running
|
|
|
|
message = logs.pop.strip
|
|
|
|
log(message) unless message.blank?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
IO.popen("#{pg_dump_command} 2>&1") do |pipe|
|
|
|
|
begin
|
|
|
|
while line = pipe.readline
|
|
|
|
logs << line
|
|
|
|
end
|
|
|
|
rescue EOFError
|
|
|
|
# finished reading...
|
|
|
|
ensure
|
|
|
|
pg_dump_running = false
|
|
|
|
logs << ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
raise "pg_dump failed" unless $?.success?
|
|
|
|
end
|
|
|
|
|
2014-03-12 18:45:55 +08:00
|
|
|
def pg_dump_command
|
2014-02-19 22:25:31 +08:00
|
|
|
db_conf = BackupRestore.database_configuration
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2015-03-19 23:39:15 +08:00
|
|
|
password_argument = "PGPASSWORD='#{db_conf.password}'" if db_conf.password.present?
|
2014-02-21 01:42:17 +08:00
|
|
|
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
2014-07-30 23:20:25 +08:00
|
|
|
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
2014-02-21 01:42:17 +08:00
|
|
|
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
|
|
|
|
|
|
|
[ password_argument, # pass the password to pg_dump (if any)
|
|
|
|
"pg_dump", # the pg_dump command
|
|
|
|
"--schema=public", # only public schema
|
|
|
|
"--file='#{@dump_filename}'", # output to the dump.sql file
|
|
|
|
"--no-owner", # do not output commands to set ownership of objects
|
|
|
|
"--no-privileges", # prevent dumping of access privileges
|
|
|
|
"--verbose", # specifies verbose mode
|
|
|
|
host_argument, # the hostname to connect to (if any)
|
2014-07-30 23:20:25 +08:00
|
|
|
port_argument, # the port to connect to (if any)
|
2014-02-21 01:42:17 +08:00
|
|
|
username_argument, # the username to connect as (if any)
|
|
|
|
db_conf.database # the name of the database to dump
|
2014-02-13 12:32:58 +08:00
|
|
|
].join(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_dump
|
|
|
|
log "Updating dump for more awesomeness..."
|
|
|
|
|
|
|
|
`#{sed_command}`
|
|
|
|
end
|
|
|
|
|
2014-03-12 18:45:55 +08:00
|
|
|
def sed_command
|
2014-02-13 12:32:58 +08:00
|
|
|
# in order to limit the downtime when restoring as much as possible
|
|
|
|
# we force the restoration to happen in the "restore" schema
|
|
|
|
|
|
|
|
# during the restoration, this make sure we
|
|
|
|
# - drop the "restore" schema if it exists
|
|
|
|
# - create the "restore" schema
|
|
|
|
# - prepend the "restore" schema into the search_path
|
|
|
|
|
2014-03-21 23:57:33 +08:00
|
|
|
regexp = "SET search_path = public, pg_catalog;"
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
replacement = [ "DROP SCHEMA IF EXISTS restore CASCADE;",
|
|
|
|
"CREATE SCHEMA restore;",
|
|
|
|
"SET search_path = restore, public, pg_catalog;",
|
2014-03-21 23:57:33 +08:00
|
|
|
].join(" ")
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
# we only want to replace the VERY first occurence of the search_path command
|
2014-03-21 23:57:33 +08:00
|
|
|
expression = "1,/^#{regexp}$/s/#{regexp}/#{replacement}/"
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
# I tried to use the --in-place argument but it was SLOOOWWWWwwwwww
|
|
|
|
# so I output the result into another file and rename it back afterwards
|
2014-03-21 23:57:33 +08:00
|
|
|
[ "sed -e '#{expression}' < #{@dump_filename} > #{@dump_filename}.tmp",
|
2014-02-13 12:32:58 +08:00
|
|
|
"&&",
|
|
|
|
"mv #{@dump_filename}.tmp #{@dump_filename}",
|
|
|
|
].join(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_archive
|
|
|
|
log "Creating archive: #{File.basename(@archive_basename)}.tar.gz"
|
|
|
|
|
|
|
|
tar_filename = "#{@archive_basename}.tar"
|
|
|
|
|
|
|
|
log "Making sure archive does not already exist..."
|
|
|
|
`rm -f #{tar_filename}`
|
|
|
|
`rm -f #{tar_filename}.gz`
|
|
|
|
|
|
|
|
log "Creating empty archive..."
|
|
|
|
`tar --create --file #{tar_filename} --files-from /dev/null`
|
|
|
|
|
|
|
|
log "Archiving metadata..."
|
|
|
|
FileUtils.cd(File.dirname(@meta_filename)) do
|
2014-02-26 02:23:37 +08:00
|
|
|
`tar --append --dereference --file #{tar_filename} #{File.basename(@meta_filename)}`
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
log "Archiving data dump..."
|
|
|
|
FileUtils.cd(File.dirname(@dump_filename)) do
|
2014-02-26 02:23:37 +08:00
|
|
|
`tar --append --dereference --file #{tar_filename} #{File.basename(@dump_filename)}`
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2014-08-21 00:48:56 +08:00
|
|
|
if @with_uploads
|
|
|
|
upload_directory = "uploads/" + @current_db
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2014-08-21 00:48:56 +08:00
|
|
|
log "Archiving uploads..."
|
|
|
|
FileUtils.cd(File.join(Rails.root, "public")) do
|
|
|
|
`tar --append --dereference --file #{tar_filename} #{upload_directory}`
|
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
log "Gzipping archive..."
|
2016-04-01 05:33:25 +08:00
|
|
|
`gzip -5 #{tar_filename}`
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2014-03-13 04:23:47 +08:00
|
|
|
def after_create_hook
|
|
|
|
log "Executing the after_create_hook for the backup"
|
|
|
|
backup = Backup.create_from_filename("#{File.basename(@archive_basename)}.tar.gz")
|
|
|
|
backup.after_create_hook
|
|
|
|
end
|
|
|
|
|
2014-03-12 05:28:12 +08:00
|
|
|
def remove_old
|
|
|
|
log "Removing old backups..."
|
|
|
|
Backup.remove_old
|
|
|
|
end
|
|
|
|
|
2014-03-25 02:34:16 +08:00
|
|
|
def notify_user
|
|
|
|
log "Notifying '#{@user.username}' of the end of the backup..."
|
|
|
|
if @success
|
2014-10-11 02:04:07 +08:00
|
|
|
SystemMessage.create_from_system_user(@user, :backup_succeeded)
|
2014-03-25 02:34:16 +08:00
|
|
|
else
|
2014-10-11 02:04:07 +08:00
|
|
|
SystemMessage.create_from_system_user(@user, :backup_failed, logs: @logs.join("\n"))
|
2014-03-25 02:34:16 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def clean_up
|
|
|
|
log "Cleaning stuff up..."
|
2015-02-04 18:49:11 +08:00
|
|
|
remove_tar_leftovers
|
2014-02-13 12:32:58 +08:00
|
|
|
remove_tmp_directory
|
|
|
|
unpause_sidekiq
|
2014-03-14 22:53:58 +08:00
|
|
|
disable_readonly_mode if Discourse.readonly_mode?
|
2014-10-11 02:04:07 +08:00
|
|
|
mark_backup_as_not_running
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Finished!"
|
|
|
|
end
|
|
|
|
|
2015-02-04 18:49:11 +08:00
|
|
|
def remove_tar_leftovers
|
|
|
|
log "Removing '.tar' leftovers..."
|
|
|
|
`rm -f #{@archive_directory}/*.tar`
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def remove_tmp_directory
|
|
|
|
log "Removing tmp '#{@tmp_directory}' directory..."
|
|
|
|
FileUtils.rm_rf(@tmp_directory) if Dir[@tmp_directory].present?
|
|
|
|
rescue
|
|
|
|
log "Something went wrong while removing the following tmp directory: #{@tmp_directory}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def unpause_sidekiq
|
|
|
|
log "Unpausing sidekiq..."
|
|
|
|
Sidekiq.unpause!
|
2014-05-13 22:18:08 +08:00
|
|
|
rescue
|
|
|
|
log "Something went wrong while unpausing Sidekiq."
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def disable_readonly_mode
|
2014-03-28 19:15:53 +08:00
|
|
|
return if @readonly_mode_was_enabled
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Disabling readonly mode..."
|
|
|
|
Discourse.disable_readonly_mode
|
|
|
|
end
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
def mark_backup_as_not_running
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Marking backup as finished..."
|
|
|
|
BackupRestore.mark_as_not_running!
|
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_directory_exists(directory)
|
|
|
|
log "Making sure '#{directory}' exists..."
|
|
|
|
FileUtils.mkdir_p(directory)
|
|
|
|
end
|
|
|
|
|
|
|
|
def log(message)
|
2015-02-09 23:53:28 +08:00
|
|
|
timestamp = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
2014-02-13 12:32:58 +08:00
|
|
|
puts(message) rescue nil
|
2015-02-09 23:53:28 +08:00
|
|
|
publish_log(message, timestamp) rescue nil
|
|
|
|
save_log(message, timestamp)
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2015-02-09 23:53:28 +08:00
|
|
|
def publish_log(message, timestamp)
|
2014-02-13 12:32:58 +08:00
|
|
|
return unless @publish_to_message_bus
|
2015-02-09 23:53:28 +08:00
|
|
|
data = { timestamp: timestamp, operation: "backup", message: message }
|
2015-08-28 02:02:13 +08:00
|
|
|
MessageBus.publish(BackupRestore::LOGS_CHANNEL, data, user_ids: [@user_id], client_ids: [@client_id])
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2015-02-09 23:53:28 +08:00
|
|
|
def save_log(message, timestamp)
|
|
|
|
@logs << "[#{timestamp}] #{message}"
|
2014-03-25 02:34:16 +08:00
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|