2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-09-15 21:45:36 +08:00
|
|
|
require_dependency "db_helper"
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
module BackupRestore
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2015-06-11 14:42:01 +08:00
|
|
|
class RestoreDisabledError < RuntimeError; end
|
2014-02-13 12:32:58 +08:00
|
|
|
class FilenameMissingError < RuntimeError; end
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
class Restorer
|
2014-08-04 23:55:09 +08:00
|
|
|
attr_reader :success
|
|
|
|
|
2018-03-16 06:09:06 +08:00
|
|
|
def self.pg_produces_portable_dump?(version)
|
|
|
|
version = Gem::Version.new(version)
|
|
|
|
|
|
|
|
%w{
|
|
|
|
10.3
|
|
|
|
9.6.8
|
|
|
|
9.5.12
|
|
|
|
9.4.17
|
|
|
|
9.3.22
|
|
|
|
}.each do |unportable_version|
|
|
|
|
return false if Gem::Dependency.new("", "~> #{unportable_version}").match?("", version)
|
|
|
|
end
|
|
|
|
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
def initialize(user_id, opts = {})
|
2015-08-28 02:02:13 +08:00
|
|
|
@user_id = user_id
|
|
|
|
@client_id = opts[:client_id]
|
|
|
|
@filename = opts[:filename]
|
|
|
|
@publish_to_message_bus = opts[:publish_to_message_bus] || false
|
2019-03-08 04:48:26 +08:00
|
|
|
@disable_emails = opts.fetch(:disable_emails, true)
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
ensure_restore_is_enabled
|
2014-02-13 12:32:58 +08:00
|
|
|
ensure_no_operation_is_running
|
|
|
|
ensure_we_have_a_user
|
|
|
|
ensure_we_have_a_filename
|
|
|
|
|
|
|
|
initialize_state
|
|
|
|
end
|
|
|
|
|
|
|
|
def run
|
|
|
|
log "[STARTED]"
|
|
|
|
log "'#{@user_info[:username]}' has started the restore!"
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
mark_restore_as_running
|
2014-02-13 12:32:58 +08:00
|
|
|
|
|
|
|
listen_for_shutdown_signal
|
|
|
|
|
|
|
|
ensure_directory_exists(@tmp_directory)
|
|
|
|
|
|
|
|
copy_archive_to_tmp_directory
|
|
|
|
unzip_archive
|
|
|
|
|
|
|
|
extract_metadata
|
|
|
|
validate_metadata
|
|
|
|
|
|
|
|
extract_dump
|
2014-04-09 00:06:53 +08:00
|
|
|
|
2018-03-09 14:28:50 +08:00
|
|
|
if !can_restore_into_different_schema?
|
2018-03-16 00:14:08 +08:00
|
|
|
log "Cannot restore into different schema, restoring in-place"
|
2018-03-09 13:18:47 +08:00
|
|
|
enable_readonly_mode
|
|
|
|
pause_sidekiq
|
|
|
|
wait_for_sidekiq
|
|
|
|
BackupRestore.move_tables_between_schemas("public", "backup")
|
2018-04-06 09:43:32 +08:00
|
|
|
@db_was_changed = true
|
2018-03-09 13:18:47 +08:00
|
|
|
restore_dump
|
|
|
|
else
|
2018-03-16 00:14:08 +08:00
|
|
|
log "Restoring into 'backup' schema"
|
2018-03-09 13:18:47 +08:00
|
|
|
restore_dump
|
|
|
|
enable_readonly_mode
|
|
|
|
pause_sidekiq
|
|
|
|
wait_for_sidekiq
|
|
|
|
switch_schema!
|
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2018-11-20 10:37:58 +08:00
|
|
|
migrate_database
|
|
|
|
reconnect_database
|
|
|
|
reload_site_settings
|
|
|
|
clear_emoji_cache
|
|
|
|
disable_readonly_mode
|
|
|
|
clear_theme_cache
|
|
|
|
|
2016-01-19 08:01:17 +08:00
|
|
|
extract_uploads
|
2019-02-18 18:48:03 +08:00
|
|
|
|
|
|
|
after_restore_hook
|
2014-02-13 12:32:58 +08:00
|
|
|
rescue SystemExit
|
|
|
|
log "Restore process was cancelled!"
|
|
|
|
rollback
|
2014-08-18 14:42:48 +08:00
|
|
|
rescue => ex
|
2014-02-13 12:32:58 +08:00
|
|
|
log "EXCEPTION: " + ex.message
|
|
|
|
log ex.backtrace.join("\n")
|
|
|
|
rollback
|
|
|
|
else
|
|
|
|
@success = true
|
|
|
|
ensure
|
2018-09-20 02:35:43 +08:00
|
|
|
clean_up
|
|
|
|
notify_user
|
|
|
|
log "Finished!"
|
2016-07-22 12:14:35 +08:00
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
@success ? log("[SUCCESS]") : log("[FAILED]")
|
|
|
|
end
|
|
|
|
|
|
|
|
protected
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
def ensure_restore_is_enabled
|
2015-06-11 14:42:01 +08:00
|
|
|
raise BackupRestore::RestoreDisabledError unless Rails.env.development? || SiteSetting.allow_restore?
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_no_operation_is_running
|
|
|
|
raise BackupRestore::OperationRunningError if BackupRestore.is_operation_running?
|
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_we_have_a_user
|
2014-05-06 21:41:59 +08:00
|
|
|
user = User.find_by(id: @user_id)
|
2014-02-13 12:32:58 +08:00
|
|
|
raise Discourse::InvalidParameters.new(:user_id) unless user
|
|
|
|
# keep some user data around to check them against the newly restored database
|
|
|
|
@user_info = { id: user.id, username: user.username, email: user.email }
|
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_we_have_a_filename
|
2015-06-11 14:42:01 +08:00
|
|
|
raise BackupRestore::FilenameMissingError if @filename.nil?
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def initialize_state
|
|
|
|
@success = false
|
2018-10-15 09:43:31 +08:00
|
|
|
@store = BackupRestore::BackupStore.create
|
2014-08-20 17:53:15 +08:00
|
|
|
@db_was_changed = false
|
2014-02-13 12:32:58 +08:00
|
|
|
@current_db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
@current_version = BackupRestore.current_version
|
|
|
|
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
|
|
|
@tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp)
|
|
|
|
@archive_filename = File.join(@tmp_directory, @filename)
|
|
|
|
@tar_filename = @archive_filename[0...-3]
|
|
|
|
@meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE)
|
2016-08-01 21:18:42 +08:00
|
|
|
@is_archive = !(@filename =~ /.sql.gz$/)
|
2016-08-01 11:56:06 +08:00
|
|
|
|
2014-03-25 12:15:30 +08:00
|
|
|
@logs = []
|
2014-03-28 19:15:53 +08:00
|
|
|
@readonly_mode_was_enabled = Discourse.readonly_mode?
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def listen_for_shutdown_signal
|
|
|
|
Thread.new do
|
|
|
|
while BackupRestore.is_operation_running?
|
|
|
|
exit if BackupRestore.should_shutdown?
|
|
|
|
sleep 0.1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
def mark_restore_as_running
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Marking restore as running..."
|
|
|
|
BackupRestore.mark_as_running!
|
|
|
|
end
|
|
|
|
|
|
|
|
def enable_readonly_mode
|
2014-03-28 19:15:53 +08:00
|
|
|
return if @readonly_mode_was_enabled
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Enabling readonly mode..."
|
|
|
|
Discourse.enable_readonly_mode
|
|
|
|
end
|
|
|
|
|
|
|
|
def pause_sidekiq
|
|
|
|
log "Pausing sidekiq..."
|
|
|
|
Sidekiq.pause!
|
|
|
|
end
|
|
|
|
|
|
|
|
def wait_for_sidekiq
|
|
|
|
log "Waiting for sidekiq to finish running jobs..."
|
2014-03-14 22:49:35 +08:00
|
|
|
iterations = 1
|
|
|
|
while sidekiq_has_running_jobs?
|
|
|
|
log "Waiting for sidekiq to finish running jobs... ##{iterations}"
|
2014-02-13 12:32:58 +08:00
|
|
|
sleep 5
|
|
|
|
iterations += 1
|
2014-03-14 22:49:35 +08:00
|
|
|
raise "Sidekiq did not finish running all the jobs in the allowed time!" if iterations > 6
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-03-14 22:49:35 +08:00
|
|
|
def sidekiq_has_running_jobs?
|
2014-08-15 05:54:55 +08:00
|
|
|
Sidekiq::Workers.new.each do |_, _, worker|
|
2014-03-14 22:49:35 +08:00
|
|
|
payload = worker.try(:payload)
|
|
|
|
return true if payload.try(:all_sites)
|
|
|
|
return true if payload.try(:current_site_id) == @current_db
|
|
|
|
end
|
|
|
|
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def copy_archive_to_tmp_directory
|
2018-10-15 09:43:31 +08:00
|
|
|
if @store.remote?
|
|
|
|
log "Downloading archive to tmp directory..."
|
|
|
|
failure_message = "Failed to download archive to tmp directory."
|
|
|
|
else
|
|
|
|
log "Copying archive to tmp directory..."
|
|
|
|
failure_message = "Failed to copy archive to tmp directory."
|
|
|
|
end
|
|
|
|
|
|
|
|
@store.download_file(@filename, @archive_filename, failure_message)
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def unzip_archive
|
2016-08-01 21:18:42 +08:00
|
|
|
return unless @is_archive
|
|
|
|
|
2016-05-23 15:33:29 +08:00
|
|
|
log "Unzipping archive, this may take a while..."
|
2016-08-01 21:18:42 +08:00
|
|
|
|
2016-07-22 10:45:39 +08:00
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 14:21:30 +08:00
|
|
|
Discourse::Utils.execute_command('gzip', '--decompress', @archive_filename, failure_message: "Failed to unzip archive.")
|
2016-07-22 10:45:39 +08:00
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def extract_metadata
|
2016-08-01 21:18:42 +08:00
|
|
|
@metadata =
|
2016-09-16 16:59:22 +08:00
|
|
|
if system('tar', '--list', '--file', @tar_filename, BackupRestore::METADATA_FILE)
|
2018-03-16 00:14:08 +08:00
|
|
|
log "Extracting metadata file..."
|
2016-08-01 21:18:42 +08:00
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 14:21:30 +08:00
|
|
|
Discourse::Utils.execute_command(
|
2016-09-16 10:32:53 +08:00
|
|
|
'tar', '--extract', '--file', @tar_filename, BackupRestore::METADATA_FILE,
|
|
|
|
failure_message: "Failed to extract metadata file."
|
2016-08-01 21:18:42 +08:00
|
|
|
)
|
|
|
|
end
|
2016-07-22 10:45:39 +08:00
|
|
|
|
2016-08-25 17:19:10 +08:00
|
|
|
data = Oj.load_file(@meta_filename)
|
|
|
|
raise "Failed to load metadata file." if !data
|
|
|
|
data
|
2016-08-01 21:18:42 +08:00
|
|
|
else
|
2018-03-16 00:14:08 +08:00
|
|
|
log "No metadata file to extract."
|
2016-08-01 21:18:42 +08:00
|
|
|
if @filename =~ /-#{BackupRestore::VERSION_PREFIX}(\d{14})/
|
|
|
|
{ "version" => Regexp.last_match[1].to_i }
|
|
|
|
else
|
|
|
|
raise "Migration version is missing from the filename."
|
|
|
|
end
|
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_metadata
|
|
|
|
log "Validating metadata..."
|
|
|
|
log " Current version: #{@current_version}"
|
2016-08-25 17:19:10 +08:00
|
|
|
|
|
|
|
raise "Metadata has not been extracted correctly." if !@metadata
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
log " Restored version: #{@metadata["version"]}"
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
error = "You're trying to restore a more recent version of the schema. You should migrate first!"
|
2014-02-13 12:32:58 +08:00
|
|
|
raise error if @metadata["version"] > @current_version
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_dump
|
2016-09-16 16:59:22 +08:00
|
|
|
@dump_filename =
|
|
|
|
if @is_archive
|
|
|
|
# For backwards compatibility
|
|
|
|
if system('tar', '--list', '--file', @tar_filename, BackupRestore::OLD_DUMP_FILE)
|
|
|
|
File.join(@tmp_directory, BackupRestore::OLD_DUMP_FILE)
|
|
|
|
else
|
|
|
|
File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
File.join(@tmp_directory, @filename)
|
|
|
|
end
|
|
|
|
|
2016-08-01 21:18:42 +08:00
|
|
|
return unless @is_archive
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Extracting dump file..."
|
2016-07-22 10:45:39 +08:00
|
|
|
|
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 14:21:30 +08:00
|
|
|
Discourse::Utils.execute_command(
|
2016-09-16 10:32:53 +08:00
|
|
|
'tar', '--extract', '--file', @tar_filename, File.basename(@dump_filename),
|
|
|
|
failure_message: "Failed to extract dump file."
|
2016-07-22 10:45:39 +08:00
|
|
|
)
|
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2018-03-09 13:18:47 +08:00
|
|
|
def get_dumped_by_version
|
|
|
|
output = Discourse::Utils.execute_command(
|
|
|
|
File.extname(@dump_filename) == '.gz' ? 'zgrep' : 'grep',
|
2018-03-09 15:48:12 +08:00
|
|
|
'-m1', @dump_filename, '-e', "-- Dumped by pg_dump version",
|
2018-03-09 13:18:47 +08:00
|
|
|
failure_message: "Failed to check version of pg_dump used to generate the dump file"
|
|
|
|
)
|
|
|
|
|
2018-03-16 11:09:13 +08:00
|
|
|
output.match(/version (\d+(\.\d+)+)/)[1]
|
2018-03-09 13:18:47 +08:00
|
|
|
end
|
|
|
|
|
2018-03-09 14:28:50 +08:00
|
|
|
def can_restore_into_different_schema?
|
2018-03-16 06:09:06 +08:00
|
|
|
self.class.pg_produces_portable_dump?(get_dumped_by_version)
|
2018-03-09 14:28:50 +08:00
|
|
|
end
|
|
|
|
|
2016-08-01 11:56:06 +08:00
|
|
|
def restore_dump_command
|
|
|
|
if File.extname(@dump_filename) == '.gz'
|
|
|
|
"gzip -d < #{@dump_filename} | #{sed_command} | #{psql_command} 2>&1"
|
|
|
|
else
|
|
|
|
"#{psql_command} 2>&1 < #{@dump_filename}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def restore_dump
|
|
|
|
log "Restoring dump file... (can be quite long)"
|
|
|
|
|
|
|
|
logs = Queue.new
|
|
|
|
psql_running = true
|
|
|
|
has_error = false
|
|
|
|
|
|
|
|
Thread.new do
|
2014-04-09 00:06:53 +08:00
|
|
|
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
2014-02-13 12:32:58 +08:00
|
|
|
while psql_running
|
|
|
|
message = logs.pop.strip
|
|
|
|
has_error ||= (message =~ /ERROR:/)
|
|
|
|
log(message) unless message.blank?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-01 11:56:06 +08:00
|
|
|
IO.popen(restore_dump_command) do |pipe|
|
2014-02-13 12:32:58 +08:00
|
|
|
begin
|
|
|
|
while line = pipe.readline
|
|
|
|
logs << line
|
|
|
|
end
|
|
|
|
rescue EOFError
|
|
|
|
# finished reading...
|
|
|
|
ensure
|
|
|
|
psql_running = false
|
|
|
|
logs << ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# psql does not return a valid exit code when an error happens
|
|
|
|
raise "psql failed" if has_error
|
|
|
|
end
|
|
|
|
|
2014-03-12 18:45:55 +08:00
|
|
|
def psql_command
|
2014-02-19 22:25:31 +08:00
|
|
|
db_conf = BackupRestore.database_configuration
|
|
|
|
|
2018-03-02 04:08:35 +08:00
|
|
|
password_argument = "PGPASSWORD='#{db_conf.password}'" if db_conf.password.present?
|
2014-02-21 01:42:17 +08:00
|
|
|
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
2014-07-30 23:20:25 +08:00
|
|
|
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
2014-02-21 01:42:17 +08:00
|
|
|
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
2014-02-19 22:25:31 +08:00
|
|
|
|
2014-02-21 01:42:17 +08:00
|
|
|
[ password_argument, # pass the password to psql (if any)
|
2014-02-19 22:25:31 +08:00
|
|
|
"psql", # the psql command
|
|
|
|
"--dbname='#{db_conf.database}'", # connect to database *dbname*
|
|
|
|
"--single-transaction", # all or nothing (also runs COPY commands faster)
|
2014-02-21 01:42:17 +08:00
|
|
|
host_argument, # the hostname to connect to (if any)
|
2016-09-16 10:32:53 +08:00
|
|
|
port_argument, # the port to connect to (if any)
|
2014-02-21 01:42:17 +08:00
|
|
|
username_argument # the username to connect as (if any)
|
2014-02-13 12:32:58 +08:00
|
|
|
].join(" ")
|
|
|
|
end
|
|
|
|
|
2016-07-22 10:45:39 +08:00
|
|
|
def sed_command
|
|
|
|
# in order to limit the downtime when restoring as much as possible
|
|
|
|
# we force the restoration to happen in the "restore" schema
|
|
|
|
|
|
|
|
# during the restoration, this make sure we
|
|
|
|
# - drop the "restore" schema if it exists
|
|
|
|
# - create the "restore" schema
|
|
|
|
# - prepend the "restore" schema into the search_path
|
|
|
|
|
|
|
|
regexp = "SET search_path = public, pg_catalog;"
|
|
|
|
|
|
|
|
replacement = [ "DROP SCHEMA IF EXISTS restore CASCADE;",
|
|
|
|
"CREATE SCHEMA restore;",
|
|
|
|
"SET search_path = restore, public, pg_catalog;",
|
|
|
|
].join(" ")
|
|
|
|
|
|
|
|
# we only want to replace the VERY first occurence of the search_path command
|
|
|
|
expression = "1,/^#{regexp}$/s/#{regexp}/#{replacement}/"
|
|
|
|
|
|
|
|
"sed -e '#{expression}'"
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def switch_schema!
|
2015-03-09 23:11:15 +08:00
|
|
|
log "Switching schemas... try reloading the site in 5 minutes, if successful, then reboot and restore is complete."
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2014-02-19 22:25:31 +08:00
|
|
|
sql = [
|
|
|
|
"BEGIN;",
|
|
|
|
BackupRestore.move_tables_between_schemas_sql("public", "backup"),
|
|
|
|
BackupRestore.move_tables_between_schemas_sql("restore", "public"),
|
|
|
|
"COMMIT;"
|
|
|
|
].join("\n")
|
2014-02-13 12:32:58 +08:00
|
|
|
|
2014-08-20 17:53:15 +08:00
|
|
|
@db_was_changed = true
|
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(sql)
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def migrate_database
|
|
|
|
log "Migrating the database..."
|
2019-02-08 22:28:25 +08:00
|
|
|
|
|
|
|
if Discourse.skip_post_deployment_migrations?
|
|
|
|
ENV["SKIP_POST_DEPLOYMENT_MIGRATIONS"] = "0"
|
|
|
|
Rails.application.config.paths['db/migrate'] << Rails.root.join(
|
|
|
|
Discourse::DB_POST_MIGRATE_PATH
|
|
|
|
).to_s
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
Discourse::Application.load_tasks
|
|
|
|
ENV["VERSION"] = @current_version.to_s
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec("SET search_path = public, pg_catalog;")
|
2014-02-21 23:17:00 +08:00
|
|
|
Rake::Task["db:migrate"].invoke
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def reconnect_database
|
|
|
|
log "Reconnecting to the database..."
|
2014-04-09 00:06:53 +08:00
|
|
|
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
|
|
|
end
|
|
|
|
|
|
|
|
def reload_site_settings
|
|
|
|
log "Reloading site settings..."
|
|
|
|
SiteSetting.refresh!
|
2019-02-25 23:06:33 +08:00
|
|
|
|
2019-04-16 17:48:07 +08:00
|
|
|
if @disable_emails && SiteSetting.disable_emails == 'no'
|
2019-03-08 04:48:26 +08:00
|
|
|
log "Disabling outgoing emails for non-staff users..."
|
|
|
|
user = User.find_by_email(@user_info[:email]) || Discourse.system_user
|
|
|
|
SiteSetting.set_and_log(:disable_emails, 'non-staff', user)
|
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2015-03-18 00:29:18 +08:00
|
|
|
def clear_emoji_cache
|
|
|
|
log "Clearing emoji cache..."
|
|
|
|
Emoji.clear_cache
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def extract_uploads
|
2016-09-16 11:00:37 +08:00
|
|
|
if system('tar', '--exclude=*/*', '--list', '--file', @tar_filename, 'uploads')
|
2014-12-23 08:12:26 +08:00
|
|
|
log "Extracting uploads..."
|
2016-09-15 21:45:36 +08:00
|
|
|
|
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 14:21:30 +08:00
|
|
|
Discourse::Utils.execute_command(
|
2016-09-16 10:32:53 +08:00
|
|
|
'tar', '--extract', '--keep-newer-files', '--file', @tar_filename, 'uploads/',
|
2016-09-21 16:04:41 +08:00
|
|
|
failure_message: "Failed to extract uploads."
|
2016-07-22 10:45:39 +08:00
|
|
|
)
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
2016-09-15 21:45:36 +08:00
|
|
|
|
|
|
|
public_uploads_path = File.join(Rails.root, "public")
|
|
|
|
|
|
|
|
FileUtils.cd(public_uploads_path) do
|
|
|
|
FileUtils.mkdir_p("uploads")
|
|
|
|
|
|
|
|
tmp_uploads_path = Dir.glob(File.join(@tmp_directory, "uploads", "*")).first
|
|
|
|
previous_db_name = File.basename(tmp_uploads_path)
|
|
|
|
current_db_name = RailsMultisite::ConnectionManagement.current_db
|
2019-02-13 18:10:33 +08:00
|
|
|
optimized_images_exist = File.exist?(File.join(tmp_uploads_path, 'optimized'))
|
2016-09-15 21:45:36 +08:00
|
|
|
|
2017-03-17 14:21:30 +08:00
|
|
|
Discourse::Utils.execute_command(
|
2017-03-17 14:27:01 +08:00
|
|
|
'rsync', '-avp', '--safe-links', "#{tmp_uploads_path}/", "uploads/#{current_db_name}/",
|
2016-09-15 21:45:36 +08:00
|
|
|
failure_message: "Failed to restore uploads."
|
|
|
|
)
|
|
|
|
|
|
|
|
if previous_db_name != current_db_name
|
2019-02-13 18:10:33 +08:00
|
|
|
log "Remapping uploads..."
|
2016-09-15 21:45:36 +08:00
|
|
|
DbHelper.remap("uploads/#{previous_db_name}", "uploads/#{current_db_name}")
|
|
|
|
end
|
2019-02-13 18:10:33 +08:00
|
|
|
|
2019-06-04 21:47:21 +08:00
|
|
|
if SiteSetting.Upload.enable_s3_uploads
|
|
|
|
migrate_to_s3
|
|
|
|
remove_local_uploads(File.join(public_uploads_path, "uploads/#{current_db_name}"))
|
|
|
|
end
|
|
|
|
|
2019-02-13 18:10:33 +08:00
|
|
|
generate_optimized_images unless optimized_images_exist
|
2016-09-15 21:45:36 +08:00
|
|
|
end
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-04 21:47:21 +08:00
|
|
|
def migrate_to_s3
|
|
|
|
log "Migrating uploads to S3..."
|
|
|
|
ENV["SKIP_FAILED"] = "1"
|
|
|
|
ENV["MIGRATE_TO_MULTISITE"] = "1" if Rails.configuration.multisite
|
|
|
|
Rake::Task["uploads:migrate_to_s3"].invoke
|
|
|
|
end
|
|
|
|
|
|
|
|
def remove_local_uploads(directory)
|
|
|
|
log "Removing local uploads directory..."
|
|
|
|
FileUtils.rm_rf(directory) if Dir[directory].present?
|
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while removing the following uploads directory: #{directory}", ex
|
|
|
|
end
|
|
|
|
|
2019-02-13 18:10:33 +08:00
|
|
|
def generate_optimized_images
|
2019-05-07 22:58:44 +08:00
|
|
|
log 'Optimizing site icons...'
|
2019-06-04 21:47:21 +08:00
|
|
|
DB.exec("TRUNCATE TABLE optimized_images")
|
2019-05-07 22:58:44 +08:00
|
|
|
SiteIconManager.ensure_optimized!
|
|
|
|
|
2019-02-13 18:10:33 +08:00
|
|
|
log 'Posts will be rebaked by a background job in sidekiq. You will see missing images until that has completed.'
|
|
|
|
log 'You can expedite the process by manually running "rake posts:rebake_uncooked_posts"'
|
|
|
|
|
|
|
|
DB.exec(<<~SQL)
|
|
|
|
UPDATE posts
|
|
|
|
SET baked_version = NULL
|
|
|
|
WHERE id IN (SELECT post_id FROM post_uploads)
|
|
|
|
SQL
|
|
|
|
|
|
|
|
User.where("uploaded_avatar_id IS NOT NULL").find_each do |user|
|
2019-05-02 16:08:12 +08:00
|
|
|
Jobs.enqueue(:create_avatar_thumbnails, upload_id: user.uploaded_avatar_id)
|
2019-02-13 18:10:33 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def rollback
|
|
|
|
log "Trying to rollback..."
|
2014-08-20 17:53:15 +08:00
|
|
|
if @db_was_changed && BackupRestore.can_rollback?
|
2014-02-14 07:27:25 +08:00
|
|
|
log "Rolling back..."
|
2014-02-19 22:25:31 +08:00
|
|
|
BackupRestore.move_tables_between_schemas("backup", "public")
|
2014-02-13 12:32:58 +08:00
|
|
|
else
|
2014-02-14 07:27:25 +08:00
|
|
|
log "There was no need to rollback"
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-03-25 02:34:16 +08:00
|
|
|
def notify_user
|
2017-04-27 02:47:36 +08:00
|
|
|
if user = User.find_by_email(@user_info[:email])
|
2014-03-25 02:34:16 +08:00
|
|
|
log "Notifying '#{user.username}' of the end of the restore..."
|
2017-03-17 14:21:30 +08:00
|
|
|
status = @success ? :restore_succeeded : :restore_failed
|
|
|
|
|
|
|
|
SystemMessage.create_from_system_user(user, status,
|
|
|
|
logs: Discourse::Utils.pretty_logs(@logs)
|
|
|
|
)
|
2014-03-25 02:34:16 +08:00
|
|
|
else
|
|
|
|
log "Could not send notification to '#{@user_info[:username]}' (#{@user_info[:email]}), because the user does not exists..."
|
|
|
|
end
|
2018-09-20 02:35:43 +08:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while notifying user.", ex
|
2014-03-25 02:34:16 +08:00
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def clean_up
|
|
|
|
log "Cleaning stuff up..."
|
|
|
|
remove_tmp_directory
|
|
|
|
unpause_sidekiq
|
2014-04-09 00:06:53 +08:00
|
|
|
disable_readonly_mode if Discourse.readonly_mode?
|
2014-10-11 02:04:07 +08:00
|
|
|
mark_restore_as_not_running
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def remove_tmp_directory
|
|
|
|
log "Removing tmp '#{@tmp_directory}' directory..."
|
|
|
|
FileUtils.rm_rf(@tmp_directory) if Dir[@tmp_directory].present?
|
2018-09-20 02:35:43 +08:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while removing the following tmp directory: #{@tmp_directory}", ex
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def unpause_sidekiq
|
|
|
|
log "Unpausing sidekiq..."
|
|
|
|
Sidekiq.unpause!
|
2018-09-20 02:35:43 +08:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while unpausing Sidekiq.", ex
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2018-11-20 10:37:58 +08:00
|
|
|
def clear_theme_cache
|
|
|
|
log "Clear theme cache"
|
2019-01-10 18:06:01 +08:00
|
|
|
ThemeField.force_recompilation!
|
2018-11-20 10:37:58 +08:00
|
|
|
Theme.expire_site_cache!
|
2019-05-07 23:00:26 +08:00
|
|
|
Stylesheet::Manager.cache.clear
|
2018-11-20 10:37:58 +08:00
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
def disable_readonly_mode
|
2014-03-28 19:15:53 +08:00
|
|
|
return if @readonly_mode_was_enabled
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Disabling readonly mode..."
|
|
|
|
Discourse.disable_readonly_mode
|
2018-09-20 02:35:43 +08:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while disabling readonly mode.", ex
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2014-10-11 02:04:07 +08:00
|
|
|
def mark_restore_as_not_running
|
2014-02-13 12:32:58 +08:00
|
|
|
log "Marking restore as finished..."
|
|
|
|
BackupRestore.mark_as_not_running!
|
2018-09-20 02:35:43 +08:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while marking restore as finished.", ex
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_directory_exists(directory)
|
|
|
|
log "Making sure #{directory} exists..."
|
|
|
|
FileUtils.mkdir_p(directory)
|
|
|
|
end
|
|
|
|
|
2019-02-18 18:48:03 +08:00
|
|
|
def after_restore_hook
|
|
|
|
log "Executing the after_restore_hook..."
|
|
|
|
DiscourseEvent.trigger(:restore_complete)
|
|
|
|
end
|
|
|
|
|
2018-09-20 02:35:43 +08:00
|
|
|
def log(message, ex = nil)
|
2015-02-09 23:53:28 +08:00
|
|
|
timestamp = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
2018-03-28 16:20:08 +08:00
|
|
|
puts(message)
|
|
|
|
publish_log(message, timestamp)
|
2015-02-09 23:53:28 +08:00
|
|
|
save_log(message, timestamp)
|
2018-09-20 02:35:43 +08:00
|
|
|
Rails.logger.error("#{ex}\n" + ex.backtrace.join("\n")) if ex
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2015-02-09 23:53:28 +08:00
|
|
|
def publish_log(message, timestamp)
|
2014-02-13 12:32:58 +08:00
|
|
|
return unless @publish_to_message_bus
|
2015-02-09 23:53:28 +08:00
|
|
|
data = { timestamp: timestamp, operation: "restore", message: message }
|
2015-08-28 02:02:13 +08:00
|
|
|
MessageBus.publish(BackupRestore::LOGS_CHANNEL, data, user_ids: [@user_id], client_ids: [@client_id])
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
2015-02-09 23:53:28 +08:00
|
|
|
def save_log(message, timestamp)
|
|
|
|
@logs << "[#{timestamp}] #{message}"
|
2014-03-25 02:34:16 +08:00
|
|
|
end
|
|
|
|
|
2014-02-13 12:32:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|