2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-02-22 08:41:01 +08:00
|
|
|
module Jobs
|
|
|
|
|
2019-10-02 12:01:53 +08:00
|
|
|
class BackupChunksMerger < ::Jobs::Base
|
2018-01-31 19:05:06 +08:00
|
|
|
sidekiq_options queue: 'critical', retry: false
|
2014-02-22 08:41:01 +08:00
|
|
|
|
|
|
|
def execute(args)
|
|
|
|
filename = args[:filename]
|
|
|
|
identifier = args[:identifier]
|
|
|
|
chunks = args[:chunks].to_i
|
|
|
|
|
2014-03-19 08:05:47 +08:00
|
|
|
raise Discourse::InvalidParameters.new(:filename) if filename.blank?
|
2014-02-22 08:41:01 +08:00
|
|
|
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
|
2014-03-19 08:05:47 +08:00
|
|
|
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
|
2014-02-22 08:41:01 +08:00
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
backup_path = "#{BackupRestore::LocalBackupStore.base_directory}/#{filename}"
|
2014-03-19 08:05:47 +08:00
|
|
|
tmp_backup_path = "#{backup_path}.tmp"
|
2014-05-28 04:14:37 +08:00
|
|
|
# path to tmp directory
|
2018-10-15 09:43:31 +08:00
|
|
|
tmp_directory = File.dirname(BackupRestore::LocalBackupStore.chunk_path(identifier, filename, 0))
|
2014-05-28 04:14:37 +08:00
|
|
|
|
|
|
|
# merge all chunks
|
2018-10-15 09:43:31 +08:00
|
|
|
HandleChunkUpload.merge_chunks(
|
|
|
|
chunks,
|
|
|
|
upload_path: backup_path,
|
|
|
|
tmp_upload_path: tmp_backup_path,
|
|
|
|
identifier: identifier,
|
|
|
|
filename: filename,
|
|
|
|
tmp_directory: tmp_directory
|
|
|
|
)
|
2018-01-31 19:05:06 +08:00
|
|
|
|
|
|
|
# push an updated list to the clients
|
2018-10-15 09:43:31 +08:00
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
data = ActiveModel::ArraySerializer.new(store.files, each_serializer: BackupFileSerializer).as_json
|
2020-04-27 11:50:21 +08:00
|
|
|
MessageBus.publish("/admin/backups", data, group_ids: [Group::AUTO_GROUPS[:staff]])
|
2014-02-22 08:41:01 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|