mirror of
https://github.com/discourse/discourse.git
synced 2024-12-15 08:33:49 +08:00
30990006a9
This reduces chances of errors where consumers of strings mutate inputs and reduces memory usage of the app. Test suite passes now, but there may be some stuff left, so we will run a few sites on a branch prior to merging
44 lines
1.4 KiB
Ruby
44 lines
1.4 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
require_dependency "backup_restore/local_backup_store"
|
|
require_dependency "backup_restore/backup_store"
|
|
|
|
module Jobs
|
|
|
|
class BackupChunksMerger < Jobs::Base
|
|
sidekiq_options queue: 'critical', retry: false
|
|
|
|
def execute(args)
|
|
filename = args[:filename]
|
|
identifier = args[:identifier]
|
|
chunks = args[:chunks].to_i
|
|
|
|
raise Discourse::InvalidParameters.new(:filename) if filename.blank?
|
|
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
|
|
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
|
|
|
|
backup_path = "#{BackupRestore::LocalBackupStore.base_directory}/#{filename}"
|
|
tmp_backup_path = "#{backup_path}.tmp"
|
|
# path to tmp directory
|
|
tmp_directory = File.dirname(BackupRestore::LocalBackupStore.chunk_path(identifier, filename, 0))
|
|
|
|
# merge all chunks
|
|
HandleChunkUpload.merge_chunks(
|
|
chunks,
|
|
upload_path: backup_path,
|
|
tmp_upload_path: tmp_backup_path,
|
|
identifier: identifier,
|
|
filename: filename,
|
|
tmp_directory: tmp_directory
|
|
)
|
|
|
|
# push an updated list to the clients
|
|
store = BackupRestore::BackupStore.create
|
|
data = ActiveModel::ArraySerializer.new(store.files, each_serializer: BackupFileSerializer).as_json
|
|
MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id))
|
|
end
|
|
|
|
end
|
|
|
|
end
|