mirror of
https://github.com/discourse/discourse.git
synced 2024-11-29 03:03:44 +08:00
08e625c446
Uppy and Resumable slice up their chunks differently, which causes a difference in this algorithm. Let's take a 131.6MB file (137951695 bytes) with a 5MB (5242880 bytes) chunk size. For resumable, there are 26 chunks, and uppy there are 27. This is controlled by forceChunkSize in resumable which is false by default. The final chunk size is 6879695 (chunk size + remainder) whereas in uppy it is 1636815 (just remainder). This means that the current condition of uploaded_file_size + current_chunk_size >= total_size is hit twice by uppy, because it uses a more correct number of chunks. This can be solved for both uppy and resumable by checking the _previous_ chunk number * chunk_size as the uploaded_file_size. An example of what is happening before that change, using the current chunk number to calculate uploaded_file_size. chunk 26: resumable: uploaded_file_size (26 * 5242880) + current_chunk_size (6879695) = 143194575 >= total_size (137951695) ? YES chunk 26: uppy: uploaded_file_size (26 * 5242880) + current_chunk_size (5242880) = 141557760 >= total_size (137951695) ? YES chunk 27: uppy: uploaded_file_size (27 * 5242880) + current_chunk_size (1636815) = 143194575 >= total_size (137951695) ? YES An example of what this looks like after the change, using the previous chunk number to calculate uploaded_file_size: chunk 26: resumable: uploaded_file_size (25 * 5242880) + current_chunk_size (6879695) = 137951695 >= total_size (137951695) ? YES chunk 26: uppy: uploaded_file_size (25 * 5242880) + current_chunk_size (5242880) = 136314880 >= total_size (137951695) ? NO chunk 27: uppy: uploaded_file_size (26 * 5242880) + current_chunk_size (1636815) = 137951695 >= total_size (137951695) ? YES
238 lines
7.1 KiB
Ruby
238 lines
7.1 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
require "backup_restore"
|
|
require "backup_restore/backup_store"
|
|
|
|
class Admin::BackupsController < Admin::AdminController
|
|
before_action :ensure_backups_enabled
|
|
skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk]
|
|
|
|
def index
|
|
respond_to do |format|
|
|
format.html do
|
|
store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status))
|
|
store_preloaded("logs", MultiJson.dump(BackupRestore.logs))
|
|
render "default/empty"
|
|
end
|
|
|
|
format.json do
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
begin
|
|
render_serialized(store.files, BackupFileSerializer)
|
|
rescue BackupRestore::BackupStore::StorageError => e
|
|
render_json_error(e)
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
def status
|
|
render_json_dump(BackupRestore.operations_status)
|
|
end
|
|
|
|
def create
|
|
opts = {
|
|
publish_to_message_bus: true,
|
|
with_uploads: params.fetch(:with_uploads) == "true",
|
|
client_id: params[:client_id],
|
|
}
|
|
BackupRestore.backup!(current_user.id, opts)
|
|
rescue BackupRestore::OperationRunningError
|
|
render_error("backup.operation_already_running")
|
|
else
|
|
StaffActionLogger.new(current_user).log_backup_create
|
|
render json: success_json
|
|
end
|
|
|
|
def cancel
|
|
BackupRestore.cancel!
|
|
rescue BackupRestore::OperationRunningError
|
|
render_error("backup.operation_already_running")
|
|
else
|
|
render json: success_json
|
|
end
|
|
|
|
def email
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
if store.file(params.fetch(:id)).present?
|
|
Jobs.enqueue(
|
|
:download_backup_email,
|
|
user_id: current_user.id,
|
|
backup_file_path: url_for(controller: 'backups', action: 'show')
|
|
)
|
|
|
|
render body: nil
|
|
else
|
|
render body: nil, status: 404
|
|
end
|
|
end
|
|
|
|
def show
|
|
if !EmailBackupToken.compare(current_user.id, params.fetch(:token))
|
|
@error = I18n.t('download_backup_mailer.no_token')
|
|
return render layout: 'no_ember', status: 422, formats: [:html]
|
|
end
|
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
if backup = store.file(params.fetch(:id), include_download_source: true)
|
|
EmailBackupToken.del(current_user.id)
|
|
StaffActionLogger.new(current_user).log_backup_download(backup)
|
|
|
|
if store.remote?
|
|
redirect_to backup.source
|
|
else
|
|
headers['Content-Length'] = File.size(backup.source).to_s
|
|
send_file backup.source
|
|
end
|
|
else
|
|
render body: nil, status: 404
|
|
end
|
|
end
|
|
|
|
def destroy
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
if backup = store.file(params.fetch(:id))
|
|
StaffActionLogger.new(current_user).log_backup_destroy(backup)
|
|
store.delete_file(backup.filename)
|
|
render body: nil
|
|
else
|
|
render body: nil, status: 404
|
|
end
|
|
end
|
|
|
|
def logs
|
|
store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status))
|
|
store_preloaded("logs", MultiJson.dump(BackupRestore.logs))
|
|
render "default/empty"
|
|
end
|
|
|
|
def restore
|
|
opts = {
|
|
filename: params.fetch(:id),
|
|
client_id: params.fetch(:client_id),
|
|
publish_to_message_bus: true,
|
|
}
|
|
BackupRestore.restore!(current_user.id, opts)
|
|
rescue BackupRestore::OperationRunningError
|
|
render_error("backup.operation_already_running")
|
|
else
|
|
render json: success_json
|
|
end
|
|
|
|
def rollback
|
|
BackupRestore.rollback!
|
|
rescue BackupRestore::OperationRunningError
|
|
render_error("backup.operation_already_running")
|
|
else
|
|
render json: success_json
|
|
end
|
|
|
|
def readonly
|
|
enable = params.fetch(:enable).to_s == "true"
|
|
readonly_mode_key = Discourse::USER_READONLY_MODE_KEY
|
|
|
|
if enable
|
|
Discourse.enable_readonly_mode(readonly_mode_key)
|
|
else
|
|
Discourse.disable_readonly_mode(readonly_mode_key)
|
|
end
|
|
|
|
StaffActionLogger.new(current_user).log_change_readonly_mode(enable)
|
|
|
|
render body: nil
|
|
end
|
|
|
|
def check_backup_chunk
|
|
identifier = params.fetch(:resumableIdentifier)
|
|
filename = params.fetch(:resumableFilename)
|
|
chunk_number = params.fetch(:resumableChunkNumber)
|
|
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
|
|
|
raise Discourse::InvalidParameters.new(:resumableIdentifier) unless valid_filename?(identifier)
|
|
|
|
# path to chunk file
|
|
chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
|
# check chunk upload status
|
|
status = HandleChunkUpload.check_chunk(chunk, current_chunk_size: current_chunk_size)
|
|
|
|
render body: nil, status: status
|
|
end
|
|
|
|
def upload_backup_chunk
|
|
filename = params.fetch(:resumableFilename)
|
|
total_size = params.fetch(:resumableTotalSize).to_i
|
|
identifier = params.fetch(:resumableIdentifier)
|
|
|
|
raise Discourse::InvalidParameters.new(:resumableIdentifier) unless valid_filename?(identifier)
|
|
return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename)
|
|
return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size)
|
|
return render status: 415, plain: I18n.t("backup.invalid_filename") unless valid_filename?(filename)
|
|
|
|
file = params.fetch(:file)
|
|
chunk_number = params.fetch(:resumableChunkNumber).to_i
|
|
chunk_size = params.fetch(:resumableChunkSize).to_i
|
|
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
|
previous_chunk_number = chunk_number - 1
|
|
|
|
# path to chunk file
|
|
chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
|
# upload chunk
|
|
HandleChunkUpload.upload_chunk(chunk, file: file)
|
|
|
|
uploaded_file_size = previous_chunk_number * chunk_size
|
|
# when all chunks are uploaded
|
|
if uploaded_file_size + current_chunk_size >= total_size
|
|
# merge all the chunks in a background thread
|
|
Jobs.enqueue_in(5.seconds, :backup_chunks_merger, filename: filename, identifier: identifier, chunks: chunk_number)
|
|
end
|
|
|
|
render body: nil
|
|
end
|
|
|
|
def create_upload_url
|
|
params.require(:filename)
|
|
filename = params.fetch(:filename)
|
|
|
|
return render_json_error(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_extension?(filename)
|
|
return render_json_error(I18n.t("backup.invalid_filename")) unless valid_filename?(filename)
|
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
begin
|
|
upload_url = store.generate_upload_url(filename)
|
|
rescue BackupRestore::BackupStore::BackupFileExists
|
|
return render_json_error(I18n.t("backup.file_exists"))
|
|
rescue BackupRestore::BackupStore::StorageError => e
|
|
return render_json_error(e)
|
|
end
|
|
|
|
render json: success_json.merge(url: upload_url)
|
|
end
|
|
|
|
private
|
|
|
|
def has_enough_space_on_disk?(size)
|
|
DiskSpace.free("#{Rails.root}/public/backups") > size
|
|
end
|
|
|
|
def ensure_backups_enabled
|
|
raise Discourse::InvalidAccess.new unless SiteSetting.enable_backups?
|
|
end
|
|
|
|
def valid_extension?(filename)
|
|
/\.(tar\.gz|t?gz)$/i =~ filename
|
|
end
|
|
|
|
def valid_filename?(filename)
|
|
!!(/^[a-zA-Z0-9\._\-]+$/ =~ filename)
|
|
end
|
|
|
|
def render_error(message_key)
|
|
render json: failed_json.merge(message: I18n.t(message_key))
|
|
end
|
|
end
|