2019-04-30 08:27:42 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-03-23 10:29:35 +08:00
|
|
|
require 'rails_helper'
|
|
|
|
|
2017-08-24 11:01:11 +08:00
|
|
|
RSpec.describe Admin::BackupsController do
|
2019-05-07 11:12:20 +08:00
|
|
|
fab!(:admin) { Fabricate(:admin) }
|
2018-06-11 13:26:24 +08:00
|
|
|
let(:backup_filename) { "2014-02-10-065935.tar.gz" }
|
|
|
|
let(:backup_filename2) { "2014-02-11-065935.tar.gz" }
|
2018-10-15 09:43:31 +08:00
|
|
|
|
|
|
|
def create_backup_files(*filenames)
|
|
|
|
@paths = filenames.map do |filename|
|
|
|
|
path = backup_path(filename)
|
|
|
|
File.open(path, "w") { |f| f.write("test backup") }
|
|
|
|
path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def backup_path(filename)
|
|
|
|
File.join(BackupRestore::LocalBackupStore.base_directory, filename)
|
|
|
|
end
|
|
|
|
|
|
|
|
def map_preloaded
|
|
|
|
controller.instance_variable_get("@preloaded").map do |key, value|
|
|
|
|
[key, JSON.parse(value)]
|
|
|
|
end.to_h
|
|
|
|
end
|
2018-06-11 13:26:24 +08:00
|
|
|
|
|
|
|
it "is a subclass of AdminController" do
|
|
|
|
expect(Admin::BackupsController < Admin::AdminController).to eq(true)
|
|
|
|
end
|
2017-03-23 10:29:35 +08:00
|
|
|
|
|
|
|
before do
|
|
|
|
sign_in(admin)
|
2018-10-15 09:43:31 +08:00
|
|
|
SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL
|
2017-03-23 10:29:35 +08:00
|
|
|
end
|
|
|
|
|
2018-06-11 14:12:28 +08:00
|
|
|
after do
|
2020-05-23 12:56:13 +08:00
|
|
|
Discourse.redis.flushdb
|
2018-10-15 09:43:31 +08:00
|
|
|
|
|
|
|
@paths&.each { |path| File.delete(path) if File.exists?(path) }
|
|
|
|
@paths = nil
|
2018-06-11 14:12:28 +08:00
|
|
|
end
|
|
|
|
|
2017-12-22 04:21:28 +08:00
|
|
|
describe "#index" do
|
|
|
|
it "raises an error when backups are disabled" do
|
|
|
|
SiteSetting.enable_backups = false
|
|
|
|
get "/admin/backups.json"
|
2018-06-11 13:26:24 +08:00
|
|
|
expect(response.status).to eq(403)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "html format" do
|
|
|
|
it "preloads important data" do
|
|
|
|
get "/admin/backups.html"
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
preloaded = map_preloaded
|
2018-06-11 13:26:24 +08:00
|
|
|
expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status)
|
|
|
|
expect(preloaded["logs"].size).to eq(BackupRestore.logs.size)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "json format" do
|
|
|
|
it "returns a list of all the backups" do
|
|
|
|
begin
|
2018-10-15 09:43:31 +08:00
|
|
|
create_backup_files(backup_filename, backup_filename2)
|
2018-06-11 13:26:24 +08:00
|
|
|
|
|
|
|
get "/admin/backups.json"
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
|
2020-05-07 23:04:12 +08:00
|
|
|
filenames = response.parsed_body.map { |backup| backup["filename"] }
|
2018-10-15 09:43:31 +08:00
|
|
|
expect(filenames).to include(backup_filename)
|
|
|
|
expect(filenames).to include(backup_filename2)
|
2018-06-11 13:26:24 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#status' do
|
|
|
|
it "returns the current backups status" do
|
|
|
|
get "/admin/backups/status.json"
|
|
|
|
expect(response.body).to eq(BackupRestore.operations_status.to_json)
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#create' do
|
|
|
|
it "starts a backup" do
|
|
|
|
BackupRestore.expects(:backup!).with(admin.id, publish_to_message_bus: true, with_uploads: false, client_id: "foo")
|
|
|
|
|
|
|
|
post "/admin/backups.json", params: {
|
|
|
|
with_uploads: false, client_id: "foo"
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#show' do
|
|
|
|
it "uses send_file to transmit the backup" do
|
|
|
|
begin
|
|
|
|
token = EmailBackupToken.set(admin.id)
|
2018-10-15 09:43:31 +08:00
|
|
|
create_backup_files(backup_filename)
|
2018-06-11 13:26:24 +08:00
|
|
|
|
|
|
|
expect do
|
|
|
|
get "/admin/backups/#{backup_filename}.json", params: { token: token }
|
|
|
|
end.to change { UserHistory.where(action: UserHistory.actions[:backup_download]).count }.by(1)
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
expect(response.headers['Content-Length']).to eq("11")
|
2018-06-11 13:26:24 +08:00
|
|
|
expect(response.headers['Content-Disposition']).to match(/attachment; filename/)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns 422 when token is bad" do
|
|
|
|
begin
|
|
|
|
get "/admin/backups/#{backup_filename}.json", params: { token: "bad_value" }
|
|
|
|
|
|
|
|
expect(response.status).to eq(422)
|
|
|
|
expect(response.headers['Content-Disposition']).not_to match(/attachment; filename/)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns 404 when the backup does not exist" do
|
|
|
|
token = EmailBackupToken.set(admin.id)
|
|
|
|
get "/admin/backups/#{backup_filename}.json", params: { token: token }
|
|
|
|
|
|
|
|
expect(response.status).to eq(404)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#destroy' do
|
|
|
|
it "removes the backup if found" do
|
|
|
|
begin
|
2018-10-15 09:43:31 +08:00
|
|
|
path = backup_path(backup_filename)
|
|
|
|
create_backup_files(backup_filename)
|
|
|
|
expect(File.exists?(path)).to eq(true)
|
2018-06-11 13:26:24 +08:00
|
|
|
|
|
|
|
expect do
|
|
|
|
delete "/admin/backups/#{backup_filename}.json"
|
|
|
|
end.to change { UserHistory.where(action: UserHistory.actions[:backup_destroy]).count }.by(1)
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
expect(File.exists?(path)).to eq(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't remove the backup if not found" do
|
|
|
|
delete "/admin/backups/#{backup_filename}.json"
|
|
|
|
expect(response.status).to eq(404)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#logs' do
|
|
|
|
it "preloads important data" do
|
|
|
|
get "/admin/backups/logs.html"
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
preloaded = map_preloaded
|
2018-06-11 13:26:24 +08:00
|
|
|
|
|
|
|
expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status)
|
|
|
|
expect(preloaded["logs"].size).to eq(BackupRestore.logs.size)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#restore' do
|
|
|
|
it "starts a restore" do
|
|
|
|
BackupRestore.expects(:restore!).with(admin.id, filename: backup_filename, publish_to_message_bus: true, client_id: "foo")
|
|
|
|
|
|
|
|
post "/admin/backups/#{backup_filename}/restore.json", params: { client_id: "foo" }
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#readonly' do
|
|
|
|
it "enables readonly mode" do
|
|
|
|
expect(Discourse.readonly_mode?).to eq(false)
|
|
|
|
|
|
|
|
expect { put "/admin/backups/readonly.json", params: { enable: true } }
|
|
|
|
.to change { UserHistory.where(action: UserHistory.actions[:change_readonly_mode], new_value: "t").count }.by(1)
|
|
|
|
|
|
|
|
expect(Discourse.readonly_mode?).to eq(true)
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "disables readonly mode" do
|
|
|
|
Discourse.enable_readonly_mode(Discourse::USER_READONLY_MODE_KEY)
|
|
|
|
expect(Discourse.readonly_mode?).to eq(true)
|
|
|
|
|
|
|
|
expect { put "/admin/backups/readonly.json", params: { enable: false } }
|
|
|
|
.to change { UserHistory.where(action: UserHistory.actions[:change_readonly_mode], new_value: "f").count }.by(1)
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
expect(Discourse.readonly_mode?).to eq(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#upload_backup_chunk" do
|
|
|
|
describe "when filename contains invalid characters" do
|
|
|
|
it "should raise an error" do
|
|
|
|
['灰色.tar.gz', '; echo \'haha\'.tar.gz'].each do |invalid_filename|
|
|
|
|
described_class.any_instance.expects(:has_enough_space_on_disk?).returns(true)
|
|
|
|
|
|
|
|
post "/admin/backups/upload", params: {
|
2019-07-19 22:33:08 +08:00
|
|
|
resumableFilename: invalid_filename,
|
|
|
|
resumableTotalSize: 1,
|
|
|
|
resumableIdentifier: 'test'
|
2018-06-11 13:26:24 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(415)
|
|
|
|
expect(response.body).to eq(I18n.t('backup.invalid_filename'))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-19 22:33:08 +08:00
|
|
|
describe "when resumableIdentifier is invalid" do
|
|
|
|
it "should raise an error" do
|
|
|
|
filename = 'test_site-0123456789.tar.gz'
|
|
|
|
@paths = [backup_path(File.join('tmp', 'test', "#{filename}.part1"))]
|
|
|
|
|
|
|
|
post "/admin/backups/upload.json", params: {
|
|
|
|
resumableFilename: filename,
|
|
|
|
resumableTotalSize: 1,
|
|
|
|
resumableIdentifier: '../test',
|
|
|
|
resumableChunkNumber: '1',
|
|
|
|
resumableChunkSize: '1',
|
|
|
|
resumableCurrentChunkSize: '1',
|
|
|
|
file: fixture_file_upload(Tempfile.new)
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-06-11 13:26:24 +08:00
|
|
|
describe "when filename is valid" do
|
|
|
|
it "should upload the file successfully" do
|
2019-07-19 22:33:08 +08:00
|
|
|
described_class.any_instance.expects(:has_enough_space_on_disk?).returns(true)
|
2018-06-11 13:26:24 +08:00
|
|
|
|
2019-07-19 22:33:08 +08:00
|
|
|
filename = 'test_Site-0123456789.tar.gz'
|
|
|
|
@paths = [backup_path(File.join('tmp', 'test', "#{filename}.part1"))]
|
2018-06-11 13:26:24 +08:00
|
|
|
|
2019-07-19 22:33:08 +08:00
|
|
|
post "/admin/backups/upload.json", params: {
|
|
|
|
resumableFilename: filename,
|
|
|
|
resumableTotalSize: 1,
|
|
|
|
resumableIdentifier: 'test',
|
|
|
|
resumableChunkNumber: '1',
|
|
|
|
resumableChunkSize: '1',
|
|
|
|
resumableCurrentChunkSize: '1',
|
|
|
|
file: fixture_file_upload(Tempfile.new)
|
|
|
|
}
|
2018-06-11 13:26:24 +08:00
|
|
|
|
2019-07-19 22:33:08 +08:00
|
|
|
expect(response.status).to eq(200)
|
|
|
|
expect(response.body).to eq("")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#check_backup_chunk" do
|
|
|
|
describe "when resumableIdentifier is invalid" do
|
|
|
|
it "should raise an error" do
|
|
|
|
get "/admin/backups/upload", params: {
|
|
|
|
resumableIdentifier: "../some_file",
|
|
|
|
resumableFilename: "test_site-0123456789.tar.gz",
|
|
|
|
resumableChunkNumber: '1',
|
|
|
|
resumableCurrentChunkSize: '1'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(400)
|
2018-06-11 13:26:24 +08:00
|
|
|
end
|
2017-12-22 04:21:28 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-24 11:01:11 +08:00
|
|
|
describe '#rollback' do
|
2017-03-23 10:29:35 +08:00
|
|
|
it 'should rollback the restore' do
|
|
|
|
BackupRestore.expects(:rollback!)
|
|
|
|
|
|
|
|
post "/admin/backups/rollback.json"
|
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-03-23 10:29:35 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'should not allow rollback via a GET request' do
|
2018-01-12 11:15:10 +08:00
|
|
|
get "/admin/backups/rollback.json"
|
|
|
|
expect(response.status).to eq(404)
|
2017-03-23 10:29:35 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-24 11:01:11 +08:00
|
|
|
describe '#cancel' do
|
2017-03-23 10:29:35 +08:00
|
|
|
it "should cancel an backup" do
|
|
|
|
BackupRestore.expects(:cancel!)
|
|
|
|
|
|
|
|
delete "/admin/backups/cancel.json"
|
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-03-23 10:29:35 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'should not allow cancel via a GET request' do
|
2018-01-12 11:15:10 +08:00
|
|
|
get "/admin/backups/cancel.json"
|
|
|
|
expect(response.status).to eq(404)
|
2017-03-23 10:29:35 +08:00
|
|
|
end
|
|
|
|
end
|
2017-12-18 11:25:22 +08:00
|
|
|
|
|
|
|
describe "#email" do
|
|
|
|
it "enqueues email job" do
|
2018-11-15 12:42:16 +08:00
|
|
|
|
|
|
|
# might as well test this here if we really want www.example.com
|
|
|
|
SiteSetting.force_hostname = "www.example.com"
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
create_backup_files(backup_filename)
|
2017-12-18 11:25:22 +08:00
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
expect {
|
|
|
|
put "/admin/backups/#{backup_filename}.json"
|
|
|
|
}.to change { Jobs::DownloadBackupEmail.jobs.size }.by(1)
|
2017-12-18 11:25:22 +08:00
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
job_args = Jobs::DownloadBackupEmail.jobs.last["args"].first
|
|
|
|
expect(job_args["user_id"]).to eq(admin.id)
|
|
|
|
expect(job_args["backup_file_path"]).to eq("http://www.example.com/admin/backups/#{backup_filename}")
|
2017-12-18 11:25:22 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-12-18 11:25:22 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "returns 404 when the backup does not exist" do
|
|
|
|
put "/admin/backups/#{backup_filename}.json"
|
|
|
|
|
|
|
|
expect(response).to be_not_found
|
|
|
|
end
|
|
|
|
end
|
2017-03-23 10:29:35 +08:00
|
|
|
end
|