discourse/lib/file_store/local_store.rb
Martin Brennan e4350bb966
FEATURE: Direct S3 multipart uploads for backups (#14736)
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files.

To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including:

* presigned_url
* create_multipart
* abort_multipart
* complete_multipart
* presign_multipart_part
* list_multipart_parts

Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin.

Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload.

This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
2021-11-11 08:25:31 +10:00

146 lines
3.8 KiB
Ruby

# frozen_string_literal: true
require_dependency 'file_store/base_store'
module FileStore
class LocalStore < BaseStore
def store_file(file, path)
copy_file(file, "#{public_dir}#{path}")
"#{Discourse.base_path}#{path}"
end
def remove_file(url, _)
return unless is_relative?(url)
source = "#{public_dir}#{url}"
return unless File.exists?(source)
destination = "#{public_dir}#{url.sub("/uploads/", "/uploads/tombstone/")}"
dir = Pathname.new(destination).dirname
FileUtils.mkdir_p(dir) unless Dir.exists?(dir)
FileUtils.remove(destination) if File.exists?(destination)
FileUtils.move(source, destination, force: true)
FileUtils.touch(destination)
end
def has_been_uploaded?(url)
is_relative?(url) || is_local?(url)
end
def absolute_base_url
"#{Discourse.base_url_no_prefix}#{relative_base_url}"
end
def absolute_base_cdn_url
"#{Discourse.asset_host}#{relative_base_url}"
end
def relative_base_url
File.join(Discourse.base_path, upload_path)
end
def temporary_upload_path(filename)
FileStore::BaseStore.temporary_upload_path(filename, folder_prefix: relative_base_url)
end
def external?
false
end
def download_url(upload)
return unless upload
File.join(relative_base_url, upload.sha1)
end
def cdn_url(url)
UrlHelper.local_cdn_url(url)
end
def path_for(upload)
url = upload.try(:url)
"#{public_dir}#{upload.url}" if url && url[0] == "/" && url[1] != "/"
end
def purge_tombstone(grace_period)
if Dir.exists?(Discourse.store.tombstone_dir)
Discourse::Utils.execute_command(
'find', tombstone_dir, '-mtime', "+#{grace_period}", '-type', 'f', '-delete'
)
end
end
def get_path_for(type, upload_id, sha, extension)
prefix_path(super(type, upload_id, sha, extension))
end
def copy_file(file, path)
dir = Pathname.new(path).dirname
FileUtils.mkdir_p(dir) unless Dir.exists?(dir)
# move the file to the right location
# not using mv, cause permissions are no good on move
File.open(path, "wb") { |f| f.write(file.read) }
end
def is_relative?(url)
url.present? && url.start_with?(relative_base_url)
end
def is_local?(url)
return false if url.blank?
absolute_url = url.start_with?("//") ? SiteSetting.scheme + ":" + url : url
absolute_url.start_with?(absolute_base_url) || absolute_url.start_with?(absolute_base_cdn_url)
end
def public_dir
File.join(Rails.root, "public")
end
def tombstone_dir
"#{public_dir}#{relative_base_url.sub("/uploads/", "/uploads/tombstone/")}"
end
def list_missing_uploads(skip_optimized: false)
list_missing(Upload)
list_missing(OptimizedImage) unless skip_optimized
end
def copy_from(source_path)
FileUtils.mkdir_p(File.join(public_dir, upload_path))
Discourse::Utils.execute_command(
'rsync', '-a', '--safe-links', "#{source_path}/", "#{upload_path}/",
failure_message: "Failed to copy uploads.",
chdir: public_dir
)
end
private
def list_missing(model)
count = 0
model.find_each do |upload|
# could be a remote image
next unless upload.url =~ /^\/[^\/]/
path = "#{public_dir}#{upload.url}"
bad = true
begin
bad = false if File.size(path) != 0
rescue
# something is messed up
end
if bad
count += 1
puts path
end
end
puts "#{count} of #{model.count} #{model.name.underscore.pluralize} are missing" if count > 0
end
def prefix_path(path)
File.join("/", upload_path, path)
end
end
end