2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-10-03 15:00:42 +08:00
|
|
|
require "aws-sdk-s3"
|
2014-09-25 04:52:09 +08:00
|
|
|
|
|
|
|
class S3Helper
|
|
|
|
|
2016-08-17 16:16:00 +08:00
|
|
|
class SettingMissing < StandardError; end
|
|
|
|
|
2018-05-23 05:21:52 +08:00
|
|
|
attr_reader :s3_bucket_name, :s3_bucket_folder_path
|
2016-08-19 14:08:04 +08:00
|
|
|
|
2019-06-06 11:27:24 +08:00
|
|
|
DOWNLOAD_URL_EXPIRES_AFTER_SECONDS ||= 15
|
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
def initialize(s3_bucket_name, tombstone_prefix = '', options = {})
|
2019-01-04 14:46:09 +08:00
|
|
|
@s3_client = options.delete(:client)
|
2016-08-17 16:16:00 +08:00
|
|
|
@s3_options = default_s3_options.merge(options)
|
2016-08-16 11:13:59 +08:00
|
|
|
|
2016-08-19 14:08:04 +08:00
|
|
|
@s3_bucket_name, @s3_bucket_folder_path = begin
|
2017-10-06 13:20:01 +08:00
|
|
|
raise Discourse::InvalidParameters.new("s3_bucket_name") if s3_bucket_name.blank?
|
2019-01-08 22:34:48 +08:00
|
|
|
self.class.get_bucket_and_folder_path(s3_bucket_name)
|
2016-08-15 16:06:29 +08:00
|
|
|
end
|
2014-09-25 04:52:09 +08:00
|
|
|
|
2016-08-15 16:06:29 +08:00
|
|
|
@tombstone_prefix =
|
|
|
|
if @s3_bucket_folder_path
|
|
|
|
File.join(@s3_bucket_folder_path, tombstone_prefix)
|
|
|
|
else
|
|
|
|
tombstone_prefix
|
|
|
|
end
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|
|
|
|
|
2019-01-08 22:34:48 +08:00
|
|
|
def self.get_bucket_and_folder_path(s3_bucket_name)
|
2020-04-30 14:48:34 +08:00
|
|
|
s3_bucket_name.downcase.split("/", 2)
|
2019-01-08 22:34:48 +08:00
|
|
|
end
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
def upload(file, path, options = {})
|
2016-08-15 16:06:29 +08:00
|
|
|
path = get_path_for_s3_upload(path)
|
2019-01-04 14:16:22 +08:00
|
|
|
obj = s3_bucket.object(path)
|
|
|
|
|
|
|
|
etag = begin
|
2019-01-04 15:55:11 +08:00
|
|
|
if File.size(file.path) >= Aws::S3::FileUploader::FIFTEEN_MEGABYTES
|
2019-01-04 14:16:22 +08:00
|
|
|
options[:multipart_threshold] = Aws::S3::FileUploader::FIFTEEN_MEGABYTES
|
|
|
|
obj.upload_file(file, options)
|
|
|
|
obj.load
|
|
|
|
obj.etag
|
|
|
|
else
|
|
|
|
options[:body] = file
|
|
|
|
obj.put(options).etag
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-15 04:10:51 +08:00
|
|
|
[path, etag.gsub('"', '')]
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
def remove(s3_filename, copy_to_tombstone = false)
|
2019-05-03 06:17:27 +08:00
|
|
|
s3_filename = s3_filename.dup
|
|
|
|
|
2014-09-25 04:52:09 +08:00
|
|
|
# copy the file in tombstone
|
2016-08-15 16:06:29 +08:00
|
|
|
if copy_to_tombstone && @tombstone_prefix.present?
|
2018-08-08 11:26:05 +08:00
|
|
|
self.copy(
|
2018-08-08 15:57:58 +08:00
|
|
|
get_path_for_s3_upload(s3_filename),
|
|
|
|
File.join(@tombstone_prefix, s3_filename)
|
2018-08-08 11:26:05 +08:00
|
|
|
)
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|
2016-08-15 11:21:24 +08:00
|
|
|
|
2014-09-25 04:52:09 +08:00
|
|
|
# delete the file
|
2018-12-19 13:32:32 +08:00
|
|
|
s3_filename.prepend(multisite_upload_path) if Rails.configuration.multisite
|
2020-05-29 02:58:23 +08:00
|
|
|
delete_object(get_path_for_s3_upload(s3_filename))
|
|
|
|
rescue Aws::S3::Errors::NoSuchKey
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete_object(key)
|
|
|
|
s3_bucket.object(key).delete
|
2015-05-25 15:57:06 +08:00
|
|
|
rescue Aws::S3::Errors::NoSuchKey
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|
|
|
|
|
2018-09-10 17:01:11 +08:00
|
|
|
def copy(source, destination, options: {})
|
2018-12-19 13:32:32 +08:00
|
|
|
if !Rails.configuration.multisite
|
|
|
|
options[:copy_source] = File.join(@s3_bucket_name, source)
|
|
|
|
else
|
2019-08-02 10:27:27 +08:00
|
|
|
if source.include?(multisite_upload_path) || source.include?(@tombstone_prefix)
|
|
|
|
options[:copy_source] = File.join(@s3_bucket_name, source)
|
|
|
|
elsif @s3_bucket_folder_path
|
2019-01-08 22:34:48 +08:00
|
|
|
folder, filename = begin
|
2020-04-30 14:48:34 +08:00
|
|
|
source.split("/", 2)
|
2018-12-19 13:32:32 +08:00
|
|
|
end
|
2019-01-08 22:34:48 +08:00
|
|
|
options[:copy_source] = File.join(@s3_bucket_name, folder, multisite_upload_path, filename)
|
2018-12-19 13:32:32 +08:00
|
|
|
else
|
|
|
|
options[:copy_source] = File.join(@s3_bucket_name, multisite_upload_path, source)
|
|
|
|
end
|
|
|
|
end
|
2018-08-08 11:26:05 +08:00
|
|
|
s3_bucket
|
2018-08-08 15:57:58 +08:00
|
|
|
.object(destination)
|
2018-12-19 13:32:32 +08:00
|
|
|
.copy_from(options)
|
2018-08-08 11:26:05 +08:00
|
|
|
end
|
|
|
|
|
2017-10-09 07:26:58 +08:00
|
|
|
# make sure we have a cors config for assets
|
|
|
|
# otherwise we will have no fonts
|
2018-10-15 09:43:31 +08:00
|
|
|
def ensure_cors!(rules = nil)
|
2020-05-26 04:09:34 +08:00
|
|
|
return unless SiteSetting.s3_install_cors_rule
|
|
|
|
|
2017-10-09 07:26:58 +08:00
|
|
|
rule = nil
|
|
|
|
|
|
|
|
begin
|
|
|
|
rule = s3_resource.client.get_bucket_cors(
|
|
|
|
bucket: @s3_bucket_name
|
|
|
|
).cors_rules&.first
|
|
|
|
rescue Aws::S3::Errors::NoSuchCORSConfiguration
|
|
|
|
# no rule
|
|
|
|
end
|
|
|
|
|
|
|
|
unless rule
|
2018-10-15 09:43:31 +08:00
|
|
|
rules = [{
|
|
|
|
allowed_headers: ["Authorization"],
|
|
|
|
allowed_methods: ["GET", "HEAD"],
|
|
|
|
allowed_origins: ["*"],
|
|
|
|
max_age_seconds: 3000
|
|
|
|
}] if rules.nil?
|
2017-10-09 07:26:58 +08:00
|
|
|
|
|
|
|
s3_resource.client.put_bucket_cors(
|
|
|
|
bucket: @s3_bucket_name,
|
|
|
|
cors_configuration: {
|
2018-10-15 09:43:31 +08:00
|
|
|
cors_rules: rules
|
2017-10-09 07:26:58 +08:00
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_lifecycle(id, days, prefix: nil, tag: nil)
|
|
|
|
filter = {}
|
|
|
|
|
|
|
|
if prefix
|
|
|
|
filter[:prefix] = prefix
|
|
|
|
elsif tag
|
|
|
|
filter[:tag] = tag
|
|
|
|
end
|
2015-05-25 23:59:00 +08:00
|
|
|
|
2014-09-25 04:52:09 +08:00
|
|
|
# cf. http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html
|
2017-10-03 15:00:42 +08:00
|
|
|
rule = {
|
|
|
|
id: id,
|
|
|
|
status: "Enabled",
|
2017-10-09 07:26:58 +08:00
|
|
|
expiration: { days: days },
|
|
|
|
filter: filter
|
2017-10-03 15:00:42 +08:00
|
|
|
}
|
|
|
|
|
2017-10-09 07:26:58 +08:00
|
|
|
rules = []
|
2017-10-03 15:00:42 +08:00
|
|
|
|
2017-10-09 07:26:58 +08:00
|
|
|
begin
|
|
|
|
rules = s3_resource.client.get_bucket_lifecycle_configuration(bucket: @s3_bucket_name).rules
|
|
|
|
rescue Aws::S3::Errors::NoSuchLifecycleConfiguration
|
|
|
|
# skip trying to merge
|
|
|
|
end
|
2017-10-03 15:00:42 +08:00
|
|
|
|
2017-11-13 12:36:45 +08:00
|
|
|
# in the past we has a rule that was called purge-tombstone vs purge_tombstone
|
|
|
|
# just go ahead and normalize for our bucket
|
2017-10-03 15:00:42 +08:00
|
|
|
rules.delete_if do |r|
|
2017-11-13 12:36:45 +08:00
|
|
|
r.id.gsub('_', '-') == id.gsub('_', '-')
|
2017-10-03 15:00:42 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
rules << rule
|
|
|
|
|
2017-11-13 12:36:45 +08:00
|
|
|
# normalize filter in rules, due to AWS library bug
|
|
|
|
rules = rules.map do |r|
|
|
|
|
r = r.to_h
|
|
|
|
prefix = r.delete(:prefix)
|
|
|
|
if prefix
|
|
|
|
r[:filter] = { prefix: prefix }
|
|
|
|
end
|
|
|
|
r
|
|
|
|
end
|
|
|
|
|
2017-10-09 07:26:58 +08:00
|
|
|
s3_resource.client.put_bucket_lifecycle_configuration(
|
|
|
|
bucket: @s3_bucket_name,
|
|
|
|
lifecycle_configuration: {
|
2017-10-03 15:00:42 +08:00
|
|
|
rules: rules
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_tombstone_lifecycle(grace_period)
|
2018-09-17 08:57:50 +08:00
|
|
|
return if !SiteSetting.s3_configure_tombstone_policy
|
2017-10-03 15:00:42 +08:00
|
|
|
return if @tombstone_prefix.blank?
|
|
|
|
update_lifecycle("purge_tombstone", grace_period, prefix: @tombstone_prefix)
|
|
|
|
end
|
|
|
|
|
2018-11-27 03:24:51 +08:00
|
|
|
def list(prefix = "", marker = nil)
|
|
|
|
options = { prefix: get_path_for_s3_upload(prefix) }
|
|
|
|
options[:marker] = marker if marker.present?
|
|
|
|
s3_bucket.objects(options)
|
2017-10-03 15:00:42 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def tag_file(key, tags)
|
|
|
|
tag_array = []
|
|
|
|
tags.each do |k, v|
|
|
|
|
tag_array << { key: k.to_s, value: v.to_s }
|
|
|
|
end
|
|
|
|
|
|
|
|
s3_resource.client.put_object_tagging(
|
|
|
|
bucket: @s3_bucket_name,
|
|
|
|
key: key,
|
|
|
|
tagging: {
|
|
|
|
tag_set: tag_array
|
|
|
|
}
|
|
|
|
)
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
def object(path)
|
2018-12-27 00:34:49 +08:00
|
|
|
s3_bucket.object(get_path_for_s3_upload(path))
|
2018-10-15 09:43:31 +08:00
|
|
|
end
|
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
def self.s3_options(obj)
|
2018-12-27 00:34:49 +08:00
|
|
|
opts = {
|
2019-03-20 21:58:20 +08:00
|
|
|
region: obj.s3_region
|
2018-12-27 00:34:49 +08:00
|
|
|
}
|
2014-09-25 04:52:09 +08:00
|
|
|
|
2019-02-06 00:50:27 +08:00
|
|
|
opts[:endpoint] = SiteSetting.s3_endpoint if SiteSetting.s3_endpoint.present?
|
2020-04-30 06:04:59 +08:00
|
|
|
opts[:http_continue_timeout] = SiteSetting.s3_http_continue_timeout
|
2019-02-06 00:50:27 +08:00
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
unless obj.s3_use_iam_profile
|
|
|
|
opts[:access_key_id] = obj.s3_access_key_id
|
|
|
|
opts[:secret_access_key] = obj.s3_secret_access_key
|
|
|
|
end
|
|
|
|
|
|
|
|
opts
|
2016-08-15 16:06:29 +08:00
|
|
|
end
|
2014-09-25 04:52:09 +08:00
|
|
|
|
2019-02-01 12:40:48 +08:00
|
|
|
def download_file(filename, destination_path, failure_message = nil)
|
|
|
|
unless object(filename).download_file(destination_path)
|
|
|
|
raise failure_message&.to_s || "Failed to download file"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def s3_client
|
|
|
|
@s3_client ||= Aws::S3::Client.new(@s3_options)
|
|
|
|
end
|
|
|
|
|
|
|
|
def s3_inventory_path(path = 'inventory')
|
|
|
|
get_path_for_s3_upload(path)
|
|
|
|
end
|
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
private
|
2014-09-25 04:52:09 +08:00
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
def default_s3_options
|
|
|
|
if SiteSetting.enable_s3_uploads?
|
|
|
|
options = self.class.s3_options(SiteSetting)
|
|
|
|
check_missing_site_options
|
|
|
|
options
|
|
|
|
elsif GlobalSetting.use_s3?
|
|
|
|
self.class.s3_options(GlobalSetting)
|
|
|
|
else
|
|
|
|
{}
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|
2017-10-06 13:20:01 +08:00
|
|
|
end
|
2014-09-25 04:52:09 +08:00
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
def get_path_for_s3_upload(path)
|
2019-05-15 18:07:40 +08:00
|
|
|
path = File.join(@s3_bucket_folder_path, path) if @s3_bucket_folder_path && path !~ /^#{@s3_bucket_folder_path}\//
|
2017-10-06 13:20:01 +08:00
|
|
|
path
|
2016-08-15 22:04:24 +08:00
|
|
|
end
|
|
|
|
|
2018-12-19 13:32:32 +08:00
|
|
|
def multisite_upload_path
|
2020-04-23 02:04:45 +08:00
|
|
|
path = File.join("uploads", RailsMultisite::ConnectionManagement.current_db, "/")
|
2020-04-28 21:03:04 +08:00
|
|
|
return path if !Rails.env.test?
|
|
|
|
File.join(path, "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}", "/")
|
2018-12-19 13:32:32 +08:00
|
|
|
end
|
|
|
|
|
2016-08-15 22:04:24 +08:00
|
|
|
def s3_resource
|
2019-01-04 14:16:22 +08:00
|
|
|
Aws::S3::Resource.new(client: s3_client)
|
2016-08-15 16:06:29 +08:00
|
|
|
end
|
2014-09-25 04:52:09 +08:00
|
|
|
|
2016-08-15 16:06:29 +08:00
|
|
|
def s3_bucket
|
2018-08-08 11:26:05 +08:00
|
|
|
@s3_bucket ||= begin
|
|
|
|
bucket = s3_resource.bucket(@s3_bucket_name)
|
|
|
|
bucket.create unless bucket.exists?
|
|
|
|
bucket
|
|
|
|
end
|
2016-08-15 16:06:29 +08:00
|
|
|
end
|
2016-08-17 16:16:00 +08:00
|
|
|
|
2017-10-06 13:20:01 +08:00
|
|
|
def check_missing_site_options
|
2016-08-17 16:16:00 +08:00
|
|
|
unless SiteSetting.s3_use_iam_profile
|
2017-10-06 13:20:01 +08:00
|
|
|
raise SettingMissing.new("access_key_id") if SiteSetting.s3_access_key_id.blank?
|
|
|
|
raise SettingMissing.new("secret_access_key") if SiteSetting.s3_secret_access_key.blank?
|
2016-08-17 16:16:00 +08:00
|
|
|
end
|
|
|
|
end
|
2014-09-25 04:52:09 +08:00
|
|
|
end
|