FEATURE: rake tasks for uploading assets to S3

This opens the door to serving application.js and so on from s3.

Also updates s3 gem for some tagging support
This commit is contained in:
Sam 2017-10-03 18:00:42 +11:00
parent f1d8ed6aaf
commit ac01885b60
4 changed files with 195 additions and 19 deletions

View File

@ -55,7 +55,7 @@ gem 'fast_xor'
# Forked until https://github.com/sdsykes/fastimage/pull/93 is merged
gem 'discourse_fastimage', require: 'fastimage'
gem 'aws-sdk', require: false
gem 'aws-sdk-s3', require: false
gem 'excon', require: false
gem 'unf', require: false

View File

@ -44,12 +44,19 @@ GEM
ansi (1.5.0)
arel (8.0.0)
ast (2.3.0)
aws-sdk (2.5.3)
aws-sdk-resources (= 2.5.3)
aws-sdk-core (2.5.3)
aws-partitions (1.24.0)
aws-sdk-core (3.6.0)
aws-partitions (~> 1.0)
aws-sigv4 (~> 1.0)
jmespath (~> 1.0)
aws-sdk-resources (2.5.3)
aws-sdk-core (= 2.5.3)
aws-sdk-kms (1.2.0)
aws-sdk-core (~> 3)
aws-sigv4 (~> 1.0)
aws-sdk-s3 (1.4.0)
aws-sdk-core (~> 3)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.0)
aws-sigv4 (1.0.2)
barber (0.11.2)
ember-source (>= 1.0, < 3)
execjs (>= 1.2, < 3)
@ -393,7 +400,7 @@ DEPENDENCIES
activerecord (~> 5.1)
activesupport (~> 5.1)
annotate
aws-sdk
aws-sdk-s3
barber
better_errors
binding_of_caller

View File

@ -1,4 +1,4 @@
require "aws-sdk"
require "aws-sdk-s3"
class S3Helper
@ -46,21 +46,57 @@ class S3Helper
rescue Aws::S3::Errors::NoSuchKey
end
def update_tombstone_lifecycle(grace_period)
return if @tombstone_prefix.blank?
def update_lifecycle(id, days, prefix: nil)
# cf. http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html
rule = {
id: id,
status: "Enabled",
expiration: { days: days }
}
if prefix
rule[:prefix] = prefix
end
rules = s3_resource.client.get_bucket_lifecycle_configuration(bucket: @s3_bucket_name).rules
rules.delete_if do |r|
r.id == id
end
rules.map! { |r| r.to_h }
rules << rule
s3_resource.client.put_bucket_lifecycle(bucket: @s3_bucket_name,
lifecycle_configuration: {
rules: [
{
id: "purge-tombstone",
status: "Enabled",
expiration: { days: grace_period },
prefix: @tombstone_prefix
}
]
})
rules: rules
})
end
def update_tombstone_lifecycle(grace_period)
return if @tombstone_prefix.blank?
update_lifecycle("purge_tombstone", grace_period, prefix: @tombstone_prefix)
end
def list
s3_bucket.objects(prefix: @s3_bucket_folder_path)
end
def tag_file(key, tags)
tag_array = []
tags.each do |k, v|
tag_array << { key: k.to_s, value: v.to_s }
end
s3_resource.client.put_object_tagging(
bucket: @s3_bucket_name,
key: key,
tagging: {
tag_set: tag_array
}
)
end
private

133
lib/tasks/s3.rake Normal file
View File

@ -0,0 +1,133 @@
require_dependency "s3_helper"
def brotli_s3_path(path)
ext = File.extname(path)
"#{path[0..-ext.length]}br#{ext}"
end
def gzip_s3_path(path)
ext = File.extname(path)
"#{path[0..-ext.length]}gz#{ext}"
end
def should_skip?(path)
return true if ENV['FORCE_S3_UPLOADS']
@existing_assets ||= Set.new(helper.list.map(&:key))
@existing_assets.include?('assets/' + path)
end
def upload_asset(helper, path, recurse: true, content_type: nil, fullpath: nil, content_encoding: nil)
fullpath ||= (Rails.root + "public/assets/#{path}").to_s
content_type ||= MiniMime.lookup_by_filename(path).content_type
options = {
cache_control: 'max-age=31556952, public, immutable',
content_type: content_type,
acl: 'public-read',
tagging: ''
}
if content_encoding
options[:content_encoding] = content_encoding
end
if should_skip?(path)
puts "Skipping: #{path}"
else
puts "Uploading: #{path}"
helper.upload(fullpath, path, options)
end
if recurse
if File.exist?(fullpath + ".br")
brotli_path = brotli_s3_path(path)
upload_asset(helper, brotli_path,
fullpath: fullpath + ".br",
recurse: false,
content_type: content_type,
content_encoding: 'br'
)
end
if File.exist?(fullpath + ".gz")
gzip_path = gzip_s3_path(path)
upload_asset(helper, gzip_path,
fullpath: fullpath + ".gz",
recurse: false,
content_type: content_type,
content_encoding: 'gzip'
)
end
if File.exist?(fullpath + ".map")
upload_asset(helper, path + ".map", recurse: false, content_type: 'application/json')
end
end
end
def assets
cached = Rails.application.assets.cached
manifest = Sprockets::Manifest.new(cached, Rails.root + 'public/assets', Rails.application.config.assets.manifest)
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?
manifest.assets
end
def helper
@helper ||= S3Helper.new(SiteSetting.s3_upload_bucket.downcase + '/assets')
end
def in_manifest
found = []
assets.each do |_, path|
fullpath = (Rails.root + "public/assets/#{path}").to_s
asset_path = "assets/#{path}"
found << asset_path
if File.exist?(fullpath + '.br')
found << brotli_s3_path(asset_path)
end
if File.exist?(fullpath + '.gz')
found << gzip_s3_path(asset_path)
end
if File.exist?(fullpath + '.map')
found << asset_path + '.map'
end
end
Set.new(found)
end
task 's3:upload_assets' => :environment do
assets.each do |name, fingerprint|
upload_asset(helper, fingerprint)
end
end
task 's3:expire_missing_assets' => :environment do
keep = in_manifest
count = 0
puts "Ensuring AWS assets are tagged correctly for removal"
helper.list.each do |f|
if keep.include?(f.key)
helper.tag_file(f.key, old: true)
count += 1
else
# ensure we do not delete this by mistake
helper.tag_file(f.key, {})
end
end
puts "#{count} assets were flagged for removal in 10 days"
puts "Ensuring AWS rule exists for purging old assets"
#helper.update_lifecycle("delete_old_assets", 10, prefix: 'old=true')
puts "Waiting on https://github.com/aws/aws-sdk-ruby/issues/1623"
end