DEV: Use a Logger for s3:upload_assets (#30218)

Now that we run the `upload` method in different threads, we need to
synchronize writes to `STDOUT` which we can do so by using a `Logger`.

Follow-up to 49e8353959
This commit is contained in:
Alan Guo Xiang Tan 2024-12-11 11:48:06 +08:00 committed by GitHub
parent 49e8353959
commit 9a2e31b9af
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -24,7 +24,7 @@ def should_skip?(path)
existing_assets.include?(prefix_s3_path(path))
end
def upload(path, remote_path, content_type, content_encoding = nil)
def upload(path, remote_path, content_type, content_encoding = nil, logger:)
options = {
cache_control: "max-age=31556952, public, immutable",
content_type: content_type,
@ -34,9 +34,9 @@ def upload(path, remote_path, content_type, content_encoding = nil)
options[:content_encoding] = content_encoding if content_encoding
if should_skip?(remote_path)
puts "Skipping: #{remote_path}"
logger << "Skipping: #{remote_path}"
else
puts "Uploading: #{remote_path}"
logger << "Uploading: #{remote_path}"
File.open(path) { |file| helper.upload(file, remote_path, options) }
end
@ -200,7 +200,8 @@ task "s3:upload_assets" => [:environment, "s3:ensure_cors_rules"] do
ENV["DISCOURSE_S3_UPLOAD_ASSETS_RAKE_THREAD_POOL_SIZE"] || Concurrent.processor_count,
)
assets.each { |asset| pool.post { upload(*asset) } }
logger = Logger.new(STDOUT)
assets.each { |asset| pool.post { upload(*asset, logger:) } }
pool.shutdown
pool.wait_for_termination