mirror of
https://github.com/discourse/discourse.git
synced 2024-11-22 09:42:02 +08:00
FEATURE: generate (avatar) thumbnails in a background task
FIX: keep the "uploading..." indicator until the server replies via the MessageBus FIX: text was disapearing when uploading an avatar PERF: always use a region for S3 (defaults to 'us-east-1') FEATURE: ApplyCDN middleware when using S3 FIX: use the same pattern to store files on S3 and locally PERF: keep a local cache of uploads when generating thumbnails FEATURE: migrate_to_s3 rake task
This commit is contained in:
parent
675e2c6e13
commit
bb0c2813ac
|
@ -10,23 +10,24 @@ export default Em.Component.extend(UploadMixin, {
|
|||
}.property("uploading"),
|
||||
|
||||
uploadDone(upload) {
|
||||
// display a warning whenever the image is not a square
|
||||
this.set("imageIsNotASquare", upload.width !== upload.height);
|
||||
|
||||
// in order to be as much responsive as possible, we're cheating a bit here
|
||||
// indeed, the server gives us back the url to the file we've just uploaded
|
||||
// often, this file is not a square, so we need to crop it properly
|
||||
// this will also capture the first frame of animated avatars when they're not allowed
|
||||
Discourse.Utilities.cropAvatar(upload.url).then(avatarTemplate => {
|
||||
// display a warning whenever the image is not a square
|
||||
this.set("imageIsNotASquare", upload.width !== upload.height);
|
||||
|
||||
// set the avatar template to update the image on the client
|
||||
this.set("uploadedAvatarTemplate", avatarTemplate);
|
||||
|
||||
// indicates the users is using an uploaded avatar (must happen after cropping, otherwise
|
||||
// we will attempt to load an invalid avatar and cache a redirect to old one, uploadedAvatarTemplate
|
||||
// trumps over custom_avatar_upload_id)
|
||||
this.set("custom_avatar_upload_id", upload.id);
|
||||
});
|
||||
|
||||
// the upload is now done
|
||||
this.sendAction("done");
|
||||
// the upload is now done
|
||||
this.sendAction("done");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -13,7 +13,8 @@ export default Em.Mixin.create({
|
|||
_initialize: function() {
|
||||
const $upload = this.$(),
|
||||
csrf = Discourse.Session.currentProp("csrfToken"),
|
||||
uploadUrl = this.getWithDefault("uploadUrl", "/uploads");
|
||||
uploadUrl = this.getWithDefault("uploadUrl", "/uploads"),
|
||||
reset = () => this.setProperties({ uploading: false, uploadProgress: 0});
|
||||
|
||||
this.messageBus.subscribe("/uploads/" + this.get("type"), upload => {
|
||||
if (upload && upload.url) {
|
||||
|
@ -21,6 +22,7 @@ export default Em.Mixin.create({
|
|||
} else {
|
||||
Discourse.Utilities.displayErrorForUpload(upload);
|
||||
}
|
||||
reset();
|
||||
});
|
||||
|
||||
$upload.fileupload({
|
||||
|
@ -55,10 +57,7 @@ export default Em.Mixin.create({
|
|||
|
||||
$upload.on("fileuploadfail", (e, data) => {
|
||||
Discourse.Utilities.displayErrorForUpload(data);
|
||||
});
|
||||
|
||||
$upload.on("fileuploadalways", () => {
|
||||
this.setProperties({ uploading: false, uploadProgress: 0});
|
||||
reset();
|
||||
});
|
||||
}.on("didInsertElement"),
|
||||
|
||||
|
|
|
@ -17,8 +17,9 @@
|
|||
{{#if uploadedAvatarTemplate}}
|
||||
{{bound-avatar-template uploadedAvatarTemplate "large"}}
|
||||
{{else}}
|
||||
{{bound-avatar controller "large" custom_avatar_upload_id}} {{i18n 'user.change_avatar.uploaded_avatar'}}
|
||||
{{bound-avatar controller "large" custom_avatar_upload_id}}
|
||||
{{/if}}
|
||||
{{i18n 'user.change_avatar.uploaded_avatar'}}
|
||||
{{else}}
|
||||
{{i18n 'user.change_avatar.uploaded_avatar_empty'}}
|
||||
{{/if}}
|
||||
|
|
|
@ -321,6 +321,8 @@ const ComposerView = Discourse.View.extend(Ember.Evented, {
|
|||
Discourse.Utilities.displayErrorForUpload(upload);
|
||||
}
|
||||
}
|
||||
// reset upload state
|
||||
reset();
|
||||
});
|
||||
|
||||
$uploadTarget.fileupload({
|
||||
|
@ -352,7 +354,7 @@ const ComposerView = Discourse.View.extend(Ember.Evented, {
|
|||
cancelledByTheUser = true;
|
||||
// might trigger a "fileuploadfail" event with status = 0
|
||||
jqHXR.abort();
|
||||
// doesn't trigger the "fileuploadalways" event
|
||||
// make sure we always reset the uploading status
|
||||
reset();
|
||||
}
|
||||
// unbind
|
||||
|
@ -369,13 +371,12 @@ const ComposerView = Discourse.View.extend(Ember.Evented, {
|
|||
});
|
||||
|
||||
$uploadTarget.on("fileuploadfail", (e, data) => {
|
||||
reset();
|
||||
if (!cancelledByTheUser) {
|
||||
Discourse.Utilities.displayErrorForUpload(data);
|
||||
}
|
||||
});
|
||||
|
||||
$uploadTarget.on("fileuploadalways", reset);
|
||||
|
||||
// contenteditable div hack for getting image paste to upload working in
|
||||
// Firefox. This is pretty dangerous because it can potentially break
|
||||
// Ctrl+v to paste so we should be conservative about what browsers this runs
|
||||
|
|
|
@ -25,6 +25,10 @@ class UploadsController < ApplicationController
|
|||
upload.update_columns(retain_hours: retain_hours) if retain_hours > 0
|
||||
end
|
||||
|
||||
if upload.errors.empty? && FileHelper.is_image?(filename)
|
||||
Jobs.enqueue(:create_thumbnails, upload_id: upload.id, type: type)
|
||||
end
|
||||
|
||||
data = upload.errors.empty? ? upload : { errors: upload.errors.values.flatten }
|
||||
|
||||
MessageBus.publish("/uploads/#{type}", data.as_json, user_ids: [current_user.id])
|
||||
|
|
|
@ -49,13 +49,12 @@ class UserAvatarsController < ApplicationController
|
|||
protected
|
||||
|
||||
def show_in_site(hostname)
|
||||
size = params[:size].to_i
|
||||
return render_dot unless Discourse.avatar_sizes.include?(size)
|
||||
|
||||
username = params[:username].to_s
|
||||
return render_dot unless user = User.find_by(username_lower: username.downcase)
|
||||
|
||||
size = params[:size].to_i
|
||||
return render_dot if size > 1000 || size < 1
|
||||
|
||||
image = nil
|
||||
version = params[:version].to_i
|
||||
return render_dot unless version > 0 && user_avatar = user.user_avatar
|
||||
|
||||
|
@ -67,14 +66,11 @@ class UserAvatarsController < ApplicationController
|
|||
elsif upload
|
||||
original = Discourse.store.path_for(upload)
|
||||
if Discourse.store.external? || File.exists?(original)
|
||||
optimized = get_optimized_image(upload, size)
|
||||
|
||||
if optimized
|
||||
if optimized = get_optimized_image(upload, size)
|
||||
if Discourse.store.external?
|
||||
expires_in 1.day, public: true
|
||||
return redirect_to optimized.url
|
||||
end
|
||||
|
||||
image = Discourse.store.path_for(optimized)
|
||||
end
|
||||
end
|
||||
|
|
33
app/jobs/regular/create_thumbnails.rb
Normal file
33
app/jobs/regular/create_thumbnails.rb
Normal file
|
@ -0,0 +1,33 @@
|
|||
module Jobs
|
||||
|
||||
class CreateThumbnails < Jobs::Base
|
||||
|
||||
def execute(args)
|
||||
upload_id = args[:upload_id]
|
||||
type = args[:type]
|
||||
|
||||
raise Discourse::InvalidParameters.new(:upload_id) if upload_id.blank?
|
||||
raise Discourse::InvalidParameters.new(:type) if type.blank?
|
||||
|
||||
# only need to generate thumbnails for avatars
|
||||
return if type != "avatar"
|
||||
|
||||
upload = Upload.find(upload_id)
|
||||
|
||||
self.send("create_thumbnails_for_#{type}", upload)
|
||||
end
|
||||
|
||||
PIXELS ||= [1, 2]
|
||||
|
||||
def create_thumbnails_for_avatar(upload)
|
||||
PIXELS.each do |pixel|
|
||||
Discourse.avatar_sizes.each do |size|
|
||||
size *= pixel
|
||||
upload.create_thumbnail!(size, size)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -20,8 +20,8 @@ class OptimizedImage < ActiveRecord::Base
|
|||
return thumbnail unless thumbnail.nil?
|
||||
|
||||
# create the thumbnail otherwise
|
||||
external_copy = Discourse.store.download(upload) if Discourse.store.external?
|
||||
original_path = if Discourse.store.external?
|
||||
external_copy = Discourse.store.download(upload)
|
||||
external_copy.try(:path)
|
||||
else
|
||||
Discourse.store.path_for(upload)
|
||||
|
|
|
@ -6,12 +6,11 @@ class S3RegionSiteSetting < EnumSiteSetting
|
|||
end
|
||||
|
||||
def self.values
|
||||
@values ||= valid_values.sort.map {|x| {name: x, value: x} }
|
||||
@values ||= valid_values.sort.map { |x| { name: x, value: x } }
|
||||
end
|
||||
|
||||
def self.valid_values
|
||||
[ '',
|
||||
'us-east-1',
|
||||
[ 'us-east-1',
|
||||
'us-west-1',
|
||||
'us-west-2',
|
||||
'us-gov-west-1',
|
||||
|
|
|
@ -93,6 +93,10 @@ class SiteSetting < ActiveRecord::Base
|
|||
use_https? ? "https" : "http"
|
||||
end
|
||||
|
||||
def max_file_size_kb
|
||||
[SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
||||
end
|
||||
|
||||
def self.has_enough_topics_to_redirect_to_top
|
||||
TopTopic.periods.each do |period|
|
||||
topics_per_period = TopTopic.where("#{period}_score > 0")
|
||||
|
|
|
@ -135,6 +135,9 @@ module Discourse
|
|||
# supports etags (post 1.7)
|
||||
config.middleware.delete Rack::ETag
|
||||
|
||||
require 'middleware/apply_cdn'
|
||||
config.middleware.use Middleware::ApplyCDN
|
||||
|
||||
# route all exceptions via our router
|
||||
config.exceptions_app = self.routes
|
||||
|
||||
|
|
|
@ -949,6 +949,8 @@ en:
|
|||
s3_secret_access_key: "The Amazon S3 secret access key that will be used to upload images."
|
||||
s3_region: "The Amazon S3 region name that will be used to upload images."
|
||||
|
||||
avatar_sizes: "List of automatically generated avatar sizes."
|
||||
|
||||
enable_flash_video_onebox: "Enable embedding of swf and flv (Adobe Flash) links in oneboxes. WARNING: may introduce security risks."
|
||||
|
||||
default_invitee_trust_level: "Default trust level (0-4) for invited users."
|
||||
|
|
|
@ -532,7 +532,7 @@ files:
|
|||
s3_access_key_id: ''
|
||||
s3_secret_access_key: ''
|
||||
s3_region:
|
||||
default: ''
|
||||
default: 'us-east-1'
|
||||
enum: 'S3RegionSiteSetting'
|
||||
s3_upload_bucket:
|
||||
default: ''
|
||||
|
@ -552,6 +552,9 @@ files:
|
|||
default: ''
|
||||
type: url_list
|
||||
client: true
|
||||
avatar_sizes:
|
||||
default: '20|25|32|45|60|120'
|
||||
type: list
|
||||
|
||||
trust:
|
||||
default_trust_level:
|
||||
|
|
13
db/migrate/20150525151759_set_default_s3_region.rb
Normal file
13
db/migrate/20150525151759_set_default_s3_region.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
class SetDefaultS3Region < ActiveRecord::Migration
|
||||
def up
|
||||
execute <<-SQL
|
||||
UPDATE site_settings
|
||||
SET value = 'us-east-1'
|
||||
WHERE name = 's3_region'
|
||||
AND LENGTH(COALESCE(value, '')) = 0
|
||||
SQL
|
||||
end
|
||||
|
||||
def down
|
||||
end
|
||||
end
|
|
@ -78,6 +78,10 @@ module Discourse
|
|||
@anonymous_top_menu_items ||= Discourse.anonymous_filters + [:category, :categories, :top]
|
||||
end
|
||||
|
||||
def self.avatar_sizes
|
||||
@avatar_size ||= Set.new(SiteSetting.avatar_sizes.split("|").map(&:to_i))
|
||||
end
|
||||
|
||||
def self.activate_plugins!
|
||||
all_plugins = Plugin::Instance.find_all("#{Rails.root}/plugins")
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
require 'file_store/base_store'
|
||||
require "file_store/base_store"
|
||||
require_dependency "s3_helper"
|
||||
require_dependency "file_helper"
|
||||
|
||||
|
@ -6,13 +6,15 @@ module FileStore
|
|||
|
||||
class S3Store < BaseStore
|
||||
|
||||
def initialize(s3_helper = nil)
|
||||
@s3_helper = s3_helper || S3Helper.new(s3_bucket, tombstone_prefix)
|
||||
TOMBSTONE_PREFIX ||= "tombstone/"
|
||||
|
||||
def initialize(s3_helper=nil)
|
||||
@s3_helper = s3_helper || S3Helper.new(s3_bucket, TOMBSTONE_PREFIX)
|
||||
end
|
||||
|
||||
def store_upload(file, upload, content_type = nil)
|
||||
def store_upload(file, upload, content_type=nil)
|
||||
path = get_path_for_upload(file, upload)
|
||||
store_file(file, path, upload.original_filename, content_type)
|
||||
store_file(file, path, filename: upload.original_filename, content_type: content_type, cache_locally: true)
|
||||
end
|
||||
|
||||
def store_optimized_image(file, optimized_image)
|
||||
|
@ -33,7 +35,7 @@ module FileStore
|
|||
end
|
||||
|
||||
def absolute_base_url
|
||||
"//#{s3_bucket}.s3.amazonaws.com"
|
||||
@absolute_base_url ||= "//#{s3_bucket}.s3-#{s3_region}.amazonaws.com"
|
||||
end
|
||||
|
||||
def external?
|
||||
|
@ -46,14 +48,19 @@ module FileStore
|
|||
|
||||
def download(upload)
|
||||
return unless has_been_uploaded?(upload.url)
|
||||
url = SiteSetting.scheme + ":" + upload.url
|
||||
max_file_size = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
||||
FileHelper.download(url, max_file_size, "discourse-s3", true)
|
||||
end
|
||||
|
||||
def avatar_template(avatar)
|
||||
template = relative_avatar_template(avatar)
|
||||
"#{absolute_base_url}/#{template}"
|
||||
DistributedMutex.synchronize("s3_download_#{upload.sha1}") do
|
||||
filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
|
||||
file = get_from_cache(filename)
|
||||
|
||||
if !file
|
||||
url = SiteSetting.scheme + ":" + upload.url
|
||||
file = FileHelper.download(url, SiteSetting.max_file_size_kb, "discourse-s3", true)
|
||||
cache_file(file, filename)
|
||||
end
|
||||
|
||||
file
|
||||
end
|
||||
end
|
||||
|
||||
def purge_tombstone(grace_period)
|
||||
|
@ -63,27 +70,32 @@ module FileStore
|
|||
private
|
||||
|
||||
def get_path_for_upload(file, upload)
|
||||
"#{upload.id}#{upload.sha1}#{upload.extension}"
|
||||
get_path_for("original".freeze, upload.sha1, upload.extension)
|
||||
end
|
||||
|
||||
def get_path_for_optimized_image(file, optimized_image)
|
||||
"#{optimized_image.id}#{optimized_image.sha1}_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
|
||||
extension = "_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
|
||||
get_path_for("optimized".freeze, optimized_image.sha1, extension)
|
||||
end
|
||||
|
||||
def get_path_for_avatar(file, avatar, size)
|
||||
relative_avatar_template(avatar).gsub("{size}", size.to_s)
|
||||
def get_path_for(type, sha, extension)
|
||||
"#{type}/#{sha[0]}/#{sha[1]}/#{sha}#{extension}"
|
||||
end
|
||||
|
||||
def relative_avatar_template(avatar)
|
||||
"avatars/#{avatar.sha1}/{size}#{avatar.extension}"
|
||||
end
|
||||
|
||||
def store_file(file, path, filename=nil, content_type=nil)
|
||||
# options
|
||||
# - filename
|
||||
# - content_type
|
||||
# - cache_locally
|
||||
def store_file(file, path, opts={})
|
||||
filename = opts[:filename].presence
|
||||
content_type = opts[:content_type].presence
|
||||
# cache file locally when needed
|
||||
cache_file(file, File.basename(path)) if opts[:cache_locally]
|
||||
# stored uploaded are public by default
|
||||
options = { acl: 'public-read' }
|
||||
options = { acl: "public-read" }
|
||||
# add a "content disposition" header for "attachments"
|
||||
options[:content_disposition] = "attachment; filename=\"#{filename}\"" if filename && !FileHelper.is_image?(filename)
|
||||
# add a "content type" header when provided (ie. for "attachments")
|
||||
# add a "content type" header when provided
|
||||
options[:content_type] = content_type if content_type
|
||||
# if this fails, it will throw an exception
|
||||
@s3_helper.upload(file, path, options)
|
||||
|
@ -98,14 +110,35 @@ module FileStore
|
|||
@s3_helper.remove(filename, true)
|
||||
end
|
||||
|
||||
CACHE_DIR ||= "#{Rails.root}/tmp/s3_cache/"
|
||||
CACHE_MAXIMUM_SIZE ||= 500
|
||||
|
||||
def get_cache_path_for(filename)
|
||||
"#{CACHE_DIR}#{filename}"
|
||||
end
|
||||
|
||||
def get_from_cache(filename)
|
||||
path = get_cache_path_for(filename)
|
||||
File.open(path) if File.exists?(path)
|
||||
end
|
||||
|
||||
def cache_file(file, filename)
|
||||
path = get_cache_path_for(filename)
|
||||
dir = File.dirname(path)
|
||||
FileUtils.mkdir_p(dir) unless Dir[dir].present?
|
||||
FileUtils.cp(file.path, path)
|
||||
# keep up to 500 files
|
||||
`ls -tr #{CACHE_DIR} | head -n +#{CACHE_MAXIMUM_SIZE} | xargs rm -f`
|
||||
end
|
||||
|
||||
def s3_bucket
|
||||
return @s3_bucket if @s3_bucket
|
||||
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?
|
||||
@s3_bucket = SiteSetting.s3_upload_bucket.downcase
|
||||
end
|
||||
|
||||
def tombstone_prefix
|
||||
"tombstone/"
|
||||
def s3_region
|
||||
SiteSetting.s3_region
|
||||
end
|
||||
|
||||
end
|
||||
|
|
24
lib/middleware/apply_cdn.rb
Normal file
24
lib/middleware/apply_cdn.rb
Normal file
|
@ -0,0 +1,24 @@
|
|||
module Middleware
|
||||
|
||||
class ApplyCDN
|
||||
|
||||
def initialize(app, settings={})
|
||||
@app = app
|
||||
end
|
||||
|
||||
def call(env)
|
||||
status, headers, response = @app.call(env)
|
||||
|
||||
if Discourse.asset_host.present? &&
|
||||
Discourse.store.external? &&
|
||||
(headers["Content-Type"].start_with?("text/") ||
|
||||
headers["Content-Type"].start_with?("application/json"))
|
||||
response.body = response.body.gsub(Discourse.store.absolute_base_url, Discourse.asset_host)
|
||||
end
|
||||
|
||||
[status, headers, response]
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -18,7 +18,6 @@ class S3Helper
|
|||
|
||||
def remove(unique_filename, copy_to_tombstone=false)
|
||||
bucket = s3_bucket
|
||||
|
||||
# copy the file in tombstone
|
||||
if copy_to_tombstone && @tombstone_prefix.present?
|
||||
bucket.object(@tombstone_prefix + unique_filename).copy_from(copy_source: "#{@s3_bucket}/#{unique_filename}")
|
||||
|
@ -29,19 +28,17 @@ class S3Helper
|
|||
end
|
||||
|
||||
def update_tombstone_lifecycle(grace_period)
|
||||
|
||||
return if @tombstone_prefix.blank?
|
||||
|
||||
# cf. http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html
|
||||
s3_resource.client.put_bucket_lifecycle({
|
||||
bucket: @s3_bucket,
|
||||
lifecycle_configuration: {
|
||||
rules: [
|
||||
{
|
||||
id: 'purge-tombstone',
|
||||
status: 'Enabled',
|
||||
expiration: {
|
||||
days: grace_period
|
||||
},
|
||||
id: "purge-tombstone",
|
||||
status: "Enabled",
|
||||
expiration: { days: grace_period },
|
||||
prefix: @tombstone_prefix
|
||||
}
|
||||
]
|
||||
|
@ -70,7 +67,6 @@ class S3Helper
|
|||
bucket
|
||||
end
|
||||
|
||||
|
||||
def check_missing_site_settings
|
||||
unless SiteSetting.s3_use_iam_profile
|
||||
raise Discourse::SiteSettingMissing.new("s3_access_key_id") if SiteSetting.s3_access_key_id.blank?
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
require "digest/sha1"
|
||||
|
||||
################################################################################
|
||||
# backfill_shas #
|
||||
################################################################################
|
||||
|
||||
task "uploads:backfill_shas" => :environment do
|
||||
RailsMultisite::ConnectionManagement.each_connection do |db|
|
||||
puts "Backfilling #{db}"
|
||||
|
@ -19,12 +23,15 @@ task "uploads:backfill_shas" => :environment do
|
|||
puts "done"
|
||||
end
|
||||
|
||||
################################################################################
|
||||
# migrate_from_s3 #
|
||||
################################################################################
|
||||
|
||||
task "uploads:migrate_from_s3" => :environment do
|
||||
require 'file_store/local_store'
|
||||
require 'file_helper'
|
||||
require "file_store/local_store"
|
||||
require "file_helper"
|
||||
|
||||
local_store = FileStore::LocalStore.new
|
||||
max_file_size = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
||||
|
||||
puts "Deleting all optimized images..."
|
||||
puts
|
||||
|
@ -44,7 +51,7 @@ task "uploads:migrate_from_s3" => :environment do
|
|||
|
||||
# no need to download an upload twice
|
||||
if local_store.has_been_uploaded?(upload.url)
|
||||
putc '.'
|
||||
putc "."
|
||||
next
|
||||
end
|
||||
|
||||
|
@ -55,7 +62,7 @@ task "uploads:migrate_from_s3" => :environment do
|
|||
# fix the name of pasted images
|
||||
upload.original_filename = "blob.png" if upload.original_filename == "blob"
|
||||
# download the file (in a temp file)
|
||||
temp_file = FileHelper.download("http:" + previous_url, max_file_size, "from_s3")
|
||||
temp_file = FileHelper.download("http:" + previous_url, SiteSetting.max_file_size_kb, "from_s3")
|
||||
# store the file locally
|
||||
upload.url = local_store.store_upload(temp_file, upload)
|
||||
# save the new url
|
||||
|
@ -66,15 +73,15 @@ task "uploads:migrate_from_s3" => :environment do
|
|||
post.save
|
||||
end
|
||||
|
||||
putc '#'
|
||||
putc "#"
|
||||
else
|
||||
putc 'X'
|
||||
putc "X"
|
||||
end
|
||||
|
||||
# close the temp_file
|
||||
temp_file.close! if temp_file.respond_to? :close!
|
||||
rescue
|
||||
putc 'X'
|
||||
putc "X"
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -83,6 +90,77 @@ task "uploads:migrate_from_s3" => :environment do
|
|||
|
||||
end
|
||||
|
||||
################################################################################
|
||||
# migrate_to_s3 #
|
||||
################################################################################
|
||||
|
||||
task "uploads:migrate_to_s3" => :environment do
|
||||
require "file_store/s3_store"
|
||||
require "file_store/local_store"
|
||||
|
||||
ENV["RAILS_DB"] ? migrate_to_s3 : migrate_to_s3_all_sites
|
||||
end
|
||||
|
||||
def migrate_to_s3_all_sites
|
||||
RailsMultisite::ConnectionManagement.each_connection { migrate_to_s3 }
|
||||
end
|
||||
|
||||
def migrate_to_s3
|
||||
# make sure s3 is enabled
|
||||
if !SiteSetting.enable_s3_uploads
|
||||
puts "You must enable s3 uploads before running that task"
|
||||
return
|
||||
end
|
||||
|
||||
db = RailsMultisite::ConnectionManagement.current_db
|
||||
|
||||
puts "Migrating uploads to S3 (#{SiteSetting.s3_upload_bucket}) for '#{db}'..."
|
||||
|
||||
# will throw an exception if the bucket is missing
|
||||
s3 = FileStore::S3Store.new
|
||||
local = FileStore::LocalStore.new
|
||||
|
||||
# Migrate all uploads
|
||||
Upload.where.not(sha1: nil)
|
||||
.where("url NOT LIKE '#{s3.absolute_base_url}%'")
|
||||
.find_each do |upload|
|
||||
# remove invalid uploads
|
||||
if upload.url.blank?
|
||||
upload.destroy!
|
||||
next
|
||||
end
|
||||
# store the old url
|
||||
from = upload.url
|
||||
# retrieve the path to the local file
|
||||
path = local.path_for(upload)
|
||||
# make sure the file exists locally
|
||||
if !File.exists?(path)
|
||||
putc "X"
|
||||
next
|
||||
end
|
||||
|
||||
begin
|
||||
file = File.open(path)
|
||||
content_type = `file --mime-type -b #{path}`.strip
|
||||
to = s3.store_upload(file, upload, content_type)
|
||||
rescue
|
||||
putc "X"
|
||||
next
|
||||
ensure
|
||||
file.try(:close!) rescue nil
|
||||
end
|
||||
|
||||
# remap the URL
|
||||
remap(from, to)
|
||||
|
||||
putc "."
|
||||
end
|
||||
end
|
||||
|
||||
################################################################################
|
||||
# clean_up #
|
||||
################################################################################
|
||||
|
||||
task "uploads:clean_up" => :environment do
|
||||
|
||||
RailsMultisite::ConnectionManagement.each_connection do |db|
|
||||
|
@ -158,6 +236,9 @@ task "uploads:clean_up" => :environment do
|
|||
|
||||
end
|
||||
|
||||
################################################################################
|
||||
# missing #
|
||||
################################################################################
|
||||
|
||||
# list all missing uploads and optimized images
|
||||
task "uploads:missing" => :environment do
|
||||
|
@ -207,6 +288,10 @@ task "uploads:missing" => :environment do
|
|||
|
||||
end
|
||||
|
||||
################################################################################
|
||||
# regenerate_missing_optimized #
|
||||
################################################################################
|
||||
|
||||
# regenerate missing optimized images
|
||||
task "uploads:regenerate_missing_optimized" => :environment do
|
||||
ENV["RAILS_DB"] ? regenerate_missing_optimized : regenerate_missing_optimized_all_sites
|
||||
|
@ -278,6 +363,10 @@ def regenerate_missing_optimized
|
|||
end
|
||||
end
|
||||
|
||||
################################################################################
|
||||
# migrate_to_new_pattern #
|
||||
################################################################################
|
||||
|
||||
task "uploads:migrate_to_new_pattern" => :environment do
|
||||
ENV["RAILS_DB"] ? migrate_to_new_pattern : migrate_to_new_pattern_all_sites
|
||||
end
|
||||
|
|
|
@ -26,7 +26,7 @@ describe FileStore::S3Store do
|
|||
upload.stubs(:id).returns(42)
|
||||
upload.stubs(:extension).returns(".png")
|
||||
s3_helper.expects(:upload)
|
||||
expect(store.store_upload(uploaded_file, upload)).to eq("//s3_upload_bucket.s3.amazonaws.com/42e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98.png")
|
||||
expect(store.store_upload(uploaded_file, upload)).to eq("//s3_upload_bucket.s3-us-east-1.amazonaws.com/original/e/9/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -36,7 +36,7 @@ describe FileStore::S3Store do
|
|||
it "returns an absolute schemaless url" do
|
||||
optimized_image.stubs(:id).returns(42)
|
||||
s3_helper.expects(:upload)
|
||||
expect(store.store_optimized_image(optimized_image_file, optimized_image)).to eq("//s3_upload_bucket.s3.amazonaws.com/4286f7e437faa5a7fce15d1ddcb9eaeaea377667b8_100x200.png")
|
||||
expect(store.store_optimized_image(optimized_image_file, optimized_image)).to eq("//s3_upload_bucket.s3-us-east-1.amazonaws.com/optimized/8/6/86f7e437faa5a7fce15d1ddcb9eaeaea377667b8_100x200.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -62,10 +62,11 @@ describe FileStore::S3Store do
|
|||
describe ".has_been_uploaded?" do
|
||||
|
||||
it "identifies S3 uploads" do
|
||||
expect(store.has_been_uploaded?("//s3_upload_bucket.s3.amazonaws.com/1337.png")).to eq(true)
|
||||
expect(store.has_been_uploaded?("//s3_upload_bucket.s3-us-east-1.amazonaws.com/1337.png")).to eq(true)
|
||||
end
|
||||
|
||||
it "does not match other s3 urls" do
|
||||
expect(store.has_been_uploaded?("//s3_upload_bucket.s3.amazonaws.com/1337.png")).to eq(false)
|
||||
expect(store.has_been_uploaded?("//s3.amazonaws.com/s3_upload_bucket/1337.png")).to eq(false)
|
||||
expect(store.has_been_uploaded?("//s4_upload_bucket.s3.amazonaws.com/1337.png")).to eq(false)
|
||||
end
|
||||
|
@ -75,7 +76,7 @@ describe FileStore::S3Store do
|
|||
describe ".absolute_base_url" do
|
||||
|
||||
it "returns a lowercase schemaless absolute url" do
|
||||
expect(store.absolute_base_url).to eq("//s3_upload_bucket.s3.amazonaws.com")
|
||||
expect(store.absolute_base_url).to eq("//s3_upload_bucket.s3-us-east-1.amazonaws.com")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -93,13 +94,6 @@ describe FileStore::S3Store do
|
|||
store.download(upload)
|
||||
end
|
||||
|
||||
it "works" do
|
||||
upload.stubs(:url).returns("//s3_upload_bucket.s3.amazonaws.com/1337.png")
|
||||
max_file_size = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
||||
FileHelper.expects(:download).with("http://s3_upload_bucket.s3.amazonaws.com/1337.png", max_file_size, "discourse-s3", true)
|
||||
store.download(upload)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe ".purge_tombstone" do
|
||||
|
|
|
@ -27,27 +27,35 @@ describe UploadsController do
|
|||
end
|
||||
|
||||
it 'is successful with an image' do
|
||||
Jobs.expects(:enqueue).with(:create_thumbnails, anything)
|
||||
|
||||
message = MessageBus.track_publish do
|
||||
xhr :post, :create, file: logo, type: "composer"
|
||||
xhr :post, :create, file: logo, type: "avatar"
|
||||
end.first
|
||||
|
||||
expect(response.status).to eq 200
|
||||
|
||||
expect(message.channel).to eq("/uploads/composer")
|
||||
expect(message.data).to be
|
||||
end
|
||||
|
||||
it 'is successful with an attachment' do
|
||||
message = MessageBus.track_publish do
|
||||
xhr :post, :create, file: text_file, type: "avatar"
|
||||
end.first
|
||||
|
||||
expect(response.status).to eq 200
|
||||
expect(message.channel).to eq("/uploads/avatar")
|
||||
expect(message.data).to be
|
||||
end
|
||||
|
||||
it 'is successful with an attachment' do
|
||||
SiteSetting.stubs(:authorized_extensions).returns("*")
|
||||
|
||||
Jobs.expects(:enqueue).never
|
||||
|
||||
message = MessageBus.track_publish do
|
||||
xhr :post, :create, file: text_file, type: "composer"
|
||||
end.first
|
||||
|
||||
expect(response.status).to eq 200
|
||||
expect(message.channel).to eq("/uploads/composer")
|
||||
expect(message.data).to be
|
||||
end
|
||||
|
||||
it 'correctly sets retain_hours for admins' do
|
||||
Jobs.expects(:enqueue).with(:create_thumbnails, anything)
|
||||
|
||||
log_in :admin
|
||||
|
||||
message = MessageBus.track_publish do
|
||||
|
@ -61,6 +69,8 @@ describe UploadsController do
|
|||
it 'properly returns errors' do
|
||||
SiteSetting.stubs(:max_attachment_size_kb).returns(1)
|
||||
|
||||
Jobs.expects(:enqueue).never
|
||||
|
||||
message = MessageBus.track_publish do
|
||||
xhr :post, :create, file: text_file, type: "avatar"
|
||||
end.first
|
||||
|
|
|
@ -13,8 +13,8 @@ describe S3RegionSiteSetting do
|
|||
end
|
||||
|
||||
describe 'values' do
|
||||
it 'returns all the S3 regions and blank' do
|
||||
expect(S3RegionSiteSetting.values.map {|x| x[:value]}.sort).to eq(['', 'us-east-1', 'us-west-1', 'us-west-2', 'us-gov-west-1', 'eu-west-1', 'eu-central-1', 'ap-southeast-1', 'ap-southeast-2', 'ap-northeast-1', 'sa-east-1'].sort)
|
||||
it 'returns all the S3 regions' do
|
||||
expect(S3RegionSiteSetting.values.map {|x| x[:value]}.sort).to eq(['us-east-1', 'us-west-1', 'us-west-2', 'us-gov-west-1', 'eu-west-1', 'eu-central-1', 'ap-southeast-1', 'ap-southeast-2', 'ap-northeast-1', 'sa-east-1'].sort)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user