Merge pull request #1038 from ZogStriP/keep-uploads-reverse-index-up-to-date

Keep uploads reverse index up to date
This commit is contained in:
Sam 2013-06-18 00:43:31 -07:00
commit 6989851cd4
10 changed files with 62 additions and 16 deletions

2
.gitignore vendored
View File

@ -64,8 +64,6 @@ config/fog_credentials.yml
/public/uploads
/public/stylesheet-cache/*
/public/downloads
/public/images
# Scripts used for downloading/refreshing db
script/download_db

View File

@ -38,9 +38,9 @@ class Upload < ActiveRecord::Base
def self.create_for(user_id, file)
# compute the sha
sha = Digest::SHA1.file(file.tempfile).hexdigest
sha1 = Digest::SHA1.file(file.tempfile).hexdigest
# check if the file has already been uploaded
upload = Upload.where(sha: sha).first
upload = Upload.where(sha1: sha1).first
# otherwise, create it
if upload.blank?
@ -53,7 +53,7 @@ class Upload < ActiveRecord::Base
user_id: user_id,
original_filename: file.original_filename,
filesize: File.size(file.tempfile),
sha: sha,
sha1: sha1,
width: width,
height: height,
url: ""
@ -61,7 +61,7 @@ class Upload < ActiveRecord::Base
# make sure we're at the beginning of the file (FastImage is moving the pointer)
file.rewind
# store the file and update its url
upload.url = Upload.store_file(file, sha, image_info, upload.id)
upload.url = Upload.store_file(file, sha1, image_info, upload.id)
# save the url
upload.save
end
@ -69,9 +69,9 @@ class Upload < ActiveRecord::Base
upload
end
def self.store_file(file, sha, image_info, upload_id)
return S3.store_file(file, sha, image_info, upload_id) if SiteSetting.enable_s3_uploads?
return LocalStore.store_file(file, sha, image_info, upload_id)
def self.store_file(file, sha1, image_info, upload_id)
return S3.store_file(file, sha1, image_info, upload_id) if SiteSetting.enable_s3_uploads?
return LocalStore.store_file(file, sha1, image_info, upload_id)
end
def self.uploaded_regex
@ -105,11 +105,11 @@ end
# url :string(255) not null
# created_at :datetime not null
# updated_at :datetime not null
# sha :string(255)
# sha1 :string(40)
#
# Indexes
#
# index_uploads_on_sha (sha) UNIQUE
# index_uploads_on_sha1 (sha1) UNIQUE
# index_uploads_on_user_id (user_id)
#

View File

@ -0,0 +1,15 @@
class RenameShaColumn < ActiveRecord::Migration
def up
remove_index :uploads, :sha
rename_column :uploads, :sha, :sha1
change_column :uploads, :sha1, :string, limit: 40
add_index :uploads, :sha1, unique: true
end
def down
remove_index :uploads, :sha1
change_column :uploads, :sha1, :string, limit: 255
rename_column :uploads, :sha1, :sha
add_index :uploads, :sha, unique: true
end
end

View File

@ -36,6 +36,8 @@ class CookedPostProcessor
# retrieve the associated upload, if any
upload = get_upload_from_url(img['src'])
if upload.present?
# update reverse index
associate_to_post upload
# create a thumbnail
upload.create_thumbnail!
# optimize image
@ -87,6 +89,13 @@ class CookedPostProcessor
end
end
def associate_to_post(upload)
return if PostUpload.where(post_id: @post.id, upload_id: upload.id).count > 0
PostUpload.create({ post_id: @post.id, upload_id: upload.id })
rescue ActiveRecord::RecordNotUnique
# do not care if it's already associated
end
def optimize_image(img)
return img["src"]
# 1) optimize using image_optim

View File

@ -1,6 +1,6 @@
module LocalStore
def self.store_file(file, sha, image_info, upload_id)
def self.store_file(file, sha1, image_info, upload_id)
clean_name = Digest::SHA1.hexdigest("#{Time.now.to_s}#{file.original_filename}")[0,16] + ".#{image_info.type}"
url_root = "/uploads/#{RailsMultisite::ConnectionManagement.current_db}/#{upload_id}"
path = "#{Rails.root}/public#{url_root}"

View File

@ -1,13 +1,13 @@
module S3
def self.store_file(file, sha, image_info, upload_id)
def self.store_file(file, sha1, image_info, upload_id)
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?
raise Discourse::SiteSettingMissing.new("s3_access_key_id") if SiteSetting.s3_access_key_id.blank?
raise Discourse::SiteSettingMissing.new("s3_secret_access_key") if SiteSetting.s3_secret_access_key.blank?
@fog_loaded = require 'fog' unless @fog_loaded
remote_filename = "#{upload_id}#{sha}.#{image_info.type}"
remote_filename = "#{upload_id}#{sha1}.#{image_info.type}"
options = S3.generate_options
directory = S3.get_or_create_directory(SiteSetting.s3_upload_bucket, options)

View File

@ -57,6 +57,23 @@ describe CookedPostProcessor do
end
context 'with uploaded images in the post' do
before do
@topic = Fabricate(:topic)
@post = Fabricate(:post_with_uploads, topic: @topic, user: @topic.user)
@cpp = CookedPostProcessor.new(@post)
@cpp.expects(:get_upload_from_url).returns(Fabricate(:upload))
@cpp.expects(:get_size).returns([100,200])
end
it "keeps reverse index up to date" do
@cpp.post_process_images
@post.uploads.reload
@post.uploads.count.should == 1
end
end
context 'with unsized images in the post' do
let(:user) { Fabricate(:user) }
let(:topic) { Fabricate(:topic, user: user) }

View File

@ -40,6 +40,13 @@ Fabricator(:post_with_s3_image_url, from: :post) do
"
end
Fabricator(:post_with_uploads, from: :post) do
cooked "
<img src='/uploads/default/1/1234567890123456.jpg' height='100' width='100'>
"
end
Fabricator(:basic_reply, from: :post) do
user(:coding_horror)
reply_to_post_number 1

View File

@ -4,5 +4,5 @@ Fabricator(:upload) do
filesize 1234
width 100
height 200
url "/uploads/default/123456789.jpg"
url "/uploads/default/1/1234567890123456.jpg"
end

View File

@ -34,7 +34,7 @@ describe Upload do
upload.user_id.should == user_id
upload.original_filename.should == logo.original_filename
upload.filesize.should == File.size(logo.tempfile)
upload.sha.should == Digest::SHA1.file(logo.tempfile).hexdigest
upload.sha1.should == Digest::SHA1.file(logo.tempfile).hexdigest
upload.width.should == 244
upload.height.should == 66
upload.url.should == url