2013-11-20 20:10:08 +08:00
|
|
|
|
require_dependency 'url_helper'
|
|
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
|
module Jobs
|
|
|
|
|
|
|
|
|
|
class PullHotlinkedImages < Jobs::Base
|
2013-11-20 20:10:08 +08:00
|
|
|
|
include UrlHelper
|
2013-11-06 02:04:47 +08:00
|
|
|
|
|
|
|
|
|
def initialize
|
|
|
|
|
# maximum size of the file in bytes
|
2013-11-14 00:30:48 +08:00
|
|
|
|
@max_size = SiteSetting.max_image_size_kb.kilobytes
|
2013-11-06 02:04:47 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def execute(args)
|
2013-11-15 22:22:18 +08:00
|
|
|
|
return unless SiteSetting.download_remote_images_to_local?
|
2013-11-06 02:04:47 +08:00
|
|
|
|
|
|
|
|
|
post_id = args[:post_id]
|
|
|
|
|
raise Discourse::InvalidParameters.new(:post_id) unless post_id.present?
|
|
|
|
|
|
|
|
|
|
post = Post.where(id: post_id).first
|
|
|
|
|
return unless post.present?
|
|
|
|
|
|
|
|
|
|
raw = post.raw.dup
|
|
|
|
|
downloaded_urls = {}
|
|
|
|
|
|
|
|
|
|
extract_images_from(post.cooked).each do |image|
|
|
|
|
|
src = image['src']
|
2013-11-26 02:47:53 +08:00
|
|
|
|
src = "http:" + src if src.start_with?("//")
|
2013-11-06 02:04:47 +08:00
|
|
|
|
|
|
|
|
|
if is_valid_image_url(src)
|
|
|
|
|
begin
|
|
|
|
|
# have we already downloaded that file?
|
|
|
|
|
if !downloaded_urls.include?(src)
|
|
|
|
|
hotlinked = download(src)
|
2013-11-16 06:28:16 +08:00
|
|
|
|
if hotlinked.try(:size) <= @max_size
|
2013-11-06 02:04:47 +08:00
|
|
|
|
filename = File.basename(URI.parse(src).path)
|
|
|
|
|
file = ActionDispatch::Http::UploadedFile.new(tempfile: hotlinked, filename: filename)
|
|
|
|
|
upload = Upload.create_for(post.user_id, file, hotlinked.size, src)
|
|
|
|
|
downloaded_urls[src] = upload.url
|
|
|
|
|
else
|
2013-11-14 00:30:48 +08:00
|
|
|
|
puts "Failed to pull hotlinked image: #{src} - Image is bigger than #{@max_size}"
|
2013-11-06 02:04:47 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
# have we successfuly downloaded that file?
|
|
|
|
|
if downloaded_urls[src].present?
|
|
|
|
|
url = downloaded_urls[src]
|
|
|
|
|
escaped_src = src.gsub("?", "\\?").gsub(".", "\\.").gsub("+", "\\+")
|
2013-11-20 20:10:08 +08:00
|
|
|
|
# there are 6 ways to insert an image in a post
|
2013-11-06 02:04:47 +08:00
|
|
|
|
# HTML tag - <img src="http://...">
|
|
|
|
|
raw.gsub!(/src=["']#{escaped_src}["']/i, "src='#{url}'")
|
|
|
|
|
# BBCode tag - [img]http://...[/img]
|
|
|
|
|
raw.gsub!(/\[img\]#{escaped_src}\[\/img\]/i, "[img]#{url}[/img]")
|
2013-11-20 20:10:08 +08:00
|
|
|
|
# Markdown linked image - [![alt](http://...)](http://...)
|
|
|
|
|
raw.gsub!(/\[!\[([^\]]*)\]\(#{escaped_src}\)\]/) { "[<img src='#{url}' alt='#{$1}'>]" }
|
2013-11-06 02:04:47 +08:00
|
|
|
|
# Markdown inline - ![alt](http://...)
|
|
|
|
|
raw.gsub!(/!\[([^\]]*)\]\(#{escaped_src}\)/) { "![#{$1}](#{url})" }
|
|
|
|
|
# Markdown reference - [x]: http://
|
|
|
|
|
raw.gsub!(/\[(\d+)\]: #{escaped_src}/) { "[#{$1}]: #{url}" }
|
|
|
|
|
# Direct link
|
|
|
|
|
raw.gsub!(src, "<img src='#{url}'>")
|
|
|
|
|
end
|
|
|
|
|
rescue => e
|
2013-11-14 00:30:48 +08:00
|
|
|
|
puts "Failed to pull hotlinked image: #{src}\n" + e.message + "\n" + e.backtrace.join("\n")
|
2013-11-06 02:04:47 +08:00
|
|
|
|
ensure
|
|
|
|
|
# close & delete the temp file
|
|
|
|
|
hotlinked && hotlinked.close!
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# TODO: make sure the post hasn´t changed while we were downloading remote images
|
|
|
|
|
if raw != post.raw
|
2013-11-22 08:52:26 +08:00
|
|
|
|
options = { edit_reason: I18n.t("upload.edit_reason") }
|
|
|
|
|
options[:bypass_bump] = true if args[:bypass_bump] == true
|
2013-11-06 02:04:47 +08:00
|
|
|
|
post.revise(Discourse.system_user, raw, options)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def extract_images_from(html)
|
|
|
|
|
doc = Nokogiri::HTML::fragment(html)
|
|
|
|
|
doc.css("img") - doc.css(".onebox-result img") - doc.css("img.avatar")
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def is_valid_image_url(src)
|
|
|
|
|
src.present? && !Discourse.store.has_been_uploaded?(src)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def download(url)
|
2013-11-14 00:30:48 +08:00
|
|
|
|
return if @max_size <= 0
|
2013-11-06 02:04:47 +08:00
|
|
|
|
extension = File.extname(URI.parse(url).path)
|
|
|
|
|
tmp = Tempfile.new(["discourse-hotlinked", extension])
|
|
|
|
|
|
|
|
|
|
File.open(tmp.path, "wb") do |f|
|
|
|
|
|
hotlinked = open(url, "rb", read_timeout: 5)
|
|
|
|
|
while f.size <= @max_size && data = hotlinked.read(@max_size)
|
|
|
|
|
f.write(data)
|
|
|
|
|
end
|
|
|
|
|
hotlinked.close!
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
tmp
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
end
|