2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
module Jobs
|
|
|
|
|
2019-10-02 12:01:53 +08:00
|
|
|
class PullHotlinkedImages < ::Jobs::Base
|
2016-04-07 10:56:43 +08:00
|
|
|
sidekiq_options queue: 'low'
|
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
def initialize
|
2013-11-14 00:30:48 +08:00
|
|
|
@max_size = SiteSetting.max_image_size_kb.kilobytes
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
|
2017-10-12 05:11:44 +08:00
|
|
|
def download(src)
|
|
|
|
downloaded = nil
|
|
|
|
|
|
|
|
begin
|
|
|
|
retries ||= 3
|
|
|
|
|
|
|
|
downloaded = FileHelper.download(
|
|
|
|
src,
|
|
|
|
max_file_size: @max_size,
|
2018-08-17 16:52:55 +08:00
|
|
|
retain_on_max_file_size_exceeded: true,
|
2017-10-12 05:11:44 +08:00
|
|
|
tmp_file_name: "discourse-hotlinked",
|
|
|
|
follow_redirect: true
|
|
|
|
)
|
|
|
|
rescue
|
2018-03-28 12:54:11 +08:00
|
|
|
if (retries -= 1) > 0 && !Rails.env.test?
|
2017-10-12 05:11:44 +08:00
|
|
|
sleep 1
|
|
|
|
retry
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
downloaded
|
|
|
|
end
|
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
def execute(args)
|
|
|
|
post_id = args[:post_id]
|
|
|
|
raise Discourse::InvalidParameters.new(:post_id) unless post_id.present?
|
|
|
|
|
2014-05-06 21:41:59 +08:00
|
|
|
post = Post.find_by(id: post_id)
|
2013-11-06 02:04:47 +08:00
|
|
|
return unless post.present?
|
|
|
|
|
|
|
|
raw = post.raw.dup
|
2014-04-22 05:08:17 +08:00
|
|
|
start_raw = raw.dup
|
2017-11-16 22:45:07 +08:00
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
downloaded_urls = {}
|
2017-11-16 12:13:15 +08:00
|
|
|
|
2018-03-28 16:20:08 +08:00
|
|
|
large_images = JSON.parse(post.custom_fields[Post::LARGE_IMAGES].presence || "[]")
|
|
|
|
broken_images = JSON.parse(post.custom_fields[Post::BROKEN_IMAGES].presence || "[]")
|
|
|
|
downloaded_images = JSON.parse(post.custom_fields[Post::DOWNLOADED_IMAGES].presence || "{}")
|
2017-11-16 12:13:15 +08:00
|
|
|
|
2017-11-16 22:45:07 +08:00
|
|
|
has_new_large_image = false
|
|
|
|
has_new_broken_image = false
|
|
|
|
has_downloaded_image = false
|
2013-11-06 02:04:47 +08:00
|
|
|
|
2019-05-23 15:43:25 +08:00
|
|
|
extract_images_from(post.cooked).each do |node|
|
|
|
|
src = original_src = node['src'] || node['href']
|
2017-11-16 22:45:07 +08:00
|
|
|
src = "#{SiteSetting.force_https ? "https" : "http"}:#{src}" if src.start_with?("//")
|
2013-11-06 02:04:47 +08:00
|
|
|
|
2020-01-29 08:11:38 +08:00
|
|
|
if should_download_image?(src, post)
|
2013-11-06 02:04:47 +08:00
|
|
|
begin
|
|
|
|
# have we already downloaded that file?
|
2020-05-30 00:47:05 +08:00
|
|
|
schemeless_src = normalize_src(original_src)
|
2018-03-28 14:44:42 +08:00
|
|
|
|
|
|
|
unless downloaded_images.include?(schemeless_src) || large_images.include?(schemeless_src) || broken_images.include?(schemeless_src)
|
2020-01-24 09:59:30 +08:00
|
|
|
|
|
|
|
# secure-media-uploads endpoint prevents anonymous downloads, so we
|
|
|
|
# need the presigned S3 URL here
|
|
|
|
src = Upload.signed_url_from_secure_media_url(src) if Upload.secure_media_url?(src)
|
|
|
|
|
2017-10-12 05:11:44 +08:00
|
|
|
if hotlinked = download(src)
|
2015-08-18 00:57:28 +08:00
|
|
|
if File.size(hotlinked.path) <= @max_size
|
2014-09-27 00:27:10 +08:00
|
|
|
filename = File.basename(URI.parse(src).path)
|
2017-06-13 19:27:05 +08:00
|
|
|
filename << File.extname(hotlinked.path) unless filename["."]
|
2017-05-11 06:16:57 +08:00
|
|
|
upload = UploadCreator.new(hotlinked, filename, origin: src).create_for(post.user_id)
|
2019-06-14 09:21:25 +08:00
|
|
|
|
2017-06-13 19:27:05 +08:00
|
|
|
if upload.persisted?
|
|
|
|
downloaded_urls[src] = upload.url
|
2020-05-30 00:47:05 +08:00
|
|
|
downloaded_images[normalize_src(src)] = upload.id
|
2017-11-16 22:45:07 +08:00
|
|
|
has_downloaded_image = true
|
2017-06-13 19:27:05 +08:00
|
|
|
else
|
2017-10-19 05:14:13 +08:00
|
|
|
log(:info, "Failed to pull hotlinked image for post: #{post_id}: #{src} - #{upload.errors.full_messages.join("\n")}")
|
2017-06-13 19:27:05 +08:00
|
|
|
end
|
2014-09-27 00:27:10 +08:00
|
|
|
else
|
2020-05-30 00:47:05 +08:00
|
|
|
large_images << normalize_src(original_src)
|
2017-11-16 22:45:07 +08:00
|
|
|
has_new_large_image = true
|
2014-09-27 00:27:10 +08:00
|
|
|
end
|
2013-11-06 02:04:47 +08:00
|
|
|
else
|
2020-05-30 00:47:05 +08:00
|
|
|
broken_images << normalize_src(original_src)
|
2017-11-16 22:45:07 +08:00
|
|
|
has_new_broken_image = true
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
end
|
2020-01-29 08:11:38 +08:00
|
|
|
|
2013-12-21 15:19:22 +08:00
|
|
|
# have we successfully downloaded that file?
|
2013-11-06 02:04:47 +08:00
|
|
|
if downloaded_urls[src].present?
|
2017-01-16 18:50:07 +08:00
|
|
|
escaped_src = Regexp.escape(original_src)
|
2019-05-23 15:43:25 +08:00
|
|
|
|
2019-06-06 15:50:35 +08:00
|
|
|
replace_raw = ->(match, match_src, replacement, _index) {
|
2019-06-21 12:32:02 +08:00
|
|
|
|
2020-05-30 00:47:05 +08:00
|
|
|
if normalize_src(src) == normalize_src(match_src)
|
2019-06-21 12:32:02 +08:00
|
|
|
replacement =
|
|
|
|
if replacement.include?(InlineUploads::PLACEHOLDER)
|
|
|
|
replacement.sub(InlineUploads::PLACEHOLDER, upload.short_url)
|
|
|
|
elsif replacement.include?(InlineUploads::PATH_PLACEHOLDER)
|
|
|
|
replacement.sub(InlineUploads::PATH_PLACEHOLDER, upload.short_path)
|
|
|
|
end
|
|
|
|
|
2019-06-06 15:50:35 +08:00
|
|
|
raw = raw.gsub(
|
|
|
|
match,
|
2019-06-21 12:32:02 +08:00
|
|
|
replacement
|
2019-06-06 15:50:35 +08:00
|
|
|
)
|
|
|
|
end
|
|
|
|
}
|
|
|
|
|
2013-11-20 20:10:08 +08:00
|
|
|
# there are 6 ways to insert an image in a post
|
2013-11-06 02:04:47 +08:00
|
|
|
# HTML tag - <img src="http://...">
|
2019-06-06 15:50:35 +08:00
|
|
|
InlineUploads.match_img(raw, external_src: true, &replace_raw)
|
2019-05-23 15:43:25 +08:00
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
# BBCode tag - [img]http://...[/img]
|
2019-06-13 13:53:43 +08:00
|
|
|
InlineUploads.match_bbcode_img(raw, external_src: true, &replace_raw)
|
2019-06-06 15:50:35 +08:00
|
|
|
|
2013-11-20 20:10:08 +08:00
|
|
|
# Markdown linked image - [![alt](http://...)](http://...)
|
2013-11-06 02:04:47 +08:00
|
|
|
# Markdown inline - ![alt](http://...)
|
2016-09-01 14:25:40 +08:00
|
|
|
# Markdown inline - ![](http://... "image title")
|
2016-09-01 20:26:39 +08:00
|
|
|
# Markdown inline - ![alt](http://... "image title")
|
2019-06-06 15:50:35 +08:00
|
|
|
InlineUploads.match_md_inline_img(raw, external_src: true, &replace_raw)
|
|
|
|
|
2013-11-06 02:04:47 +08:00
|
|
|
# Direct link
|
2019-06-06 15:50:35 +08:00
|
|
|
raw.gsub!(/^#{escaped_src}(\s?)$/) { "![](#{upload.short_url})#{$1}" }
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
rescue => e
|
2019-06-06 15:50:35 +08:00
|
|
|
if Rails.env.test?
|
|
|
|
raise e
|
|
|
|
else
|
|
|
|
log(:error, "Failed to pull hotlinked image (#{src}) post: #{post_id}\n" + e.message + "\n" + e.backtrace.join("\n"))
|
|
|
|
end
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-16 22:45:07 +08:00
|
|
|
large_images.uniq!
|
|
|
|
broken_images.uniq!
|
|
|
|
|
|
|
|
post.custom_fields[Post::LARGE_IMAGES] = large_images.to_json if large_images.present?
|
|
|
|
post.custom_fields[Post::BROKEN_IMAGES] = broken_images.to_json if broken_images.present?
|
|
|
|
post.custom_fields[Post::DOWNLOADED_IMAGES] = downloaded_images.to_json if downloaded_images.present?
|
|
|
|
# only save custom fields if there are any
|
|
|
|
post.save_custom_fields if large_images.present? || broken_images.present? || downloaded_images.present?
|
2017-11-16 12:13:15 +08:00
|
|
|
|
2014-04-22 05:08:17 +08:00
|
|
|
post.reload
|
2017-11-15 18:30:47 +08:00
|
|
|
|
2015-10-31 05:46:46 +08:00
|
|
|
if start_raw == post.raw && raw != post.raw
|
2014-10-28 05:06:43 +08:00
|
|
|
changes = { raw: raw, edit_reason: I18n.t("upload.edit_reason") }
|
2019-09-12 22:55:45 +08:00
|
|
|
post.revise(Discourse.system_user, changes, bypass_bump: true, skip_staff_log: true)
|
2017-11-16 22:45:07 +08:00
|
|
|
elsif has_downloaded_image || has_new_large_image || has_new_broken_image
|
2020-05-29 20:07:47 +08:00
|
|
|
post.trigger_post_process(
|
|
|
|
bypass_bump: true,
|
|
|
|
skip_pull_hotlinked_images: true # Avoid an infinite loop of job scheduling
|
|
|
|
)
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_images_from(html)
|
2020-05-05 11:46:57 +08:00
|
|
|
doc = Nokogiri::HTML5::fragment(html)
|
2019-06-06 15:50:35 +08:00
|
|
|
|
2019-06-14 09:21:25 +08:00
|
|
|
doc.css("img[src], a.lightbox[href]") -
|
2019-06-06 15:50:35 +08:00
|
|
|
doc.css("img.avatar") -
|
|
|
|
doc.css(".lightbox img[src]")
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
|
2020-01-29 08:11:38 +08:00
|
|
|
def should_download_image?(src, post = nil)
|
2014-05-08 01:49:16 +08:00
|
|
|
# make sure we actually have a url
|
|
|
|
return false unless src.present?
|
2019-03-28 04:31:12 +08:00
|
|
|
|
2020-01-24 09:59:30 +08:00
|
|
|
# If file is on the forum or CDN domain or already has the
|
|
|
|
# secure media url
|
2020-06-19 19:45:06 +08:00
|
|
|
if UrlHelper.is_local(src) || Upload.secure_media_url?(src)
|
2019-06-07 20:00:52 +08:00
|
|
|
return false if src =~ /\/images\/emoji\//
|
|
|
|
|
|
|
|
# Someone could hotlink a file from a different site on the same CDN,
|
|
|
|
# so check whether we have it in this database
|
2020-01-29 08:11:38 +08:00
|
|
|
#
|
|
|
|
# if the upload already exists and is attached to a different post,
|
|
|
|
# or the original_sha1 is missing meaning it was created before secure
|
|
|
|
# media was enabled, then we definitely want to redownload again otherwise
|
|
|
|
# we end up reusing existing uploads which may be linked to many posts
|
|
|
|
# already.
|
|
|
|
upload = Upload.consider_for_reuse(Upload.get_from_url(src), post)
|
|
|
|
|
|
|
|
return !upload.present?
|
2019-03-28 04:31:12 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Don't download non-local images unless site setting enabled
|
|
|
|
return false unless SiteSetting.download_remote_images_to_local?
|
2017-07-06 16:55:28 +08:00
|
|
|
|
2014-05-08 01:49:16 +08:00
|
|
|
# parse the src
|
|
|
|
begin
|
|
|
|
uri = URI.parse(src)
|
2018-08-14 18:23:32 +08:00
|
|
|
rescue URI::Error
|
2014-05-08 01:49:16 +08:00
|
|
|
return false
|
|
|
|
end
|
2017-07-06 16:55:28 +08:00
|
|
|
|
|
|
|
hostname = uri.hostname
|
|
|
|
return false unless hostname
|
|
|
|
|
2014-05-08 01:49:16 +08:00
|
|
|
# check the domains blacklist
|
2014-04-22 04:59:53 +08:00
|
|
|
SiteSetting.should_download_images?(src)
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
|
2017-07-05 09:34:24 +08:00
|
|
|
def log(log_level, message)
|
|
|
|
Rails.logger.public_send(
|
|
|
|
log_level,
|
|
|
|
"#{RailsMultisite::ConnectionManagement.current_db}: #{message}"
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-03-28 14:44:42 +08:00
|
|
|
private
|
|
|
|
|
2020-05-30 00:47:05 +08:00
|
|
|
def normalize_src(src)
|
|
|
|
uri = Addressable::URI.heuristic_parse(src)
|
|
|
|
uri.normalize!
|
|
|
|
uri.scheme = nil
|
|
|
|
uri.to_s
|
2020-06-19 19:52:51 +08:00
|
|
|
rescue URI::Error, Addressable::URI::InvalidURIError
|
2020-05-30 00:47:05 +08:00
|
|
|
src
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2013-11-06 02:04:47 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|