2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2019-04-25 03:46:18 +08:00
|
|
|
require "file_store/local_store"
|
|
|
|
|
2013-06-19 15:20:30 +08:00
|
|
|
desc "Update each post with latest markdown"
|
|
|
|
task "posts:rebake" => :environment do
|
2014-04-04 23:10:47 +08:00
|
|
|
ENV["RAILS_DB"] ? rebake_posts : rebake_posts_all_sites
|
2013-06-19 15:20:30 +08:00
|
|
|
end
|
|
|
|
|
2017-09-12 15:10:18 +08:00
|
|
|
task "posts:rebake_uncooked_posts" => :environment do
|
2019-08-13 08:27:52 +08:00
|
|
|
# rebaking uncooked posts can very quickly saturate sidekiq
|
|
|
|
# this provides an insurance policy so you can safely run and stop
|
|
|
|
# this rake task without worrying about your sidekiq imploding
|
|
|
|
Jobs.run_immediately!
|
|
|
|
|
2024-01-14 06:34:20 +08:00
|
|
|
# don't lock per machine, we want to be able to run this from multiple consoles
|
|
|
|
OptimizedImage.lock_per_machine = false
|
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
ENV["RAILS_DB"] ? rebake_uncooked_posts : rebake_uncooked_posts_all_sites
|
|
|
|
end
|
|
|
|
|
|
|
|
def rebake_uncooked_posts_all_sites
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { |db| rebake_uncooked_posts }
|
|
|
|
end
|
2017-09-12 15:10:18 +08:00
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
def rebake_uncooked_posts
|
|
|
|
puts "Rebaking uncooked posts on #{RailsMultisite::ConnectionManagement.current_db}"
|
|
|
|
uncooked = Post.where("baked_version <> ? or baked_version IS NULL", Post::BAKED_VERSION)
|
2017-09-12 15:10:18 +08:00
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
rebaked = 0
|
|
|
|
total = uncooked.count
|
2017-09-12 15:10:18 +08:00
|
|
|
|
2019-08-13 08:27:52 +08:00
|
|
|
ids = uncooked.pluck(:id)
|
|
|
|
# work randomly so you can run this job from lots of consoles if needed
|
|
|
|
ids.shuffle!
|
|
|
|
|
|
|
|
ids.each do |id|
|
|
|
|
# may have been cooked in interim
|
|
|
|
post = uncooked.where(id: id).first
|
|
|
|
|
|
|
|
rebake_post(post) if post
|
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
print_status(rebaked += 1, total)
|
2019-05-22 08:04:54 +08:00
|
|
|
end
|
2019-06-04 21:12:34 +08:00
|
|
|
|
|
|
|
puts "", "#{rebaked} posts done!", ""
|
2017-09-12 15:10:18 +08:00
|
|
|
end
|
|
|
|
|
2013-06-19 15:20:30 +08:00
|
|
|
desc "Update each post with latest markdown and refresh oneboxes"
|
|
|
|
task "posts:refresh_oneboxes" => :environment do
|
2014-04-04 23:10:47 +08:00
|
|
|
if ENV["RAILS_DB"]
|
|
|
|
rebake_posts(invalidate_oneboxes: true)
|
2023-01-09 20:10:19 +08:00
|
|
|
else
|
2014-04-04 23:10:47 +08:00
|
|
|
rebake_posts_all_sites(invalidate_oneboxes: true)
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2013-06-19 15:20:30 +08:00
|
|
|
end
|
|
|
|
|
2015-09-24 04:44:53 +08:00
|
|
|
desc "Rebake all posts with a quote using a letter_avatar"
|
|
|
|
task "posts:fix_letter_avatars" => :environment do
|
2021-11-10 23:53:55 +08:00
|
|
|
next unless SiteSetting.external_system_avatars_enabled
|
2015-09-24 04:44:53 +08:00
|
|
|
|
|
|
|
search =
|
|
|
|
Post.where("user_id <> -1").where(
|
|
|
|
"raw LIKE '%/letter\_avatar/%' OR cooked LIKE '%/letter\_avatar/%'",
|
|
|
|
)
|
|
|
|
|
|
|
|
rebaked = 0
|
|
|
|
total = search.count
|
|
|
|
|
2016-08-25 16:10:27 +08:00
|
|
|
search.find_each do |post|
|
2015-09-24 04:44:53 +08:00
|
|
|
rebake_post(post)
|
|
|
|
print_status(rebaked += 1, total)
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "#{rebaked} posts done!", ""
|
|
|
|
end
|
|
|
|
|
2016-09-22 00:14:53 +08:00
|
|
|
desc "Rebake all posts matching string/regex and optionally delay the loop"
|
|
|
|
task "posts:rebake_match", %i[pattern type delay] => [:environment] do |_, args|
|
2017-10-04 08:47:53 +08:00
|
|
|
args.with_defaults(type: "string")
|
2016-08-17 01:18:28 +08:00
|
|
|
pattern = args[:pattern]
|
2017-10-04 08:47:53 +08:00
|
|
|
type = args[:type]&.downcase
|
|
|
|
delay = args[:delay]&.to_i
|
|
|
|
|
2016-08-17 01:18:28 +08:00
|
|
|
if !pattern
|
2016-09-22 00:14:53 +08:00
|
|
|
puts "ERROR: Expecting rake posts:rebake_match[pattern,type,delay]"
|
|
|
|
exit 1
|
|
|
|
elsif delay && delay < 1
|
|
|
|
puts "ERROR: delay parameter should be an integer and greater than 0"
|
2016-08-17 01:18:28 +08:00
|
|
|
exit 1
|
2017-10-04 08:47:53 +08:00
|
|
|
elsif type != "string" && type != "regex"
|
2016-08-17 01:18:28 +08:00
|
|
|
puts "ERROR: Expecting rake posts:rebake_match[pattern,type] where type is string or regex"
|
|
|
|
exit 1
|
|
|
|
end
|
|
|
|
|
2017-10-25 02:49:00 +08:00
|
|
|
search = Post.raw_match(pattern, type)
|
|
|
|
|
2016-08-17 01:18:28 +08:00
|
|
|
rebaked = 0
|
|
|
|
total = search.count
|
|
|
|
|
2017-10-25 02:49:00 +08:00
|
|
|
search.find_each do |post|
|
2016-08-17 01:18:28 +08:00
|
|
|
rebake_post(post)
|
|
|
|
print_status(rebaked += 1, total)
|
2016-09-22 00:14:53 +08:00
|
|
|
sleep(delay) if delay
|
2016-08-17 01:18:28 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "#{rebaked} posts done!", ""
|
|
|
|
end
|
|
|
|
|
2014-04-04 23:10:47 +08:00
|
|
|
def rebake_posts_all_sites(opts = {})
|
2013-02-06 03:16:51 +08:00
|
|
|
RailsMultisite::ConnectionManagement.each_connection { |db| rebake_posts(opts) }
|
2014-04-04 23:10:47 +08:00
|
|
|
end
|
2013-06-19 15:20:30 +08:00
|
|
|
|
2014-04-04 23:10:47 +08:00
|
|
|
def rebake_posts(opts = {})
|
2014-09-10 01:25:20 +08:00
|
|
|
puts "Rebaking post markdown for '#{RailsMultisite::ConnectionManagement.current_db}'"
|
|
|
|
|
2018-07-06 16:22:54 +08:00
|
|
|
begin
|
2019-07-31 22:50:41 +08:00
|
|
|
disable_system_edit_notifications = SiteSetting.disable_system_edit_notifications
|
|
|
|
SiteSetting.disable_system_edit_notifications = true
|
2018-07-06 16:22:54 +08:00
|
|
|
|
|
|
|
total = Post.count
|
|
|
|
rebaked = 0
|
2018-07-11 17:02:12 +08:00
|
|
|
batch = 1000
|
|
|
|
Post.update_all("baked_version = NULL")
|
2018-07-06 16:22:54 +08:00
|
|
|
|
2018-07-11 17:02:12 +08:00
|
|
|
(0..(total - 1).abs).step(batch) do |i|
|
|
|
|
Post
|
|
|
|
.order(id: :desc)
|
|
|
|
.offset(i)
|
|
|
|
.limit(batch)
|
|
|
|
.each do |post|
|
2018-07-06 16:22:54 +08:00
|
|
|
rebake_post(post, opts)
|
|
|
|
print_status(rebaked += 1, total)
|
|
|
|
end
|
2018-04-10 02:00:36 +08:00
|
|
|
end
|
2018-07-06 16:22:54 +08:00
|
|
|
ensure
|
2019-07-31 22:50:41 +08:00
|
|
|
SiteSetting.disable_system_edit_notifications = disable_system_edit_notifications
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2014-04-04 23:10:47 +08:00
|
|
|
|
2014-09-10 01:25:20 +08:00
|
|
|
puts "", "#{rebaked} posts done!", "-" * 50
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2014-06-06 12:08:39 +08:00
|
|
|
|
2015-09-24 04:44:53 +08:00
|
|
|
def rebake_post(post, opts = {})
|
2019-05-22 08:31:49 +08:00
|
|
|
opts[:priority] = :ultra_low if !opts[:priority]
|
2021-09-27 20:45:05 +08:00
|
|
|
post.rebake!(**opts)
|
2014-09-10 01:25:20 +08:00
|
|
|
rescue => e
|
|
|
|
puts "",
|
|
|
|
"Failed to rebake (topic_id: #{post.topic_id}, post_id: #{post.id})",
|
|
|
|
e,
|
|
|
|
e.backtrace.join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
def print_status(current, max)
|
|
|
|
print "\r%9d / %d (%5.1f%%)" % [current, max, ((current.to_f / max.to_f) * 100).round(1)]
|
|
|
|
end
|
2014-06-06 12:08:39 +08:00
|
|
|
|
|
|
|
desc "normalize all markdown so <pre><code> is not used and instead backticks"
|
|
|
|
task "posts:normalize_code" => :environment do
|
|
|
|
lang = ENV["CODE_LANG"] || ""
|
|
|
|
require "import/normalize"
|
|
|
|
|
|
|
|
puts "Normalizing"
|
|
|
|
i = 0
|
|
|
|
Post
|
|
|
|
.where("raw like '%<pre>%<code>%'")
|
|
|
|
.each do |p|
|
|
|
|
normalized = Import::Normalize.normalize_code_blocks(p.raw, lang)
|
|
|
|
if normalized != p.raw
|
2014-10-28 05:06:43 +08:00
|
|
|
p.revise(Discourse.system_user, raw: normalized)
|
2014-06-06 12:08:39 +08:00
|
|
|
putc "."
|
|
|
|
i += 1
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2014-06-06 12:08:39 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts
|
|
|
|
puts "#{i} posts normalized!"
|
|
|
|
end
|
2016-08-17 15:17:07 +08:00
|
|
|
|
2018-08-23 20:49:00 +08:00
|
|
|
def remap_posts(find, type, ignore_case, replace = "")
|
|
|
|
ignore_case = ignore_case == "true"
|
2017-01-26 00:20:21 +08:00
|
|
|
i = 0
|
2017-10-04 08:47:53 +08:00
|
|
|
|
|
|
|
Post
|
|
|
|
.raw_match(find, type)
|
|
|
|
.find_each do |p|
|
2018-08-23 21:09:20 +08:00
|
|
|
regex =
|
|
|
|
case type
|
|
|
|
when "string"
|
|
|
|
Regexp.new(Regexp.escape(find), ignore_case)
|
|
|
|
when "regex"
|
|
|
|
Regexp.new(find, ignore_case)
|
|
|
|
end
|
2018-08-23 20:49:00 +08:00
|
|
|
|
|
|
|
new_raw = p.raw.gsub(regex, replace)
|
2017-01-26 00:20:21 +08:00
|
|
|
|
|
|
|
if new_raw != p.raw
|
2017-12-28 22:13:01 +08:00
|
|
|
begin
|
|
|
|
p.revise(Discourse.system_user, { raw: new_raw }, bypass_bump: true, skip_revision: true)
|
|
|
|
putc "."
|
|
|
|
i += 1
|
|
|
|
rescue StandardError
|
|
|
|
puts "\nFailed to remap post (topic_id: #{p.topic_id}, post_id: #{p.id})\n"
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2017-12-28 22:13:01 +08:00
|
|
|
end
|
2017-01-26 00:20:21 +08:00
|
|
|
end
|
2017-10-04 08:47:53 +08:00
|
|
|
|
2017-01-26 00:20:21 +08:00
|
|
|
i
|
|
|
|
end
|
|
|
|
|
2024-07-03 05:52:47 +08:00
|
|
|
desc "monitor rebaking progress for the current unbaked post count; Ctrl-C to exit"
|
|
|
|
task "posts:monitor_rebaking_progress", [:csv] => [:environment] do |_, args|
|
|
|
|
if args[:csv]
|
|
|
|
puts "utc_time_now,remaining_to_bake,baked_in_last_period,etc_in_days,sidekiq_enqueued,sidekiq_scheduled"
|
|
|
|
end
|
|
|
|
|
|
|
|
# remember last ID right now so the goal post isn't constantly moved by new posts being created
|
|
|
|
last_id_as_of_now = Post.where(baked_version: nil).order("id desc").first&.id
|
|
|
|
if last_id_as_of_now.nil?
|
|
|
|
warn "no posts to bake; all done"
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
report_time_in_mins = 10
|
|
|
|
window_size_in_hs = 6
|
|
|
|
|
|
|
|
deltas = []
|
|
|
|
last = nil
|
|
|
|
|
|
|
|
while true
|
|
|
|
now = Post.where("id <= ? and baked_version is null", last_id_as_of_now).count
|
|
|
|
|
|
|
|
if last
|
|
|
|
delta_now = last - now
|
|
|
|
deltas.unshift delta_now
|
|
|
|
|
|
|
|
deltas = deltas.take((window_size_in_hs * 60) / report_time_in_mins)
|
|
|
|
average = deltas.reduce(:+).to_f / deltas.length.to_f / report_time_in_mins.to_f
|
|
|
|
etc_days = sprintf("%.2f", (now.to_f / average) / 60.0 / 24.0)
|
|
|
|
else
|
|
|
|
last = now
|
|
|
|
etc_days = 999 # fake initial value so that the column is 100% valid floats
|
|
|
|
end
|
|
|
|
|
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
|
|
|
|
if args[:csv]
|
|
|
|
puts [Time.now.utc.iso8601, now, last - now, etc_days, s.enqueued, s.scheduled_size].join(",")
|
|
|
|
else
|
|
|
|
puts [
|
|
|
|
Time.now.utc.iso8601,
|
|
|
|
"unbaked old posts remaining: #{now}",
|
|
|
|
"baked in last period: #{last - now}",
|
|
|
|
"ETC based on #{window_size_in_hs}h avg: #{etc_days} days",
|
|
|
|
"SK enqueued: #{s.enqueued}",
|
|
|
|
"SK scheduled: #{s.scheduled_size}",
|
|
|
|
"waiting #{report_time_in_mins}min",
|
|
|
|
].join(" - ")
|
|
|
|
end
|
|
|
|
|
|
|
|
last = now
|
|
|
|
sleep report_time_in_mins * 60
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-17 15:17:07 +08:00
|
|
|
desc "Remap all posts matching specific string"
|
2018-08-23 20:49:00 +08:00
|
|
|
task "posts:remap", %i[find replace type ignore_case] => [:environment] do |_, args|
|
2017-10-04 08:47:53 +08:00
|
|
|
require "highline/import"
|
2017-01-24 14:30:57 +08:00
|
|
|
|
2018-08-23 20:49:00 +08:00
|
|
|
args.with_defaults(type: "string", ignore_case: "false")
|
2016-08-17 15:17:07 +08:00
|
|
|
find = args[:find]
|
|
|
|
replace = args[:replace]
|
2017-10-04 08:47:53 +08:00
|
|
|
type = args[:type]&.downcase
|
2018-08-23 20:49:00 +08:00
|
|
|
ignore_case = args[:ignore_case]&.downcase
|
2017-10-04 08:47:53 +08:00
|
|
|
|
2017-01-24 14:30:57 +08:00
|
|
|
if !find
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace']"
|
2016-08-17 15:17:07 +08:00
|
|
|
exit 1
|
2017-01-24 14:30:57 +08:00
|
|
|
elsif !replace
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace']. Want to delete a word/string instead? Try rake posts:delete_word['word-to-delete']"
|
|
|
|
exit 1
|
2017-10-04 08:47:53 +08:00
|
|
|
elsif type != "string" && type != "regex"
|
2018-08-23 20:49:00 +08:00
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace',type] where type is string or regex"
|
|
|
|
exit 1
|
|
|
|
elsif ignore_case != "true" && ignore_case != "false"
|
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace',type,ignore_case] where ignore_case is true or false"
|
2017-10-04 08:47:53 +08:00
|
|
|
exit 1
|
|
|
|
else
|
|
|
|
confirm_replace =
|
|
|
|
ask(
|
|
|
|
"Are you sure you want to replace all #{type} occurrences of '#{find}' with '#{replace}'? (Y/n)",
|
|
|
|
)
|
|
|
|
exit 1 unless (confirm_replace == "" || confirm_replace.downcase == "y")
|
2016-08-17 15:17:07 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts "Remapping"
|
2018-08-23 20:49:00 +08:00
|
|
|
total = remap_posts(find, type, ignore_case, replace)
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "", "#{total} posts remapped!", ""
|
|
|
|
end
|
2016-08-17 15:17:07 +08:00
|
|
|
|
2017-01-26 00:20:21 +08:00
|
|
|
desc "Delete occurrence of a word/string"
|
2018-08-23 20:49:00 +08:00
|
|
|
task "posts:delete_word", %i[find type ignore_case] => [:environment] do |_, args|
|
2017-01-26 00:20:21 +08:00
|
|
|
require "highline/import"
|
|
|
|
|
2018-08-23 20:49:00 +08:00
|
|
|
args.with_defaults(type: "string", ignore_case: "false")
|
2017-01-26 00:20:21 +08:00
|
|
|
find = args[:find]
|
2017-10-04 08:47:53 +08:00
|
|
|
type = args[:type]&.downcase
|
2018-08-23 20:49:00 +08:00
|
|
|
ignore_case = args[:ignore_case]&.downcase
|
2017-10-04 08:47:53 +08:00
|
|
|
|
2017-01-26 00:20:21 +08:00
|
|
|
if !find
|
|
|
|
puts "ERROR: Expecting rake posts:delete_word['word-to-delete']"
|
|
|
|
exit 1
|
2017-10-04 08:47:53 +08:00
|
|
|
elsif type != "string" && type != "regex"
|
|
|
|
puts "ERROR: Expecting rake posts:delete_word[pattern, type] where type is string or regex"
|
|
|
|
exit 1
|
2018-08-23 20:49:00 +08:00
|
|
|
elsif ignore_case != "true" && ignore_case != "false"
|
|
|
|
puts "ERROR: Expecting rake posts:delete_word[pattern, type,ignore_case] where ignore_case is true or false"
|
|
|
|
exit 1
|
2017-01-26 00:20:21 +08:00
|
|
|
else
|
2017-10-04 08:47:53 +08:00
|
|
|
confirm_delete =
|
|
|
|
ask("Are you sure you want to remove all #{type} occurrences of '#{find}'? (Y/n)")
|
|
|
|
exit 1 unless (confirm_delete == "" || confirm_delete.downcase == "y")
|
2016-08-17 15:17:07 +08:00
|
|
|
end
|
2017-01-26 00:20:21 +08:00
|
|
|
|
|
|
|
puts "Processing"
|
2018-08-23 20:49:00 +08:00
|
|
|
total = remap_posts(find, type, ignore_case)
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "", "#{total} posts updated!", ""
|
2016-08-17 15:17:07 +08:00
|
|
|
end
|
2017-06-23 23:33:44 +08:00
|
|
|
|
|
|
|
desc "Delete all likes"
|
|
|
|
task "posts:delete_all_likes" => :environment do
|
|
|
|
post_actions = PostAction.where(post_action_type_id: PostActionType.types[:like])
|
|
|
|
|
|
|
|
likes_deleted = 0
|
|
|
|
total = post_actions.count
|
|
|
|
|
|
|
|
post_actions.each do |post_action|
|
|
|
|
begin
|
|
|
|
post_action.remove_act!(Discourse.system_user)
|
|
|
|
print_status(likes_deleted += 1, total)
|
|
|
|
rescue StandardError
|
|
|
|
# skip
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
UserStat.update_all(likes_given: 0, likes_received: 0) # clear user likes stats
|
2017-06-26 13:49:03 +08:00
|
|
|
DirectoryItem.update_all(likes_given: 0, likes_received: 0) # clear user directory likes stats
|
2017-06-23 23:33:44 +08:00
|
|
|
puts "", "#{likes_deleted} likes deleted!", ""
|
|
|
|
end
|
2017-09-15 16:48:24 +08:00
|
|
|
|
2017-10-06 20:28:26 +08:00
|
|
|
desc "Refreshes each post that was received via email"
|
|
|
|
task "posts:refresh_emails", [:topic_id] => [:environment] do |_, args|
|
|
|
|
posts = Post.where.not(raw_email: nil).where(via_email: true)
|
|
|
|
posts = posts.where(topic_id: args[:topic_id]) if args[:topic_id]
|
|
|
|
|
|
|
|
updated = 0
|
|
|
|
total = posts.count
|
|
|
|
|
|
|
|
posts.find_each do |post|
|
2017-12-14 05:01:31 +08:00
|
|
|
begin
|
|
|
|
receiver = Email::Receiver.new(post.raw_email)
|
|
|
|
|
|
|
|
body, elided = receiver.select_body
|
2018-10-17 22:48:09 +08:00
|
|
|
body = receiver.add_attachments(body || "", post.user)
|
2017-12-14 05:01:31 +08:00
|
|
|
body << Email::Receiver.elided_html(elided) if elided.present?
|
2017-10-06 20:28:26 +08:00
|
|
|
|
2017-12-14 05:01:31 +08:00
|
|
|
post.revise(
|
|
|
|
Discourse.system_user,
|
|
|
|
{ raw: body, cook_method: Post.cook_methods[:regular] },
|
|
|
|
skip_revision: true,
|
|
|
|
skip_validations: true,
|
|
|
|
bypass_bump: true,
|
|
|
|
)
|
|
|
|
rescue StandardError
|
|
|
|
puts "Failed to refresh post (topic_id: #{post.topic_id}, post_id: #{post.id})"
|
|
|
|
end
|
2017-10-06 20:28:26 +08:00
|
|
|
|
|
|
|
updated += 1
|
|
|
|
|
|
|
|
print_status(updated, total)
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Done. #{updated} posts updated.", ""
|
|
|
|
end
|
2018-03-29 20:42:41 +08:00
|
|
|
|
|
|
|
desc "Reorders all posts based on their creation_date"
|
2018-04-10 19:38:59 +08:00
|
|
|
task "posts:reorder_posts", [:topic_id] => [:environment] do |_, args|
|
2018-03-29 20:42:41 +08:00
|
|
|
Post.transaction do
|
2020-08-04 17:07:07 +08:00
|
|
|
builder = DB.build <<~SQL
|
2018-03-29 20:42:41 +08:00
|
|
|
WITH ordered_posts AS (
|
2020-08-04 17:07:07 +08:00
|
|
|
SELECT
|
|
|
|
id,
|
|
|
|
ROW_NUMBER() OVER (
|
|
|
|
PARTITION BY
|
|
|
|
topic_id
|
|
|
|
ORDER BY
|
|
|
|
created_at,
|
|
|
|
post_number
|
|
|
|
) AS new_post_number
|
|
|
|
FROM
|
|
|
|
posts
|
|
|
|
/*where*/
|
2018-03-29 20:42:41 +08:00
|
|
|
)
|
2020-08-04 17:07:07 +08:00
|
|
|
UPDATE
|
|
|
|
posts AS p
|
|
|
|
SET
|
|
|
|
sort_order = o.new_post_number,
|
|
|
|
post_number = p.post_number * -1
|
|
|
|
FROM
|
|
|
|
ordered_posts AS o
|
|
|
|
WHERE
|
|
|
|
p.id = o.id AND
|
|
|
|
p.post_number <> o.new_post_number
|
2018-03-29 20:42:41 +08:00
|
|
|
SQL
|
2020-08-04 17:07:07 +08:00
|
|
|
|
2020-08-04 20:57:49 +08:00
|
|
|
builder.where("topic_id = ?", args[:topic_id]) if args[:topic_id]
|
|
|
|
builder.exec
|
2018-03-29 20:42:41 +08:00
|
|
|
|
2020-08-04 17:07:07 +08:00
|
|
|
[
|
|
|
|
%w[notifications post_number],
|
|
|
|
%w[post_timings post_number],
|
|
|
|
%w[posts reply_to_post_number],
|
|
|
|
%w[topic_users last_read_post_number],
|
|
|
|
%w[topic_users last_emailed_post_number],
|
|
|
|
].each do |table, column|
|
2020-08-04 20:57:49 +08:00
|
|
|
builder = DB.build <<~SQL
|
2020-08-04 17:07:07 +08:00
|
|
|
UPDATE
|
|
|
|
#{table} AS x
|
|
|
|
SET
|
|
|
|
#{column} = p.sort_order * -1
|
|
|
|
FROM
|
|
|
|
posts AS p
|
2020-08-04 20:57:49 +08:00
|
|
|
/*where*/
|
2020-08-04 17:07:07 +08:00
|
|
|
SQL
|
|
|
|
|
2020-08-04 20:57:49 +08:00
|
|
|
builder.where("p.topic_id = ?", args[:topic_id]) if args[:topic_id]
|
|
|
|
builder.where("p.post_number < 0")
|
|
|
|
builder.where("x.topic_id = p.topic_id")
|
|
|
|
builder.where("x.#{column} = ABS(p.post_number)")
|
|
|
|
builder.exec
|
|
|
|
|
2020-08-04 17:07:07 +08:00
|
|
|
DB.exec <<~SQL
|
|
|
|
UPDATE
|
|
|
|
#{table}
|
|
|
|
SET
|
|
|
|
#{column} = #{column} * -1
|
|
|
|
WHERE
|
|
|
|
#{column} < 0
|
|
|
|
SQL
|
|
|
|
end
|
2018-03-29 20:42:41 +08:00
|
|
|
|
2020-08-04 20:57:49 +08:00
|
|
|
builder = DB.build <<~SQL
|
2020-08-04 17:07:07 +08:00
|
|
|
UPDATE
|
|
|
|
posts
|
|
|
|
SET
|
|
|
|
post_number = sort_order
|
2020-08-04 20:57:49 +08:00
|
|
|
/*where*/
|
2018-03-29 20:42:41 +08:00
|
|
|
SQL
|
|
|
|
|
2020-08-04 20:57:49 +08:00
|
|
|
builder.where("topic_id = ?", args[:topic_id]) if args[:topic_id]
|
|
|
|
builder.where("post_number < 0")
|
|
|
|
builder.exec
|
2018-03-29 20:42:41 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Done.", ""
|
|
|
|
end
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-21 10:45:51 +08:00
|
|
|
def missing_uploads
|
2019-05-22 07:00:32 +08:00
|
|
|
puts "Looking for missing uploads on: #{RailsMultisite::ConnectionManagement.current_db}"
|
|
|
|
|
2019-04-19 22:44:40 +08:00
|
|
|
old_scheme_upload_count = 0
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-22 07:00:32 +08:00
|
|
|
count_missing = 0
|
|
|
|
|
2019-07-29 14:35:34 +08:00
|
|
|
missing =
|
|
|
|
Post.find_missing_uploads(include_local_upload: true) do |post, src, path, sha1|
|
2019-05-04 03:46:20 +08:00
|
|
|
next if sha1.present?
|
2019-05-22 07:00:32 +08:00
|
|
|
puts "Fixing missing uploads: " if count_missing == 0
|
|
|
|
count_missing += 1
|
2019-04-10 13:50:26 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
upload_id = nil
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
# recovering old scheme upload.
|
|
|
|
local_store = FileStore::LocalStore.new
|
|
|
|
public_path = "#{local_store.public_dir}#{path}"
|
|
|
|
file_path = nil
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-08-01 10:58:59 +08:00
|
|
|
if File.file?(public_path)
|
2019-05-04 03:46:20 +08:00
|
|
|
file_path = public_path
|
|
|
|
else
|
|
|
|
tombstone_path = public_path.sub("/uploads/", "/uploads/tombstone/")
|
2019-08-01 10:58:59 +08:00
|
|
|
file_path = tombstone_path if File.file?(tombstone_path)
|
2019-05-04 03:46:20 +08:00
|
|
|
end
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
if file_path.present?
|
2019-06-25 15:18:48 +08:00
|
|
|
if (
|
|
|
|
upload =
|
|
|
|
UploadCreator.new(File.open(file_path), File.basename(path)).create_for(
|
|
|
|
Discourse.system_user.id,
|
2023-01-09 20:10:19 +08:00
|
|
|
)
|
2019-06-25 15:18:48 +08:00
|
|
|
).persisted?
|
2019-05-04 03:46:20 +08:00
|
|
|
upload_id = upload.id
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
post.reload
|
2019-06-25 11:34:26 +08:00
|
|
|
new_raw = post.raw.dup
|
|
|
|
new_raw = new_raw.gsub(path, upload.url)
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2019-06-25 11:34:26 +08:00
|
|
|
PostRevisor.new(post, Topic.with_deleted.find_by(id: post.topic_id)).revise!(
|
|
|
|
Discourse.system_user,
|
|
|
|
{ raw: new_raw },
|
|
|
|
skip_validations: true,
|
|
|
|
force_new_version: true,
|
|
|
|
bypass_bump: true,
|
|
|
|
)
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2019-06-25 11:34:26 +08:00
|
|
|
print "🆗"
|
2023-01-09 20:10:19 +08:00
|
|
|
else
|
2019-06-25 11:34:26 +08:00
|
|
|
print "❌"
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-06-25 11:34:26 +08:00
|
|
|
else
|
|
|
|
print "🚫"
|
2019-05-04 03:46:20 +08:00
|
|
|
old_scheme_upload_count += 1
|
2019-04-19 19:57:16 +08:00
|
|
|
end
|
2019-05-04 03:46:20 +08:00
|
|
|
|
|
|
|
upload_id
|
2019-04-09 04:37:35 +08:00
|
|
|
end
|
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
puts "", "#{missing[:count]} post uploads are missing.", ""
|
2019-04-19 22:44:40 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
if missing[:count] > 0
|
|
|
|
puts "#{missing[:uploads].count} uploads are missing."
|
|
|
|
if old_scheme_upload_count > 0
|
|
|
|
puts "#{old_scheme_upload_count} of #{missing[:uploads].count} are old scheme uploads."
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-05-04 03:46:20 +08:00
|
|
|
puts "#{missing[:post_uploads].count} of #{Post.count} posts are affected.", ""
|
2019-05-21 10:45:51 +08:00
|
|
|
|
2019-05-28 12:44:41 +08:00
|
|
|
if ENV["GIVE_UP"] == "1"
|
|
|
|
missing[:post_uploads].each do |id, uploads|
|
|
|
|
post = Post.with_deleted.find_by(id: id)
|
|
|
|
if post
|
2019-06-24 14:57:20 +08:00
|
|
|
puts "#{post.full_url} giving up on #{uploads.length} upload(s)"
|
2019-05-28 12:44:41 +08:00
|
|
|
PostCustomField.create!(post_id: post.id, name: Post::MISSING_UPLOADS_IGNORED, value: "t")
|
|
|
|
else
|
|
|
|
puts "could not find post #{id}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-21 10:45:51 +08:00
|
|
|
if ENV["VERBOSE"] == "1"
|
|
|
|
puts "missing uploads!"
|
|
|
|
missing[:uploads].each { |path| puts "#{path}" }
|
|
|
|
|
|
|
|
if missing[:post_uploads].count > 0
|
|
|
|
puts
|
|
|
|
puts "Posts with missing uploads"
|
|
|
|
missing[:post_uploads].each do |id, uploads|
|
|
|
|
post = Post.with_deleted.find_by(id: id)
|
|
|
|
if post
|
2019-05-23 13:09:16 +08:00
|
|
|
puts "#{post.full_url} missing #{uploads.join(", ")}"
|
2019-05-21 10:45:51 +08:00
|
|
|
else
|
|
|
|
puts "could not find post #{id}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-04-19 22:44:40 +08:00
|
|
|
end
|
2019-05-21 10:45:51 +08:00
|
|
|
|
|
|
|
missing[:count] == 0
|
2019-04-09 04:37:35 +08:00
|
|
|
end
|
2019-05-09 07:41:15 +08:00
|
|
|
|
|
|
|
desc "Finds missing post upload records from cooked HTML content"
|
2019-07-29 10:56:11 +08:00
|
|
|
task "posts:missing_uploads" => :environment do |_, args|
|
|
|
|
if ENV["RAILS_DB"]
|
2019-05-21 10:45:51 +08:00
|
|
|
missing_uploads
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { missing_uploads }
|
|
|
|
end
|
2019-05-09 07:41:15 +08:00
|
|
|
end
|
2019-05-23 09:11:19 +08:00
|
|
|
|
2019-05-23 15:08:22 +08:00
|
|
|
def recover_uploads_from_index(path)
|
2019-05-24 10:38:38 +08:00
|
|
|
lookup = []
|
|
|
|
|
|
|
|
db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
cdn_path = SiteSetting.cdn_path("/uploads/#{db}").sub(/https?:/, "")
|
2022-11-02 03:05:13 +08:00
|
|
|
Post
|
|
|
|
.where("cooked LIKE ?", "%#{cdn_path}%")
|
|
|
|
.each do |post|
|
2019-05-28 12:44:41 +08:00
|
|
|
regex = Regexp.new("((https?:)?#{Regexp.escape(cdn_path)}[^,;\\]\\>\\t\\n\\s)\"\']+)")
|
2019-05-24 10:38:38 +08:00
|
|
|
uploads = []
|
|
|
|
post.raw.scan(regex).each { |match| uploads << match[0] }
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2019-05-24 10:38:38 +08:00
|
|
|
if uploads.length > 0
|
|
|
|
lookup << [post.id, uploads]
|
2019-05-28 12:44:41 +08:00
|
|
|
else
|
|
|
|
print "."
|
|
|
|
post.rebake!
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-05-24 10:38:38 +08:00
|
|
|
end
|
|
|
|
|
2019-05-23 15:08:22 +08:00
|
|
|
PostCustomField
|
|
|
|
.where(name: Post::MISSING_UPLOADS)
|
|
|
|
.pluck(:post_id, :value)
|
|
|
|
.each do |post_id, uploads|
|
|
|
|
uploads = JSON.parse(uploads)
|
2023-02-13 12:39:45 +08:00
|
|
|
raw = Post.where(id: post_id).pick(:raw)
|
2019-05-24 10:38:38 +08:00
|
|
|
uploads.map! do |upload|
|
|
|
|
orig = upload
|
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
upload = upload.sub(SiteSetting.Upload.s3_cdn_url, SiteSetting.Upload.s3_base_url)
|
2019-05-28 12:44:41 +08:00
|
|
|
end
|
2019-05-24 10:38:38 +08:00
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
upload = upload.sub(SiteSetting.Upload.s3_base_url, Discourse.base_url)
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-05-24 10:38:38 +08:00
|
|
|
upload = upload.sub(Discourse.base_url + "/", "/") if raw.scan(upload).length == 0
|
|
|
|
if raw.scan(upload).length == 0
|
2019-05-28 12:44:41 +08:00
|
|
|
# last resort, try for sha
|
|
|
|
sha = upload.split("/")[-1]
|
|
|
|
sha = sha.split(".")[0]
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2019-05-28 12:44:41 +08:00
|
|
|
if sha.length == 40 && raw.scan(sha).length == 1
|
|
|
|
raw.match(Regexp.new("([^\"'<\\s\\n]+#{sha}[^\"'>\\s\\n]+)"))
|
|
|
|
upload = $1
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
|
|
|
end
|
2019-05-24 10:38:38 +08:00
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
puts "can not find #{orig} in\n\n#{raw}"
|
|
|
|
upload = nil
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
|
|
|
upload
|
2019-05-28 12:44:41 +08:00
|
|
|
end
|
2019-05-24 10:38:38 +08:00
|
|
|
uploads.compact!
|
|
|
|
lookup << [post_id, uploads] if uploads.length > 0
|
|
|
|
end
|
|
|
|
|
|
|
|
lookup.each do |post_id, uploads|
|
2019-05-23 15:08:22 +08:00
|
|
|
post = Post.find(post_id)
|
|
|
|
changed = false
|
|
|
|
|
|
|
|
uploads.each do |url|
|
|
|
|
if (n = post.raw.scan(url).length) != 1
|
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it appears #{n} times"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2019-05-28 12:44:41 +08:00
|
|
|
name = File.basename(url).split("_")[0].split(".")[0]
|
2019-05-24 10:38:38 +08:00
|
|
|
puts "Searching for #{url} (#{name}) in index"
|
2019-05-28 12:44:41 +08:00
|
|
|
if name.length != 40
|
2019-05-23 15:08:22 +08:00
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it appears to have a short file name"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
found =
|
2023-01-09 20:10:19 +08:00
|
|
|
begin
|
2019-05-23 15:08:22 +08:00
|
|
|
`cat #{path} | grep #{name} | grep original`.split("\n")[0]
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-05-23 15:08:22 +08:00
|
|
|
if found.blank?
|
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it missing from index"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
found = File.expand_path(File.join(File.dirname(path), found))
|
|
|
|
if !File.exist?(found)
|
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it missing from disk"
|
2019-05-24 11:35:41 +08:00
|
|
|
next
|
2019-05-23 15:08:22 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
File.open(found) do |f|
|
2019-05-24 11:35:41 +08:00
|
|
|
begin
|
|
|
|
upload = UploadCreator.new(f, "upload").create_for(post.user_id)
|
|
|
|
if upload && upload.url
|
|
|
|
post.raw = post.raw.sub(url, upload.url)
|
|
|
|
changed = true
|
|
|
|
else
|
|
|
|
puts "Skipping #{url} in #{post.full_url} unable to create upload (unknown error)"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
rescue Discourse::InvalidAccess
|
|
|
|
puts "Skipping #{url} in #{post.full_url} unable to create upload (bad format)"
|
|
|
|
next
|
|
|
|
end
|
2019-05-23 15:08:22 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
if changed
|
|
|
|
puts "Recovered uploads on #{post.full_url}"
|
|
|
|
post.save!(validate: false)
|
|
|
|
post.rebake!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
desc "Attempts to recover missing uploads from an index file"
|
2019-05-24 11:35:41 +08:00
|
|
|
task "posts:recover_uploads_from_index" => :environment do |_, args|
|
|
|
|
path = File.expand_path(Rails.root + "public/uploads/all_the_files")
|
|
|
|
if File.exist?(path)
|
|
|
|
puts "Found existing index file at #{path}"
|
|
|
|
else
|
|
|
|
puts "Can not find index #{path} generating index this could take a while..."
|
2019-05-28 10:39:39 +08:00
|
|
|
`cd #{File.dirname(path)} && find -type f > #{path}`
|
2019-05-23 15:08:22 +08:00
|
|
|
end
|
|
|
|
if RailsMultisite::ConnectionManagement.current_db != "default"
|
|
|
|
recover_uploads_from_index(path)
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { recover_uploads_from_index(path) }
|
|
|
|
end
|
|
|
|
end
|
2019-06-04 15:10:31 +08:00
|
|
|
|
|
|
|
desc "invalidate broken images"
|
|
|
|
task "posts:invalidate_broken_images" => :environment do
|
|
|
|
puts "Invalidating broken images.."
|
|
|
|
|
|
|
|
posts = Post.where("raw like '%<img%'")
|
|
|
|
|
|
|
|
rebaked = 0
|
|
|
|
total = posts.count
|
|
|
|
|
|
|
|
posts.find_each do |p|
|
|
|
|
rebake_post(p, invalidate_broken_images: true)
|
|
|
|
print_status(rebaked += 1, total)
|
|
|
|
end
|
|
|
|
|
|
|
|
puts
|
|
|
|
puts "", "#{rebaked} posts rebaked!"
|
|
|
|
end
|
2019-05-30 14:38:46 +08:00
|
|
|
|
|
|
|
desc "Coverts full upload URLs in `Post#raw` to short upload url"
|
|
|
|
task "posts:inline_uploads" => :environment do |_, args|
|
2019-06-13 16:13:22 +08:00
|
|
|
if ENV["RAILS_DB"]
|
|
|
|
correct_inline_uploads
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db|
|
|
|
|
puts "Correcting #{db}..."
|
|
|
|
puts
|
|
|
|
correct_inline_uploads
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def correct_inline_uploads
|
2019-06-03 15:41:26 +08:00
|
|
|
dry_run = (ENV["DRY_RUN"].nil? ? true : ENV["DRY_RUN"] != "false")
|
2019-06-07 16:21:37 +08:00
|
|
|
verbose = ENV["VERBOSE"]
|
2019-05-30 14:38:46 +08:00
|
|
|
|
2022-09-23 20:00:02 +08:00
|
|
|
scope =
|
|
|
|
Post
|
|
|
|
.joins(:upload_references)
|
|
|
|
.distinct("posts.id")
|
2022-11-02 03:05:13 +08:00
|
|
|
.where(
|
|
|
|
"raw LIKE ?",
|
|
|
|
"%/uploads/#{RailsMultisite::ConnectionManagement.current_db}/original/%",
|
|
|
|
)
|
2019-05-30 14:38:46 +08:00
|
|
|
|
|
|
|
affected_posts_count = scope.count
|
|
|
|
fixed_count = 0
|
|
|
|
not_corrected_post_ids = []
|
2019-06-03 15:41:26 +08:00
|
|
|
failed_to_correct_post_ids = []
|
2019-05-30 14:38:46 +08:00
|
|
|
|
|
|
|
scope.find_each do |post|
|
2019-06-03 15:41:26 +08:00
|
|
|
if post.raw !~ Upload::URL_REGEX
|
|
|
|
affected_posts_count -= 1
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
|
|
|
new_raw = InlineUploads.process(post.raw)
|
2019-05-30 14:38:46 +08:00
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if post.raw != new_raw
|
2019-06-07 16:21:37 +08:00
|
|
|
if !dry_run
|
2019-06-11 16:28:41 +08:00
|
|
|
PostRevisor.new(post, Topic.with_deleted.find_by(id: post.topic_id)).revise!(
|
|
|
|
Discourse.system_user,
|
|
|
|
{ raw: new_raw },
|
|
|
|
skip_validations: true,
|
2019-06-12 09:13:31 +08:00
|
|
|
force_new_version: true,
|
|
|
|
bypass_bump: true,
|
2019-06-11 16:28:41 +08:00
|
|
|
)
|
2019-06-07 16:21:37 +08:00
|
|
|
end
|
2019-06-03 15:41:26 +08:00
|
|
|
|
2019-06-07 16:21:37 +08:00
|
|
|
if verbose
|
|
|
|
require "diffy"
|
|
|
|
Diffy::Diff.default_format = :color
|
|
|
|
puts "Cooked diff for Post #{post.id}"
|
|
|
|
puts Diffy::Diff.new(PrettyText.cook(post.raw), PrettyText.cook(new_raw), context: 1)
|
|
|
|
puts
|
|
|
|
elsif dry_run
|
2019-07-05 07:40:56 +08:00
|
|
|
putc "#"
|
2019-06-07 16:21:37 +08:00
|
|
|
else
|
2019-07-05 07:40:56 +08:00
|
|
|
putc "."
|
2019-06-03 15:41:26 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
fixed_count += 1
|
2019-05-30 14:38:46 +08:00
|
|
|
else
|
2019-07-05 07:40:56 +08:00
|
|
|
putc "X"
|
2019-06-03 15:41:26 +08:00
|
|
|
not_corrected_post_ids << post.id
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
2019-07-05 07:40:56 +08:00
|
|
|
rescue StandardError
|
|
|
|
putc "!"
|
2019-06-03 15:41:26 +08:00
|
|
|
failed_to_correct_post_ids << post.id
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
puts
|
2019-05-30 14:38:46 +08:00
|
|
|
puts "#{fixed_count} out of #{affected_posts_count} affected posts corrected"
|
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if not_corrected_post_ids.present?
|
|
|
|
puts "Ids of posts that were not corrected: #{not_corrected_post_ids}"
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if failed_to_correct_post_ids.present?
|
|
|
|
puts "Ids of posts that encountered failures: #{failed_to_correct_post_ids}"
|
|
|
|
end
|
2019-05-30 14:38:46 +08:00
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
puts "Task was ran in dry run mode. Set `DRY_RUN=false` to revise affected posts" if dry_run
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|