2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2019-04-25 03:46:18 +08:00
|
|
|
require 'file_store/local_store'
|
|
|
|
|
2013-06-19 15:20:30 +08:00
|
|
|
desc 'Update each post with latest markdown'
|
|
|
|
task 'posts:rebake' => :environment do
|
2014-04-04 23:10:47 +08:00
|
|
|
ENV['RAILS_DB'] ? rebake_posts : rebake_posts_all_sites
|
2013-06-19 15:20:30 +08:00
|
|
|
end
|
|
|
|
|
2017-09-12 15:10:18 +08:00
|
|
|
task 'posts:rebake_uncooked_posts' => :environment do
|
2019-06-04 21:12:34 +08:00
|
|
|
ENV['RAILS_DB'] ? rebake_uncooked_posts : rebake_uncooked_posts_all_sites
|
|
|
|
end
|
|
|
|
|
|
|
|
def rebake_uncooked_posts_all_sites
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db|
|
|
|
|
rebake_uncooked_posts
|
|
|
|
end
|
|
|
|
end
|
2017-09-12 15:10:18 +08:00
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
def rebake_uncooked_posts
|
|
|
|
puts "Rebaking uncooked posts on #{RailsMultisite::ConnectionManagement.current_db}"
|
|
|
|
uncooked = Post.where('baked_version <> ? or baked_version IS NULL', Post::BAKED_VERSION)
|
2017-09-12 15:10:18 +08:00
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
rebaked = 0
|
|
|
|
total = uncooked.count
|
2017-09-12 15:10:18 +08:00
|
|
|
|
2019-06-04 21:12:34 +08:00
|
|
|
uncooked.find_each do |post|
|
|
|
|
rebake_post(post)
|
|
|
|
print_status(rebaked += 1, total)
|
2019-05-22 08:04:54 +08:00
|
|
|
end
|
2019-06-04 21:12:34 +08:00
|
|
|
|
|
|
|
puts "", "#{rebaked} posts done!", ""
|
2017-09-12 15:10:18 +08:00
|
|
|
end
|
|
|
|
|
2013-06-19 15:20:30 +08:00
|
|
|
desc 'Update each post with latest markdown and refresh oneboxes'
|
|
|
|
task 'posts:refresh_oneboxes' => :environment do
|
2014-04-04 23:10:47 +08:00
|
|
|
ENV['RAILS_DB'] ? rebake_posts(invalidate_oneboxes: true) : rebake_posts_all_sites(invalidate_oneboxes: true)
|
2013-06-19 15:20:30 +08:00
|
|
|
end
|
|
|
|
|
2015-09-24 04:44:53 +08:00
|
|
|
desc 'Rebake all posts with a quote using a letter_avatar'
|
|
|
|
task 'posts:fix_letter_avatars' => :environment do
|
|
|
|
return unless SiteSetting.external_system_avatars_enabled
|
|
|
|
|
|
|
|
search = Post.where("user_id <> -1")
|
2017-07-28 09:20:09 +08:00
|
|
|
.where("raw LIKE '%/letter\_avatar/%' OR cooked LIKE '%/letter\_avatar/%'")
|
2015-09-24 04:44:53 +08:00
|
|
|
|
|
|
|
rebaked = 0
|
|
|
|
total = search.count
|
|
|
|
|
2016-08-25 16:10:27 +08:00
|
|
|
search.find_each do |post|
|
2015-09-24 04:44:53 +08:00
|
|
|
rebake_post(post)
|
|
|
|
print_status(rebaked += 1, total)
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "#{rebaked} posts done!", ""
|
|
|
|
end
|
|
|
|
|
2016-09-22 00:14:53 +08:00
|
|
|
desc 'Rebake all posts matching string/regex and optionally delay the loop'
|
2017-07-28 09:20:09 +08:00
|
|
|
task 'posts:rebake_match', [:pattern, :type, :delay] => [:environment] do |_, args|
|
2017-10-04 08:47:53 +08:00
|
|
|
args.with_defaults(type: 'string')
|
2016-08-17 01:18:28 +08:00
|
|
|
pattern = args[:pattern]
|
2017-10-04 08:47:53 +08:00
|
|
|
type = args[:type]&.downcase
|
|
|
|
delay = args[:delay]&.to_i
|
|
|
|
|
2016-08-17 01:18:28 +08:00
|
|
|
if !pattern
|
2016-09-22 00:14:53 +08:00
|
|
|
puts "ERROR: Expecting rake posts:rebake_match[pattern,type,delay]"
|
|
|
|
exit 1
|
|
|
|
elsif delay && delay < 1
|
|
|
|
puts "ERROR: delay parameter should be an integer and greater than 0"
|
2016-08-17 01:18:28 +08:00
|
|
|
exit 1
|
2017-10-04 08:47:53 +08:00
|
|
|
elsif type != 'string' && type != 'regex'
|
2016-08-17 01:18:28 +08:00
|
|
|
puts "ERROR: Expecting rake posts:rebake_match[pattern,type] where type is string or regex"
|
|
|
|
exit 1
|
|
|
|
end
|
|
|
|
|
2017-10-25 02:49:00 +08:00
|
|
|
search = Post.raw_match(pattern, type)
|
|
|
|
|
2016-08-17 01:18:28 +08:00
|
|
|
rebaked = 0
|
|
|
|
total = search.count
|
|
|
|
|
2017-10-25 02:49:00 +08:00
|
|
|
search.find_each do |post|
|
2016-08-17 01:18:28 +08:00
|
|
|
rebake_post(post)
|
|
|
|
print_status(rebaked += 1, total)
|
2016-09-22 00:14:53 +08:00
|
|
|
sleep(delay) if delay
|
2016-08-17 01:18:28 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "#{rebaked} posts done!", ""
|
|
|
|
end
|
|
|
|
|
2014-04-04 23:10:47 +08:00
|
|
|
def rebake_posts_all_sites(opts = {})
|
2013-02-06 03:16:51 +08:00
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db|
|
2014-04-04 23:10:47 +08:00
|
|
|
rebake_posts(opts)
|
|
|
|
end
|
|
|
|
end
|
2013-06-19 15:20:30 +08:00
|
|
|
|
2014-04-04 23:10:47 +08:00
|
|
|
def rebake_posts(opts = {})
|
2014-09-10 01:25:20 +08:00
|
|
|
puts "Rebaking post markdown for '#{RailsMultisite::ConnectionManagement.current_db}'"
|
|
|
|
|
2018-07-06 16:22:54 +08:00
|
|
|
begin
|
|
|
|
disable_edit_notifications = SiteSetting.disable_edit_notifications
|
|
|
|
SiteSetting.disable_edit_notifications = true
|
|
|
|
|
|
|
|
total = Post.count
|
|
|
|
rebaked = 0
|
2018-07-11 17:02:12 +08:00
|
|
|
batch = 1000
|
|
|
|
Post.update_all('baked_version = NULL')
|
2018-07-06 16:22:54 +08:00
|
|
|
|
2018-07-11 17:02:12 +08:00
|
|
|
(0..(total - 1).abs).step(batch) do |i|
|
|
|
|
Post.order(id: :desc).offset(i).limit(batch).each do |post|
|
2018-07-06 16:22:54 +08:00
|
|
|
rebake_post(post, opts)
|
|
|
|
print_status(rebaked += 1, total)
|
|
|
|
end
|
2018-04-10 02:00:36 +08:00
|
|
|
end
|
2018-07-06 16:22:54 +08:00
|
|
|
ensure
|
|
|
|
SiteSetting.disable_edit_notifications = disable_edit_notifications
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2014-04-04 23:10:47 +08:00
|
|
|
|
2014-09-10 01:25:20 +08:00
|
|
|
puts "", "#{rebaked} posts done!", "-" * 50
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2014-06-06 12:08:39 +08:00
|
|
|
|
2015-09-24 04:44:53 +08:00
|
|
|
def rebake_post(post, opts = {})
|
2019-05-22 08:31:49 +08:00
|
|
|
if !opts[:priority]
|
|
|
|
opts[:priority] = :ultra_low
|
|
|
|
end
|
2014-09-10 01:25:20 +08:00
|
|
|
post.rebake!(opts)
|
|
|
|
rescue => e
|
|
|
|
puts "", "Failed to rebake (topic_id: #{post.topic_id}, post_id: #{post.id})", e, e.backtrace.join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
def print_status(current, max)
|
|
|
|
print "\r%9d / %d (%5.1f%%)" % [current, max, ((current.to_f / max.to_f) * 100).round(1)]
|
|
|
|
end
|
2014-06-06 12:08:39 +08:00
|
|
|
|
|
|
|
desc 'normalize all markdown so <pre><code> is not used and instead backticks'
|
|
|
|
task 'posts:normalize_code' => :environment do
|
|
|
|
lang = ENV['CODE_LANG'] || ''
|
|
|
|
require 'import/normalize'
|
|
|
|
|
|
|
|
puts "Normalizing"
|
|
|
|
i = 0
|
|
|
|
Post.where("raw like '%<pre>%<code>%'").each do |p|
|
|
|
|
normalized = Import::Normalize.normalize_code_blocks(p.raw, lang)
|
|
|
|
if normalized != p.raw
|
2017-07-28 09:20:09 +08:00
|
|
|
p.revise(Discourse.system_user, raw: normalized)
|
2014-06-06 12:08:39 +08:00
|
|
|
putc "."
|
|
|
|
i += 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts
|
|
|
|
puts "#{i} posts normalized!"
|
|
|
|
end
|
2016-08-17 15:17:07 +08:00
|
|
|
|
2018-08-23 20:49:00 +08:00
|
|
|
def remap_posts(find, type, ignore_case, replace = "")
|
|
|
|
ignore_case = ignore_case == 'true'
|
2017-01-26 00:20:21 +08:00
|
|
|
i = 0
|
2017-10-04 08:47:53 +08:00
|
|
|
|
|
|
|
Post.raw_match(find, type).find_each do |p|
|
2018-08-23 21:09:20 +08:00
|
|
|
regex =
|
|
|
|
case type
|
|
|
|
when 'string' then
|
|
|
|
Regexp.new(Regexp.escape(find), ignore_case)
|
|
|
|
when 'regex' then
|
|
|
|
Regexp.new(find, ignore_case)
|
|
|
|
end
|
2018-08-23 20:49:00 +08:00
|
|
|
|
|
|
|
new_raw = p.raw.gsub(regex, replace)
|
2017-01-26 00:20:21 +08:00
|
|
|
|
|
|
|
if new_raw != p.raw
|
2017-12-28 22:13:01 +08:00
|
|
|
begin
|
|
|
|
p.revise(Discourse.system_user, { raw: new_raw }, bypass_bump: true, skip_revision: true)
|
|
|
|
putc "."
|
|
|
|
i += 1
|
|
|
|
rescue
|
|
|
|
puts "\nFailed to remap post (topic_id: #{p.topic_id}, post_id: #{p.id})\n"
|
|
|
|
end
|
2017-01-26 00:20:21 +08:00
|
|
|
end
|
|
|
|
end
|
2017-10-04 08:47:53 +08:00
|
|
|
|
2017-01-26 00:20:21 +08:00
|
|
|
i
|
|
|
|
end
|
|
|
|
|
2016-08-17 15:17:07 +08:00
|
|
|
desc 'Remap all posts matching specific string'
|
2018-08-23 20:49:00 +08:00
|
|
|
task 'posts:remap', [:find, :replace, :type, :ignore_case] => [:environment] do |_, args|
|
2017-10-04 08:47:53 +08:00
|
|
|
require 'highline/import'
|
2017-01-24 14:30:57 +08:00
|
|
|
|
2018-08-23 20:49:00 +08:00
|
|
|
args.with_defaults(type: 'string', ignore_case: 'false')
|
2016-08-17 15:17:07 +08:00
|
|
|
find = args[:find]
|
|
|
|
replace = args[:replace]
|
2017-10-04 08:47:53 +08:00
|
|
|
type = args[:type]&.downcase
|
2018-08-23 20:49:00 +08:00
|
|
|
ignore_case = args[:ignore_case]&.downcase
|
2017-10-04 08:47:53 +08:00
|
|
|
|
2017-01-24 14:30:57 +08:00
|
|
|
if !find
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace']"
|
2016-08-17 15:17:07 +08:00
|
|
|
exit 1
|
2017-01-24 14:30:57 +08:00
|
|
|
elsif !replace
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace']. Want to delete a word/string instead? Try rake posts:delete_word['word-to-delete']"
|
|
|
|
exit 1
|
2017-10-04 08:47:53 +08:00
|
|
|
elsif type != 'string' && type != 'regex'
|
2018-08-23 20:49:00 +08:00
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace',type] where type is string or regex"
|
|
|
|
exit 1
|
|
|
|
elsif ignore_case != 'true' && ignore_case != 'false'
|
|
|
|
puts "ERROR: Expecting rake posts:remap['find','replace',type,ignore_case] where ignore_case is true or false"
|
2017-10-04 08:47:53 +08:00
|
|
|
exit 1
|
|
|
|
else
|
|
|
|
confirm_replace = ask("Are you sure you want to replace all #{type} occurrences of '#{find}' with '#{replace}'? (Y/n)")
|
|
|
|
exit 1 unless (confirm_replace == "" || confirm_replace.downcase == 'y')
|
2016-08-17 15:17:07 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts "Remapping"
|
2018-08-23 20:49:00 +08:00
|
|
|
total = remap_posts(find, type, ignore_case, replace)
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "", "#{total} posts remapped!", ""
|
|
|
|
end
|
2016-08-17 15:17:07 +08:00
|
|
|
|
2017-01-26 00:20:21 +08:00
|
|
|
desc 'Delete occurrence of a word/string'
|
2018-08-23 20:49:00 +08:00
|
|
|
task 'posts:delete_word', [:find, :type, :ignore_case] => [:environment] do |_, args|
|
2017-01-26 00:20:21 +08:00
|
|
|
require 'highline/import'
|
|
|
|
|
2018-08-23 20:49:00 +08:00
|
|
|
args.with_defaults(type: 'string', ignore_case: 'false')
|
2017-01-26 00:20:21 +08:00
|
|
|
find = args[:find]
|
2017-10-04 08:47:53 +08:00
|
|
|
type = args[:type]&.downcase
|
2018-08-23 20:49:00 +08:00
|
|
|
ignore_case = args[:ignore_case]&.downcase
|
2017-10-04 08:47:53 +08:00
|
|
|
|
2017-01-26 00:20:21 +08:00
|
|
|
if !find
|
|
|
|
puts "ERROR: Expecting rake posts:delete_word['word-to-delete']"
|
|
|
|
exit 1
|
2017-10-04 08:47:53 +08:00
|
|
|
elsif type != 'string' && type != 'regex'
|
|
|
|
puts "ERROR: Expecting rake posts:delete_word[pattern, type] where type is string or regex"
|
|
|
|
exit 1
|
2018-08-23 20:49:00 +08:00
|
|
|
elsif ignore_case != 'true' && ignore_case != 'false'
|
|
|
|
puts "ERROR: Expecting rake posts:delete_word[pattern, type,ignore_case] where ignore_case is true or false"
|
|
|
|
exit 1
|
2017-01-26 00:20:21 +08:00
|
|
|
else
|
2017-10-04 08:47:53 +08:00
|
|
|
confirm_delete = ask("Are you sure you want to remove all #{type} occurrences of '#{find}'? (Y/n)")
|
|
|
|
exit 1 unless (confirm_delete == "" || confirm_delete.downcase == 'y')
|
2016-08-17 15:17:07 +08:00
|
|
|
end
|
2017-01-26 00:20:21 +08:00
|
|
|
|
|
|
|
puts "Processing"
|
2018-08-23 20:49:00 +08:00
|
|
|
total = remap_posts(find, type, ignore_case)
|
2017-01-26 00:20:21 +08:00
|
|
|
puts "", "#{total} posts updated!", ""
|
2016-08-17 15:17:07 +08:00
|
|
|
end
|
2017-06-23 23:33:44 +08:00
|
|
|
|
|
|
|
desc 'Delete all likes'
|
|
|
|
task 'posts:delete_all_likes' => :environment do
|
|
|
|
|
|
|
|
post_actions = PostAction.where(post_action_type_id: PostActionType.types[:like])
|
|
|
|
|
|
|
|
likes_deleted = 0
|
|
|
|
total = post_actions.count
|
|
|
|
|
|
|
|
post_actions.each do |post_action|
|
|
|
|
begin
|
|
|
|
post_action.remove_act!(Discourse.system_user)
|
|
|
|
print_status(likes_deleted += 1, total)
|
|
|
|
rescue
|
|
|
|
# skip
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
UserStat.update_all(likes_given: 0, likes_received: 0) # clear user likes stats
|
2017-06-26 13:49:03 +08:00
|
|
|
DirectoryItem.update_all(likes_given: 0, likes_received: 0) # clear user directory likes stats
|
2017-06-23 23:33:44 +08:00
|
|
|
puts "", "#{likes_deleted} likes deleted!", ""
|
|
|
|
end
|
2017-09-15 16:48:24 +08:00
|
|
|
|
|
|
|
desc 'Defer all flags'
|
|
|
|
task 'posts:defer_all_flags' => :environment do
|
|
|
|
|
|
|
|
active_flags = FlagQuery.flagged_post_actions('active')
|
|
|
|
|
|
|
|
flags_deferred = 0
|
|
|
|
total = active_flags.count
|
|
|
|
|
|
|
|
active_flags.each do |post_action|
|
|
|
|
begin
|
|
|
|
PostAction.defer_flags!(Post.find(post_action.post_id), Discourse.system_user)
|
|
|
|
print_status(flags_deferred += 1, total)
|
|
|
|
rescue
|
|
|
|
# skip
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "#{flags_deferred} flags deferred!", ""
|
|
|
|
end
|
2017-10-06 20:28:26 +08:00
|
|
|
|
|
|
|
desc 'Refreshes each post that was received via email'
|
|
|
|
task 'posts:refresh_emails', [:topic_id] => [:environment] do |_, args|
|
|
|
|
posts = Post.where.not(raw_email: nil).where(via_email: true)
|
|
|
|
posts = posts.where(topic_id: args[:topic_id]) if args[:topic_id]
|
|
|
|
|
|
|
|
updated = 0
|
|
|
|
total = posts.count
|
|
|
|
|
|
|
|
posts.find_each do |post|
|
2017-12-14 05:01:31 +08:00
|
|
|
begin
|
|
|
|
receiver = Email::Receiver.new(post.raw_email)
|
|
|
|
|
|
|
|
body, elided = receiver.select_body
|
2018-10-17 22:48:09 +08:00
|
|
|
body = receiver.add_attachments(body || '', post.user)
|
2017-12-14 05:01:31 +08:00
|
|
|
body << Email::Receiver.elided_html(elided) if elided.present?
|
2017-10-06 20:28:26 +08:00
|
|
|
|
2017-12-14 05:01:31 +08:00
|
|
|
post.revise(Discourse.system_user, { raw: body, cook_method: Post.cook_methods[:regular] },
|
|
|
|
skip_revision: true, skip_validations: true, bypass_bump: true)
|
|
|
|
rescue
|
|
|
|
puts "Failed to refresh post (topic_id: #{post.topic_id}, post_id: #{post.id})"
|
|
|
|
end
|
2017-10-06 20:28:26 +08:00
|
|
|
|
|
|
|
updated += 1
|
|
|
|
|
|
|
|
print_status(updated, total)
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Done. #{updated} posts updated.", ""
|
|
|
|
end
|
2018-03-29 20:42:41 +08:00
|
|
|
|
|
|
|
desc 'Reorders all posts based on their creation_date'
|
2018-04-10 19:38:59 +08:00
|
|
|
task 'posts:reorder_posts', [:topic_id] => [:environment] do |_, args|
|
2018-03-29 20:42:41 +08:00
|
|
|
Post.transaction do
|
|
|
|
# update sort_order and flip post_number to prevent
|
|
|
|
# unique constraint violations when updating post_number
|
2018-06-20 15:48:02 +08:00
|
|
|
builder = DB.build(<<~SQL)
|
2018-03-29 20:42:41 +08:00
|
|
|
WITH ordered_posts AS (
|
|
|
|
SELECT
|
|
|
|
id,
|
|
|
|
ROW_NUMBER()
|
|
|
|
OVER (
|
|
|
|
PARTITION BY topic_id
|
|
|
|
ORDER BY created_at, post_number ) AS new_post_number
|
|
|
|
FROM posts
|
2018-04-10 19:38:59 +08:00
|
|
|
/*where*/
|
2018-03-29 20:42:41 +08:00
|
|
|
)
|
|
|
|
UPDATE posts AS p
|
|
|
|
SET sort_order = o.new_post_number,
|
|
|
|
post_number = p.post_number * -1
|
|
|
|
FROM ordered_posts AS o
|
|
|
|
WHERE p.id = o.id AND
|
|
|
|
p.post_number <> o.new_post_number
|
|
|
|
SQL
|
2018-04-10 19:38:59 +08:00
|
|
|
builder.where("topic_id = :topic_id") if args[:topic_id]
|
|
|
|
builder.exec(topic_id: args[:topic_id])
|
2018-03-29 20:42:41 +08:00
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(<<~SQL)
|
2018-03-29 20:42:41 +08:00
|
|
|
UPDATE notifications AS x
|
|
|
|
SET post_number = p.sort_order
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE x.topic_id = p.topic_id AND
|
|
|
|
x.post_number = ABS(p.post_number) AND
|
|
|
|
p.post_number < 0
|
|
|
|
SQL
|
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(<<~SQL)
|
2018-03-29 20:42:41 +08:00
|
|
|
UPDATE post_timings AS x
|
|
|
|
SET post_number = x.post_number * -1
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE x.topic_id = p.topic_id AND
|
|
|
|
x.post_number = ABS(p.post_number) AND
|
|
|
|
p.post_number < 0;
|
|
|
|
|
|
|
|
UPDATE post_timings AS t
|
|
|
|
SET post_number = p.sort_order
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE t.topic_id = p.topic_id AND
|
|
|
|
t.post_number = p.post_number AND
|
|
|
|
p.post_number < 0;
|
|
|
|
SQL
|
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(<<~SQL)
|
2018-03-29 20:42:41 +08:00
|
|
|
UPDATE posts AS x
|
|
|
|
SET reply_to_post_number = p.sort_order
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE x.topic_id = p.topic_id AND
|
|
|
|
x.reply_to_post_number = ABS(p.post_number) AND
|
|
|
|
p.post_number < 0;
|
|
|
|
SQL
|
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(<<~SQL)
|
2018-03-29 20:42:41 +08:00
|
|
|
UPDATE topic_users AS x
|
|
|
|
SET last_read_post_number = p.sort_order
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE x.topic_id = p.topic_id AND
|
|
|
|
x.last_read_post_number = ABS(p.post_number) AND
|
|
|
|
p.post_number < 0;
|
|
|
|
|
|
|
|
UPDATE topic_users AS x
|
|
|
|
SET highest_seen_post_number = p.sort_order
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE x.topic_id = p.topic_id AND
|
|
|
|
x.highest_seen_post_number = ABS(p.post_number) AND
|
|
|
|
p.post_number < 0;
|
|
|
|
|
|
|
|
UPDATE topic_users AS x
|
|
|
|
SET last_emailed_post_number = p.sort_order
|
|
|
|
FROM posts AS p
|
|
|
|
WHERE x.topic_id = p.topic_id AND
|
|
|
|
x.last_emailed_post_number = ABS(p.post_number) AND
|
|
|
|
p.post_number < 0;
|
|
|
|
SQL
|
|
|
|
|
|
|
|
# finally update the post_number
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(<<~SQL)
|
2018-03-29 20:42:41 +08:00
|
|
|
UPDATE posts
|
|
|
|
SET post_number = sort_order
|
|
|
|
WHERE post_number < 0
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Done.", ""
|
|
|
|
end
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-21 10:45:51 +08:00
|
|
|
def missing_uploads
|
2019-05-22 07:00:32 +08:00
|
|
|
puts "Looking for missing uploads on: #{RailsMultisite::ConnectionManagement.current_db}"
|
|
|
|
|
2019-04-19 22:44:40 +08:00
|
|
|
old_scheme_upload_count = 0
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-22 07:00:32 +08:00
|
|
|
count_missing = 0
|
|
|
|
|
2019-05-22 00:37:08 +08:00
|
|
|
missing = Post.find_missing_uploads(include_local_upload: true) do |post, src, path, sha1|
|
2019-05-04 03:46:20 +08:00
|
|
|
next if sha1.present?
|
2019-05-22 07:00:32 +08:00
|
|
|
puts "Fixing missing uploads: " if count_missing == 0
|
|
|
|
count_missing += 1
|
2019-04-10 13:50:26 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
upload_id = nil
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
# recovering old scheme upload.
|
|
|
|
local_store = FileStore::LocalStore.new
|
|
|
|
public_path = "#{local_store.public_dir}#{path}"
|
|
|
|
file_path = nil
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
if File.exists?(public_path)
|
|
|
|
file_path = public_path
|
|
|
|
else
|
|
|
|
tombstone_path = public_path.sub("/uploads/", "/uploads/tombstone/")
|
|
|
|
file_path = tombstone_path if File.exists?(tombstone_path)
|
|
|
|
end
|
2019-04-09 04:37:35 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
if file_path.present?
|
2019-06-25 15:18:48 +08:00
|
|
|
if (upload = UploadCreator.new(File.open(file_path), File.basename(path)).create_for(Discourse.system_user.id)).persisted?
|
2019-05-04 03:46:20 +08:00
|
|
|
upload_id = upload.id
|
2019-04-19 19:57:16 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
post.reload
|
2019-06-25 11:34:26 +08:00
|
|
|
new_raw = post.raw.dup
|
|
|
|
new_raw = new_raw.gsub(path, upload.url)
|
|
|
|
|
|
|
|
PostRevisor.new(post, Topic.with_deleted.find_by(id: post.topic_id)).revise!(
|
|
|
|
Discourse.system_user,
|
|
|
|
{
|
|
|
|
raw: new_raw
|
|
|
|
},
|
|
|
|
skip_validations: true,
|
|
|
|
force_new_version: true,
|
|
|
|
bypass_bump: true
|
|
|
|
)
|
|
|
|
|
|
|
|
print "🆗"
|
|
|
|
else
|
|
|
|
print "❌"
|
2019-04-19 19:57:16 +08:00
|
|
|
end
|
2019-05-04 03:46:20 +08:00
|
|
|
else
|
2019-06-25 11:34:26 +08:00
|
|
|
print "🚫"
|
2019-05-04 03:46:20 +08:00
|
|
|
old_scheme_upload_count += 1
|
2019-04-19 19:57:16 +08:00
|
|
|
end
|
2019-05-04 03:46:20 +08:00
|
|
|
|
|
|
|
upload_id
|
2019-04-09 04:37:35 +08:00
|
|
|
end
|
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
puts "", "#{missing[:count]} post uploads are missing.", ""
|
2019-04-19 22:44:40 +08:00
|
|
|
|
2019-05-04 03:46:20 +08:00
|
|
|
if missing[:count] > 0
|
|
|
|
puts "#{missing[:uploads].count} uploads are missing."
|
|
|
|
puts "#{old_scheme_upload_count} of #{missing[:uploads].count} are old scheme uploads." if old_scheme_upload_count > 0
|
|
|
|
puts "#{missing[:post_uploads].count} of #{Post.count} posts are affected.", ""
|
2019-05-21 10:45:51 +08:00
|
|
|
|
2019-05-28 12:44:41 +08:00
|
|
|
if ENV['GIVE_UP'] == "1"
|
|
|
|
missing[:post_uploads].each do |id, uploads|
|
|
|
|
post = Post.with_deleted.find_by(id: id)
|
|
|
|
if post
|
2019-06-24 14:57:20 +08:00
|
|
|
puts "#{post.full_url} giving up on #{uploads.length} upload(s)"
|
2019-05-28 12:44:41 +08:00
|
|
|
PostCustomField.create!(post_id: post.id, name: Post::MISSING_UPLOADS_IGNORED, value: "t")
|
|
|
|
else
|
|
|
|
puts "could not find post #{id}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-21 10:45:51 +08:00
|
|
|
if ENV['VERBOSE'] == "1"
|
|
|
|
puts "missing uploads!"
|
|
|
|
missing[:uploads].each do |path|
|
|
|
|
puts "#{path}"
|
|
|
|
end
|
|
|
|
|
|
|
|
if missing[:post_uploads].count > 0
|
|
|
|
puts
|
|
|
|
puts "Posts with missing uploads"
|
|
|
|
missing[:post_uploads].each do |id, uploads|
|
|
|
|
post = Post.with_deleted.find_by(id: id)
|
|
|
|
if post
|
2019-05-23 13:09:16 +08:00
|
|
|
puts "#{post.full_url} missing #{uploads.join(", ")}"
|
2019-05-21 10:45:51 +08:00
|
|
|
else
|
|
|
|
puts "could not find post #{id}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-04-19 22:44:40 +08:00
|
|
|
end
|
2019-05-21 10:45:51 +08:00
|
|
|
|
|
|
|
missing[:count] == 0
|
2019-04-09 04:37:35 +08:00
|
|
|
end
|
2019-05-09 07:41:15 +08:00
|
|
|
|
|
|
|
desc 'Finds missing post upload records from cooked HTML content'
|
2019-05-21 10:45:51 +08:00
|
|
|
task 'posts:missing_uploads', [:single_site] => :environment do |_, args|
|
|
|
|
if args[:single_site].to_s.downcase == "single_site"
|
|
|
|
missing_uploads
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do
|
|
|
|
missing_uploads
|
|
|
|
end
|
|
|
|
end
|
2019-05-09 07:41:15 +08:00
|
|
|
end
|
2019-05-23 09:11:19 +08:00
|
|
|
|
2019-05-23 15:08:22 +08:00
|
|
|
def recover_uploads_from_index(path)
|
2019-05-24 10:38:38 +08:00
|
|
|
lookup = []
|
|
|
|
|
|
|
|
db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
cdn_path = SiteSetting.cdn_path("/uploads/#{db}").sub(/https?:/, "")
|
|
|
|
Post.where("cooked LIKE '%#{cdn_path}%'").each do |post|
|
2019-05-28 12:44:41 +08:00
|
|
|
regex = Regexp.new("((https?:)?#{Regexp.escape(cdn_path)}[^,;\\]\\>\\t\\n\\s)\"\']+)")
|
2019-05-24 10:38:38 +08:00
|
|
|
uploads = []
|
|
|
|
post.raw.scan(regex).each do |match|
|
|
|
|
uploads << match[0]
|
|
|
|
end
|
|
|
|
|
|
|
|
if uploads.length > 0
|
|
|
|
lookup << [post.id, uploads]
|
2019-05-28 12:44:41 +08:00
|
|
|
else
|
|
|
|
print "."
|
|
|
|
post.rebake!
|
2019-05-24 10:38:38 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-23 15:08:22 +08:00
|
|
|
PostCustomField.where(name: Post::MISSING_UPLOADS).pluck(:post_id, :value).each do |post_id, uploads|
|
|
|
|
uploads = JSON.parse(uploads)
|
2019-05-24 10:38:38 +08:00
|
|
|
raw = Post.where(id: post_id).pluck(:raw).first
|
|
|
|
uploads.map! do |upload|
|
|
|
|
orig = upload
|
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
upload = upload.sub(SiteSetting.Upload.s3_cdn_url, SiteSetting.Upload.s3_base_url)
|
|
|
|
end
|
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
upload = upload.sub(SiteSetting.Upload.s3_base_url, Discourse.base_url)
|
|
|
|
end
|
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
upload = upload.sub(Discourse.base_url + "/", "/")
|
|
|
|
end
|
2019-05-28 12:44:41 +08:00
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
# last resort, try for sha
|
|
|
|
sha = upload.split("/")[-1]
|
|
|
|
sha = sha.split(".")[0]
|
|
|
|
|
|
|
|
if sha.length == 40 && raw.scan(sha).length == 1
|
|
|
|
raw.match(Regexp.new("([^\"'<\\s\\n]+#{sha}[^\"'>\\s\\n]+)"))
|
|
|
|
upload = $1
|
|
|
|
end
|
|
|
|
end
|
2019-05-24 10:38:38 +08:00
|
|
|
if raw.scan(upload).length == 0
|
|
|
|
puts "can not find #{orig} in\n\n#{raw}"
|
|
|
|
upload = nil
|
|
|
|
end
|
|
|
|
upload
|
|
|
|
end
|
|
|
|
uploads.compact!
|
|
|
|
if uploads.length > 0
|
|
|
|
lookup << [post_id, uploads]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
lookup.each do |post_id, uploads|
|
2019-05-23 15:08:22 +08:00
|
|
|
post = Post.find(post_id)
|
|
|
|
changed = false
|
|
|
|
|
|
|
|
uploads.each do |url|
|
|
|
|
if (n = post.raw.scan(url).length) != 1
|
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it appears #{n} times"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2019-05-28 12:44:41 +08:00
|
|
|
name = File.basename(url).split("_")[0].split(".")[0]
|
2019-05-24 10:38:38 +08:00
|
|
|
puts "Searching for #{url} (#{name}) in index"
|
2019-05-28 12:44:41 +08:00
|
|
|
if name.length != 40
|
2019-05-23 15:08:22 +08:00
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it appears to have a short file name"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
found = `cat #{path} | grep #{name} | grep original`.split("\n")[0] rescue nil
|
|
|
|
if found.blank?
|
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it missing from index"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
found = File.expand_path(File.join(File.dirname(path), found))
|
|
|
|
if !File.exist?(found)
|
|
|
|
puts "Skipping #{url} in #{post.full_url} cause it missing from disk"
|
2019-05-24 11:35:41 +08:00
|
|
|
next
|
2019-05-23 15:08:22 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
File.open(found) do |f|
|
2019-05-24 11:35:41 +08:00
|
|
|
begin
|
|
|
|
upload = UploadCreator.new(f, "upload").create_for(post.user_id)
|
|
|
|
if upload && upload.url
|
|
|
|
post.raw = post.raw.sub(url, upload.url)
|
|
|
|
changed = true
|
|
|
|
else
|
|
|
|
puts "Skipping #{url} in #{post.full_url} unable to create upload (unknown error)"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
rescue Discourse::InvalidAccess
|
|
|
|
puts "Skipping #{url} in #{post.full_url} unable to create upload (bad format)"
|
|
|
|
next
|
|
|
|
end
|
2019-05-23 15:08:22 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
if changed
|
|
|
|
puts "Recovered uploads on #{post.full_url}"
|
|
|
|
post.save!(validate: false)
|
|
|
|
post.rebake!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
desc 'Attempts to recover missing uploads from an index file'
|
2019-05-24 11:35:41 +08:00
|
|
|
task 'posts:recover_uploads_from_index' => :environment do |_, args|
|
|
|
|
path = File.expand_path(Rails.root + "public/uploads/all_the_files")
|
|
|
|
if File.exist?(path)
|
|
|
|
puts "Found existing index file at #{path}"
|
|
|
|
else
|
|
|
|
puts "Can not find index #{path} generating index this could take a while..."
|
2019-05-28 10:39:39 +08:00
|
|
|
`cd #{File.dirname(path)} && find -type f > #{path}`
|
2019-05-23 15:08:22 +08:00
|
|
|
end
|
|
|
|
if RailsMultisite::ConnectionManagement.current_db != "default"
|
|
|
|
recover_uploads_from_index(path)
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do
|
|
|
|
recover_uploads_from_index(path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-06-04 15:10:31 +08:00
|
|
|
|
|
|
|
desc 'invalidate broken images'
|
|
|
|
task 'posts:invalidate_broken_images' => :environment do
|
|
|
|
puts "Invalidating broken images.."
|
|
|
|
|
|
|
|
posts = Post.where("raw like '%<img%'")
|
|
|
|
|
|
|
|
rebaked = 0
|
|
|
|
total = posts.count
|
|
|
|
|
|
|
|
posts.find_each do |p|
|
|
|
|
rebake_post(p, invalidate_broken_images: true)
|
|
|
|
print_status(rebaked += 1, total)
|
|
|
|
end
|
|
|
|
|
|
|
|
puts
|
|
|
|
puts "", "#{rebaked} posts rebaked!"
|
|
|
|
end
|
2019-05-30 14:38:46 +08:00
|
|
|
|
|
|
|
desc "Coverts full upload URLs in `Post#raw` to short upload url"
|
|
|
|
task 'posts:inline_uploads' => :environment do |_, args|
|
2019-06-13 16:13:22 +08:00
|
|
|
if ENV['RAILS_DB']
|
|
|
|
correct_inline_uploads
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db|
|
|
|
|
puts "Correcting #{db}..."
|
|
|
|
puts
|
|
|
|
correct_inline_uploads
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def correct_inline_uploads
|
2019-06-03 15:41:26 +08:00
|
|
|
dry_run = (ENV["DRY_RUN"].nil? ? true : ENV["DRY_RUN"] != "false")
|
2019-06-07 16:21:37 +08:00
|
|
|
verbose = ENV["VERBOSE"]
|
2019-05-30 14:38:46 +08:00
|
|
|
|
2019-06-14 11:53:34 +08:00
|
|
|
scope = Post.joins(:post_uploads).distinct("posts.id")
|
|
|
|
.where(<<~SQL)
|
|
|
|
raw LIKE '%/uploads/#{RailsMultisite::ConnectionManagement.current_db}/original/%'
|
|
|
|
SQL
|
2019-05-30 14:38:46 +08:00
|
|
|
|
|
|
|
affected_posts_count = scope.count
|
|
|
|
fixed_count = 0
|
|
|
|
not_corrected_post_ids = []
|
2019-06-03 15:41:26 +08:00
|
|
|
failed_to_correct_post_ids = []
|
2019-05-30 14:38:46 +08:00
|
|
|
|
|
|
|
scope.find_each do |post|
|
2019-06-03 15:41:26 +08:00
|
|
|
if post.raw !~ Upload::URL_REGEX
|
|
|
|
affected_posts_count -= 1
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
|
|
|
new_raw = InlineUploads.process(post.raw)
|
2019-05-30 14:38:46 +08:00
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if post.raw != new_raw
|
2019-06-07 16:21:37 +08:00
|
|
|
if !dry_run
|
2019-06-11 16:28:41 +08:00
|
|
|
PostRevisor.new(post, Topic.with_deleted.find_by(id: post.topic_id))
|
|
|
|
.revise!(
|
|
|
|
Discourse.system_user,
|
|
|
|
{
|
|
|
|
raw: new_raw
|
|
|
|
},
|
|
|
|
skip_validations: true,
|
2019-06-12 09:13:31 +08:00
|
|
|
force_new_version: true,
|
|
|
|
bypass_bump: true
|
2019-06-11 16:28:41 +08:00
|
|
|
)
|
2019-06-07 16:21:37 +08:00
|
|
|
end
|
2019-06-03 15:41:26 +08:00
|
|
|
|
2019-06-07 16:21:37 +08:00
|
|
|
if verbose
|
|
|
|
require 'diffy'
|
|
|
|
Diffy::Diff.default_format = :color
|
|
|
|
puts "Cooked diff for Post #{post.id}"
|
|
|
|
puts Diffy::Diff.new(PrettyText.cook(post.raw), PrettyText.cook(new_raw), context: 1)
|
|
|
|
puts
|
|
|
|
elsif dry_run
|
2019-07-05 07:40:56 +08:00
|
|
|
putc "#"
|
2019-06-07 16:21:37 +08:00
|
|
|
else
|
2019-07-05 07:40:56 +08:00
|
|
|
putc "."
|
2019-06-03 15:41:26 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
fixed_count += 1
|
2019-05-30 14:38:46 +08:00
|
|
|
else
|
2019-07-05 07:40:56 +08:00
|
|
|
putc "X"
|
2019-06-03 15:41:26 +08:00
|
|
|
not_corrected_post_ids << post.id
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
2019-07-05 07:40:56 +08:00
|
|
|
rescue
|
|
|
|
putc "!"
|
2019-06-03 15:41:26 +08:00
|
|
|
failed_to_correct_post_ids << post.id
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
puts
|
2019-05-30 14:38:46 +08:00
|
|
|
puts "#{fixed_count} out of #{affected_posts_count} affected posts corrected"
|
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if not_corrected_post_ids.present?
|
|
|
|
puts "Ids of posts that were not corrected: #{not_corrected_post_ids}"
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if failed_to_correct_post_ids.present?
|
|
|
|
puts "Ids of posts that encountered failures: #{failed_to_correct_post_ids}"
|
|
|
|
end
|
2019-05-30 14:38:46 +08:00
|
|
|
|
2019-06-03 15:41:26 +08:00
|
|
|
if dry_run
|
|
|
|
puts "Task was ran in dry run mode. Set `DRY_RUN=false` to revise affected posts"
|
2019-05-30 14:38:46 +08:00
|
|
|
end
|
|
|
|
end
|