2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-07-03 04:50:44 +08:00
|
|
|
if ARGV.include?("bbcode-to-md")
|
|
|
|
# Replace (most) bbcode with markdown before creating posts.
|
|
|
|
# This will dramatically clean up the final posts in Discourse.
|
|
|
|
#
|
|
|
|
# In a temp dir:
|
|
|
|
#
|
2014-07-12 01:36:05 +08:00
|
|
|
# git clone https://github.com/nlalonde/ruby-bbcode-to-md.git
|
2014-07-03 04:50:44 +08:00
|
|
|
# cd ruby-bbcode-to-md
|
|
|
|
# gem build ruby-bbcode-to-md.gemspec
|
2015-05-05 05:09:58 +08:00
|
|
|
# gem install ruby-bbcode-to-md-*.gem
|
2014-07-03 04:50:44 +08:00
|
|
|
require "ruby-bbcode-to-md"
|
|
|
|
end
|
|
|
|
|
2015-01-31 22:42:39 +08:00
|
|
|
require_relative "../../config/environment"
|
2015-05-05 05:09:58 +08:00
|
|
|
require_relative "base/lookup_container"
|
|
|
|
require_relative "base/uploader"
|
2015-01-24 01:19:46 +08:00
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
module ImportScripts
|
|
|
|
end
|
|
|
|
|
|
|
|
class ImportScripts::Base
|
|
|
|
def initialize
|
2014-08-14 04:17:16 +08:00
|
|
|
preload_i18n
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
@lookup = ImportScripts::LookupContainer.new
|
|
|
|
@uploader = ImportScripts::Uploader.new
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
@bbcode_to_md = true if use_bbcode_to_md?
|
|
|
|
@site_settings_during_import = {}
|
|
|
|
@old_site_settings = {}
|
|
|
|
@start_times = { import: Time.now }
|
2018-01-17 19:03:57 +08:00
|
|
|
@skip_updates = false
|
2018-06-28 02:27:11 +08:00
|
|
|
@next_category_color_index = {}
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
2014-08-14 04:17:16 +08:00
|
|
|
def preload_i18n
|
|
|
|
I18n.t("test")
|
|
|
|
ActiveSupport::Inflector.transliterate("test")
|
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
def perform
|
|
|
|
Rails.logger.level = 3 # :error, so that we don't create log files that are many GB
|
2014-06-06 02:40:11 +08:00
|
|
|
|
2014-09-01 00:09:21 +08:00
|
|
|
change_site_settings
|
2014-05-31 03:09:58 +08:00
|
|
|
execute
|
|
|
|
|
2014-08-14 04:17:16 +08:00
|
|
|
puts ""
|
|
|
|
|
2018-01-17 19:03:57 +08:00
|
|
|
unless @skip_updates
|
2018-03-17 05:31:33 +08:00
|
|
|
update_topic_status
|
2018-01-17 19:03:57 +08:00
|
|
|
update_bumped_at
|
|
|
|
update_last_posted_at
|
|
|
|
update_last_seen_at
|
|
|
|
update_user_stats
|
2019-01-04 22:30:17 +08:00
|
|
|
update_topic_users
|
|
|
|
update_post_timings
|
2018-01-17 19:03:57 +08:00
|
|
|
update_feature_topic_users
|
|
|
|
update_category_featured_topics
|
|
|
|
reset_topic_counters
|
|
|
|
end
|
2014-06-06 03:30:29 +08:00
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
elapsed = Time.now - @start_times[:import]
|
|
|
|
puts "", "", "Done (%02dh %02dmin %02dsec)" % [elapsed / 3600, elapsed / 60 % 60, elapsed % 60]
|
2014-05-31 03:09:58 +08:00
|
|
|
ensure
|
2014-09-01 00:09:21 +08:00
|
|
|
reset_site_settings
|
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
def get_site_settings_for_import
|
|
|
|
{
|
2020-07-27 08:23:54 +08:00
|
|
|
blocked_email_domains: "",
|
2014-09-01 00:09:21 +08:00
|
|
|
min_topic_title_length: 1,
|
|
|
|
min_post_length: 1,
|
2015-03-19 22:17:55 +08:00
|
|
|
min_first_post_length: 1,
|
2018-01-31 13:56:00 +08:00
|
|
|
min_personal_message_post_length: 1,
|
|
|
|
min_personal_message_title_length: 1,
|
2014-09-01 00:09:21 +08:00
|
|
|
allow_duplicate_topic_titles: true,
|
2020-06-18 23:19:47 +08:00
|
|
|
allow_duplicate_topic_titles_category: false,
|
2018-06-08 01:19:38 +08:00
|
|
|
disable_emails: "yes",
|
2017-10-23 16:18:44 +08:00
|
|
|
max_attachment_size_kb: 102_400,
|
|
|
|
max_image_size_kb: 102_400,
|
2020-03-08 06:34:28 +08:00
|
|
|
authorized_extensions: "*",
|
|
|
|
clean_up_inactive_users_after_days: 0,
|
2020-03-25 00:07:59 +08:00
|
|
|
clean_up_unused_staged_users_after_days: 0,
|
|
|
|
clean_up_uploads: false,
|
|
|
|
clean_orphan_uploads_grace_period_hours: 1800,
|
2014-09-01 00:09:21 +08:00
|
|
|
}
|
2015-05-05 05:09:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def change_site_settings
|
2018-07-25 14:42:26 +08:00
|
|
|
if SiteSetting.bootstrap_mode_enabled
|
|
|
|
SiteSetting.default_trust_level = TrustLevel[0] if SiteSetting.default_trust_level ==
|
|
|
|
TrustLevel[1]
|
|
|
|
SiteSetting.default_email_digest_frequency =
|
|
|
|
10_080 if SiteSetting.default_email_digest_frequency == 1440
|
|
|
|
SiteSetting.bootstrap_mode_enabled = false
|
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
@site_settings_during_import = get_site_settings_for_import
|
2014-09-01 00:09:21 +08:00
|
|
|
|
2015-03-14 04:24:11 +08:00
|
|
|
@site_settings_during_import.each do |key, value|
|
2019-05-07 09:00:09 +08:00
|
|
|
@old_site_settings[key] = SiteSetting.get(key)
|
2014-09-01 00:09:21 +08:00
|
|
|
SiteSetting.set(key, value)
|
|
|
|
end
|
|
|
|
|
2018-04-10 00:44:59 +08:00
|
|
|
# Some changes that should not be rolled back after the script is done
|
2021-02-20 00:33:35 +08:00
|
|
|
if SiteSetting.purge_unactivated_users_grace_period_days > 0
|
|
|
|
SiteSetting.purge_unactivated_users_grace_period_days = 60
|
|
|
|
end
|
2018-04-10 00:44:59 +08:00
|
|
|
SiteSetting.purge_deleted_uploads_grace_period_days = 90
|
|
|
|
|
2014-09-01 00:09:21 +08:00
|
|
|
RateLimiter.disable
|
|
|
|
end
|
|
|
|
|
|
|
|
def reset_site_settings
|
|
|
|
@old_site_settings.each do |key, value|
|
2019-05-07 09:00:09 +08:00
|
|
|
current_value = SiteSetting.get(key)
|
2015-03-14 04:24:11 +08:00
|
|
|
SiteSetting.set(key, value) unless current_value != @site_settings_during_import[key]
|
2014-09-01 00:09:21 +08:00
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
RateLimiter.enable
|
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
def use_bbcode_to_md?
|
|
|
|
ARGV.include?("bbcode-to-md")
|
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
# Implementation will do most of its work in its execute method.
|
|
|
|
# It will need to call create_users, create_categories, and create_posts.
|
|
|
|
def execute
|
|
|
|
raise NotImplementedError
|
|
|
|
end
|
|
|
|
|
2020-02-05 17:35:55 +08:00
|
|
|
%i[
|
|
|
|
add_category
|
|
|
|
add_group
|
|
|
|
add_post
|
|
|
|
add_topic
|
|
|
|
add_user
|
|
|
|
category_id_from_imported_category_id
|
|
|
|
find_group_by_import_id
|
|
|
|
find_user_by_import_id
|
|
|
|
group_id_from_imported_group_id
|
|
|
|
post_already_imported?
|
|
|
|
post_id_from_imported_post_id
|
|
|
|
topic_lookup_from_imported_post_id
|
|
|
|
user_already_imported?
|
|
|
|
user_id_from_imported_user_id
|
2017-04-26 21:36:35 +08:00
|
|
|
].each { |method_name| delegate method_name, to: :@lookup }
|
2014-05-31 03:09:58 +08:00
|
|
|
|
|
|
|
def create_admin(opts = {})
|
|
|
|
admin = User.new
|
|
|
|
admin.email = opts[:email] || "sam.saffron@gmail.com"
|
|
|
|
admin.username = opts[:username] || "sam"
|
|
|
|
admin.password = SecureRandom.uuid
|
|
|
|
admin.save!
|
|
|
|
admin.grant_admin!
|
2014-09-05 13:20:39 +08:00
|
|
|
admin.change_trust_level!(TrustLevel[4])
|
2014-05-31 03:09:58 +08:00
|
|
|
admin.email_tokens.update_all(confirmed: true)
|
|
|
|
admin
|
|
|
|
end
|
|
|
|
|
2016-10-26 23:51:34 +08:00
|
|
|
def created_group(group)
|
|
|
|
# override if needed
|
|
|
|
end
|
|
|
|
|
2014-07-17 01:59:30 +08:00
|
|
|
# Iterate through a list of groups to be imported.
|
|
|
|
# Takes a collection and yields to the block for each element.
|
|
|
|
# Block should return a hash with the attributes for each element.
|
|
|
|
# Required fields are :id and :name, where :id is the id of the
|
|
|
|
# group in the original datasource. The given id will not be used
|
|
|
|
# to create the Discourse group record.
|
|
|
|
def create_groups(results, opts = {})
|
2015-05-05 05:09:58 +08:00
|
|
|
created = 0
|
|
|
|
skipped = 0
|
|
|
|
failed = 0
|
2017-10-23 16:18:44 +08:00
|
|
|
total = opts[:total] || results.count
|
2014-07-17 01:59:30 +08:00
|
|
|
|
|
|
|
results.each do |result|
|
|
|
|
g = yield(result)
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
if g.nil? || group_id_from_imported_group_id(g[:id])
|
2015-05-05 05:09:58 +08:00
|
|
|
skipped += 1
|
2014-07-17 01:59:30 +08:00
|
|
|
else
|
|
|
|
new_group = create_group(g, g[:id])
|
2016-10-26 23:51:34 +08:00
|
|
|
created_group(new_group)
|
2014-07-17 01:59:30 +08:00
|
|
|
|
|
|
|
if new_group.valid?
|
2017-04-26 21:36:35 +08:00
|
|
|
add_group(g[:id].to_s, new_group)
|
2015-05-05 05:09:58 +08:00
|
|
|
created += 1
|
2014-07-17 01:59:30 +08:00
|
|
|
else
|
2015-05-05 05:09:58 +08:00
|
|
|
failed += 1
|
2014-07-17 01:59:30 +08:00
|
|
|
puts "Failed to create group id #{g[:id]} #{new_group.name}: #{new_group.errors.full_messages}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(
|
|
|
|
created + skipped + failed + (opts[:offset] || 0),
|
|
|
|
total,
|
|
|
|
get_start_time("groups"),
|
|
|
|
)
|
2014-07-17 01:59:30 +08:00
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
[created, skipped]
|
2014-07-17 01:59:30 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def create_group(opts, import_id)
|
|
|
|
opts = opts.dup.tap { |o| o.delete(:id) }
|
2018-05-28 17:02:19 +08:00
|
|
|
|
|
|
|
import_name = opts[:name].presence || opts[:full_name]
|
2014-07-17 01:59:30 +08:00
|
|
|
opts[:name] = UserNameSuggester.suggest(import_name)
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
existing = Group.find_by(name: opts[:name])
|
|
|
|
return existing if existing && existing.custom_fields["import_id"].to_s == import_id.to_s
|
|
|
|
|
2014-07-17 01:59:30 +08:00
|
|
|
g = existing || Group.new(opts)
|
|
|
|
g.custom_fields["import_id"] = import_id
|
|
|
|
g.custom_fields["import_name"] = import_name
|
|
|
|
|
|
|
|
g.tap(&:save)
|
|
|
|
end
|
|
|
|
|
2015-09-22 07:48:42 +08:00
|
|
|
def all_records_exist?(type, import_ids)
|
|
|
|
return false if import_ids.empty?
|
|
|
|
|
2020-03-26 23:36:38 +08:00
|
|
|
ActiveRecord::Base.transaction do
|
|
|
|
begin
|
|
|
|
connection = ActiveRecord::Base.connection.raw_connection
|
|
|
|
connection.exec("CREATE TEMP TABLE import_ids(val text PRIMARY KEY)")
|
2015-10-15 10:25:10 +08:00
|
|
|
|
2020-03-26 23:36:38 +08:00
|
|
|
import_id_clause =
|
|
|
|
import_ids.map { |id| "('#{PG::Connection.escape_string(id.to_s)}')" }.join(",")
|
2016-06-14 23:44:35 +08:00
|
|
|
|
2020-03-26 23:36:38 +08:00
|
|
|
connection.exec("INSERT INTO import_ids VALUES #{import_id_clause}")
|
2015-09-22 07:48:42 +08:00
|
|
|
|
2020-03-26 23:36:38 +08:00
|
|
|
existing = "#{type.to_s.classify}CustomField".constantize
|
|
|
|
existing = existing.where(name: "import_id").joins("JOIN import_ids ON val = value").count
|
2018-05-28 17:02:19 +08:00
|
|
|
|
2020-03-26 23:36:38 +08:00
|
|
|
if existing == import_ids.length
|
|
|
|
puts "Skipping #{import_ids.length} already imported #{type}"
|
|
|
|
true
|
|
|
|
end
|
|
|
|
ensure
|
|
|
|
connection.exec("DROP TABLE import_ids") unless connection.nil?
|
|
|
|
end
|
2015-09-22 07:48:42 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-10-26 23:51:34 +08:00
|
|
|
def created_user(user)
|
|
|
|
# override if needed
|
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
# Iterate through a list of user records to be imported.
|
|
|
|
# Takes a collection, and yields to the block for each element.
|
|
|
|
# Block should return a hash with the attributes for the User model.
|
|
|
|
# Required fields are :id and :email, where :id is the id of the
|
|
|
|
# user in the original datasource. The given id will not be used to
|
|
|
|
# create the Discourse user record.
|
2014-07-05 04:05:15 +08:00
|
|
|
def create_users(results, opts = {})
|
2015-05-05 05:09:58 +08:00
|
|
|
created = 0
|
|
|
|
skipped = 0
|
|
|
|
failed = 0
|
2017-10-23 16:18:44 +08:00
|
|
|
total = opts[:total] || results.count
|
2014-05-31 03:09:58 +08:00
|
|
|
|
|
|
|
results.each do |result|
|
|
|
|
u = yield(result)
|
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
# block returns nil to skip a user
|
2014-08-12 00:44:17 +08:00
|
|
|
if u.nil?
|
2015-05-05 05:09:58 +08:00
|
|
|
skipped += 1
|
2014-09-05 01:18:22 +08:00
|
|
|
else
|
|
|
|
import_id = u[:id]
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2017-04-26 21:36:35 +08:00
|
|
|
if user_id_from_imported_user_id(import_id)
|
2015-05-05 05:09:58 +08:00
|
|
|
skipped += 1
|
2018-10-22 17:12:40 +08:00
|
|
|
else
|
2014-09-05 01:18:22 +08:00
|
|
|
new_user = create_user(u, import_id)
|
2016-10-26 23:51:34 +08:00
|
|
|
created_user(new_user)
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2015-12-01 22:38:21 +08:00
|
|
|
if new_user && new_user.valid? && new_user.user_profile && new_user.user_profile.valid?
|
2017-04-26 21:36:35 +08:00
|
|
|
add_user(import_id.to_s, new_user)
|
2015-05-05 05:09:58 +08:00
|
|
|
created += 1
|
2014-09-05 01:18:22 +08:00
|
|
|
else
|
2015-05-05 05:09:58 +08:00
|
|
|
failed += 1
|
2015-12-01 22:38:21 +08:00
|
|
|
puts "Failed to create user id: #{import_id}, username: #{new_user.try(:username)}, email: #{new_user.try(:email)}"
|
|
|
|
if new_user.try(:errors)
|
|
|
|
puts "user errors: #{new_user.errors.full_messages}"
|
|
|
|
if new_user.try(:user_profile).try(:errors)
|
|
|
|
puts "user_profile errors: #{new_user.user_profile.errors.full_messages}"
|
|
|
|
end
|
|
|
|
end
|
2014-09-05 01:18:22 +08:00
|
|
|
end
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(
|
|
|
|
created + skipped + failed + (opts[:offset] || 0),
|
|
|
|
total,
|
|
|
|
get_start_time("users"),
|
|
|
|
)
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
[created, skipped]
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def create_user(opts, import_id)
|
2018-10-18 16:21:12 +08:00
|
|
|
original_opts = opts.dup
|
2014-05-31 03:09:58 +08:00
|
|
|
opts.delete(:id)
|
2014-08-25 16:48:29 +08:00
|
|
|
merge = opts.delete(:merge)
|
2014-07-17 01:59:30 +08:00
|
|
|
post_create_action = opts.delete(:post_create_action)
|
2014-08-25 16:48:29 +08:00
|
|
|
|
2017-11-16 23:26:18 +08:00
|
|
|
existing = find_existing_user(opts[:email], opts[:username])
|
2017-10-24 01:59:45 +08:00
|
|
|
if existing && (merge || existing.custom_fields["import_id"].to_s == import_id.to_s)
|
|
|
|
return existing
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2014-06-10 14:07:16 +08:00
|
|
|
bio_raw = opts.delete(:bio_raw)
|
2014-08-18 19:04:08 +08:00
|
|
|
website = opts.delete(:website)
|
2015-01-24 01:19:46 +08:00
|
|
|
location = opts.delete(:location)
|
2014-08-14 15:43:32 +08:00
|
|
|
avatar_url = opts.delete(:avatar_url)
|
|
|
|
|
2016-07-28 04:30:15 +08:00
|
|
|
original_username = opts[:username]
|
|
|
|
original_name = opts[:name]
|
2017-10-23 16:18:44 +08:00
|
|
|
original_email = opts[:email] = opts[:email].downcase
|
2016-07-28 04:30:15 +08:00
|
|
|
|
2019-10-02 02:32:41 +08:00
|
|
|
if !UsernameValidator.new(opts[:username]).valid_format? ||
|
|
|
|
!User.username_available?(opts[:username])
|
|
|
|
opts[:username] = UserNameSuggester.suggest(
|
|
|
|
opts[:username].presence || opts[:name].presence || opts[:email],
|
|
|
|
)
|
2014-08-14 04:17:16 +08:00
|
|
|
end
|
2016-07-28 04:30:15 +08:00
|
|
|
|
2022-02-18 09:12:51 +08:00
|
|
|
if !EmailAddressValidator.valid_value?(opts[:email])
|
2018-10-22 17:12:40 +08:00
|
|
|
opts[:email] = fake_email
|
2019-01-04 22:30:17 +08:00
|
|
|
puts "Invalid email '#{original_email}' for '#{opts[:username]}'. Using '#{opts[:email]}'"
|
2017-10-23 16:18:44 +08:00
|
|
|
end
|
|
|
|
|
2016-07-28 04:30:15 +08:00
|
|
|
opts[:name] = original_username if original_name.blank? && opts[:username] != original_username
|
|
|
|
|
2014-09-05 13:20:39 +08:00
|
|
|
opts[:trust_level] = TrustLevel[1] unless opts[:trust_level]
|
2015-01-24 05:44:00 +08:00
|
|
|
opts[:active] = opts.fetch(:active, true)
|
2014-08-14 04:17:16 +08:00
|
|
|
opts[:import_mode] = true
|
2015-03-19 03:48:26 +08:00
|
|
|
opts[:last_emailed_at] = opts.fetch(:last_emailed_at, Time.now)
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2022-02-15 01:10:35 +08:00
|
|
|
if (date_of_birth = opts[:date_of_birth]).is_a?(Date) && date_of_birth.year != 1904
|
|
|
|
opts[:date_of_birth] = Date.new(1904, date_of_birth.month, date_of_birth.day)
|
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
u = User.new(opts)
|
2015-12-01 22:38:21 +08:00
|
|
|
(opts[:custom_fields] || {}).each { |k, v| u.custom_fields[k] = v }
|
2014-05-31 03:09:58 +08:00
|
|
|
u.custom_fields["import_id"] = import_id
|
2019-01-04 22:30:17 +08:00
|
|
|
u.custom_fields["import_username"] = original_username if original_username.present? &&
|
|
|
|
original_username != opts[:username]
|
2014-08-14 15:43:32 +08:00
|
|
|
u.custom_fields["import_avatar_url"] = avatar_url if avatar_url.present?
|
2015-08-21 04:15:57 +08:00
|
|
|
u.custom_fields["import_pass"] = opts[:password] if opts[:password].present?
|
2017-10-23 16:18:44 +08:00
|
|
|
u.custom_fields["import_email"] = original_email if original_email != opts[:email]
|
2014-05-31 03:09:58 +08:00
|
|
|
|
|
|
|
begin
|
2014-06-10 14:07:16 +08:00
|
|
|
User.transaction do
|
|
|
|
u.save!
|
2015-03-07 22:42:42 +08:00
|
|
|
if bio_raw.present? || website.present? || location.present?
|
2017-12-28 21:51:43 +08:00
|
|
|
if website.present?
|
|
|
|
u.user_profile.website = website
|
|
|
|
u.user_profile.website = nil unless u.user_profile.valid?
|
|
|
|
end
|
|
|
|
|
2016-02-22 00:38:04 +08:00
|
|
|
u.user_profile.bio_raw = bio_raw[0..2999] if bio_raw.present?
|
2015-01-24 01:19:46 +08:00
|
|
|
u.user_profile.location = location if location.present?
|
2014-06-10 14:07:16 +08:00
|
|
|
u.user_profile.save!
|
|
|
|
end
|
|
|
|
end
|
2015-08-21 04:15:57 +08:00
|
|
|
|
|
|
|
u.activate if opts[:active] && opts[:password].present?
|
2015-09-22 07:27:47 +08:00
|
|
|
rescue => e
|
2014-05-31 03:09:58 +08:00
|
|
|
# try based on email
|
2017-07-24 18:52:56 +08:00
|
|
|
if e.try(:record).try(:errors).try(:messages).try(:[], :primary_email).present?
|
2017-04-27 02:47:36 +08:00
|
|
|
if existing = User.find_by_email(opts[:email].downcase)
|
2022-02-16 20:04:31 +08:00
|
|
|
existing.created_at = opts[:created_at] if opts[:created_at]
|
2015-09-22 07:27:47 +08:00
|
|
|
existing.custom_fields["import_id"] = import_id
|
|
|
|
existing.save!
|
|
|
|
u = existing
|
|
|
|
end
|
|
|
|
else
|
2018-10-18 16:21:12 +08:00
|
|
|
puts "Error on record: #{original_opts.inspect}"
|
2015-09-22 07:27:47 +08:00
|
|
|
raise e
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
end
|
2015-12-01 22:38:21 +08:00
|
|
|
|
2017-10-23 16:18:44 +08:00
|
|
|
if u.custom_fields["import_email"]
|
|
|
|
u.suspended_at = Time.zone.at(Time.now)
|
|
|
|
u.suspended_till = 200.years.from_now
|
|
|
|
u.save!
|
|
|
|
|
|
|
|
user_option = u.user_option
|
|
|
|
user_option.email_digests = false
|
2019-03-15 22:55:11 +08:00
|
|
|
user_option.email_level = UserOption.email_level_types[:never]
|
|
|
|
user_option.email_messages_level = UserOption.email_level_types[:never]
|
2017-10-23 16:18:44 +08:00
|
|
|
user_option.save!
|
|
|
|
if u.save
|
2020-03-12 01:27:37 +08:00
|
|
|
StaffActionLogger.new(Discourse.system_user).log_user_suspend(
|
|
|
|
u,
|
|
|
|
"Invalid email address on import",
|
|
|
|
)
|
2017-10-23 16:18:44 +08:00
|
|
|
else
|
|
|
|
Rails.logger.error(
|
|
|
|
"Failed to suspend user #{u.username}. #{u.errors.try(:full_messages).try(:inspect)}",
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-07-17 01:59:30 +08:00
|
|
|
post_create_action.try(:call, u) if u.persisted?
|
2014-05-31 03:09:58 +08:00
|
|
|
|
|
|
|
u # If there was an error creating the user, u.errors has the messages
|
|
|
|
end
|
|
|
|
|
2017-11-16 23:26:18 +08:00
|
|
|
def find_existing_user(email, username)
|
2018-10-22 17:14:13 +08:00
|
|
|
# Force the use of the index on the 'user_emails' table
|
|
|
|
UserEmail.where("lower(email) = ?", email.downcase).first&.user ||
|
|
|
|
User.where(username: username).first
|
2017-11-16 23:26:18 +08:00
|
|
|
end
|
|
|
|
|
2016-10-26 23:51:34 +08:00
|
|
|
def created_category(category)
|
|
|
|
# override if needed
|
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
# Iterates through a collection to create categories.
|
|
|
|
# The block should return a hash with attributes for the new category.
|
|
|
|
# Required fields are :id and :name, where :id is the id of the
|
|
|
|
# category in the original datasource. The given id will not be used to
|
|
|
|
# create the Discourse category record.
|
|
|
|
# Optional attributes are position, description, and parent_category_id.
|
|
|
|
def create_categories(results)
|
2015-05-05 05:09:58 +08:00
|
|
|
created = 0
|
|
|
|
skipped = 0
|
2017-10-23 16:18:44 +08:00
|
|
|
total = results.count
|
2015-05-05 05:09:58 +08:00
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
results.each do |c|
|
|
|
|
params = yield(c)
|
2014-09-11 02:27:18 +08:00
|
|
|
|
2015-03-13 04:15:02 +08:00
|
|
|
# block returns nil to skip
|
2017-04-26 21:36:35 +08:00
|
|
|
if params.nil? || category_id_from_imported_category_id(params[:id])
|
2015-05-05 05:09:58 +08:00
|
|
|
skipped += 1
|
|
|
|
else
|
|
|
|
# Basic massaging on the category name
|
|
|
|
params[:name] = "Blank" if params[:name].blank?
|
|
|
|
params[:name].strip!
|
|
|
|
params[:name] = params[:name][0..49]
|
|
|
|
|
|
|
|
# make sure categories don't go more than 2 levels deep
|
|
|
|
if params[:parent_category_id]
|
|
|
|
top = Category.find_by_id(params[:parent_category_id])
|
2021-05-26 17:40:26 +08:00
|
|
|
top = top.parent_category while (top&.height_of_ancestors || -1) + 1 >=
|
|
|
|
SiteSetting.max_category_nesting
|
2015-05-05 05:09:58 +08:00
|
|
|
params[:parent_category_id] = top.id if top
|
|
|
|
end
|
2014-09-11 02:27:18 +08:00
|
|
|
|
2016-10-26 23:51:34 +08:00
|
|
|
new_category = create_category(params, params[:id])
|
|
|
|
created_category(new_category)
|
2014-07-05 04:05:15 +08:00
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
created += 1
|
2014-07-05 04:05:15 +08:00
|
|
|
end
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(created + skipped, total, get_start_time("categories"))
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
2015-05-05 05:09:58 +08:00
|
|
|
|
|
|
|
[created, skipped]
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def create_category(opts, import_id)
|
2020-02-05 17:35:55 +08:00
|
|
|
existing =
|
|
|
|
Category
|
|
|
|
.where(parent_category_id: opts[:parent_category_id])
|
|
|
|
.where("LOWER(name) = ?", opts[:name].downcase.strip)
|
|
|
|
.first
|
|
|
|
|
2022-04-30 00:24:29 +08:00
|
|
|
if existing
|
|
|
|
if import_id && existing.custom_fields["import_id"] != import_id
|
|
|
|
existing.custom_fields["import_id"] = import_id
|
|
|
|
existing.save!
|
|
|
|
|
|
|
|
add_category(import_id, existing)
|
|
|
|
end
|
|
|
|
|
|
|
|
return existing
|
|
|
|
end
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2014-07-17 01:59:30 +08:00
|
|
|
post_create_action = opts.delete(:post_create_action)
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
new_category =
|
|
|
|
Category.new(
|
|
|
|
name: opts[:name],
|
2016-07-28 00:38:23 +08:00
|
|
|
user_id: opts[:user_id] || opts[:user].try(:id) || Discourse::SYSTEM_USER_ID,
|
2014-05-31 03:09:58 +08:00
|
|
|
position: opts[:position],
|
2015-05-19 18:40:35 +08:00
|
|
|
parent_category_id: opts[:parent_category_id],
|
2018-06-28 02:27:11 +08:00
|
|
|
color: opts[:color] || category_color(opts[:parent_category_id]),
|
2015-05-19 18:40:35 +08:00
|
|
|
text_color: opts[:text_color] || "FFF",
|
2016-07-28 00:38:23 +08:00
|
|
|
read_restricted: opts[:read_restricted] || false,
|
2014-05-31 03:09:58 +08:00
|
|
|
)
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
new_category.custom_fields["import_id"] = import_id if import_id
|
|
|
|
new_category.save!
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2018-03-17 05:31:33 +08:00
|
|
|
if opts[:description].present?
|
|
|
|
changes = { raw: opts[:description] }
|
|
|
|
opts = { skip_revision: true, skip_validations: true, bypass_bump: true }
|
|
|
|
new_category.topic.first_post.revise(Discourse.system_user, changes, opts)
|
|
|
|
end
|
|
|
|
|
2017-04-26 21:36:35 +08:00
|
|
|
add_category(import_id, new_category)
|
2015-12-03 23:12:06 +08:00
|
|
|
|
2014-07-17 01:59:30 +08:00
|
|
|
post_create_action.try(:call, new_category)
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
new_category
|
|
|
|
end
|
|
|
|
|
2018-06-28 02:27:11 +08:00
|
|
|
def category_color(parent_category_id)
|
2018-05-23 03:27:25 +08:00
|
|
|
@category_colors ||= SiteSetting.category_colors.split("|")
|
|
|
|
|
2018-06-28 02:27:11 +08:00
|
|
|
index = @next_category_color_index[parent_category_id].presence || 0
|
|
|
|
@next_category_color_index[parent_category_id] = (
|
|
|
|
if index + 1 >= @category_colors.count
|
|
|
|
0
|
2023-01-07 19:53:14 +08:00
|
|
|
else
|
2018-06-28 02:27:11 +08:00
|
|
|
index + 1
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
|
|
|
)
|
2018-05-23 03:27:25 +08:00
|
|
|
|
|
|
|
@category_colors[index]
|
|
|
|
end
|
|
|
|
|
2014-10-31 12:16:08 +08:00
|
|
|
def created_post(post)
|
|
|
|
# override if needed
|
|
|
|
end
|
|
|
|
|
2014-05-31 03:09:58 +08:00
|
|
|
# Iterates through a collection of posts to be imported.
|
|
|
|
# It can create topics and replies.
|
|
|
|
# Attributes will be passed to the PostCreator.
|
|
|
|
# Topics should give attributes title and category.
|
|
|
|
# Replies should provide topic_id. Use topic_lookup_from_imported_post_id to find the topic.
|
|
|
|
def create_posts(results, opts = {})
|
|
|
|
skipped = 0
|
|
|
|
created = 0
|
2017-10-23 16:18:44 +08:00
|
|
|
total = opts[:total] || results.count
|
2015-05-05 05:09:58 +08:00
|
|
|
start_time = get_start_time("posts-#{total}") # the post count should be unique enough to differentiate between posts and PMs
|
2014-05-31 03:09:58 +08:00
|
|
|
|
|
|
|
results.each do |r|
|
|
|
|
params = yield(r)
|
|
|
|
|
2014-09-05 01:18:22 +08:00
|
|
|
# block returns nil to skip a post
|
2014-05-31 03:09:58 +08:00
|
|
|
if params.nil?
|
|
|
|
skipped += 1
|
|
|
|
else
|
2014-09-05 01:18:22 +08:00
|
|
|
import_id = params.delete(:id).to_s
|
|
|
|
|
2017-04-26 21:36:35 +08:00
|
|
|
if post_id_from_imported_post_id(import_id)
|
2019-01-04 22:30:17 +08:00
|
|
|
skipped += 1
|
2014-09-05 01:18:22 +08:00
|
|
|
else
|
|
|
|
begin
|
|
|
|
new_post = create_post(params, import_id)
|
|
|
|
if new_post.is_a?(Post)
|
2017-04-26 21:36:35 +08:00
|
|
|
add_post(import_id, new_post)
|
|
|
|
add_topic(new_post)
|
2014-10-31 12:16:08 +08:00
|
|
|
created_post(new_post)
|
2014-09-05 01:18:22 +08:00
|
|
|
created += 1
|
|
|
|
else
|
|
|
|
skipped += 1
|
|
|
|
puts "Error creating post #{import_id}. Skipping."
|
2019-01-04 22:30:17 +08:00
|
|
|
p new_post
|
2014-09-05 01:18:22 +08:00
|
|
|
end
|
|
|
|
rescue Discourse::InvalidAccess => e
|
|
|
|
skipped += 1
|
|
|
|
puts "InvalidAccess creating post #{import_id}. Topic is closed? #{e.message}"
|
|
|
|
rescue => e
|
2014-07-05 04:05:15 +08:00
|
|
|
skipped += 1
|
2014-09-05 01:18:22 +08:00
|
|
|
puts "Exception while creating post #{import_id}. Skipping."
|
|
|
|
puts e.message
|
|
|
|
puts e.backtrace.join("\n")
|
2014-07-05 04:05:15 +08:00
|
|
|
end
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
print_status(created + skipped + (opts[:offset] || 0), total, start_time)
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
[created, skipped]
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
2016-07-28 00:38:23 +08:00
|
|
|
STAFF_GUARDIAN ||= Guardian.new(Discourse.system_user)
|
2015-10-15 10:25:10 +08:00
|
|
|
|
2014-06-26 07:11:52 +08:00
|
|
|
def create_post(opts, import_id)
|
2014-05-31 03:09:58 +08:00
|
|
|
user = User.find(opts[:user_id])
|
2014-07-17 01:59:30 +08:00
|
|
|
post_create_action = opts.delete(:post_create_action)
|
2014-05-31 03:09:58 +08:00
|
|
|
opts = opts.merge(skip_validations: true)
|
2014-07-04 02:43:24 +08:00
|
|
|
opts[:import_mode] = true
|
2014-06-26 07:11:52 +08:00
|
|
|
opts[:custom_fields] ||= {}
|
|
|
|
opts[:custom_fields]["import_id"] = import_id
|
2014-05-31 03:09:58 +08:00
|
|
|
|
2018-03-17 05:31:33 +08:00
|
|
|
unless opts[:topic_id]
|
|
|
|
opts[:meta_data] = meta_data = {}
|
|
|
|
meta_data["import_closed"] = true if opts[:closed]
|
|
|
|
meta_data["import_archived"] = true if opts[:archived]
|
|
|
|
meta_data["import_topic_id"] = opts[:import_topic_id] if opts[:import_topic_id]
|
|
|
|
end
|
|
|
|
|
2015-10-15 10:25:10 +08:00
|
|
|
opts[:guardian] = STAFF_GUARDIAN
|
2014-07-03 04:50:44 +08:00
|
|
|
if @bbcode_to_md
|
2018-12-11 22:39:46 +08:00
|
|
|
opts[:raw] = begin
|
|
|
|
opts[:raw].bbcode_to_md(false, {}, :disable, :quote)
|
|
|
|
rescue StandardError
|
|
|
|
opts[:raw]
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2014-07-03 04:50:44 +08:00
|
|
|
end
|
|
|
|
|
2014-07-05 04:05:15 +08:00
|
|
|
post_creator = PostCreator.new(user, opts)
|
|
|
|
post = post_creator.create
|
2014-07-17 01:59:30 +08:00
|
|
|
post_create_action.try(:call, post) if post
|
2019-03-07 04:58:06 +08:00
|
|
|
post && post_creator.errors.empty? ? post : post_creator.errors.full_messages
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
2014-07-17 01:59:30 +08:00
|
|
|
def create_upload(user_id, path, source_filename)
|
2015-05-05 05:09:58 +08:00
|
|
|
@uploader.create_upload(user_id, path, source_filename)
|
2014-07-17 01:59:30 +08:00
|
|
|
end
|
|
|
|
|
2015-03-14 04:24:11 +08:00
|
|
|
# Iterate through a list of bookmark records to be imported.
|
|
|
|
# Takes a collection, and yields to the block for each element.
|
|
|
|
# Block should return a hash with the attributes for the bookmark.
|
|
|
|
# Required fields are :user_id and :post_id, where both ids are
|
|
|
|
# the values in the original datasource.
|
|
|
|
def create_bookmarks(results, opts = {})
|
2015-05-05 05:09:58 +08:00
|
|
|
created = 0
|
|
|
|
skipped = 0
|
2017-10-23 16:18:44 +08:00
|
|
|
total = opts[:total] || results.count
|
2015-03-14 04:24:11 +08:00
|
|
|
|
|
|
|
user = User.new
|
|
|
|
post = Post.new
|
|
|
|
|
|
|
|
results.each do |result|
|
|
|
|
params = yield(result)
|
|
|
|
|
|
|
|
# only the IDs are needed, so this should be enough
|
2015-05-05 05:09:58 +08:00
|
|
|
if params.nil?
|
|
|
|
skipped += 1
|
2015-03-14 04:24:11 +08:00
|
|
|
else
|
2017-04-26 21:36:35 +08:00
|
|
|
user.id = user_id_from_imported_user_id(params[:user_id])
|
|
|
|
post.id = post_id_from_imported_post_id(params[:post_id])
|
2015-03-14 04:24:11 +08:00
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
if user.id.nil? || post.id.nil?
|
|
|
|
skipped += 1
|
|
|
|
puts "Skipping bookmark for user id #{params[:user_id]} and post id #{params[:post_id]}"
|
|
|
|
else
|
2021-01-15 03:44:43 +08:00
|
|
|
begin
|
|
|
|
manager = BookmarkManager.new(user)
|
2022-05-23 08:07:15 +08:00
|
|
|
bookmark = manager.create_for(bookmarkable_id: post.id, bookmarkable_type: "Post")
|
2020-05-01 09:34:55 +08:00
|
|
|
|
2021-01-15 03:44:43 +08:00
|
|
|
created += 1 if manager.errors.none?
|
|
|
|
skipped += 1 if manager.errors.any?
|
|
|
|
rescue StandardError
|
|
|
|
skipped += 1
|
|
|
|
end
|
2015-05-05 05:09:58 +08:00
|
|
|
end
|
2015-03-14 04:24:11 +08:00
|
|
|
end
|
2015-05-05 05:09:58 +08:00
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(created + skipped + (opts[:offset] || 0), total, get_start_time("bookmarks"))
|
2015-03-14 04:24:11 +08:00
|
|
|
end
|
2015-05-05 05:09:58 +08:00
|
|
|
|
|
|
|
[created, skipped]
|
2015-03-14 04:24:11 +08:00
|
|
|
end
|
|
|
|
|
2022-02-16 20:04:31 +08:00
|
|
|
def create_likes(results, opts = {})
|
|
|
|
created = 0
|
|
|
|
skipped = 0
|
|
|
|
total = opts[:total] || results.count
|
|
|
|
|
|
|
|
results.each do |result|
|
|
|
|
params = yield(result)
|
|
|
|
|
|
|
|
if params.nil?
|
|
|
|
skipped += 1
|
|
|
|
else
|
|
|
|
created_by = User.find_by(id: user_id_from_imported_user_id(params[:user_id]))
|
|
|
|
post = Post.find_by(id: post_id_from_imported_post_id(params[:post_id]))
|
|
|
|
|
|
|
|
if created_by && post
|
|
|
|
PostActionCreator.create(created_by, post, :like, created_at: params[:created_at])
|
|
|
|
created += 1
|
|
|
|
else
|
|
|
|
skipped += 1
|
|
|
|
puts "Skipping like for user id #{params[:user_id]} and post id #{params[:post_id]}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
print_status(created + skipped + (opts[:offset] || 0), total, get_start_time("likes"))
|
|
|
|
end
|
|
|
|
|
|
|
|
[created, skipped]
|
|
|
|
end
|
|
|
|
|
2014-06-04 22:37:43 +08:00
|
|
|
def close_inactive_topics(opts = {})
|
|
|
|
num_days = opts[:days] || 30
|
2015-01-24 05:44:00 +08:00
|
|
|
puts "", "Closing topics that have been inactive for more than #{num_days} days."
|
2014-06-04 22:37:43 +08:00
|
|
|
|
|
|
|
query = Topic.where("last_posted_at < ?", num_days.days.ago).where(closed: false)
|
|
|
|
total_count = query.count
|
|
|
|
closed_count = 0
|
|
|
|
|
|
|
|
query.find_each do |topic|
|
|
|
|
topic.update_status("closed", true, Discourse.system_user)
|
|
|
|
closed_count += 1
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(closed_count, total_count, get_start_time("close_inactive_topics"))
|
2014-06-04 22:37:43 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 05:31:33 +08:00
|
|
|
def update_topic_status
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating topic status"
|
2018-03-17 05:31:33 +08:00
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2018-03-17 05:31:33 +08:00
|
|
|
UPDATE topics AS t
|
|
|
|
SET closed = TRUE
|
|
|
|
WHERE EXISTS(
|
|
|
|
SELECT 1
|
|
|
|
FROM topic_custom_fields AS f
|
|
|
|
WHERE f.topic_id = t.id AND f.name = 'import_closed' AND f.value = 't'
|
|
|
|
)
|
|
|
|
SQL
|
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2018-03-17 05:31:33 +08:00
|
|
|
UPDATE topics AS t
|
|
|
|
SET archived = TRUE
|
|
|
|
WHERE EXISTS(
|
|
|
|
SELECT 1
|
|
|
|
FROM topic_custom_fields AS f
|
|
|
|
WHERE f.topic_id = t.id AND f.name = 'import_archived' AND f.value = 't'
|
|
|
|
)
|
|
|
|
SQL
|
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2018-03-17 05:31:33 +08:00
|
|
|
DELETE FROM topic_custom_fields
|
|
|
|
WHERE name IN ('import_closed', 'import_archived')
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
2014-06-05 06:21:45 +08:00
|
|
|
def update_bumped_at
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating bumped_at on topics"
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
|
|
|
UPDATE topics t
|
|
|
|
SET bumped_at = COALESCE((SELECT MAX(created_at) FROM posts WHERE topic_id = t.id AND post_type = #{Post.types[:regular]}), bumped_at)
|
|
|
|
SQL
|
2014-06-05 06:21:45 +08:00
|
|
|
end
|
|
|
|
|
2014-09-09 01:36:55 +08:00
|
|
|
def update_last_posted_at
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating last posted at on users"
|
2014-09-09 04:08:41 +08:00
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2014-09-09 04:08:41 +08:00
|
|
|
WITH lpa AS (
|
|
|
|
SELECT user_id, MAX(posts.created_at) AS last_posted_at
|
|
|
|
FROM posts
|
|
|
|
GROUP BY user_id
|
|
|
|
)
|
|
|
|
UPDATE users
|
|
|
|
SET last_posted_at = lpa.last_posted_at
|
|
|
|
FROM users u1
|
|
|
|
JOIN lpa ON lpa.user_id = u1.id
|
|
|
|
WHERE u1.id = users.id
|
2020-06-04 13:56:40 +08:00
|
|
|
AND users.last_posted_at IS DISTINCT FROM lpa.last_posted_at
|
2014-09-09 04:08:41 +08:00
|
|
|
SQL
|
2014-09-09 01:36:55 +08:00
|
|
|
end
|
|
|
|
|
2016-05-18 06:38:51 +08:00
|
|
|
def update_user_stats
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating first_post_created_at..."
|
2016-05-18 06:38:51 +08:00
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2016-05-18 06:38:51 +08:00
|
|
|
WITH sub AS (
|
|
|
|
SELECT user_id, MIN(posts.created_at) AS first_post_created_at
|
|
|
|
FROM posts
|
|
|
|
GROUP BY user_id
|
|
|
|
)
|
|
|
|
UPDATE user_stats
|
|
|
|
SET first_post_created_at = sub.first_post_created_at
|
|
|
|
FROM user_stats u1
|
|
|
|
JOIN sub ON sub.user_id = u1.user_id
|
|
|
|
WHERE u1.user_id = user_stats.user_id
|
2020-06-04 13:56:40 +08:00
|
|
|
AND user_stats.first_post_created_at IS DISTINCT FROM sub.first_post_created_at
|
2016-05-18 06:38:51 +08:00
|
|
|
SQL
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating user post_count..."
|
2016-05-18 06:38:51 +08:00
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2016-05-18 06:38:51 +08:00
|
|
|
WITH sub AS (
|
|
|
|
SELECT user_id, COUNT(*) AS post_count
|
|
|
|
FROM posts
|
|
|
|
GROUP BY user_id
|
|
|
|
)
|
|
|
|
UPDATE user_stats
|
|
|
|
SET post_count = sub.post_count
|
|
|
|
FROM user_stats u1
|
|
|
|
JOIN sub ON sub.user_id = u1.user_id
|
|
|
|
WHERE u1.user_id = user_stats.user_id
|
|
|
|
AND user_stats.post_count <> sub.post_count
|
|
|
|
SQL
|
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating user topic_count..."
|
2016-05-18 06:38:51 +08:00
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
DB.exec <<~SQL
|
2016-05-18 06:38:51 +08:00
|
|
|
WITH sub AS (
|
|
|
|
SELECT user_id, COUNT(*) AS topic_count
|
|
|
|
FROM topics
|
|
|
|
GROUP BY user_id
|
|
|
|
)
|
|
|
|
UPDATE user_stats
|
|
|
|
SET topic_count = sub.topic_count
|
|
|
|
FROM user_stats u1
|
|
|
|
JOIN sub ON sub.user_id = u1.user_id
|
|
|
|
WHERE u1.user_id = user_stats.user_id
|
|
|
|
AND user_stats.topic_count <> sub.topic_count
|
|
|
|
SQL
|
2020-12-14 07:58:14 +08:00
|
|
|
|
|
|
|
puts "", "Updating user digest_attempted_at..."
|
|
|
|
|
2021-06-22 00:00:09 +08:00
|
|
|
DB.exec(
|
|
|
|
"UPDATE user_stats SET digest_attempted_at = now() - random() * interval '1 week' WHERE digest_attempted_at IS NULL",
|
|
|
|
)
|
2016-05-18 06:38:51 +08:00
|
|
|
end
|
|
|
|
|
2015-02-13 01:24:53 +08:00
|
|
|
# scripts that are able to import last_seen_at from the source data should override this method
|
|
|
|
def update_last_seen_at
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating last seen at on users"
|
2015-02-13 01:24:53 +08:00
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec("UPDATE users SET last_seen_at = created_at WHERE last_seen_at IS NULL")
|
|
|
|
DB.exec("UPDATE users SET last_seen_at = last_posted_at WHERE last_posted_at IS NOT NULL")
|
2015-02-13 01:24:53 +08:00
|
|
|
end
|
|
|
|
|
2019-01-04 22:30:17 +08:00
|
|
|
def update_topic_users
|
|
|
|
puts "", "Updating topic users"
|
|
|
|
|
|
|
|
DB.exec <<~SQL
|
2021-07-05 14:17:31 +08:00
|
|
|
INSERT INTO topic_users (user_id, topic_id, posted, last_read_post_number, first_visited_at, last_visited_at, total_msecs_viewed)
|
|
|
|
SELECT user_id, topic_id, 't' , MAX(post_number), MIN(created_at), MAX(created_at), COUNT(id) * 5000
|
2019-01-04 22:30:17 +08:00
|
|
|
FROM posts
|
|
|
|
WHERE user_id > 0
|
|
|
|
GROUP BY user_id, topic_id
|
|
|
|
ON CONFLICT DO NOTHING
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_post_timings
|
|
|
|
puts "", "Updating post timings"
|
|
|
|
|
|
|
|
DB.exec <<~SQL
|
|
|
|
INSERT INTO post_timings (topic_id, post_number, user_id, msecs)
|
|
|
|
SELECT topic_id, post_number, user_id, 5000
|
|
|
|
FROM posts
|
|
|
|
WHERE user_id > 0
|
|
|
|
ON CONFLICT DO NOTHING
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
2014-06-06 03:30:29 +08:00
|
|
|
def update_feature_topic_users
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating featured topic users"
|
2019-01-18 04:48:23 +08:00
|
|
|
TopicFeaturedUsers.ensure_consistency!
|
2014-06-06 03:30:29 +08:00
|
|
|
end
|
|
|
|
|
2014-09-05 01:08:57 +08:00
|
|
|
def reset_topic_counters
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Resetting topic counters"
|
2019-01-18 04:48:23 +08:00
|
|
|
Topic.reset_all_highest!
|
2014-09-05 01:08:57 +08:00
|
|
|
end
|
|
|
|
|
2014-07-04 02:43:24 +08:00
|
|
|
def update_category_featured_topics
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Updating featured topics in categories"
|
2014-08-22 16:11:12 +08:00
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
count = 0
|
|
|
|
total = Category.count
|
2014-08-22 16:11:12 +08:00
|
|
|
|
2014-07-04 02:43:24 +08:00
|
|
|
Category.find_each do |category|
|
|
|
|
CategoryFeaturedTopic.feature_topics_for(category)
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(count += 1, total, get_start_time("category_featured_topics"))
|
2014-07-04 02:43:24 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-02-07 05:03:41 +08:00
|
|
|
def update_tl0
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Setting users with no posts to trust level 0"
|
2015-03-21 05:05:13 +08:00
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
count = 0
|
|
|
|
total = User.count
|
2015-03-21 05:05:13 +08:00
|
|
|
|
2016-07-06 22:58:43 +08:00
|
|
|
User
|
|
|
|
.includes(:user_stat)
|
|
|
|
.find_each do |user|
|
2015-04-17 23:34:20 +08:00
|
|
|
begin
|
2016-07-06 22:58:43 +08:00
|
|
|
user.update_columns(trust_level: 0) if user.trust_level > 0 && user.post_count == 0
|
2015-04-17 23:34:20 +08:00
|
|
|
rescue Discourse::InvalidAccess
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(count += 1, total, get_start_time("update_tl0"))
|
2015-04-17 23:34:20 +08:00
|
|
|
end
|
2015-02-07 05:03:41 +08:00
|
|
|
end
|
|
|
|
|
2015-12-03 23:12:06 +08:00
|
|
|
def update_user_signup_date_based_on_first_post
|
2018-05-28 17:02:19 +08:00
|
|
|
puts "", "Setting users' signup date based on the date of their first post"
|
2015-12-03 23:12:06 +08:00
|
|
|
|
2018-05-28 17:02:19 +08:00
|
|
|
count = 0
|
|
|
|
total = User.count
|
2015-12-03 23:12:06 +08:00
|
|
|
|
|
|
|
User.find_each do |user|
|
2018-05-28 17:02:19 +08:00
|
|
|
if first = user.posts.order("created_at ASC").first
|
2015-12-03 23:12:06 +08:00
|
|
|
user.created_at = first.created_at
|
|
|
|
user.save!
|
|
|
|
end
|
2018-05-28 17:02:19 +08:00
|
|
|
print_status(count += 1, total, get_start_time("user_signup"))
|
2015-12-03 23:12:06 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-01-24 01:19:46 +08:00
|
|
|
def html_for_upload(upload, display_filename)
|
2015-05-05 05:09:58 +08:00
|
|
|
@uploader.html_for_upload(upload, display_filename)
|
2015-01-24 01:19:46 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def embedded_image_html(upload)
|
2015-05-05 05:09:58 +08:00
|
|
|
@uploader.embedded_image_html(upload)
|
2015-01-24 01:19:46 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def attachment_html(upload, display_filename)
|
2015-05-05 05:09:58 +08:00
|
|
|
@uploader.attachment_html(upload, display_filename)
|
2015-01-24 01:19:46 +08:00
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
def print_status(current, max, start_time = nil)
|
|
|
|
if start_time.present?
|
|
|
|
elapsed_seconds = Time.now - start_time
|
|
|
|
elements_per_minute = "[%.0f items/min] " % [current / elapsed_seconds.to_f * 60]
|
|
|
|
else
|
|
|
|
elements_per_minute = ""
|
|
|
|
end
|
|
|
|
|
|
|
|
print "\r%9d / %d (%5.1f%%) %s" % [current, max, current / max.to_f * 100, elements_per_minute]
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|
|
|
|
|
2015-03-31 00:29:48 +08:00
|
|
|
def print_spinner
|
|
|
|
@spinner_chars ||= %w[| / - \\]
|
|
|
|
@spinner_chars.push @spinner_chars.shift
|
|
|
|
print "\b#{@spinner_chars[0]}"
|
|
|
|
end
|
|
|
|
|
2015-05-05 05:09:58 +08:00
|
|
|
def get_start_time(key)
|
|
|
|
@start_times.fetch(key) { |k| @start_times[k] = Time.now }
|
|
|
|
end
|
|
|
|
|
2018-08-28 22:21:39 +08:00
|
|
|
def batches(batch_size = 1000)
|
2014-05-31 03:09:58 +08:00
|
|
|
offset = 0
|
|
|
|
loop do
|
|
|
|
yield offset
|
|
|
|
offset += batch_size
|
|
|
|
end
|
|
|
|
end
|
2018-10-22 17:12:40 +08:00
|
|
|
|
|
|
|
def fake_email
|
2019-05-31 04:02:10 +08:00
|
|
|
SecureRandom.hex << "@email.invalid"
|
2018-10-22 17:12:40 +08:00
|
|
|
end
|
2014-05-31 03:09:58 +08:00
|
|
|
end
|