2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
require "mysql2"
|
2015-03-19 03:30:42 +08:00
|
|
|
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
2015-01-19 22:00:55 +08:00
|
|
|
require "htmlentities"
|
2017-01-31 13:01:18 +08:00
|
|
|
begin
|
|
|
|
require "php_serialize" # https://github.com/jqr/php-serialize
|
|
|
|
rescue LoadError
|
|
|
|
puts
|
|
|
|
puts "php_serialize not found."
|
|
|
|
puts "Add to Gemfile, like this: "
|
|
|
|
puts
|
|
|
|
puts "echo gem \\'php-serialize\\' >> Gemfile"
|
|
|
|
puts "bundle install"
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
# See https://meta.discourse.org/t/importing-from-vbulletin-4/54881
|
|
|
|
# Please update there if substantive changes are made!
|
2014-08-18 19:04:08 +08:00
|
|
|
|
|
|
|
class ImportScripts::VBulletin < ImportScripts::Base
|
2015-01-19 22:00:55 +08:00
|
|
|
BATCH_SIZE = 1000
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
# CHANGE THESE BEFORE RUNNING THE IMPORTER
|
2017-01-31 13:01:18 +08:00
|
|
|
|
|
|
|
DB_HOST ||= ENV["DB_HOST"] || "localhost"
|
|
|
|
DB_NAME ||= ENV["DB_NAME"] || "vbulletin"
|
|
|
|
DB_PW ||= ENV["DB_PW"] || ""
|
|
|
|
DB_USER ||= ENV["DB_USER"] || "root"
|
|
|
|
TIMEZONE ||= ENV["TIMEZONE"] || "America/Los_Angeles"
|
|
|
|
TABLE_PREFIX ||= ENV["TABLE_PREFIX"] || "vb_"
|
|
|
|
ATTACHMENT_DIR ||= ENV["ATTACHMENT_DIR"] || "/path/to/your/attachment/folder"
|
|
|
|
|
|
|
|
puts "#{DB_USER}:#{DB_PW}@#{DB_HOST} wants #{DB_NAME}"
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def initialize
|
2018-06-13 02:41:21 +08:00
|
|
|
@bbcode_to_md = true
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
super
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2018-06-13 02:41:21 +08:00
|
|
|
@usernames = {}
|
2015-01-27 03:35:30 +08:00
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
@tz = TZInfo::Timezone.get(TIMEZONE)
|
|
|
|
|
|
|
|
@htmlentities = HTMLEntities.new
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
@client =
|
2017-01-31 13:01:18 +08:00
|
|
|
Mysql2::Client.new(host: DB_HOST, username: DB_USER, password: DB_PW, database: DB_NAME)
|
|
|
|
rescue Exception => e
|
|
|
|
puts "=" * 50
|
|
|
|
puts e.message
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
puts <<~TEXT
|
|
|
|
Cannot connect in to database.
|
2017-01-31 13:01:18 +08:00
|
|
|
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
Hostname: #{DB_HOST}
|
|
|
|
Username: #{DB_USER}
|
|
|
|
Password: #{DB_PW}
|
|
|
|
database: #{DB_NAME}
|
2017-01-31 13:01:18 +08:00
|
|
|
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
Edit the script or set these environment variables:
|
2017-01-31 13:01:18 +08:00
|
|
|
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
export DB_HOST="localhost"
|
|
|
|
export DB_NAME="vbulletin"
|
|
|
|
export DB_PW=""
|
|
|
|
export DB_USER="root"
|
|
|
|
export TABLE_PREFIX="vb_"
|
|
|
|
export ATTACHMENT_DIR '/path/to/your/attachment/folder'
|
2017-01-31 13:01:18 +08:00
|
|
|
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
Exiting.
|
|
|
|
TEXT
|
2017-01-31 13:01:18 +08:00
|
|
|
exit
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2023-01-07 19:53:14 +08:00
|
|
|
begin
|
2017-02-01 21:33:09 +08:00
|
|
|
mysql_query("CREATE INDEX firstpostid_index ON #{TABLE_PREFIX}thread (firstpostid)")
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2017-02-01 21:33:09 +08:00
|
|
|
|
2014-08-18 19:04:08 +08:00
|
|
|
import_groups
|
|
|
|
import_users
|
2016-12-05 20:11:59 +08:00
|
|
|
create_groups_membership
|
2014-08-18 19:04:08 +08:00
|
|
|
import_categories
|
|
|
|
import_topics
|
|
|
|
import_posts
|
2016-07-10 17:19:24 +08:00
|
|
|
import_private_messages
|
2015-05-15 19:26:53 +08:00
|
|
|
import_attachments
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-09-04 23:55:05 +08:00
|
|
|
close_topics
|
2015-01-19 22:00:55 +08:00
|
|
|
post_process_posts
|
2016-07-10 17:19:24 +08:00
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
create_permalink_file
|
2016-07-10 17:19:24 +08:00
|
|
|
suspend_users
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def import_groups
|
|
|
|
puts "", "importing groups..."
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
groups = mysql_query <<-SQL
|
|
|
|
SELECT usergroupid, title
|
2016-07-10 17:19:24 +08:00
|
|
|
FROM #{TABLE_PREFIX}usergroup
|
2014-12-22 20:22:16 +08:00
|
|
|
ORDER BY usergroupid
|
|
|
|
SQL
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
create_groups(groups) do |group|
|
2015-01-19 22:00:55 +08:00
|
|
|
{ id: group["usergroupid"], name: @htmlentities.decode(group["title"]).strip }
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2018-06-13 02:41:21 +08:00
|
|
|
def get_username_for_old_username(old_username)
|
|
|
|
if @usernames.has_key?(old_username)
|
|
|
|
@usernames[old_username]
|
|
|
|
else
|
|
|
|
old_username
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def import_users
|
|
|
|
puts "", "importing users"
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2016-07-10 17:19:24 +08:00
|
|
|
user_count = mysql_query("SELECT COUNT(userid) count FROM #{TABLE_PREFIX}user").first["count"]
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_user_id = -1
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
batches(BATCH_SIZE) do |offset|
|
2017-02-01 21:33:09 +08:00
|
|
|
users = mysql_query(<<-SQL).to_a
|
2018-10-18 16:22:55 +08:00
|
|
|
SELECT userid
|
|
|
|
, username
|
|
|
|
, homepage
|
|
|
|
, usertitle
|
|
|
|
, usergroupid
|
|
|
|
, joindate
|
|
|
|
, email
|
|
|
|
, password
|
|
|
|
, salt
|
2016-07-10 17:19:24 +08:00
|
|
|
FROM #{TABLE_PREFIX}user
|
2017-02-01 21:33:09 +08:00
|
|
|
WHERE userid > #{last_user_id}
|
2014-12-22 20:22:16 +08:00
|
|
|
ORDER BY userid
|
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
SQL
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
break if users.empty?
|
2014-08-25 16:48:29 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_user_id = users[-1]["userid"]
|
2018-10-19 21:16:45 +08:00
|
|
|
users.reject! { |u| @lookup.user_already_imported?(u["userid"]) }
|
2015-09-22 07:48:42 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
create_users(users, total: user_count, offset: offset) do |user|
|
2017-02-01 21:33:09 +08:00
|
|
|
email = user["email"].presence || fake_email
|
2022-02-18 09:12:51 +08:00
|
|
|
email = fake_email if !EmailAddressValidator.valid_value?(email)
|
2017-02-01 21:33:09 +08:00
|
|
|
|
2018-10-18 16:22:55 +08:00
|
|
|
password = [user["password"].presence, user["salt"].presence].compact.join(":")
|
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
username = @htmlentities.decode(user["username"]).strip
|
|
|
|
|
2014-08-18 19:04:08 +08:00
|
|
|
{
|
2015-01-19 22:00:55 +08:00
|
|
|
id: user["userid"],
|
|
|
|
name: username,
|
|
|
|
username: username,
|
2018-10-18 16:22:55 +08:00
|
|
|
password: password,
|
2017-02-01 21:33:09 +08:00
|
|
|
email: email,
|
2018-10-19 21:16:45 +08:00
|
|
|
merge: true,
|
2015-01-19 22:00:55 +08:00
|
|
|
website: user["homepage"].strip,
|
|
|
|
title: @htmlentities.decode(user["usertitle"]).strip,
|
2016-12-05 20:11:59 +08:00
|
|
|
primary_group_id: group_id_from_imported_group_id(user["usergroupid"].to_i),
|
2015-01-19 22:00:55 +08:00
|
|
|
created_at: parse_timestamp(user["joindate"]),
|
2016-07-10 17:19:24 +08:00
|
|
|
last_seen_at: parse_timestamp(user["lastvisit"]),
|
2014-12-22 20:22:16 +08:00
|
|
|
post_create_action:
|
|
|
|
proc do |u|
|
2015-01-19 22:00:55 +08:00
|
|
|
import_profile_picture(user, u)
|
|
|
|
import_profile_background(user, u)
|
2014-12-22 20:22:16 +08:00
|
|
|
end,
|
2014-08-18 19:04:08 +08:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
2018-06-13 02:41:21 +08:00
|
|
|
|
|
|
|
@usernames =
|
|
|
|
UserCustomField
|
|
|
|
.joins(:user)
|
|
|
|
.where(name: "import_username")
|
|
|
|
.pluck("user_custom_fields.value", "users.username")
|
|
|
|
.to_h
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
def create_groups_membership
|
|
|
|
puts "", "Creating groups membership..."
|
|
|
|
|
|
|
|
Group.find_each do |group|
|
|
|
|
begin
|
|
|
|
next if group.automatic
|
|
|
|
puts "\t#{group.name}"
|
|
|
|
next if GroupUser.where(group_id: group.id).count > 0
|
|
|
|
user_ids_in_group = User.where(primary_group_id: group.id).pluck(:id).to_a
|
|
|
|
next if user_ids_in_group.size == 0
|
|
|
|
values =
|
|
|
|
user_ids_in_group
|
|
|
|
.map { |user_id| "(#{group.id}, #{user_id}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)" }
|
|
|
|
.join(",")
|
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec <<~SQL
|
|
|
|
INSERT INTO group_users (group_id, user_id, created_at, updated_at) VALUES #{values}
|
2016-12-05 20:11:59 +08:00
|
|
|
SQL
|
|
|
|
rescue Exception => e
|
|
|
|
puts e.message
|
|
|
|
puts e.backtrace.join("\n")
|
|
|
|
end
|
|
|
|
end
|
2022-11-30 22:52:08 +08:00
|
|
|
|
|
|
|
Group.reset_all_counters!
|
2016-12-05 20:11:59 +08:00
|
|
|
end
|
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
def import_profile_picture(old_user, imported_user)
|
|
|
|
query = mysql_query <<-SQL
|
|
|
|
SELECT filedata, filename
|
2016-07-10 17:19:24 +08:00
|
|
|
FROM #{TABLE_PREFIX}customavatar
|
2015-01-19 22:00:55 +08:00
|
|
|
WHERE userid = #{old_user["userid"]}
|
|
|
|
ORDER BY dateline DESC
|
|
|
|
LIMIT 1
|
|
|
|
SQL
|
|
|
|
|
|
|
|
picture = query.first
|
|
|
|
|
|
|
|
return if picture.nil?
|
2017-01-31 13:01:18 +08:00
|
|
|
return if picture["filedata"].nil?
|
2015-01-19 22:00:55 +08:00
|
|
|
|
|
|
|
file = Tempfile.new("profile-picture")
|
|
|
|
file.write(picture["filedata"].encode("ASCII-8BIT").force_encoding("UTF-8"))
|
|
|
|
file.rewind
|
|
|
|
|
2017-05-11 06:16:57 +08:00
|
|
|
upload = UploadCreator.new(file, picture["filename"]).create_for(imported_user.id)
|
2015-01-19 22:00:55 +08:00
|
|
|
|
|
|
|
return if !upload.persisted?
|
|
|
|
|
|
|
|
imported_user.create_user_avatar
|
|
|
|
imported_user.user_avatar.update(custom_upload_id: upload.id)
|
|
|
|
imported_user.update(uploaded_avatar_id: upload.id)
|
|
|
|
ensure
|
2023-01-07 19:53:14 +08:00
|
|
|
begin
|
2015-01-19 22:00:55 +08:00
|
|
|
file.close
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
|
|
|
begin
|
2015-01-19 22:00:55 +08:00
|
|
|
file.unlind
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2015-01-19 22:00:55 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def import_profile_background(old_user, imported_user)
|
|
|
|
query = mysql_query <<-SQL
|
|
|
|
SELECT filedata, filename
|
2016-07-10 17:19:24 +08:00
|
|
|
FROM #{TABLE_PREFIX}customprofilepic
|
2015-01-19 22:00:55 +08:00
|
|
|
WHERE userid = #{old_user["userid"]}
|
|
|
|
ORDER BY dateline DESC
|
|
|
|
LIMIT 1
|
|
|
|
SQL
|
|
|
|
|
|
|
|
background = query.first
|
|
|
|
|
|
|
|
return if background.nil?
|
2017-01-31 13:01:18 +08:00
|
|
|
return if background["filedata"].nil?
|
2015-01-19 22:00:55 +08:00
|
|
|
|
|
|
|
file = Tempfile.new("profile-background")
|
|
|
|
file.write(background["filedata"].encode("ASCII-8BIT").force_encoding("UTF-8"))
|
|
|
|
file.rewind
|
|
|
|
|
2017-05-11 06:16:57 +08:00
|
|
|
upload = UploadCreator.new(file, background["filename"]).create_for(imported_user.id)
|
2015-01-19 22:00:55 +08:00
|
|
|
|
|
|
|
return if !upload.persisted?
|
|
|
|
|
2019-04-29 11:58:52 +08:00
|
|
|
imported_user.user_profile.upload_profile_background(upload)
|
2015-01-19 22:00:55 +08:00
|
|
|
ensure
|
2023-01-07 19:53:14 +08:00
|
|
|
begin
|
2015-01-19 22:00:55 +08:00
|
|
|
file.close
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
|
|
|
begin
|
2015-01-19 22:00:55 +08:00
|
|
|
file.unlink
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2015-01-19 22:00:55 +08:00
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def import_categories
|
|
|
|
puts "", "importing top level categories..."
|
|
|
|
|
2016-07-10 17:19:24 +08:00
|
|
|
categories =
|
|
|
|
mysql_query(
|
|
|
|
"SELECT forumid, title, description, displayorder, parentid FROM #{TABLE_PREFIX}forum ORDER BY forumid",
|
|
|
|
).to_a
|
2014-12-22 20:22:16 +08:00
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
top_level_categories = categories.select { |c| c["parentid"] == -1 }
|
2014-12-22 20:22:16 +08:00
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
create_categories(top_level_categories) do |category|
|
2014-12-22 20:22:16 +08:00
|
|
|
{
|
2015-01-19 22:00:55 +08:00
|
|
|
id: category["forumid"],
|
|
|
|
name: @htmlentities.decode(category["title"]).strip,
|
|
|
|
position: category["displayorder"],
|
|
|
|
description: @htmlentities.decode(category["description"]).strip,
|
2014-12-22 20:22:16 +08:00
|
|
|
}
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
puts "", "importing children categories..."
|
|
|
|
|
|
|
|
children_categories = categories.select { |c| c["parentid"] != -1 }
|
|
|
|
top_level_category_ids = Set.new(top_level_categories.map { |c| c["forumid"] })
|
|
|
|
|
|
|
|
# cut down the tree to only 2 levels of categories
|
|
|
|
children_categories.each do |cc|
|
|
|
|
while !top_level_category_ids.include?(cc["parentid"])
|
|
|
|
cc["parentid"] = categories.detect { |c| c["forumid"] == cc["parentid"] }["parentid"]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
create_categories(children_categories) do |category|
|
|
|
|
{
|
|
|
|
id: category["forumid"],
|
|
|
|
name: @htmlentities.decode(category["title"]).strip,
|
|
|
|
position: category["displayorder"],
|
|
|
|
description: @htmlentities.decode(category["description"]).strip,
|
|
|
|
parent_category_id: category_id_from_imported_category_id(category["parentid"]),
|
|
|
|
}
|
|
|
|
end
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def import_topics
|
|
|
|
puts "", "importing topics..."
|
2014-09-04 23:55:05 +08:00
|
|
|
|
2016-07-10 17:19:24 +08:00
|
|
|
topic_count =
|
|
|
|
mysql_query("SELECT COUNT(threadid) count FROM #{TABLE_PREFIX}thread").first["count"]
|
2014-09-04 23:55:05 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_topic_id = -1
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
batches(BATCH_SIZE) do |offset|
|
2017-02-01 21:33:09 +08:00
|
|
|
topics = mysql_query(<<-SQL).to_a
|
2014-12-22 20:22:16 +08:00
|
|
|
SELECT t.threadid threadid, t.title title, forumid, open, postuserid, t.dateline dateline, views, t.visible visible, sticky,
|
|
|
|
p.pagetext raw
|
2016-07-10 17:19:24 +08:00
|
|
|
FROM #{TABLE_PREFIX}thread t
|
|
|
|
JOIN #{TABLE_PREFIX}post p ON p.postid = t.firstpostid
|
2017-02-01 21:33:09 +08:00
|
|
|
WHERE t.threadid > #{last_topic_id}
|
2014-12-22 20:22:16 +08:00
|
|
|
ORDER BY t.threadid
|
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
SQL
|
2014-09-04 23:55:05 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
break if topics.empty?
|
|
|
|
|
|
|
|
last_topic_id = topics[-1]["threadid"]
|
|
|
|
topics.reject! { |t| @lookup.post_already_imported?("thread-#{t["threadid"]}") }
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
create_posts(topics, total: topic_count, offset: offset) do |topic|
|
2015-01-21 16:36:46 +08:00
|
|
|
raw =
|
2023-01-07 19:53:14 +08:00
|
|
|
begin
|
2015-01-21 16:36:46 +08:00
|
|
|
preprocess_post_raw(topic["raw"])
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2015-01-21 16:36:46 +08:00
|
|
|
next if raw.blank?
|
2014-12-22 20:22:16 +08:00
|
|
|
topic_id = "thread-#{topic["threadid"]}"
|
2014-08-18 19:04:08 +08:00
|
|
|
t = {
|
2014-12-22 20:22:16 +08:00
|
|
|
id: topic_id,
|
2015-01-19 22:00:55 +08:00
|
|
|
user_id: user_id_from_imported_user_id(topic["postuserid"]) || Discourse::SYSTEM_USER_ID,
|
|
|
|
title: @htmlentities.decode(topic["title"]).strip[0...255],
|
2015-03-13 04:15:02 +08:00
|
|
|
category: category_id_from_imported_category_id(topic["forumid"]),
|
2015-01-21 16:36:46 +08:00
|
|
|
raw: raw,
|
2015-01-19 22:00:55 +08:00
|
|
|
created_at: parse_timestamp(topic["dateline"]),
|
2014-12-22 20:22:16 +08:00
|
|
|
visible: topic["visible"].to_i == 1,
|
2015-01-19 22:00:55 +08:00
|
|
|
views: topic["views"],
|
2014-08-18 19:04:08 +08:00
|
|
|
}
|
2014-12-22 20:22:16 +08:00
|
|
|
t[:pinned_at] = t[:created_at] if topic["sticky"].to_i == 1
|
2014-08-18 19:04:08 +08:00
|
|
|
t
|
|
|
|
end
|
2016-12-05 20:11:59 +08:00
|
|
|
|
2018-06-13 02:41:21 +08:00
|
|
|
# Add the following to permalink_normalizations for this to work:
|
|
|
|
# /forum\/.*?\/(\d*)\-.*/thread/\1
|
|
|
|
|
|
|
|
topics.each do |thread|
|
|
|
|
topic_id = "thread-#{thread["threadid"]}"
|
|
|
|
topic = topic_lookup_from_imported_post_id(topic_id)
|
|
|
|
if topic.present?
|
|
|
|
url_slug = "thread/#{thread["threadid"]}" if thread["title"].present?
|
|
|
|
if url_slug.present? && topic[:topic_id].present?
|
|
|
|
Permalink.create(url: url_slug, topic_id: topic[:topic_id].to_i)
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2018-06-13 02:41:21 +08:00
|
|
|
end
|
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def import_posts
|
|
|
|
puts "", "importing posts..."
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
post_count = mysql_query(<<-SQL).first["count"]
|
|
|
|
SELECT COUNT(postid) count
|
|
|
|
FROM #{TABLE_PREFIX}post p
|
|
|
|
JOIN #{TABLE_PREFIX}thread t ON t.threadid = p.threadid
|
|
|
|
WHERE t.firstpostid <> p.postid
|
|
|
|
SQL
|
2015-01-21 16:36:46 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_post_id = -1
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
batches(BATCH_SIZE) do |offset|
|
2017-02-01 21:33:09 +08:00
|
|
|
posts = mysql_query(<<-SQL).to_a
|
|
|
|
SELECT p.postid, p.userid, p.threadid, p.pagetext raw, p.dateline, p.visible, p.parentid
|
|
|
|
FROM #{TABLE_PREFIX}post p
|
|
|
|
JOIN #{TABLE_PREFIX}thread t ON t.threadid = p.threadid
|
|
|
|
WHERE t.firstpostid <> p.postid
|
|
|
|
AND p.postid > #{last_post_id}
|
|
|
|
ORDER BY p.postid
|
2014-12-22 20:22:16 +08:00
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
SQL
|
2017-02-01 21:33:09 +08:00
|
|
|
|
|
|
|
break if posts.empty?
|
2014-08-25 16:48:29 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_post_id = posts[-1]["postid"]
|
|
|
|
posts.reject! { |p| @lookup.post_already_imported?(p["postid"].to_i) }
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
create_posts(posts, total: post_count, offset: offset) do |post|
|
2015-01-21 16:36:46 +08:00
|
|
|
raw =
|
2023-01-07 19:53:14 +08:00
|
|
|
begin
|
2015-01-21 16:36:46 +08:00
|
|
|
preprocess_post_raw(post["raw"])
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2015-01-21 16:36:46 +08:00
|
|
|
next if raw.blank?
|
2014-12-22 20:22:16 +08:00
|
|
|
next unless topic = topic_lookup_from_imported_post_id("thread-#{post["threadid"]}")
|
2014-08-18 19:04:08 +08:00
|
|
|
p = {
|
2015-01-19 22:00:55 +08:00
|
|
|
id: post["postid"],
|
2014-12-22 20:22:16 +08:00
|
|
|
user_id: user_id_from_imported_user_id(post["userid"]) || Discourse::SYSTEM_USER_ID,
|
|
|
|
topic_id: topic[:topic_id],
|
2015-01-21 16:36:46 +08:00
|
|
|
raw: raw,
|
2015-01-19 22:00:55 +08:00
|
|
|
created_at: parse_timestamp(post["dateline"]),
|
2021-02-12 21:29:05 +08:00
|
|
|
hidden: post["visible"].to_i != 1,
|
2014-08-18 19:04:08 +08:00
|
|
|
}
|
2014-12-22 20:22:16 +08:00
|
|
|
if parent = topic_lookup_from_imported_post_id(post["parentid"])
|
2014-08-18 19:04:08 +08:00
|
|
|
p[:reply_to_post_number] = parent[:post_number]
|
|
|
|
end
|
|
|
|
p
|
|
|
|
end
|
|
|
|
end
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
# find the uploaded file information from the db
|
|
|
|
def find_upload(post, attachment_id)
|
|
|
|
sql =
|
|
|
|
"SELECT a.attachmentid attachment_id, a.userid user_id, a.filedataid file_id, a.filename filename,
|
2018-06-13 02:41:21 +08:00
|
|
|
LENGTH(fd.filedata) AS dbsize, filedata, a.caption caption
|
2016-07-10 17:19:24 +08:00
|
|
|
FROM #{TABLE_PREFIX}attachment a
|
2018-06-13 02:41:21 +08:00
|
|
|
LEFT JOIN #{TABLE_PREFIX}filedata fd ON fd.filedataid = a.filedataid
|
2015-05-15 19:26:53 +08:00
|
|
|
WHERE a.attachmentid = #{attachment_id}"
|
|
|
|
results = mysql_query(sql)
|
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
unless row = results.first
|
2015-05-15 19:26:53 +08:00
|
|
|
puts "Couldn't find attachment record for post.id = #{post.id}, import_id = #{post.custom_fields["import_id"]}"
|
2017-02-01 21:33:09 +08:00
|
|
|
return
|
2015-05-15 19:26:53 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
filename =
|
|
|
|
File.join(ATTACHMENT_DIR, row["user_id"].to_s.split("").join("/"), "#{row["file_id"]}.attach")
|
2018-06-13 02:41:21 +08:00
|
|
|
real_filename = row["filename"]
|
|
|
|
real_filename.prepend SecureRandom.hex if real_filename[0] == "."
|
|
|
|
|
2022-01-06 01:45:08 +08:00
|
|
|
unless File.exist?(filename)
|
2018-06-13 02:41:21 +08:00
|
|
|
if row["dbsize"].to_i == 0
|
|
|
|
puts "Attachment file #{row["filedataid"]} doesn't exist"
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
tmpfile = "attach_" + row["filedataid"].to_s
|
|
|
|
filename = File.join("/tmp/", tmpfile)
|
|
|
|
File.open(filename, "wb") { |f| f.write(row["filedata"]) }
|
2015-05-15 19:26:53 +08:00
|
|
|
end
|
2017-02-01 21:33:09 +08:00
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
upload = create_upload(post.user.id, filename, real_filename)
|
|
|
|
|
|
|
|
if upload.nil? || !upload.valid?
|
|
|
|
puts "Upload not valid :("
|
|
|
|
puts upload.errors.inspect if upload
|
2017-02-01 21:33:09 +08:00
|
|
|
return
|
2015-05-15 19:26:53 +08:00
|
|
|
end
|
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
[upload, real_filename]
|
2015-05-15 19:26:53 +08:00
|
|
|
rescue Mysql2::Error => e
|
|
|
|
puts "SQL Error"
|
|
|
|
puts e.message
|
|
|
|
puts sql
|
|
|
|
end
|
|
|
|
|
2016-07-10 17:19:24 +08:00
|
|
|
def import_private_messages
|
|
|
|
puts "", "importing private messages..."
|
|
|
|
|
|
|
|
topic_count =
|
|
|
|
mysql_query("SELECT COUNT(pmtextid) count FROM #{TABLE_PREFIX}pmtext").first["count"]
|
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_private_message_id = -1
|
|
|
|
|
2016-07-10 17:19:24 +08:00
|
|
|
batches(BATCH_SIZE) do |offset|
|
2017-02-01 21:33:09 +08:00
|
|
|
private_messages = mysql_query(<<-SQL).to_a
|
2016-07-10 17:19:24 +08:00
|
|
|
SELECT pmtextid, fromuserid, title, message, touserarray, dateline
|
2017-02-01 21:33:09 +08:00
|
|
|
FROM #{TABLE_PREFIX}pmtext
|
|
|
|
WHERE pmtextid > #{last_private_message_id}
|
|
|
|
ORDER BY pmtextid
|
|
|
|
LIMIT #{BATCH_SIZE}
|
2016-07-10 17:19:24 +08:00
|
|
|
SQL
|
2017-02-01 21:33:09 +08:00
|
|
|
|
|
|
|
break if private_messages.empty?
|
2016-07-10 17:19:24 +08:00
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
last_private_message_id = private_messages[-1]["pmtextid"]
|
|
|
|
private_messages.reject! { |pm| @lookup.post_already_imported?("pm-#{pm["pmtextid"]}") }
|
2016-07-10 17:19:24 +08:00
|
|
|
|
|
|
|
title_username_of_pm_first_post = {}
|
|
|
|
|
|
|
|
create_posts(private_messages, total: topic_count, offset: offset) do |m|
|
|
|
|
skip = false
|
|
|
|
mapped = {}
|
|
|
|
|
|
|
|
mapped[:id] = "pm-#{m["pmtextid"]}"
|
|
|
|
mapped[:user_id] = user_id_from_imported_user_id(m["fromuserid"]) ||
|
|
|
|
Discourse::SYSTEM_USER_ID
|
|
|
|
mapped[:raw] = begin
|
|
|
|
preprocess_post_raw(m["message"])
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2016-07-10 17:19:24 +08:00
|
|
|
mapped[:created_at] = Time.zone.at(m["dateline"])
|
|
|
|
title = @htmlentities.decode(m["title"]).strip[0...255]
|
|
|
|
topic_id = nil
|
|
|
|
|
|
|
|
next if mapped[:raw].blank?
|
|
|
|
|
|
|
|
# users who are part of this private message.
|
|
|
|
target_usernames = []
|
|
|
|
target_userids = []
|
|
|
|
begin
|
|
|
|
to_user_array = PHP.unserialize(m["touserarray"])
|
|
|
|
rescue StandardError
|
|
|
|
puts "#{m["pmtextid"]} -- #{m["touserarray"]}"
|
|
|
|
skip = true
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
|
|
|
to_user_array.each do |to_user|
|
|
|
|
if to_user[0] == "cc" || to_user[0] == "bcc" # not sure if we should include bcc users
|
|
|
|
to_user[1].each do |to_user_cc|
|
|
|
|
user_id = user_id_from_imported_user_id(to_user_cc[0])
|
|
|
|
username = User.find_by(id: user_id).try(:username)
|
|
|
|
target_userids << user_id || Discourse::SYSTEM_USER_ID
|
|
|
|
target_usernames << username if username
|
|
|
|
end
|
|
|
|
else
|
|
|
|
user_id = user_id_from_imported_user_id(to_user[0])
|
|
|
|
username = User.find_by(id: user_id).try(:username)
|
|
|
|
target_userids << user_id || Discourse::SYSTEM_USER_ID
|
|
|
|
target_usernames << username if username
|
|
|
|
end
|
|
|
|
end
|
|
|
|
rescue StandardError
|
|
|
|
puts "skipping pm-#{m["pmtextid"]} `to_user_array` is not properly serialized -- #{to_user_array.inspect}"
|
|
|
|
skip = true
|
|
|
|
end
|
|
|
|
|
|
|
|
participants = target_userids
|
|
|
|
participants << mapped[:user_id]
|
|
|
|
begin
|
|
|
|
participants.sort!
|
|
|
|
rescue StandardError
|
|
|
|
puts "one of the participant's id is nil -- #{participants.inspect}"
|
|
|
|
end
|
|
|
|
|
|
|
|
if title =~ /^Re:/
|
2017-02-01 21:33:09 +08:00
|
|
|
parent_id =
|
|
|
|
title_username_of_pm_first_post[[title[3..-1], participants]] ||
|
|
|
|
title_username_of_pm_first_post[[title[4..-1], participants]] ||
|
|
|
|
title_username_of_pm_first_post[[title[5..-1], participants]] ||
|
|
|
|
title_username_of_pm_first_post[[title[6..-1], participants]] ||
|
|
|
|
title_username_of_pm_first_post[[title[7..-1], participants]] ||
|
|
|
|
title_username_of_pm_first_post[[title[8..-1], participants]]
|
|
|
|
|
2016-09-14 10:45:48 +08:00
|
|
|
if parent_id
|
2016-07-10 17:19:24 +08:00
|
|
|
if t = topic_lookup_from_imported_post_id("pm-#{parent_id}")
|
|
|
|
topic_id = t[:topic_id]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
title_username_of_pm_first_post[[title, participants]] ||= m["pmtextid"]
|
|
|
|
end
|
|
|
|
|
2023-02-16 17:40:11 +08:00
|
|
|
if topic_id
|
|
|
|
mapped[:topic_id] = topic_id
|
|
|
|
else
|
2016-07-10 17:19:24 +08:00
|
|
|
mapped[:title] = title
|
|
|
|
mapped[:archetype] = Archetype.private_message
|
|
|
|
mapped[:target_usernames] = target_usernames.join(",")
|
|
|
|
|
2017-02-01 21:33:09 +08:00
|
|
|
if mapped[:target_usernames].size < 1 # pm with yourself?
|
2016-07-10 17:19:24 +08:00
|
|
|
# skip = true
|
|
|
|
mapped[:target_usernames] = "system"
|
|
|
|
puts "pm-#{m["pmtextid"]} has no target (#{m["touserarray"]})"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
skip ? nil : mapped
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
def import_attachments
|
|
|
|
puts "", "importing attachments..."
|
|
|
|
|
2021-04-20 03:05:16 +08:00
|
|
|
mapping = {}
|
|
|
|
attachments = mysql_query(<<-SQL)
|
|
|
|
SELECT a.attachmentid, a.contentid as postid, p.threadid
|
|
|
|
FROM #{TABLE_PREFIX}attachment a, #{TABLE_PREFIX}post p
|
|
|
|
WHERE a.contentid = p.postid
|
|
|
|
AND contenttypeid = 1 AND state = 'visible'
|
|
|
|
SQL
|
|
|
|
attachments.each do |attachment|
|
|
|
|
post_id = post_id_from_imported_post_id(attachment["postid"])
|
|
|
|
post_id = post_id_from_imported_post_id("thread-#{attachment["threadid"]}") unless post_id
|
|
|
|
if post_id.nil?
|
|
|
|
puts "Post for attachment #{attachment["attachmentid"]} not found"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
mapping[post_id] ||= []
|
|
|
|
mapping[post_id] << attachment["attachmentid"].to_i
|
|
|
|
end
|
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
current_count = 0
|
2022-11-29 03:30:19 +08:00
|
|
|
total_count = Post.count
|
2015-05-15 19:26:53 +08:00
|
|
|
success_count = 0
|
|
|
|
fail_count = 0
|
|
|
|
|
|
|
|
attachment_regex = %r{\[attach[^\]]*\](\d+)\[/attach\]}i
|
|
|
|
|
|
|
|
Post.find_each do |post|
|
|
|
|
current_count += 1
|
|
|
|
print_status current_count, total_count
|
|
|
|
|
|
|
|
new_raw = post.raw.dup
|
|
|
|
new_raw.gsub!(attachment_regex) do |s|
|
|
|
|
matches = attachment_regex.match(s)
|
|
|
|
attachment_id = matches[1]
|
|
|
|
|
2021-04-20 03:05:16 +08:00
|
|
|
mapping[post.id].delete(attachment_id.to_i) unless mapping[post.id].nil?
|
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
upload, filename = find_upload(post, attachment_id)
|
|
|
|
unless upload
|
|
|
|
fail_count += 1
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
html_for_upload(upload, filename)
|
|
|
|
end
|
|
|
|
|
2021-04-20 03:05:16 +08:00
|
|
|
# make resumed imports faster
|
|
|
|
if new_raw == post.raw
|
|
|
|
unless mapping[post.id].nil? || mapping[post.id].empty?
|
|
|
|
imported_text = mysql_query(<<-SQL).first["pagetext"]
|
|
|
|
SELECT p.pagetext
|
|
|
|
FROM #{TABLE_PREFIX}attachment a, #{TABLE_PREFIX}post p
|
|
|
|
WHERE a.contentid = p.postid
|
|
|
|
AND a.attachmentid = #{mapping[post.id][0]}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
imported_text.scan(attachment_regex) do |match|
|
|
|
|
attachment_id = match[0]
|
|
|
|
mapping[post.id].delete(attachment_id.to_i)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
unless mapping[post.id].nil? || mapping[post.id].empty?
|
|
|
|
mapping[post.id].each do |attachment_id|
|
|
|
|
upload, filename = find_upload(post, attachment_id)
|
|
|
|
unless upload
|
|
|
|
fail_count += 1
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
# internal upload deduplication will make sure that we do not import attachments again
|
|
|
|
html = html_for_upload(upload, filename)
|
|
|
|
new_raw += "\n\n#{html}\n\n" if !new_raw[html]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
if new_raw != post.raw
|
|
|
|
PostRevisor.new(post).revise!(
|
|
|
|
post.user,
|
|
|
|
{ raw: new_raw },
|
|
|
|
bypass_bump: true,
|
|
|
|
edit_reason: "Import attachments from vBulletin",
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
success_count += 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def close_topics
|
|
|
|
puts "", "Closing topics..."
|
|
|
|
|
2016-12-15 20:20:05 +08:00
|
|
|
# keep track of closed topics
|
|
|
|
closed_topic_ids = []
|
|
|
|
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
topics = mysql_query <<-SQL
|
2016-12-15 20:20:05 +08:00
|
|
|
SELECT t.threadid threadid, firstpostid, open
|
|
|
|
FROM #{TABLE_PREFIX}thread t
|
|
|
|
JOIN #{TABLE_PREFIX}post p ON p.postid = t.firstpostid
|
|
|
|
ORDER BY t.threadid
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
SQL
|
2016-12-15 20:20:05 +08:00
|
|
|
topics.each do |topic|
|
|
|
|
topic_id = "thread-#{topic["threadid"]}"
|
|
|
|
closed_topic_ids << topic_id if topic["open"] == 0
|
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
sql = <<-SQL
|
|
|
|
WITH closed_topic_ids AS (
|
|
|
|
SELECT t.id AS topic_id
|
2016-12-03 23:31:10 +08:00
|
|
|
FROM post_custom_fields pcf
|
|
|
|
JOIN posts p ON p.id = pcf.post_id
|
|
|
|
JOIN topics t ON t.id = p.topic_id
|
2014-12-22 20:22:16 +08:00
|
|
|
WHERE pcf.name = 'import_id'
|
|
|
|
AND pcf.value IN (?)
|
|
|
|
)
|
|
|
|
UPDATE topics
|
|
|
|
SET closed = true
|
2016-12-03 23:31:10 +08:00
|
|
|
WHERE id IN (SELECT topic_id FROM closed_topic_ids)
|
2014-12-22 20:22:16 +08:00
|
|
|
SQL
|
|
|
|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(sql, closed_topic_ids)
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-25 16:48:29 +08:00
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
def post_process_posts
|
|
|
|
puts "", "Postprocessing posts..."
|
|
|
|
|
|
|
|
current = 0
|
|
|
|
max = Post.count
|
|
|
|
|
|
|
|
Post.find_each do |post|
|
|
|
|
begin
|
2018-06-13 02:41:21 +08:00
|
|
|
old_raw = post.raw.dup
|
2015-01-19 22:00:55 +08:00
|
|
|
new_raw = postprocess_post_raw(post.raw)
|
2018-06-13 02:41:21 +08:00
|
|
|
if new_raw != old_raw
|
2015-01-19 22:00:55 +08:00
|
|
|
post.raw = new_raw
|
|
|
|
post.save
|
|
|
|
end
|
2015-01-27 03:35:30 +08:00
|
|
|
rescue PrettyText::JavaScriptError
|
|
|
|
nil
|
2015-01-19 22:00:55 +08:00
|
|
|
ensure
|
|
|
|
print_status(current += 1, max)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def preprocess_post_raw(raw)
|
|
|
|
return "" if raw.blank?
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
# decode HTML entities
|
|
|
|
raw = @htmlentities.decode(raw)
|
|
|
|
|
|
|
|
# fix whitespaces
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(/(\\r)?\\n/, "\n")
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!("\\t", "\t")
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [HTML]...[/HTML]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(/\[html\]/i, "\n```html\n")
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!(%r{\[/html\]}i, "\n```\n")
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [PHP]...[/PHP]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(/\[php\]/i, "\n```php\n")
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!(%r{\[/php\]}i, "\n```\n")
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [HIGHLIGHT="..."]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(/\[highlight="?(\w+)"?\]/i) { "\n```#{$1.downcase}\n" }
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [CODE]...[/CODE]
|
|
|
|
# [HIGHLIGHT]...[/HIGHLIGHT]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[/?code\]}i, "\n```\n")
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!(%r{\[/?highlight\]}i, "\n```\n")
|
2014-09-04 23:55:05 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [SAMP]...[/SAMP]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[/?samp\]}i, "`")
|
2014-09-04 23:55:05 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# replace all chevrons with HTML entities
|
|
|
|
# NOTE: must be done
|
|
|
|
# - AFTER all the "code" processing
|
|
|
|
# - BEFORE the "quote" processing
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(/`([^`]+)`/im) { "`" + $1.gsub("<", "\u2603") + "`" }
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!("<", "<")
|
|
|
|
raw.gsub!("\u2603", "<")
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(/`([^`]+)`/im) { "`" + $1.gsub(">", "\u2603") + "`" }
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!(">", ">")
|
|
|
|
raw.gsub!("\u2603", ">")
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [URL=...]...[/URL]
|
2016-07-10 17:19:24 +08:00
|
|
|
raw.gsub!(%r{\[url="?([^"]+?)"?\](.*?)\[/url\]}im) { "[#{$2.strip}](#{$1})" }
|
|
|
|
raw.gsub!(%r{\[url="?(.+?)"?\](.+)\[/url\]}im) { "[#{$2.strip}](#{$1})" }
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [URL]...[/URL]
|
|
|
|
# [MP3]...[/MP3]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[/?url\]}i, "")
|
2016-12-05 10:16:59 +08:00
|
|
|
raw.gsub!(%r{\[/?mp3\]}i, "")
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [MENTION]<username>[/MENTION]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[mention\](.+?)\[/mention\]}i) do
|
2018-06-13 02:41:21 +08:00
|
|
|
new_username = get_username_for_old_username($1)
|
|
|
|
"@#{new_username}"
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
# [FONT=blah] and [COLOR=blah]
|
|
|
|
raw.gsub! %r{\[FONT=.*?\](.*?)\[/FONT\]}im, '\1'
|
|
|
|
raw.gsub! %r{\[COLOR=.*?\](.*?)\[/COLOR\]}im, '\1'
|
|
|
|
raw.gsub! %r{\[COLOR=#.*?\](.*?)\[/COLOR\]}im, '\1'
|
|
|
|
|
|
|
|
raw.gsub! %r{\[SIZE=.*?\](.*?)\[/SIZE\]}im, '\1'
|
2018-06-13 02:41:21 +08:00
|
|
|
raw.gsub! %r{\[SUP\](.*?)\[/SUP\]}im, '\1'
|
2016-12-05 20:11:59 +08:00
|
|
|
raw.gsub! %r{\[h=.*?\](.*?)\[/h\]}im, '\1'
|
|
|
|
|
|
|
|
# [CENTER]...[/CENTER]
|
|
|
|
raw.gsub! %r{\[CENTER\](.*?)\[/CENTER\]}im, '\1'
|
|
|
|
|
|
|
|
# [INDENT]...[/INDENT]
|
|
|
|
raw.gsub! %r{\[INDENT\](.*?)\[/INDENT\]}im, '\1'
|
2018-06-13 02:41:21 +08:00
|
|
|
|
|
|
|
# Tables to MD
|
|
|
|
raw.gsub!(%r{\[TABLE.*?\](.*?)\[/TABLE\]}im) do |t|
|
|
|
|
rows =
|
|
|
|
$1.gsub!(%r{\s*\[TR\](.*?)\[/TR\]\s*}im) do |r|
|
|
|
|
cols = $1.gsub! %r{\s*\[TD.*?\](.*?)\[/TD\]\s*}im, '|\1'
|
|
|
|
"#{cols}|\n"
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2018-06-13 02:41:21 +08:00
|
|
|
header, rest = rows.split "\n", 2
|
|
|
|
c = header.count "|"
|
|
|
|
sep = "|---" * (c - 1)
|
|
|
|
"#{header}\n#{sep}|\n#{rest}\n"
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2016-12-05 20:11:59 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [QUOTE]...[/QUOTE]
|
2016-07-10 17:19:24 +08:00
|
|
|
raw.gsub!(%r{\[quote\](.+?)\[/quote\]}im) do |quote|
|
|
|
|
quote.gsub!(%r{\[quote\](.+?)\[/quote\]}im) { "\n#{$1}\n" }
|
|
|
|
quote.gsub!(/\n(.+?)/) { "\n> #{$1}" }
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2014-12-22 20:22:16 +08:00
|
|
|
|
|
|
|
# [QUOTE=<username>]...[/QUOTE]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[quote=([^;\]]+)\](.+?)\[/quote\]}im) do
|
2014-12-22 20:22:16 +08:00
|
|
|
old_username, quote = $1, $2
|
2018-06-13 02:41:21 +08:00
|
|
|
new_username = get_username_for_old_username(old_username)
|
|
|
|
"\n[quote=\"#{new_username}\"]\n#{quote}\n[/quote]\n"
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
# [YOUTUBE]<id>[/YOUTUBE]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[youtube\](.+?)\[/youtube\]}i) { "\n//youtu.be/#{$1}\n" }
|
2014-12-22 20:22:16 +08:00
|
|
|
|
|
|
|
# [VIDEO=youtube;<id>]...[/VIDEO]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[video=youtube;([^\]]+)\].*?\[/video\]}i) { "\n//youtu.be/#{$1}\n" }
|
2014-12-22 20:22:16 +08:00
|
|
|
|
2018-06-13 02:41:21 +08:00
|
|
|
# Fix uppercase B U and I tags
|
|
|
|
raw.gsub!(%r{(\[/?[BUI]\])}i) { $1.downcase }
|
|
|
|
|
2016-07-10 17:19:24 +08:00
|
|
|
# More Additions ....
|
|
|
|
|
|
|
|
# [spoiler=Some hidden stuff]SPOILER HERE!![/spoiler]
|
|
|
|
raw.gsub!(%r{\[spoiler="?(.+?)"?\](.+?)\[/spoiler\]}im) do
|
|
|
|
"\n#{$1}\n[spoiler]#{$2}[/spoiler]\n"
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2016-07-10 17:19:24 +08:00
|
|
|
|
|
|
|
# [IMG][IMG]http://i63.tinypic.com/akga3r.jpg[/IMG][/IMG]
|
|
|
|
raw.gsub!(%r{\[IMG\]\[IMG\](.+?)\[/IMG\]\[/IMG\]}i) { "[IMG]#{$1}[/IMG]" }
|
|
|
|
|
|
|
|
# convert list tags to ul and list=1 tags to ol
|
|
|
|
# (basically, we're only missing list=a here...)
|
|
|
|
# (https://meta.discourse.org/t/phpbb-3-importer-old/17397)
|
|
|
|
raw.gsub!(%r{\[list\](.*?)\[/list\]}im, '[ul]\1[/ul]')
|
|
|
|
raw.gsub!(%r{\[list=1\](.*?)\[/list\]}im, '[ol]\1[/ol]')
|
|
|
|
raw.gsub!(%r{\[list\](.*?)\[/list:u\]}im, '[ul]\1[/ul]')
|
|
|
|
raw.gsub!(%r{\[list=1\](.*?)\[/list:o\]}im, '[ol]\1[/ol]')
|
|
|
|
# convert *-tags to li-tags so bbcode-to-md can do its magic on phpBB's lists:
|
|
|
|
raw.gsub!(/\[\*\]\n/, "")
|
|
|
|
raw.gsub!(%r{\[\*\](.*?)\[/\*:m\]}, '[li]\1[/li]')
|
|
|
|
raw.gsub!(/\[\*\](.*?)\n/, '[li]\1[/li]')
|
2016-12-05 20:11:59 +08:00
|
|
|
raw.gsub!(/\[\*=1\]/, "")
|
2016-07-10 17:19:24 +08:00
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
raw
|
|
|
|
end
|
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
def postprocess_post_raw(raw)
|
|
|
|
# [QUOTE=<username>;<post_id>]...[/QUOTE]
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[quote=([^;]+);(\d+)\](.+?)\[/quote\]}im) do
|
2015-01-19 22:00:55 +08:00
|
|
|
old_username, post_id, quote = $1, $2, $3
|
|
|
|
|
2018-06-13 02:41:21 +08:00
|
|
|
new_username = get_username_for_old_username(old_username)
|
|
|
|
|
|
|
|
# There is a bug here when the first post in a topic is quoted.
|
|
|
|
# The first post in a topic does not have an post_custom_field referring to the post number,
|
|
|
|
# but it refers to thread-XXX instead, so this lookup fails miserably then.
|
|
|
|
# Fixing this would imply rewriting that logic completely.
|
2015-01-19 22:00:55 +08:00
|
|
|
|
|
|
|
if topic_lookup = topic_lookup_from_imported_post_id(post_id)
|
|
|
|
post_number = topic_lookup[:post_number]
|
|
|
|
topic_id = topic_lookup[:topic_id]
|
2018-06-13 02:41:21 +08:00
|
|
|
"\n[quote=\"#{new_username},post:#{post_number},topic:#{topic_id}\"]\n#{quote}\n[/quote]\n"
|
2015-01-19 22:00:55 +08:00
|
|
|
else
|
2018-06-13 02:41:21 +08:00
|
|
|
"\n[quote=\"#{new_username}\"]\n#{quote}\n[/quote]\n"
|
2015-01-19 22:00:55 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-05-15 19:26:53 +08:00
|
|
|
# remove attachments
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[attach[^\]]*\]\d+\[/attach\]}i, "")
|
2015-05-15 19:26:53 +08:00
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
# [THREAD]<thread_id>[/THREAD]
|
|
|
|
# ==> http://my.discourse.org/t/slug/<topic_id>
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[thread\](\d+)\[/thread\]}i) do
|
2015-01-19 22:00:55 +08:00
|
|
|
thread_id = $1
|
|
|
|
if topic_lookup = topic_lookup_from_imported_post_id("thread-#{thread_id}")
|
|
|
|
topic_lookup[:url]
|
|
|
|
else
|
|
|
|
$&
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# [THREAD=<thread_id>]...[/THREAD]
|
|
|
|
# ==> [...](http://my.discourse.org/t/slug/<topic_id>)
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[thread=(\d+)\](.+?)\[/thread\]}i) do
|
2015-01-19 22:00:55 +08:00
|
|
|
thread_id, link = $1, $2
|
|
|
|
if topic_lookup = topic_lookup_from_imported_post_id("thread-#{thread_id}")
|
|
|
|
url = topic_lookup[:url]
|
|
|
|
"[#{link}](#{url})"
|
|
|
|
else
|
|
|
|
$&
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# [POST]<post_id>[/POST]
|
|
|
|
# ==> http://my.discourse.org/t/slug/<topic_id>/<post_number>
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[post\](\d+)\[/post\]}i) do
|
2015-01-19 22:00:55 +08:00
|
|
|
post_id = $1
|
|
|
|
if topic_lookup = topic_lookup_from_imported_post_id(post_id)
|
|
|
|
topic_lookup[:url]
|
|
|
|
else
|
|
|
|
$&
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# [POST=<post_id>]...[/POST]
|
|
|
|
# ==> [...](http://my.discourse.org/t/<topic_slug>/<topic_id>/<post_number>)
|
2016-12-03 23:31:10 +08:00
|
|
|
raw.gsub!(%r{\[post=(\d+)\](.+?)\[/post\]}i) do
|
2015-01-19 22:00:55 +08:00
|
|
|
post_id, link = $1, $2
|
|
|
|
if topic_lookup = topic_lookup_from_imported_post_id(post_id)
|
|
|
|
url = topic_lookup[:url]
|
|
|
|
"[#{link}](#{url})"
|
|
|
|
else
|
|
|
|
$&
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
raw
|
|
|
|
end
|
|
|
|
|
2016-12-05 20:11:59 +08:00
|
|
|
def create_permalink_file
|
|
|
|
puts "", "Creating Permalink File...", ""
|
2016-07-10 17:19:24 +08:00
|
|
|
|
|
|
|
id_mapping = []
|
|
|
|
|
|
|
|
Topic.listable_topics.find_each do |topic|
|
|
|
|
pcf = topic.first_post.custom_fields
|
|
|
|
if pcf && pcf["import_id"]
|
|
|
|
id = pcf["import_id"].split("-").last
|
|
|
|
id_mapping.push("XXX#{id} YYY#{topic.id}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Category.find_each do |cat|
|
|
|
|
# ccf = cat.custom_fields
|
|
|
|
# if ccf && ccf["import_id"]
|
|
|
|
# id = ccf["import_id"].to_i
|
|
|
|
# id_mapping.push("/forumdisplay.php?#{id} http://forum.quartertothree.com#{cat.url}")
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
|
|
|
CSV.open(File.expand_path("../vb_map.csv", __FILE__), "w") do |csv|
|
|
|
|
id_mapping.each { |value| csv << [value] }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def suspend_users
|
|
|
|
puts "", "updating banned users"
|
|
|
|
|
|
|
|
banned = 0
|
|
|
|
failed = 0
|
|
|
|
total = mysql_query("SELECT count(*) count FROM #{TABLE_PREFIX}userban").first["count"]
|
|
|
|
|
|
|
|
system_user = Discourse.system_user
|
|
|
|
|
|
|
|
mysql_query("SELECT userid, bandate FROM #{TABLE_PREFIX}userban").each do |b|
|
2016-12-05 20:11:59 +08:00
|
|
|
user = User.find_by_id(user_id_from_imported_user_id(b["userid"]))
|
2016-07-10 17:19:24 +08:00
|
|
|
if user
|
|
|
|
user.suspended_at = parse_timestamp(user["bandate"])
|
|
|
|
user.suspended_till = 200.years.from_now
|
|
|
|
|
|
|
|
if user.save
|
|
|
|
StaffActionLogger.new(system_user).log_user_suspend(user, "banned during initial import")
|
|
|
|
banned += 1
|
|
|
|
else
|
|
|
|
puts "Failed to suspend user #{user.username}. #{user.errors.try(:full_messages).try(:inspect)}"
|
|
|
|
failed += 1
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "Not found: #{b["userid"]}"
|
|
|
|
failed += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
print_status banned + failed, total
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-01-19 22:00:55 +08:00
|
|
|
def parse_timestamp(timestamp)
|
|
|
|
Time.zone.at(@tz.utc_to_local(timestamp))
|
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
def mysql_query(sql)
|
2015-10-22 01:07:31 +08:00
|
|
|
@client.query(sql, cache_rows: true)
|
2014-12-22 20:22:16 +08:00
|
|
|
end
|
2014-08-18 19:04:08 +08:00
|
|
|
end
|
|
|
|
|
2014-12-22 20:22:16 +08:00
|
|
|
ImportScripts::VBulletin.new.perform
|