2015-07-14 13:53:38 +08:00
|
|
|
# Notes:
|
|
|
|
#
|
|
|
|
# Written by Sam
|
|
|
|
#
|
|
|
|
# Lithium are quite protective of data, there is no simple way of exporting
|
|
|
|
# If you have leverage you may get a data dump, in my case it was provided in XML
|
|
|
|
# format
|
|
|
|
#
|
|
|
|
# First step is to convert it to db format so you can import it into a DB
|
|
|
|
# that was done using import_scripts/support/convert_mysql_xml_to_mysql.rb
|
|
|
|
#
|
|
|
|
|
|
|
|
require 'mysql2'
|
2015-07-28 15:46:00 +08:00
|
|
|
require 'csv'
|
2015-07-14 13:53:38 +08:00
|
|
|
require 'reverse_markdown'
|
|
|
|
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
|
|
|
require 'htmlentities'
|
|
|
|
|
2015-07-20 15:25:27 +08:00
|
|
|
# remove table conversion
|
2017-07-28 09:20:09 +08:00
|
|
|
[:table, :td, :tr, :th, :thead, :tbody].each do |tag|
|
2015-07-20 15:25:27 +08:00
|
|
|
ReverseMarkdown::Converters.unregister(tag)
|
|
|
|
end
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
class ImportScripts::Lithium < ImportScripts::Base
|
|
|
|
BATCH_SIZE = 1000
|
|
|
|
|
|
|
|
# CHANGE THESE BEFORE RUNNING THE IMPORTER
|
|
|
|
DATABASE = "wd"
|
|
|
|
PASSWORD = "password"
|
2018-05-18 19:41:20 +08:00
|
|
|
AVATARS_DIR = '/tmp/avatars'
|
2015-07-29 16:11:55 +08:00
|
|
|
UPLOAD_DIR = '/tmp/uploads'
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
OLD_DOMAIN = 'community.wd.com'
|
2015-07-15 13:34:16 +08:00
|
|
|
|
|
|
|
TEMP = ""
|
|
|
|
|
2018-05-18 19:41:20 +08:00
|
|
|
USER_CUSTOM_FIELDS = [
|
|
|
|
{ name: "sso_id", user: "sso_id" },
|
|
|
|
{ name: "user_field_1", profile: "jobtitle" },
|
|
|
|
{ name: "user_field_2", profile: "company" },
|
|
|
|
{ name: "user_field_3", profile: "industry" },
|
|
|
|
]
|
|
|
|
|
|
|
|
LITHIUM_PROFILE_FIELDS = "'profile.jobtitle', 'profile.company', 'profile.industry', 'profile.location'"
|
|
|
|
|
|
|
|
USERNAME_MAPPINGS = {
|
|
|
|
"admins": "admin_user"
|
|
|
|
}.with_indifferent_access
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
def initialize
|
|
|
|
super
|
|
|
|
|
|
|
|
@old_username_to_new_usernames = {}
|
|
|
|
|
|
|
|
@htmlentities = HTMLEntities.new
|
|
|
|
|
|
|
|
@client = Mysql2::Client.new(
|
|
|
|
host: "localhost",
|
|
|
|
username: "root",
|
|
|
|
password: PASSWORD,
|
|
|
|
database: DATABASE
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2015-07-16 11:08:59 +08:00
|
|
|
|
2015-10-16 07:40:52 +08:00
|
|
|
@max_start_id = Post.maximum(:id)
|
2015-07-28 15:46:00 +08:00
|
|
|
|
2018-05-16 19:56:15 +08:00
|
|
|
import_groups
|
2015-07-29 16:11:55 +08:00
|
|
|
import_categories
|
2015-10-16 07:40:52 +08:00
|
|
|
import_users
|
|
|
|
import_topics
|
|
|
|
import_posts
|
|
|
|
import_likes
|
|
|
|
import_accepted_answers
|
|
|
|
import_pms
|
|
|
|
close_topics
|
|
|
|
create_permalinks
|
2015-07-16 11:08:59 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
post_process_posts
|
2015-07-14 13:53:38 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def import_groups
|
|
|
|
puts "", "importing groups..."
|
|
|
|
|
|
|
|
groups = mysql_query <<-SQL
|
2018-05-16 19:56:15 +08:00
|
|
|
SELECT DISTINCT name
|
|
|
|
FROM roles
|
|
|
|
ORDER BY name
|
2015-07-14 13:53:38 +08:00
|
|
|
SQL
|
|
|
|
|
|
|
|
create_groups(groups) do |group|
|
|
|
|
{
|
2018-05-16 19:56:15 +08:00
|
|
|
id: group["name"],
|
|
|
|
name: @htmlentities.decode(group["name"]).strip
|
2015-07-14 13:53:38 +08:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_users
|
|
|
|
puts "", "importing users"
|
|
|
|
|
|
|
|
user_count = mysql_query("SELECT COUNT(*) count FROM users").first["count"]
|
2018-05-18 19:41:20 +08:00
|
|
|
avatar_files = Dir.entries(AVATARS_DIR)
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
batches(BATCH_SIZE) do |offset|
|
|
|
|
users = mysql_query <<-SQL
|
2018-05-18 19:41:20 +08:00
|
|
|
SELECT id, nlogin, login_canon, email, registration_time, sso_id
|
2015-07-14 13:53:38 +08:00
|
|
|
FROM users
|
|
|
|
ORDER BY id
|
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
OFFSET #{offset}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
break if users.size < 1
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
next if all_records_exist? :users, users.map { |u| u["id"].to_i }
|
2015-09-22 07:48:42 +08:00
|
|
|
|
2018-05-18 19:41:20 +08:00
|
|
|
users = users.to_a
|
|
|
|
first_id = users.first["id"]
|
|
|
|
last_id = users.last["id"]
|
|
|
|
|
|
|
|
profiles = mysql_query <<-SQL
|
|
|
|
SELECT user_id, param, nvalue
|
|
|
|
FROM user_profile
|
|
|
|
WHERE nvalue IS NOT NULL AND param IN (#{LITHIUM_PROFILE_FIELDS}) AND user_id >= #{first_id} AND user_id <= #{last_id}
|
|
|
|
ORDER BY user_id
|
|
|
|
SQL
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
create_users(users, total: user_count, offset: offset) do |user|
|
2018-05-18 19:41:20 +08:00
|
|
|
profile = profiles.select { |p| p["user_id"] == user["id"] }
|
|
|
|
result = profile.select { |p| p["param"] == "profile.location" }
|
|
|
|
location = result.count > 0 ? result.first["nvalue"] : nil
|
|
|
|
username = user["login_canon"]
|
|
|
|
username = USERNAME_MAPPINGS[username] if USERNAME_MAPPINGS[username].present?
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
{
|
|
|
|
id: user["id"],
|
|
|
|
name: user["nlogin"],
|
2018-05-18 19:41:20 +08:00
|
|
|
username: username,
|
2015-07-14 13:53:38 +08:00
|
|
|
email: user["email"].presence || fake_email,
|
2018-05-18 19:41:20 +08:00
|
|
|
location: location,
|
|
|
|
custom_fields: user_custom_fields(user, profile),
|
2015-07-14 13:53:38 +08:00
|
|
|
# website: user["homepage"].strip,
|
|
|
|
# title: @htmlentities.decode(user["usertitle"]).strip,
|
|
|
|
# primary_group_id: group_id_from_imported_group_id(user["usergroupid"]),
|
|
|
|
created_at: unix_time(user["registration_time"]),
|
2015-07-29 16:11:55 +08:00
|
|
|
post_create_action: proc do |u|
|
2018-05-18 19:41:20 +08:00
|
|
|
@old_username_to_new_usernames[user["login_canon"]] = u.username
|
|
|
|
|
|
|
|
# import user avatar
|
|
|
|
sso_id = u.custom_fields["sso_id"]
|
|
|
|
if sso_id.present?
|
|
|
|
prefix = "#{UPLOAD_DIR}/#{sso_id}_"
|
|
|
|
file = get_file(prefix + "actual.jpeg")
|
|
|
|
file ||= get_file(prefix + "profile.jpeg")
|
|
|
|
|
|
|
|
if file.present?
|
|
|
|
upload = UploadCreator.new(file, file.path, type: "avatar").create_for(user.id)
|
|
|
|
user.create_user_avatar unless user.user_avatar
|
|
|
|
|
|
|
|
if !user.user_avatar.contains_upload?(upload.id)
|
|
|
|
user.user_avatar.update_columns(custom_upload_id: upload.id)
|
|
|
|
|
|
|
|
if user.uploaded_avatar_id.nil? ||
|
|
|
|
!user.user_avatar.contains_upload?(user.uploaded_avatar_id)
|
|
|
|
user.update_columns(uploaded_avatar_id: upload.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2015-07-29 16:11:55 +08:00
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-18 19:41:20 +08:00
|
|
|
def user_custom_fields(user, profile)
|
|
|
|
fields = Hash.new
|
|
|
|
|
|
|
|
USER_CUSTOM_FIELDS.each do |attr|
|
|
|
|
name = attr[:name]
|
|
|
|
|
|
|
|
if attr[:user].present?
|
|
|
|
fields[name] = user[attr[:user]]
|
|
|
|
elsif attr[:profile].present? && profile.count > 0
|
|
|
|
result = profile.select { |p| p["param"] == "profile.#{attr[:profile]}" }
|
|
|
|
fields[name] = result.first["nvalue"] if result.count > 0
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
fields
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_file(path)
|
|
|
|
return File.open(path) if File.exist?(path)
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
def unix_time(t)
|
2017-07-28 09:20:09 +08:00
|
|
|
Time.at(t / 1000.0)
|
2015-07-14 13:53:38 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def import_profile_picture(old_user, imported_user)
|
|
|
|
query = mysql_query <<-SQL
|
|
|
|
SELECT filedata, filename
|
|
|
|
FROM customavatar
|
|
|
|
WHERE userid = #{old_user["userid"]}
|
|
|
|
ORDER BY dateline DESC
|
|
|
|
LIMIT 1
|
|
|
|
SQL
|
|
|
|
|
|
|
|
picture = query.first
|
|
|
|
|
|
|
|
return if picture.nil?
|
|
|
|
|
|
|
|
file = Tempfile.new("profile-picture")
|
|
|
|
file.write(picture["filedata"].encode("ASCII-8BIT").force_encoding("UTF-8"))
|
|
|
|
file.rewind
|
|
|
|
|
2017-05-11 06:16:57 +08:00
|
|
|
upload = UploadCreator.new(file, picture["filename"]).create_for(imported_user.id)
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
return if !upload.persisted?
|
|
|
|
|
|
|
|
imported_user.create_user_avatar
|
|
|
|
imported_user.user_avatar.update(custom_upload_id: upload.id)
|
|
|
|
imported_user.update(uploaded_avatar_id: upload.id)
|
|
|
|
ensure
|
|
|
|
file.close rescue nil
|
|
|
|
file.unlind rescue nil
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_profile_background(old_user, imported_user)
|
|
|
|
query = mysql_query <<-SQL
|
|
|
|
SELECT filedata, filename
|
|
|
|
FROM customprofilepic
|
|
|
|
WHERE userid = #{old_user["userid"]}
|
|
|
|
ORDER BY dateline DESC
|
|
|
|
LIMIT 1
|
|
|
|
SQL
|
|
|
|
|
|
|
|
background = query.first
|
|
|
|
|
|
|
|
return if background.nil?
|
|
|
|
|
|
|
|
file = Tempfile.new("profile-background")
|
|
|
|
file.write(background["filedata"].encode("ASCII-8BIT").force_encoding("UTF-8"))
|
|
|
|
file.rewind
|
|
|
|
|
2017-05-11 06:16:57 +08:00
|
|
|
upload = UploadCreator.new(file, background["filename"]).create_for(imported_user.id)
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
return if !upload.persisted?
|
|
|
|
|
|
|
|
imported_user.user_profile.update(profile_background: upload.url)
|
|
|
|
ensure
|
|
|
|
file.close rescue nil
|
|
|
|
file.unlink rescue nil
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_categories
|
|
|
|
puts "", "importing top level categories..."
|
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
categories = mysql_query <<-SQL
|
|
|
|
SELECT n.node_id, n.display_id, c.nvalue c_title, b.nvalue b_title, n.position, n.parent_node_id, n.type_id
|
|
|
|
FROM nodes n
|
|
|
|
LEFT JOIN settings c ON n.node_id = c.node_id AND c.param = 'category.title'
|
|
|
|
LEFT JOIN settings b ON n.node_id = b.node_id AND b.param = 'board.title'
|
|
|
|
ORDER BY n.type_id DESC, n.node_id ASC
|
|
|
|
SQL
|
2015-07-28 15:46:00 +08:00
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
categories = categories.map { |c| (c["name"] = c["c_title"] || c["b_title"] || c["display_id"]) && c }
|
2015-07-28 15:46:00 +08:00
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
# To prevent duplicate category names
|
|
|
|
categories = categories.map do |category|
|
|
|
|
count = categories.to_a.count { |c| c["name"].present? && c["name"] == category["name"] }
|
|
|
|
category["name"] << " (#{category["node_id"]})" if count > 1
|
|
|
|
category
|
2015-07-28 15:46:00 +08:00
|
|
|
end
|
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
parent_categories = categories.select { |c| c["parent_node_id"] <= 2 }
|
2015-07-28 15:46:00 +08:00
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
create_categories(parent_categories) do |category|
|
2015-07-14 13:53:38 +08:00
|
|
|
{
|
2018-05-18 04:04:31 +08:00
|
|
|
id: category["node_id"],
|
|
|
|
name: category["name"],
|
|
|
|
position: category["position"],
|
|
|
|
post_create_action: lambda do |record|
|
|
|
|
after_category_create(record, category)
|
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "importing children categories..."
|
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
children_categories = categories.select { |c| c["parent_node_id"] > 2 }
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
create_categories(children_categories) do |category|
|
|
|
|
{
|
2018-05-18 04:04:31 +08:00
|
|
|
id: category["node_id"],
|
|
|
|
name: category["name"],
|
2015-07-14 13:53:38 +08:00
|
|
|
position: category["position"],
|
2018-05-18 04:04:31 +08:00
|
|
|
parent_category_id: category_id_from_imported_category_id(category["parent_node_id"]),
|
|
|
|
post_create_action: lambda do |record|
|
|
|
|
after_category_create(record, category)
|
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
}
|
|
|
|
end
|
2018-05-18 04:04:31 +08:00
|
|
|
end
|
2015-07-28 15:46:00 +08:00
|
|
|
|
2018-05-18 04:04:31 +08:00
|
|
|
def after_category_create(category, params)
|
|
|
|
node_id = category.custom_fields["import_id"]
|
|
|
|
roles = mysql_query <<-SQL
|
|
|
|
SELECT name
|
|
|
|
FROM roles
|
|
|
|
WHERE node_id = #{node_id}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
if roles.count > 0
|
|
|
|
category.update(read_restricted: true)
|
|
|
|
|
|
|
|
roles.each do |role|
|
|
|
|
group_id = group_id_from_imported_group_id(role["name"])
|
|
|
|
if group_id.present?
|
|
|
|
CategoryGroup.find_or_create_by(category: category, group_id: group_id) do |cg|
|
|
|
|
cg.permission_type = CategoryGroup.permission_types[:full]
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "", "Group not found for id '#{role["name"]}'"
|
2015-07-28 15:46:00 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def import_topics
|
|
|
|
puts "", "importing topics..."
|
|
|
|
|
|
|
|
topic_count = mysql_query("SELECT COUNT(*) count FROM message2 where id = root_id").first["count"]
|
|
|
|
|
|
|
|
batches(BATCH_SIZE) do |offset|
|
|
|
|
topics = mysql_query <<-SQL
|
|
|
|
SELECT id, subject, body, deleted, user_id,
|
2015-07-15 13:34:16 +08:00
|
|
|
post_date, views, node_id, unique_id
|
2015-07-14 13:53:38 +08:00
|
|
|
FROM message2
|
2018-05-18 19:41:20 +08:00
|
|
|
WHERE id = root_id #{TEMP}
|
2015-07-15 13:34:16 +08:00
|
|
|
ORDER BY node_id, id
|
2015-07-14 13:53:38 +08:00
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
OFFSET #{offset}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
break if topics.size < 1
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
next if all_records_exist? :posts, topics.map { |topic| "#{topic["node_id"]} #{topic["id"]}" }
|
2015-09-22 07:48:42 +08:00
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
create_posts(topics, total: topic_count, offset: offset) do |topic|
|
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
category_id = category_id_from_imported_category_id(topic["node_id"])
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
raw = topic["body"]
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
if category_id
|
|
|
|
{
|
|
|
|
id: "#{topic["node_id"]} #{topic["id"]}",
|
|
|
|
user_id: user_id_from_imported_user_id(topic["user_id"]) || Discourse::SYSTEM_USER_ID,
|
|
|
|
title: @htmlentities.decode(topic["subject"]).strip[0...255],
|
|
|
|
category: category_id,
|
|
|
|
raw: raw,
|
|
|
|
created_at: unix_time(topic["post_date"]),
|
|
|
|
views: topic["views"],
|
2017-07-28 09:20:09 +08:00
|
|
|
custom_fields: { import_unique_id: topic["unique_id"] },
|
2015-07-28 15:46:00 +08:00
|
|
|
import_mode: true
|
|
|
|
}
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_posts
|
|
|
|
|
|
|
|
post_count = mysql_query("SELECT COUNT(*) count FROM message2
|
|
|
|
WHERE id <> root_id").first["count"]
|
|
|
|
|
2015-10-15 10:25:10 +08:00
|
|
|
puts "", "importing posts... (#{post_count})"
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
batches(BATCH_SIZE) do |offset|
|
|
|
|
posts = mysql_query <<-SQL
|
|
|
|
SELECT id, body, deleted, user_id,
|
2015-07-15 13:34:16 +08:00
|
|
|
post_date, parent_id, root_id, node_id, unique_id
|
2015-07-14 13:53:38 +08:00
|
|
|
FROM message2
|
2015-07-16 12:58:01 +08:00
|
|
|
WHERE id <> root_id #{TEMP} AND deleted = 0
|
2015-07-15 13:34:16 +08:00
|
|
|
ORDER BY node_id, root_id, id
|
2015-07-14 13:53:38 +08:00
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
OFFSET #{offset}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
break if posts.size < 1
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
next if all_records_exist? :posts, posts.map { |post| "#{post["node_id"]} #{post["root_id"]} #{post["id"]}" }
|
2015-09-22 07:48:42 +08:00
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
create_posts(posts, total: post_count, offset: offset) do |post|
|
2015-07-29 16:11:55 +08:00
|
|
|
raw = post["raw"]
|
2015-07-15 13:34:16 +08:00
|
|
|
next unless topic = topic_lookup_from_imported_post_id("#{post["node_id"]} #{post["root_id"]}")
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
raw = post["body"]
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
new_post = {
|
|
|
|
id: "#{post["node_id"]} #{post["root_id"]} #{post["id"]}",
|
|
|
|
user_id: user_id_from_imported_user_id(post["user_id"]) || Discourse::SYSTEM_USER_ID,
|
|
|
|
topic_id: topic[:topic_id],
|
|
|
|
raw: raw,
|
|
|
|
created_at: unix_time(post["post_date"]),
|
2017-07-28 09:20:09 +08:00
|
|
|
custom_fields: { import_unique_id: post["unique_id"] },
|
2015-07-16 14:11:20 +08:00
|
|
|
import_mode: true
|
2015-07-14 13:53:38 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if post["deleted"] > 0
|
|
|
|
new_post["deleted_at"] = Time.now
|
|
|
|
end
|
|
|
|
|
|
|
|
if parent = topic_lookup_from_imported_post_id("#{post["node_id"]} #{post["root_id"]} #{post["parent_id"]}")
|
|
|
|
new_post[:reply_to_post_number] = parent[:post_number]
|
|
|
|
end
|
|
|
|
|
|
|
|
new_post
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
SMILEY_SUBS = {
|
|
|
|
"smileyhappy" => "smiley",
|
|
|
|
"smileyindifferent" => "neutral_face",
|
|
|
|
"smileymad" => "angry",
|
|
|
|
"smileysad" => "cry",
|
|
|
|
"smileysurprised" => "dizzy_face",
|
|
|
|
"smileytongue" => "stuck_out_tongue",
|
|
|
|
"smileyvery-happy" => "grin",
|
2017-07-28 09:20:09 +08:00
|
|
|
"smileywink" => "wink",
|
2015-07-28 15:46:00 +08:00
|
|
|
"smileyfrustrated" => "confounded",
|
|
|
|
"smileyembarrassed" => "flushed",
|
|
|
|
"smileylol" => "laughing",
|
|
|
|
"cathappy" => "smiley_cat",
|
|
|
|
"catindifferent" => "cat",
|
|
|
|
"catmad" => "smirk_cat",
|
|
|
|
"catsad" => "crying_cat_face",
|
|
|
|
"catsurprised" => "scream_cat",
|
|
|
|
"cattongue" => "stuck_out_tongue",
|
|
|
|
"catvery-happy" => "smile_cat",
|
|
|
|
"catwink" => "wink",
|
|
|
|
"catfrustrated" => "grumpycat",
|
|
|
|
"catembarrassed" => "kissing_cat",
|
|
|
|
"catlol" => "joy_cat"
|
|
|
|
}
|
|
|
|
|
2015-07-16 11:08:59 +08:00
|
|
|
def import_likes
|
|
|
|
puts "\nimporting likes..."
|
|
|
|
|
|
|
|
sql = "select source_id user_id, target_id post_id, row_version created_at from wd.tag_events_score_message"
|
|
|
|
results = mysql_query(sql)
|
|
|
|
|
|
|
|
puts "loading unique id map"
|
|
|
|
existing_map = {}
|
|
|
|
PostCustomField.where(name: 'import_unique_id').pluck(:post_id, :value).each do |post_id, import_id|
|
|
|
|
existing_map[import_id] = post_id
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "loading data into temp table"
|
|
|
|
PostAction.exec_sql("create temp table like_data(user_id int, post_id int, created_at timestamp without time zone)")
|
|
|
|
PostAction.transaction do
|
|
|
|
results.each do |result|
|
|
|
|
|
2015-07-17 15:16:26 +08:00
|
|
|
result["user_id"] = user_id_from_imported_user_id(result["user_id"].to_s)
|
2015-07-16 11:08:59 +08:00
|
|
|
result["post_id"] = existing_map[result["post_id"].to_s]
|
|
|
|
|
|
|
|
next unless result["user_id"] && result["post_id"]
|
|
|
|
|
|
|
|
PostAction.exec_sql("INSERT INTO like_data VALUES (:user_id,:post_id,:created_at)",
|
|
|
|
user_id: result["user_id"],
|
|
|
|
post_id: result["post_id"],
|
|
|
|
created_at: result["created_at"]
|
|
|
|
)
|
|
|
|
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "creating missing post actions"
|
|
|
|
PostAction.exec_sql <<-SQL
|
|
|
|
|
|
|
|
INSERT INTO post_actions (post_id, user_id, post_action_type_id, created_at, updated_at)
|
|
|
|
SELECT l.post_id, l.user_id, 2, l.created_at, l.created_at FROM like_data l
|
|
|
|
LEFT JOIN post_actions a ON a.post_id = l.post_id AND l.user_id = a.user_id AND a.post_action_type_id = 2
|
|
|
|
WHERE a.id IS NULL
|
|
|
|
SQL
|
|
|
|
|
|
|
|
puts "creating missing user actions"
|
|
|
|
UserAction.exec_sql <<-SQL
|
|
|
|
INSERT INTO user_actions (user_id, action_type, target_topic_id, target_post_id, acting_user_id, created_at, updated_at)
|
|
|
|
SELECT pa.user_id, 1, p.topic_id, p.id, pa.user_id, pa.created_at, pa.created_at
|
|
|
|
FROM post_actions pa
|
|
|
|
JOIN posts p ON p.id = pa.post_id
|
|
|
|
LEFT JOIN user_actions ua ON action_type = 1 AND ua.target_post_id = pa.post_id AND ua.user_id = pa.user_id
|
|
|
|
|
|
|
|
WHERE ua.id IS NULL AND pa.post_action_type_id = 2
|
|
|
|
SQL
|
|
|
|
|
|
|
|
# reverse action
|
|
|
|
UserAction.exec_sql <<-SQL
|
|
|
|
INSERT INTO user_actions (user_id, action_type, target_topic_id, target_post_id, acting_user_id, created_at, updated_at)
|
|
|
|
SELECT p.user_id, 2, p.topic_id, p.id, pa.user_id, pa.created_at, pa.created_at
|
|
|
|
FROM post_actions pa
|
|
|
|
JOIN posts p ON p.id = pa.post_id
|
|
|
|
LEFT JOIN user_actions ua ON action_type = 2 AND ua.target_post_id = pa.post_id AND
|
|
|
|
ua.acting_user_id = pa.user_id AND ua.user_id = p.user_id
|
|
|
|
|
|
|
|
WHERE ua.id IS NULL AND pa.post_action_type_id = 2
|
|
|
|
SQL
|
|
|
|
puts "updating like counts on posts"
|
|
|
|
|
|
|
|
Post.exec_sql <<-SQL
|
|
|
|
UPDATE posts SET like_count = coalesce(cnt,0)
|
|
|
|
FROM (
|
|
|
|
SELECT post_id, count(*) cnt
|
|
|
|
FROM post_actions
|
|
|
|
WHERE post_action_type_id = 2 AND deleted_at IS NULL
|
|
|
|
GROUP BY post_id
|
|
|
|
) x
|
|
|
|
WHERE posts.like_count <> x.cnt AND posts.id = x.post_id
|
|
|
|
|
|
|
|
SQL
|
|
|
|
|
|
|
|
puts "updating like counts on topics"
|
|
|
|
|
|
|
|
Post.exec_sql <<-SQL
|
|
|
|
UPDATE topics SET like_count = coalesce(cnt,0)
|
|
|
|
FROM (
|
|
|
|
SELECT topic_id, sum(like_count) cnt
|
|
|
|
FROM posts
|
|
|
|
WHERE deleted_at IS NULL
|
|
|
|
GROUP BY topic_id
|
|
|
|
) x
|
|
|
|
WHERE topics.like_count <> x.cnt AND topics.id = x.topic_id
|
|
|
|
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
2015-07-17 10:41:45 +08:00
|
|
|
def import_accepted_answers
|
|
|
|
|
|
|
|
puts "\nimporting accepted answers..."
|
|
|
|
|
|
|
|
sql = "select unique_id post_id from message2 where (attributes & 0x4000 ) != 0 and deleted = 0;"
|
|
|
|
results = mysql_query(sql)
|
|
|
|
|
|
|
|
puts "loading unique id map"
|
|
|
|
existing_map = {}
|
|
|
|
PostCustomField.where(name: 'import_unique_id').pluck(:post_id, :value).each do |post_id, import_id|
|
|
|
|
existing_map[import_id] = post_id
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "loading data into temp table"
|
2015-07-20 07:45:11 +08:00
|
|
|
PostAction.exec_sql("create temp table accepted_data(post_id int primary key)")
|
2015-07-17 10:41:45 +08:00
|
|
|
PostAction.transaction do
|
|
|
|
results.each do |result|
|
|
|
|
|
|
|
|
result["post_id"] = existing_map[result["post_id"].to_s]
|
|
|
|
|
|
|
|
next unless result["post_id"]
|
|
|
|
|
|
|
|
PostAction.exec_sql("INSERT INTO accepted_data VALUES (:post_id)",
|
|
|
|
post_id: result["post_id"]
|
|
|
|
)
|
|
|
|
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-07-17 15:16:26 +08:00
|
|
|
puts "deleting dupe answers"
|
|
|
|
PostAction.exec_sql <<-SQL
|
|
|
|
DELETE FROM accepted_data WHERE post_id NOT IN (
|
|
|
|
SELECT post_id FROM
|
|
|
|
(
|
|
|
|
SELECT topic_id, MIN(post_id) post_id
|
|
|
|
FROM accepted_data a
|
|
|
|
JOIN posts p ON p.id = a.post_id
|
|
|
|
GROUP BY topic_id
|
|
|
|
) X
|
|
|
|
)
|
|
|
|
SQL
|
|
|
|
|
|
|
|
puts "importing accepted answers"
|
|
|
|
PostAction.exec_sql <<-SQL
|
|
|
|
INSERT into post_custom_fields (name, value, post_id, created_at, updated_at)
|
|
|
|
SELECT 'is_accepted_answer', 'true', a.post_id, current_timestamp, current_timestamp
|
|
|
|
FROM accepted_data a
|
|
|
|
LEFT JOIN post_custom_fields f ON name = 'is_accepted_answer' AND f.post_id = a.post_id
|
|
|
|
WHERE f.id IS NULL
|
|
|
|
SQL
|
|
|
|
|
|
|
|
puts "marking accepted topics"
|
|
|
|
PostAction.exec_sql <<-SQL
|
|
|
|
INSERT into topic_custom_fields (name, value, topic_id, created_at, updated_at)
|
|
|
|
SELECT 'accepted_answer_post_id', a.post_id::varchar, p.topic_id, current_timestamp, current_timestamp
|
|
|
|
FROM accepted_data a
|
|
|
|
JOIN posts p ON p.id = a.post_id
|
|
|
|
LEFT JOIN topic_custom_fields f ON name = 'accepted_answer_post_id' AND f.topic_id = p.topic_id
|
|
|
|
WHERE f.id IS NULL
|
|
|
|
SQL
|
|
|
|
puts "done importing accepted answers"
|
2015-07-17 10:41:45 +08:00
|
|
|
end
|
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
def import_pms
|
|
|
|
|
|
|
|
puts "", "importing pms..."
|
|
|
|
|
|
|
|
puts "determining participation records"
|
|
|
|
|
|
|
|
inbox = mysql_query("SELECT note_id, recipient_user_id user_id FROM tblia_notes_inbox")
|
|
|
|
outbox = mysql_query("SELECT note_id, recipient_id user_id FROM tblia_notes_outbox")
|
|
|
|
|
|
|
|
users = {}
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
[inbox, outbox].each do |r|
|
2015-07-28 15:46:00 +08:00
|
|
|
r.each do |row|
|
|
|
|
ary = (users[row["note_id"]] ||= Set.new)
|
|
|
|
user_id = user_id_from_imported_user_id(row["user_id"])
|
|
|
|
ary << user_id if user_id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "untangling PM soup"
|
|
|
|
|
|
|
|
note_to_subject = {}
|
|
|
|
subject_to_first_note = {}
|
|
|
|
|
|
|
|
mysql_query("SELECT note_id, subject, sender_user_id FROM tblia_notes_content order by note_id").each do |row|
|
2017-07-28 09:20:09 +08:00
|
|
|
user_id = user_id_from_imported_user_id(row["sender_user_id"])
|
2015-07-28 15:46:00 +08:00
|
|
|
ary = (users[row["note_id"]] ||= Set.new)
|
|
|
|
if user_id
|
|
|
|
ary << user_id
|
|
|
|
end
|
|
|
|
note_to_subject[row["note_id"]] = row["subject"]
|
|
|
|
|
|
|
|
if row["subject"] !~ /^Re: /
|
|
|
|
subject_to_first_note[[row["subject"], ary]] ||= row["note_id"]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "Loading user_id to username map"
|
|
|
|
user_map = {}
|
2017-07-28 09:20:09 +08:00
|
|
|
User.pluck(:id, :username).each do |id, username|
|
2015-07-28 15:46:00 +08:00
|
|
|
user_map[id] = username
|
|
|
|
end
|
|
|
|
|
|
|
|
topic_count = mysql_query("SELECT COUNT(*) count FROM tblia_notes_content").first["count"]
|
|
|
|
|
|
|
|
batches(BATCH_SIZE) do |offset|
|
|
|
|
topics = mysql_query <<-SQL
|
|
|
|
SELECT note_id, subject, body, sender_user_id, sent_time
|
|
|
|
FROM tblia_notes_content
|
|
|
|
ORDER BY note_id
|
|
|
|
LIMIT #{BATCH_SIZE}
|
|
|
|
OFFSET #{offset}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
break if topics.size < 1
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
next if all_records_exist? :posts, topics.map { |topic| "pm_#{topic["note_id"]}" }
|
2015-09-22 07:48:42 +08:00
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
create_posts(topics, total: topic_count, offset: offset) do |topic|
|
|
|
|
|
2015-07-28 16:16:29 +08:00
|
|
|
user_id = user_id_from_imported_user_id(topic["sender_user_id"]) || Discourse::SYSTEM_USER_ID
|
2015-07-28 15:46:00 +08:00
|
|
|
participants = users[topic["note_id"]]
|
2015-07-28 16:16:29 +08:00
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
usernames = (participants - [user_id]).map { |id| user_map[id] }
|
2015-07-28 15:46:00 +08:00
|
|
|
|
|
|
|
subject = topic["subject"]
|
2015-07-28 16:16:29 +08:00
|
|
|
topic_id = nil
|
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
if subject =~ /^Re: /
|
|
|
|
parent_id = subject_to_first_note[[subject[4..-1], participants]]
|
2015-07-28 16:16:29 +08:00
|
|
|
if parent_id
|
|
|
|
if t = topic_lookup_from_imported_post_id("pm_#{parent_id}")
|
|
|
|
topic_id = t[:topic_id]
|
|
|
|
end
|
|
|
|
end
|
2015-07-28 15:46:00 +08:00
|
|
|
end
|
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
raw = topic["body"]
|
2015-07-28 15:46:00 +08:00
|
|
|
|
|
|
|
msg = {
|
|
|
|
id: "pm_#{topic["note_id"]}",
|
2015-07-28 16:16:29 +08:00
|
|
|
user_id: user_id,
|
2015-07-28 15:46:00 +08:00
|
|
|
raw: raw,
|
|
|
|
created_at: unix_time(topic["sent_time"]),
|
|
|
|
import_mode: true
|
|
|
|
}
|
|
|
|
|
|
|
|
unless topic_id
|
|
|
|
msg[:title] = @htmlentities.decode(topic["subject"]).strip[0...255]
|
|
|
|
msg[:archetype] = Archetype.private_message
|
2015-07-28 16:16:29 +08:00
|
|
|
msg[:target_usernames] = usernames.join(',')
|
|
|
|
else
|
|
|
|
msg[:topic_id] = topic_id
|
2015-07-28 15:46:00 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
msg
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
def close_topics
|
|
|
|
|
2015-08-03 14:18:28 +08:00
|
|
|
puts "\nclosing closed topics..."
|
|
|
|
|
|
|
|
sql = "select unique_id post_id from message2 where root_id = id AND (attributes & 0x0002 ) != 0;"
|
|
|
|
results = mysql_query(sql)
|
|
|
|
|
|
|
|
# loading post map
|
|
|
|
existing_map = {}
|
|
|
|
PostCustomField.where(name: 'import_unique_id').pluck(:post_id, :value).each do |post_id, import_id|
|
|
|
|
existing_map[import_id.to_i] = post_id.to_i
|
|
|
|
end
|
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
results.map { |r| r["post_id"] }.each_slice(500) do |ids|
|
|
|
|
mapped = ids.map { |id| existing_map[id] }.compact
|
2015-08-03 14:18:28 +08:00
|
|
|
Topic.exec_sql("
|
|
|
|
UPDATE topics SET closed = true
|
|
|
|
WHERE id IN (SELECT topic_id FROM posts where id in (:ids))
|
|
|
|
", ids: mapped) if mapped.present?
|
|
|
|
end
|
|
|
|
|
2015-07-28 15:46:00 +08:00
|
|
|
end
|
|
|
|
|
2015-07-22 11:40:45 +08:00
|
|
|
def create_permalinks
|
|
|
|
puts "Creating permalinks"
|
|
|
|
|
|
|
|
SiteSetting.permalink_normalizations = '/t5\\/.*p\\/(\\d+).*//p/\\1'
|
|
|
|
|
|
|
|
sql = <<-SQL
|
|
|
|
INSERT INTO permalinks (url, topic_id, created_at, updated_at)
|
|
|
|
SELECT '/p/' || value, p.topic_id, current_timestamp, current_timestamp
|
|
|
|
FROM post_custom_fields f
|
|
|
|
JOIN posts p on f.post_id = p.id AND post_number = 1
|
|
|
|
LEFT JOIN permalinks pm ON url = '/p/' || value
|
|
|
|
WHERE pm.id IS NULL AND f.name = 'import_unique_id'
|
|
|
|
SQL
|
|
|
|
|
|
|
|
r = Permalink.exec_sql sql
|
|
|
|
puts "#{r.cmd_tuples} permalinks to topics added!"
|
|
|
|
|
|
|
|
sql = <<-SQL
|
|
|
|
INSERT INTO permalinks (url, post_id, created_at, updated_at)
|
|
|
|
SELECT '/p/' || value, p.id, current_timestamp, current_timestamp
|
|
|
|
FROM post_custom_fields f
|
|
|
|
JOIN posts p on f.post_id = p.id AND post_number <> 1
|
|
|
|
LEFT JOIN permalinks pm ON url = '/p/' || value
|
|
|
|
WHERE pm.id IS NULL AND f.name = 'import_unique_id'
|
|
|
|
SQL
|
|
|
|
|
|
|
|
r = Permalink.exec_sql sql
|
|
|
|
puts "#{r.cmd_tuples} permalinks to posts added!"
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2015-07-14 13:53:38 +08:00
|
|
|
# find the uploaded file information from the db
|
|
|
|
def find_upload(post, attachment_id)
|
|
|
|
sql = "SELECT a.attachmentid attachment_id, a.userid user_id, a.filedataid file_id, a.filename filename,
|
|
|
|
a.caption caption
|
|
|
|
FROM attachment a
|
|
|
|
WHERE a.attachmentid = #{attachment_id}"
|
|
|
|
results = mysql_query(sql)
|
|
|
|
|
|
|
|
unless (row = results.first)
|
|
|
|
puts "Couldn't find attachment record for post.id = #{post.id}, import_id = #{post.custom_fields['import_id']}"
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
filename = File.join(ATTACHMENT_DIR, row['user_id'].to_s.split('').join('/'), "#{row['file_id']}.attach")
|
|
|
|
unless File.exists?(filename)
|
|
|
|
puts "Attachment file doesn't exist: #{filename}"
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
real_filename = row['filename']
|
|
|
|
real_filename.prepend SecureRandom.hex if real_filename[0] == '.'
|
|
|
|
upload = create_upload(post.user.id, filename, real_filename)
|
|
|
|
|
|
|
|
if upload.nil? || !upload.valid?
|
|
|
|
puts "Upload not valid :("
|
|
|
|
puts upload.errors.inspect if upload
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
return upload, real_filename
|
|
|
|
rescue Mysql2::Error => e
|
|
|
|
puts "SQL Error"
|
|
|
|
puts e.message
|
|
|
|
puts sql
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
def post_process_posts
|
|
|
|
puts "", "Postprocessing posts..."
|
|
|
|
|
|
|
|
current = 0
|
|
|
|
max = Post.count
|
|
|
|
|
2015-10-15 10:25:10 +08:00
|
|
|
mysql_query("create index idxUniqueId on message2(unique_id)") rescue nil
|
|
|
|
|
2015-10-16 07:40:52 +08:00
|
|
|
Post.where('id > ?', @max_start_id).find_each do |post|
|
2015-07-14 13:53:38 +08:00
|
|
|
begin
|
2015-10-15 10:25:10 +08:00
|
|
|
id = post.custom_fields["import_unique_id"]
|
|
|
|
next unless id
|
|
|
|
raw = mysql_query("select body from message2 where unique_id = '#{id}'").first['body']
|
|
|
|
unless raw
|
|
|
|
puts "Missing raw for post: #{post.id}"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
new_raw = postprocess_post_raw(raw, post.user_id)
|
2015-07-29 16:11:55 +08:00
|
|
|
post.raw = new_raw
|
|
|
|
post.save
|
2015-07-14 13:53:38 +08:00
|
|
|
rescue PrettyText::JavaScriptError
|
2015-10-15 10:25:10 +08:00
|
|
|
puts "GOT A JS error on post: #{post.id}"
|
2015-07-14 13:53:38 +08:00
|
|
|
nil
|
|
|
|
ensure
|
|
|
|
print_status(current += 1, max)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
def postprocess_post_raw(raw, user_id)
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
doc = Nokogiri::HTML.fragment(raw)
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
doc.css("a,img").each do |l|
|
|
|
|
uri = URI.parse(l["href"] || l["src"]) rescue nil
|
|
|
|
if uri && uri.hostname == OLD_DOMAIN
|
|
|
|
uri.hostname = nil
|
2015-07-14 13:53:38 +08:00
|
|
|
end
|
|
|
|
|
2015-08-03 15:16:19 +08:00
|
|
|
if uri && !uri.hostname
|
2015-07-29 16:11:55 +08:00
|
|
|
if l["href"]
|
|
|
|
l["href"] = uri.path
|
|
|
|
# we have an internal link, lets see if we can remap it?
|
2015-08-03 16:30:26 +08:00
|
|
|
permalink = Permalink.find_by_url(uri.path) rescue nil
|
2015-07-29 16:11:55 +08:00
|
|
|
if l["href"] && permalink && permalink.target_url
|
|
|
|
l["href"] = permalink.target_url
|
|
|
|
end
|
|
|
|
elsif l["src"]
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
# we need an upload here
|
|
|
|
upload_name = $1 if uri.path =~ /image-id\/([^\/]+)/
|
2015-08-03 15:35:35 +08:00
|
|
|
if upload_name
|
|
|
|
png = UPLOAD_DIR + "/" + upload_name + ".png"
|
|
|
|
jpg = UPLOAD_DIR + "/" + upload_name + ".jpg"
|
2015-08-03 16:30:26 +08:00
|
|
|
gif = UPLOAD_DIR + "/" + upload_name + ".gif"
|
2015-08-03 15:35:35 +08:00
|
|
|
|
|
|
|
# check to see if we have it
|
|
|
|
if File.exist?(png)
|
|
|
|
image = png
|
|
|
|
elsif File.exists?(jpg)
|
|
|
|
image = jpg
|
2015-08-03 16:30:26 +08:00
|
|
|
elsif File.exists?(gif)
|
|
|
|
image = gif
|
2015-08-03 15:35:35 +08:00
|
|
|
end
|
2015-07-29 16:11:55 +08:00
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
if image
|
|
|
|
File.open(image) do |file|
|
2017-05-11 06:16:57 +08:00
|
|
|
upload = UploadCreator.new(file, "image." + (image.ends_with?(".png") ? "png" : "jpg")).create_for(user_id)
|
2015-07-29 16:11:55 +08:00
|
|
|
l["src"] = upload.url
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "image was missing #{l["src"]}"
|
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
end
|
2015-07-14 13:53:38 +08:00
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2015-07-29 16:11:55 +08:00
|
|
|
raw = ReverseMarkdown.convert(doc.to_s)
|
|
|
|
raw.gsub!(/^\s* \s*$/, "")
|
|
|
|
# ugly quotes
|
|
|
|
raw.gsub!(/^>[\s\*]*$/, "")
|
|
|
|
raw.gsub!(/:([a-z]+):/) do |match|
|
|
|
|
":#{SMILEY_SUBS[$1] || $1}:"
|
2015-07-14 13:53:38 +08:00
|
|
|
end
|
2015-07-29 16:11:55 +08:00
|
|
|
# nbsp central
|
2017-07-28 09:20:09 +08:00
|
|
|
raw.gsub!(/([a-zA-Z0-9]) ([a-zA-Z0-9])/, "\\1 \\2")
|
2015-07-14 13:53:38 +08:00
|
|
|
raw
|
|
|
|
end
|
|
|
|
|
|
|
|
def fake_email
|
|
|
|
SecureRandom.hex << "@domain.com"
|
|
|
|
end
|
|
|
|
|
|
|
|
def mysql_query(sql)
|
2015-10-15 10:25:10 +08:00
|
|
|
@client.query(sql, cache_rows: true)
|
2015-07-14 13:53:38 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
ImportScripts::Lithium.new.perform
|