2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2022-03-21 22:28:52 +08:00
|
|
|
require "migration/base_dropper"
|
2019-05-10 00:01:35 +08:00
|
|
|
|
2015-06-12 18:02:36 +08:00
|
|
|
class DbHelper
|
2024-10-16 10:09:07 +08:00
|
|
|
REMAP_SQL = <<~SQL
|
2021-04-21 17:36:32 +08:00
|
|
|
SELECT table_name::text, column_name::text, character_maximum_length
|
2015-06-12 18:02:36 +08:00
|
|
|
FROM information_schema.columns
|
|
|
|
WHERE table_schema = 'public'
|
|
|
|
AND is_updatable = 'YES'
|
|
|
|
AND (data_type LIKE 'char%' OR data_type LIKE 'text%')
|
2018-12-27 00:34:49 +08:00
|
|
|
ORDER BY table_name, column_name
|
|
|
|
SQL
|
2015-06-12 18:02:36 +08:00
|
|
|
|
2024-10-16 10:09:07 +08:00
|
|
|
TRIGGERS_SQL = <<~SQL
|
2021-04-21 17:36:32 +08:00
|
|
|
SELECT trigger_name::text
|
2019-05-04 02:30:23 +08:00
|
|
|
FROM information_schema.triggers
|
|
|
|
WHERE trigger_name LIKE '%_readonly'
|
|
|
|
SQL
|
|
|
|
|
2024-10-16 10:09:07 +08:00
|
|
|
TRUNCATABLE_COLUMNS = ["topic_links.url"]
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2019-07-31 23:30:08 +08:00
|
|
|
def self.remap(
|
|
|
|
from,
|
|
|
|
to,
|
|
|
|
anchor_left: false,
|
|
|
|
anchor_right: false,
|
|
|
|
excluded_tables: [],
|
2024-11-15 18:42:25 +08:00
|
|
|
verbose: false,
|
|
|
|
skip_max_length_violations: false
|
2019-07-31 23:30:08 +08:00
|
|
|
)
|
2019-08-12 22:49:22 +08:00
|
|
|
text_columns = find_text_columns(excluded_tables)
|
2018-11-08 09:57:01 +08:00
|
|
|
|
2024-11-15 18:42:25 +08:00
|
|
|
return if text_columns.empty?
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2024-11-15 18:42:25 +08:00
|
|
|
pattern = "#{anchor_left ? "" : "%"}#{from}#{anchor_right ? "" : "%"}"
|
2018-11-08 09:57:01 +08:00
|
|
|
|
2024-11-15 18:42:25 +08:00
|
|
|
text_columns.each do |table, columns|
|
|
|
|
query_parts = build_remap_query_parts(table, columns, skip_max_length_violations)
|
|
|
|
|
|
|
|
begin
|
|
|
|
rows_updated = DB.exec(<<~SQL, from: from, to: to, pattern: pattern)
|
|
|
|
UPDATE \"#{table}\"
|
|
|
|
SET #{query_parts[:updates].join(", ")}
|
|
|
|
WHERE #{query_parts[:conditions].join(" OR ")}
|
|
|
|
SQL
|
|
|
|
rescue PG::StringDataRightTruncation => e
|
|
|
|
# Provide more context in the exeption message
|
|
|
|
raise_contextualized_remap_exception(e, table, query_parts[:length_constrained_columns])
|
|
|
|
end
|
2019-07-31 23:30:08 +08:00
|
|
|
|
2024-11-15 18:42:25 +08:00
|
|
|
if verbose
|
|
|
|
skipped_counts =
|
|
|
|
skipped_remap_counts(table, from, to, pattern, query_parts, skip_max_length_violations)
|
|
|
|
|
|
|
|
log_remap_message(table, rows_updated, skipped_counts)
|
|
|
|
end
|
2018-12-27 00:34:49 +08:00
|
|
|
end
|
|
|
|
|
2019-07-30 00:43:40 +08:00
|
|
|
finish!
|
2018-12-27 00:34:49 +08:00
|
|
|
end
|
|
|
|
|
2019-07-31 23:30:08 +08:00
|
|
|
def self.regexp_replace(
|
|
|
|
pattern,
|
|
|
|
replacement,
|
|
|
|
flags: "gi",
|
|
|
|
match: "~*",
|
|
|
|
excluded_tables: [],
|
|
|
|
verbose: false
|
|
|
|
)
|
2019-08-12 22:49:22 +08:00
|
|
|
text_columns = find_text_columns(excluded_tables)
|
2018-12-27 00:34:49 +08:00
|
|
|
|
|
|
|
text_columns.each do |table, columns|
|
|
|
|
set =
|
|
|
|
columns
|
|
|
|
.map do |column|
|
2020-03-31 02:16:10 +08:00
|
|
|
replace = "REGEXP_REPLACE(\"#{column[:name]}\", :pattern, :replacement, :flags)"
|
2019-08-12 23:12:06 +08:00
|
|
|
replace = truncate(replace, table, column)
|
2020-03-31 02:16:10 +08:00
|
|
|
"\"#{column[:name]}\" = #{replace}"
|
2018-12-27 00:34:49 +08:00
|
|
|
end
|
|
|
|
.join(", ")
|
2023-01-09 20:10:19 +08:00
|
|
|
|
2018-12-27 00:34:49 +08:00
|
|
|
where =
|
|
|
|
columns
|
|
|
|
.map do |column|
|
2020-03-31 02:16:10 +08:00
|
|
|
"\"#{column[:name]}\" IS NOT NULL AND \"#{column[:name]}\" #{match} :pattern"
|
2018-12-27 00:34:49 +08:00
|
|
|
end
|
|
|
|
.join(" OR ")
|
|
|
|
|
2019-07-31 23:30:08 +08:00
|
|
|
rows = DB.exec(<<~SQL, pattern: pattern, replacement: replacement, flags: flags, match: match)
|
2020-03-31 02:16:10 +08:00
|
|
|
UPDATE \"#{table}\"
|
2018-12-27 00:34:49 +08:00
|
|
|
SET #{set}
|
|
|
|
WHERE #{where}
|
2018-11-08 09:57:01 +08:00
|
|
|
SQL
|
2019-07-31 23:30:08 +08:00
|
|
|
|
|
|
|
puts "#{table}=#{rows}" if verbose && rows > 0
|
2015-06-12 18:02:36 +08:00
|
|
|
end
|
2018-04-23 16:26:33 +08:00
|
|
|
|
2019-07-30 00:43:40 +08:00
|
|
|
finish!
|
2015-06-12 18:02:36 +08:00
|
|
|
end
|
|
|
|
|
2018-12-27 00:34:49 +08:00
|
|
|
def self.find(needle, anchor_left: false, anchor_right: false, excluded_tables: [])
|
2018-06-07 22:51:16 +08:00
|
|
|
found = {}
|
2018-12-27 00:34:49 +08:00
|
|
|
like = "#{anchor_left ? "" : "%"}#{needle}#{anchor_right ? "" : "%"}"
|
|
|
|
|
|
|
|
DB
|
|
|
|
.query(REMAP_SQL)
|
|
|
|
.each do |r|
|
|
|
|
next if excluded_tables.include?(r.table_name)
|
2018-06-07 22:51:16 +08:00
|
|
|
|
2018-12-27 00:34:49 +08:00
|
|
|
rows = DB.query(<<~SQL, like: like)
|
2020-03-31 02:16:10 +08:00
|
|
|
SELECT \"#{r.column_name}\"
|
|
|
|
FROM \"#{r.table_name}\"
|
2023-01-11 04:38:52 +08:00
|
|
|
WHERE \"#{r.column_name}\" LIKE :like
|
2018-12-27 00:34:49 +08:00
|
|
|
SQL
|
|
|
|
|
|
|
|
if rows.size > 0
|
2019-05-07 09:27:05 +08:00
|
|
|
found["#{r.table_name}.#{r.column_name}"] = rows.map do |row|
|
|
|
|
row.public_send(r.column_name)
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-05-07 09:27:05 +08:00
|
|
|
end
|
2018-06-07 22:51:16 +08:00
|
|
|
end
|
2018-12-27 00:34:49 +08:00
|
|
|
|
2018-06-07 22:51:16 +08:00
|
|
|
found
|
|
|
|
end
|
|
|
|
|
2019-07-30 00:43:40 +08:00
|
|
|
private
|
|
|
|
|
|
|
|
def self.finish!
|
|
|
|
SiteSetting.refresh!
|
|
|
|
Theme.expire_site_cache!
|
|
|
|
SiteIconManager.ensure_optimized!
|
2019-08-15 18:24:20 +08:00
|
|
|
ApplicationController.banner_json_cache.clear
|
2019-07-30 00:43:40 +08:00
|
|
|
end
|
|
|
|
|
2019-08-12 22:49:22 +08:00
|
|
|
def self.find_text_columns(excluded_tables)
|
|
|
|
triggers = DB.query(TRIGGERS_SQL).map(&:trigger_name).to_set
|
|
|
|
text_columns = Hash.new { |h, k| h[k] = [] }
|
|
|
|
|
|
|
|
DB
|
|
|
|
.query(REMAP_SQL)
|
|
|
|
.each do |r|
|
|
|
|
if excluded_tables.include?(r.table_name) ||
|
2020-08-18 15:53:12 +08:00
|
|
|
triggers.include?(
|
|
|
|
Migration::BaseDropper.readonly_trigger_name(r.table_name, r.column_name),
|
|
|
|
) || triggers.include?(Migration::BaseDropper.readonly_trigger_name(r.table_name))
|
2023-01-09 20:10:19 +08:00
|
|
|
next
|
|
|
|
end
|
2019-08-12 22:49:22 +08:00
|
|
|
|
2019-08-12 23:12:06 +08:00
|
|
|
text_columns[r.table_name] << {
|
|
|
|
name: r.column_name,
|
|
|
|
max_length: r.character_maximum_length,
|
|
|
|
}
|
2019-08-12 22:49:22 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
text_columns
|
|
|
|
end
|
2019-08-12 23:12:06 +08:00
|
|
|
|
|
|
|
def self.truncate(sql, table, column)
|
|
|
|
if column[:max_length] && TRUNCATABLE_COLUMNS.include?("#{table}.#{column[:name]}")
|
|
|
|
"LEFT(#{sql}, #{column[:max_length]})"
|
|
|
|
else
|
|
|
|
sql
|
|
|
|
end
|
|
|
|
end
|
2024-11-15 18:42:25 +08:00
|
|
|
|
|
|
|
def self.build_remap_query_parts(table, columns, skip_max_length_violations)
|
|
|
|
columns.each_with_object(
|
|
|
|
{ updates: [], conditions: [], skipped_sums: [], length_constrained_columns: [] },
|
|
|
|
) do |column, parts|
|
|
|
|
replace = %|REPLACE("#{column[:name]}", :from, :to)|
|
|
|
|
replace = truncate(replace, table, column)
|
|
|
|
|
|
|
|
if column[:max_length].present?
|
|
|
|
# Keep track of columns with length constraints for error messages
|
|
|
|
parts[:length_constrained_columns] << "#{column[:name]}(#{column[:max_length]})"
|
|
|
|
end
|
|
|
|
|
|
|
|
# Build SQL update statements for each column
|
|
|
|
parts[:updates] << %("#{column[:name]}" = #{replace})
|
|
|
|
|
|
|
|
# Build the base SQL condition clause for each column
|
|
|
|
basic_condition = %("#{column[:name]}" IS NOT NULL AND "#{column[:name]}" LIKE :pattern)
|
|
|
|
|
|
|
|
if skip_max_length_violations && column[:max_length].present?
|
|
|
|
# Extend base condition to skip updates that would violate the column length constraint
|
|
|
|
parts[
|
|
|
|
:conditions
|
|
|
|
] << "(#{basic_condition} AND LENGTH(#{replace}) <= #{column[:max_length]})"
|
|
|
|
|
|
|
|
# Build SQL sum statements for each column to count skipped updates.
|
|
|
|
# This will helps us know the number of updates skipped due to length constraints
|
|
|
|
# violations on this column
|
|
|
|
parts[:skipped_sums] << <<~SQL
|
|
|
|
SUM (
|
|
|
|
CASE
|
|
|
|
WHEN #{basic_condition} AND LENGTH(#{replace}) > #{column[:max_length]} THEN 1 ELSE 0
|
|
|
|
END
|
|
|
|
) AS #{column[:name]}_skipped
|
|
|
|
SQL
|
|
|
|
else
|
|
|
|
parts[:conditions] << "(#{basic_condition})"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.log_remap_message(table, rows_updated, skipped_counts)
|
|
|
|
return if rows_updated == 0 && skipped_counts.blank?
|
|
|
|
|
|
|
|
message = +"#{table}=#{rows_updated}"
|
|
|
|
|
|
|
|
if skipped_counts&.any?
|
|
|
|
message << " SKIPPED: "
|
|
|
|
message << skipped_counts
|
|
|
|
.map do |column, count|
|
|
|
|
"#{column.delete_suffix("_skipped")}: #{count} #{"update".pluralize(count)}"
|
|
|
|
end
|
|
|
|
.join(", ")
|
|
|
|
end
|
|
|
|
|
|
|
|
puts message
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.skipped_remap_counts(table, from, to, pattern, query_parts, skip_max_length_violations)
|
|
|
|
return unless skip_max_length_violations && query_parts[:skipped_sums].any?
|
|
|
|
|
|
|
|
skipped = DB.query_hash(<<~SQL, from: from, to: to, pattern: pattern).first
|
|
|
|
SELECT #{query_parts[:skipped_sums].join(", ")}
|
|
|
|
FROM \"#{table}\"
|
|
|
|
SQL
|
|
|
|
|
|
|
|
skipped.select { |_, count| count.to_i > 0 }
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.raise_contextualized_remap_exception(error, table, columns)
|
|
|
|
details = "columns with length constraints: #{columns.join(", ")}"
|
|
|
|
|
|
|
|
raise PG::StringDataRightTruncation, " #{error.message.strip} (table: #{table}, #{details})"
|
|
|
|
end
|
2015-06-12 18:02:36 +08:00
|
|
|
end
|