2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-02-14 23:51:52 +08:00
|
|
|
# we should set the locale before the migration
|
|
|
|
task "set_locale" do
|
2017-11-16 23:58:09 +08:00
|
|
|
begin
|
|
|
|
I18n.locale =
|
|
|
|
begin
|
|
|
|
(SiteSetting.default_locale || :en)
|
|
|
|
rescue StandardError
|
|
|
|
:en
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2017-11-16 23:58:09 +08:00
|
|
|
rescue I18n::InvalidLocale
|
|
|
|
I18n.locale = :en
|
|
|
|
end
|
2015-02-14 23:51:52 +08:00
|
|
|
end
|
|
|
|
|
2019-03-21 09:50:16 +08:00
|
|
|
module MultisiteTestHelpers
|
|
|
|
def self.load_multisite?
|
2022-08-05 05:15:06 +08:00
|
|
|
Rails.env.test? && !ENV["RAILS_DB"] && !ENV["SKIP_MULTISITE"]
|
2017-08-31 12:06:56 +08:00
|
|
|
end
|
2019-06-14 13:44:07 +08:00
|
|
|
|
|
|
|
def self.create_multisite?
|
2023-11-16 20:01:12 +08:00
|
|
|
(ENV["RAILS_ENV"] == "test" || !ENV["RAILS_ENV"]) && !ENV["RAILS_DB"] &&
|
|
|
|
!ENV["SKIP_MULTISITE"] && !ENV["SKIP_TEST_DATABASE"]
|
2019-06-14 13:44:07 +08:00
|
|
|
end
|
2017-08-31 12:06:56 +08:00
|
|
|
end
|
|
|
|
|
2019-03-21 09:50:16 +08:00
|
|
|
task "db:environment:set" => [:load_config] do |_, args|
|
|
|
|
if MultisiteTestHelpers.load_multisite?
|
2024-02-27 18:33:28 +08:00
|
|
|
system(
|
|
|
|
"RAILS_ENV=test RAILS_DB=discourse_test_multisite rake db:environment:set",
|
|
|
|
exception: true,
|
|
|
|
)
|
2017-08-07 21:05:13 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-14 12:20:48 +08:00
|
|
|
task "db:force_skip_persist" do
|
|
|
|
GlobalSetting.skip_db = true
|
|
|
|
GlobalSetting.skip_redis = true
|
|
|
|
end
|
|
|
|
|
2022-08-05 05:15:06 +08:00
|
|
|
task "db:create" => [:load_config] do |_, args|
|
|
|
|
if MultisiteTestHelpers.create_multisite?
|
|
|
|
unless system("RAILS_ENV=test RAILS_DB=discourse_test_multisite rake db:create")
|
|
|
|
STDERR.puts "-" * 80
|
|
|
|
STDERR.puts "ERROR: Could not create multisite DB. A common cause of this is a plugin"
|
|
|
|
STDERR.puts "checking the column structure when initializing, which raises an error."
|
|
|
|
STDERR.puts "-" * 80
|
|
|
|
raise "Could not initialize discourse_test_multisite"
|
|
|
|
end
|
2017-08-07 21:05:13 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-14 12:20:48 +08:00
|
|
|
begin
|
2022-08-05 05:15:06 +08:00
|
|
|
reqs = Rake::Task["db:create"].prerequisites.map(&:to_sym)
|
2019-06-14 12:20:48 +08:00
|
|
|
Rake::Task["db:create"].clear_prerequisites
|
2022-08-05 05:15:06 +08:00
|
|
|
Rake::Task["db:create"].enhance(["db:force_skip_persist"] + reqs)
|
2019-06-14 12:20:48 +08:00
|
|
|
end
|
|
|
|
|
2022-08-05 05:15:06 +08:00
|
|
|
task "db:drop" => [:load_config] do |_, args|
|
|
|
|
if MultisiteTestHelpers.create_multisite?
|
2024-02-27 18:33:28 +08:00
|
|
|
system("RAILS_DB=discourse_test_multisite RAILS_ENV=test rake db:drop", exception: true)
|
2019-03-21 09:50:16 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-07-06 16:39:03 +08:00
|
|
|
begin
|
2022-08-05 05:15:06 +08:00
|
|
|
Rake::Task["db:migrate"].clear
|
|
|
|
Rake::Task["db:rollback"].clear
|
2019-09-13 07:17:54 +08:00
|
|
|
end
|
|
|
|
|
2022-08-05 05:15:06 +08:00
|
|
|
task "db:rollback" => %w[environment set_locale] do |_, args|
|
|
|
|
step = ENV["STEP"] ? ENV["STEP"].to_i : 1
|
|
|
|
ActiveRecord::Base.connection.migration_context.rollback(step)
|
|
|
|
Rake::Task["db:_dump"].invoke
|
2022-07-06 16:39:03 +08:00
|
|
|
end
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2022-08-05 05:15:06 +08:00
|
|
|
# our optimized version of multisite migrate, we have many sites and we have seeds
|
|
|
|
# this ensures we can run migrations concurrently to save huge amounts of time
|
|
|
|
Rake::Task["multisite:migrate"].clear
|
|
|
|
|
2020-04-14 09:21:31 +08:00
|
|
|
class StdOutDemux
|
|
|
|
def initialize(stdout)
|
|
|
|
@stdout = stdout
|
|
|
|
@data = {}
|
|
|
|
end
|
|
|
|
|
|
|
|
def write(data)
|
|
|
|
(@data[Thread.current] ||= +"") << data
|
|
|
|
end
|
|
|
|
|
|
|
|
def close
|
|
|
|
finish_chunk
|
|
|
|
end
|
|
|
|
|
|
|
|
def finish_chunk
|
|
|
|
data = @data[Thread.current]
|
|
|
|
if data
|
|
|
|
@stdout.write(data)
|
|
|
|
@data.delete Thread.current
|
|
|
|
end
|
|
|
|
end
|
2020-05-06 18:58:35 +08:00
|
|
|
|
|
|
|
def flush
|
|
|
|
# Do nothing
|
|
|
|
end
|
2020-04-14 09:21:31 +08:00
|
|
|
end
|
|
|
|
|
2020-06-27 04:35:21 +08:00
|
|
|
class SeedHelper
|
|
|
|
def self.paths
|
|
|
|
DiscoursePluginRegistry.seed_paths
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.filter
|
|
|
|
# Allows a plugin to exclude any specified seed data files from running
|
|
|
|
if DiscoursePluginRegistry.seedfu_filter.any?
|
2023-01-21 02:52:49 +08:00
|
|
|
/\A(?!.*(#{DiscoursePluginRegistry.seedfu_filter.to_a.join("|")})).*\z/
|
2023-01-09 20:10:19 +08:00
|
|
|
else
|
2020-06-27 04:35:21 +08:00
|
|
|
nil
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2020-06-27 04:35:21 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-10-27 16:59:41 +08:00
|
|
|
task "multisite:migrate" => %w[
|
|
|
|
db:load_config
|
|
|
|
environment
|
|
|
|
set_locale
|
|
|
|
assets:precompile:theme_transpiler
|
|
|
|
] do |_, args|
|
2020-04-14 09:21:31 +08:00
|
|
|
raise "Multisite migrate is only supported in production" if ENV["RAILS_ENV"] != "production"
|
|
|
|
|
2021-11-09 09:06:06 +08:00
|
|
|
DistributedMutex.synchronize(
|
|
|
|
"db_migration",
|
|
|
|
redis: Discourse.redis.without_namespace,
|
|
|
|
validity: 1200,
|
|
|
|
) do
|
2021-09-03 23:22:25 +08:00
|
|
|
# TODO: Switch to processes for concurrent migrations because Rails migration
|
|
|
|
# is not thread safe by default.
|
|
|
|
concurrency = 1
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
puts "Multisite migrator is running using #{concurrency} threads"
|
|
|
|
puts
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
exceptions = Queue.new
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2022-05-23 21:26:13 +08:00
|
|
|
if concurrency > 1
|
|
|
|
old_stdout = $stdout
|
|
|
|
$stdout = StdOutDemux.new($stdout)
|
|
|
|
end
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
SeedFu.quiet = true
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2022-05-04 14:12:18 +08:00
|
|
|
def execute_concurrently(concurrency, exceptions)
|
2021-09-03 23:22:25 +08:00
|
|
|
queue = Queue.new
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
RailsMultisite::ConnectionManagement.each_connection { |db| queue << db }
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
concurrency.times { queue << :done }
|
2020-06-17 16:15:43 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
(1..concurrency)
|
|
|
|
.map do
|
|
|
|
Thread.new do
|
|
|
|
while true
|
|
|
|
db = queue.pop
|
|
|
|
break if db == :done
|
2020-06-17 16:15:43 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
RailsMultisite::ConnectionManagement.with_connection(db) do
|
|
|
|
begin
|
|
|
|
yield(db) if block_given?
|
|
|
|
rescue => e
|
|
|
|
exceptions << [db, e]
|
2023-01-09 20:10:19 +08:00
|
|
|
ensure
|
|
|
|
begin
|
2022-05-23 21:26:13 +08:00
|
|
|
$stdout.finish_chunk if concurrency > 1
|
2021-09-03 23:22:25 +08:00
|
|
|
rescue => ex
|
|
|
|
STDERR.puts ex.inspect
|
|
|
|
STDERR.puts ex.backtrace
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2021-09-03 23:22:25 +08:00
|
|
|
end
|
2020-06-17 16:15:43 +08:00
|
|
|
end
|
2020-04-14 09:21:31 +08:00
|
|
|
end
|
|
|
|
end
|
2021-09-03 23:22:25 +08:00
|
|
|
end
|
|
|
|
.each(&:join)
|
|
|
|
end
|
2020-06-17 16:15:43 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
def check_exceptions(exceptions)
|
|
|
|
if exceptions.length > 0
|
2020-08-05 15:01:46 +08:00
|
|
|
STDERR.puts
|
2021-09-03 23:22:25 +08:00
|
|
|
STDERR.puts "-" * 80
|
|
|
|
STDERR.puts "#{exceptions.length} migrations failed!"
|
|
|
|
while !exceptions.empty?
|
|
|
|
db, e = exceptions.pop
|
|
|
|
STDERR.puts
|
|
|
|
STDERR.puts "Failed to migrate #{db}"
|
|
|
|
STDERR.puts e.inspect
|
|
|
|
STDERR.puts e.backtrace
|
|
|
|
STDERR.puts
|
|
|
|
end
|
|
|
|
exit 1
|
2020-08-05 15:01:46 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-04 14:12:18 +08:00
|
|
|
execute_concurrently(concurrency, exceptions) do |db|
|
2021-09-03 23:22:25 +08:00
|
|
|
puts "Migrating #{db}"
|
|
|
|
ActiveRecord::Tasks::DatabaseTasks.migrate
|
|
|
|
end
|
2020-06-17 16:15:43 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
check_exceptions(exceptions)
|
2020-08-05 15:01:46 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
SeedFu.seed(SeedHelper.paths, /001_refresh/)
|
2020-06-17 16:15:43 +08:00
|
|
|
|
2022-05-04 14:12:18 +08:00
|
|
|
execute_concurrently(concurrency, exceptions) do |db|
|
2021-09-03 23:22:25 +08:00
|
|
|
puts "Seeding #{db}"
|
|
|
|
SeedFu.seed(SeedHelper.paths, SeedHelper.filter)
|
2020-06-23 09:10:02 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
if !Discourse.skip_post_deployment_migrations? && ENV["SKIP_OPTIMIZE_ICONS"] != "1"
|
|
|
|
SiteIconManager.ensure_optimized!
|
|
|
|
end
|
2020-06-23 09:10:02 +08:00
|
|
|
end
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2022-05-23 21:26:13 +08:00
|
|
|
$stdout = old_stdout if concurrency > 1
|
2021-09-03 23:22:25 +08:00
|
|
|
check_exceptions(exceptions)
|
2020-04-14 09:21:31 +08:00
|
|
|
|
2021-09-03 23:22:25 +08:00
|
|
|
Rake::Task["db:_dump"].invoke
|
|
|
|
end
|
2020-04-14 09:21:31 +08:00
|
|
|
end
|
|
|
|
|
2023-08-25 01:24:43 +08:00
|
|
|
task "db:migrate" => %w[
|
|
|
|
load_config
|
|
|
|
environment
|
|
|
|
set_locale
|
2023-10-02 18:36:06 +08:00
|
|
|
assets:precompile:theme_transpiler
|
2023-08-25 01:24:43 +08:00
|
|
|
] do |_, args|
|
2022-08-05 05:15:06 +08:00
|
|
|
DistributedMutex.synchronize(
|
|
|
|
"db_migration",
|
|
|
|
redis: Discourse.redis.without_namespace,
|
|
|
|
validity: 300,
|
|
|
|
) do
|
|
|
|
migrations = ActiveRecord::Base.connection.migration_context.migrations
|
|
|
|
now_timestamp = Time.now.utc.strftime("%Y%m%d%H%M%S").to_i
|
|
|
|
epoch_timestamp = Time.at(0).utc.strftime("%Y%m%d%H%M%S").to_i
|
2019-12-31 11:07:44 +08:00
|
|
|
|
2022-08-05 05:15:06 +08:00
|
|
|
if migrations.last.version > now_timestamp
|
|
|
|
raise "Migration #{migrations.last.version} is timestamped in the future"
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2022-08-05 05:15:06 +08:00
|
|
|
if migrations.first.version < epoch_timestamp
|
|
|
|
raise "Migration #{migrations.first.version} is timestamped before the epoch"
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2022-08-05 05:15:06 +08:00
|
|
|
|
|
|
|
%i[pg_trgm unaccent].each do |extension|
|
|
|
|
begin
|
|
|
|
DB.exec "CREATE EXTENSION IF NOT EXISTS #{extension}"
|
|
|
|
rescue => e
|
|
|
|
STDERR.puts "Cannot enable database extension #{extension}"
|
|
|
|
STDERR.puts e
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ActiveRecord::Tasks::DatabaseTasks.migrate
|
|
|
|
|
|
|
|
SeedFu.quiet = true
|
2024-08-27 19:32:55 +08:00
|
|
|
|
|
|
|
begin
|
|
|
|
SeedFu.seed(SeedHelper.paths, SeedHelper.filter)
|
|
|
|
rescue => error
|
|
|
|
error.backtrace.each { |l| puts l }
|
|
|
|
end
|
2022-08-05 05:15:06 +08:00
|
|
|
|
|
|
|
Rake::Task["db:schema:cache:dump"].invoke if Rails.env.development? && !ENV["RAILS_DB"]
|
|
|
|
|
|
|
|
if !Discourse.skip_post_deployment_migrations? && ENV["SKIP_OPTIMIZE_ICONS"] != "1"
|
|
|
|
SiteIconManager.ensure_optimized!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if !Discourse.is_parallel_test? && MultisiteTestHelpers.load_multisite?
|
2024-02-27 18:33:28 +08:00
|
|
|
system("RAILS_DB=discourse_test_multisite rake db:migrate", exception: true)
|
2022-08-05 05:15:06 +08:00
|
|
|
end
|
2013-04-08 07:56:42 +08:00
|
|
|
end
|
2013-10-25 07:31:33 +08:00
|
|
|
|
|
|
|
task "test:prepare" => "environment" do
|
2014-07-29 00:40:00 +08:00
|
|
|
I18n.locale =
|
|
|
|
begin
|
|
|
|
SiteSetting.default_locale
|
|
|
|
rescue StandardError
|
|
|
|
:en
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2017-11-17 03:42:38 +08:00
|
|
|
SeedFu.seed(DiscoursePluginRegistry.seed_paths)
|
2013-10-25 07:31:33 +08:00
|
|
|
end
|
2014-02-01 03:16:15 +08:00
|
|
|
|
2014-09-11 10:53:21 +08:00
|
|
|
task "db:api_test_seed" => "environment" do
|
|
|
|
puts "Loading test data for discourse_api"
|
|
|
|
load Rails.root + "db/api_test_seeds.rb"
|
|
|
|
end
|
|
|
|
|
2017-08-25 22:29:04 +08:00
|
|
|
def print_table(array)
|
|
|
|
width = array[0].keys.map { |k| k.to_s.length }
|
|
|
|
cols = array[0].keys.length
|
|
|
|
|
|
|
|
array.each do |row|
|
|
|
|
row.each_with_index { |(_, val), i| width[i] = [width[i].to_i, val.to_s.length].max }
|
|
|
|
end
|
|
|
|
|
|
|
|
array[0].keys.each_with_index do |col, i|
|
|
|
|
print col.to_s.ljust(width[i], " ")
|
|
|
|
if i == cols - 1
|
|
|
|
puts
|
|
|
|
else
|
|
|
|
print " | "
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "-" * (width.sum + width.length)
|
|
|
|
|
|
|
|
array.each do |row|
|
|
|
|
row.each_with_index do |(_, val), i|
|
|
|
|
print val.to_s.ljust(width[i], " ")
|
|
|
|
if i == cols - 1
|
|
|
|
puts
|
|
|
|
else
|
|
|
|
print " | "
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
desc "Statistics about database"
|
|
|
|
task "db:stats" => "environment" do
|
|
|
|
sql = <<~SQL
|
|
|
|
select table_name,
|
|
|
|
(
|
|
|
|
select reltuples::bigint
|
|
|
|
from pg_class
|
|
|
|
where oid = ('public.' || table_name)::regclass
|
|
|
|
) AS row_estimate,
|
2019-08-02 15:25:30 +08:00
|
|
|
pg_size_pretty(pg_table_size(quote_ident(table_name))) table_size,
|
|
|
|
pg_size_pretty(pg_indexes_size(quote_ident(table_name))) index_size,
|
|
|
|
pg_size_pretty(pg_total_relation_size(quote_ident(table_name))) total_size
|
2017-08-25 22:29:04 +08:00
|
|
|
from information_schema.tables
|
|
|
|
where table_schema = 'public'
|
2019-08-02 15:25:30 +08:00
|
|
|
order by pg_total_relation_size(quote_ident(table_name)) DESC
|
2017-08-25 22:29:04 +08:00
|
|
|
SQL
|
|
|
|
|
|
|
|
puts
|
2018-06-19 14:13:14 +08:00
|
|
|
print_table(DB.query_hash(sql))
|
2017-08-25 22:29:04 +08:00
|
|
|
end
|
|
|
|
|
2021-03-10 07:59:20 +08:00
|
|
|
task "db:ensure_post_migrations" do
|
|
|
|
if %w[1 true].include?(ENV["SKIP_POST_DEPLOYMENT_MIGRATIONS"])
|
|
|
|
cmd = `cat /proc/#{Process.pid}/cmdline | xargs -0 echo`
|
|
|
|
ENV["SKIP_POST_DEPLOYMENT_MIGRATIONS"] = "0"
|
|
|
|
exec cmd
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class NormalizedIndex
|
|
|
|
attr_accessor :name, :original, :normalized, :table
|
|
|
|
|
|
|
|
def initialize(original)
|
|
|
|
@original = original
|
|
|
|
@normalized = original.sub(/(create.*index )(\S+)(.*)/i, '\1idx\3')
|
|
|
|
@name = original.match(/create.*index (\S+)/i)[1]
|
|
|
|
@table = original.match(/create.*index \S+ on public\.(\S+)/i)[1]
|
|
|
|
end
|
|
|
|
|
|
|
|
def ==(other)
|
|
|
|
other&.normalized == normalized
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def normalize_index_names(names)
|
|
|
|
names.map { |name| NormalizedIndex.new(name) }.reject { |i| i.name.include?("ccnew") }
|
|
|
|
end
|
|
|
|
|
|
|
|
desc "Validate indexes"
|
|
|
|
task "db:validate_indexes", [:arg] => %w[db:ensure_post_migrations environment] do |_, args|
|
2021-03-24 05:53:01 +08:00
|
|
|
db = TemporaryDb.new
|
2021-03-10 07:59:20 +08:00
|
|
|
db.start
|
2021-04-29 04:12:08 +08:00
|
|
|
db.migrate
|
2021-03-10 07:59:20 +08:00
|
|
|
|
|
|
|
ActiveRecord::Base.establish_connection(
|
|
|
|
adapter: "postgresql",
|
|
|
|
database: "discourse",
|
|
|
|
port: db.pg_port,
|
|
|
|
host: "localhost",
|
|
|
|
)
|
|
|
|
|
|
|
|
expected = DB.query_single <<~SQL
|
|
|
|
SELECT indexdef FROM pg_indexes
|
|
|
|
WHERE schemaname = 'public'
|
|
|
|
ORDER BY indexdef
|
|
|
|
SQL
|
|
|
|
|
|
|
|
expected_tables = DB.query_single <<~SQL
|
|
|
|
SELECT tablename
|
|
|
|
FROM pg_tables
|
|
|
|
WHERE schemaname = 'public'
|
|
|
|
SQL
|
|
|
|
|
|
|
|
ActiveRecord::Base.establish_connection
|
|
|
|
|
|
|
|
db.stop
|
|
|
|
|
|
|
|
puts
|
|
|
|
|
|
|
|
fix_indexes = (ENV["FIX_INDEXES"] == "1" || args[:arg] == "fix")
|
|
|
|
inconsistency_found = false
|
|
|
|
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db_name|
|
|
|
|
puts "Testing indexes on the #{db_name} database", ""
|
|
|
|
|
|
|
|
current = DB.query_single <<~SQL
|
|
|
|
SELECT indexdef FROM pg_indexes
|
|
|
|
WHERE schemaname = 'public'
|
|
|
|
ORDER BY indexdef
|
|
|
|
SQL
|
|
|
|
|
|
|
|
missing = expected - current
|
|
|
|
extra = current - expected
|
|
|
|
|
|
|
|
extra.reject! { |x| x =~ /idx_recent_regular_post_search_data/ }
|
|
|
|
|
|
|
|
renames = []
|
|
|
|
normalized_missing = normalize_index_names(missing)
|
|
|
|
normalized_extra = normalize_index_names(extra)
|
|
|
|
|
|
|
|
normalized_extra.each do |extra_index|
|
|
|
|
if missing_index = normalized_missing.select { |x| x == extra_index }.first
|
|
|
|
renames << [extra_index, missing_index]
|
|
|
|
missing.delete missing_index.original
|
|
|
|
extra.delete extra_index.original
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
next if db_name != "default" && renames.length == 0 && missing.length == 0 && extra.length == 0
|
|
|
|
|
|
|
|
if renames.length > 0
|
|
|
|
inconsistency_found = true
|
|
|
|
|
|
|
|
puts "Renamed indexes"
|
|
|
|
renames.each do |extra_index, missing_index|
|
|
|
|
puts "#{extra_index.name} should be renamed to #{missing_index.name}"
|
|
|
|
end
|
|
|
|
puts
|
|
|
|
|
|
|
|
if fix_indexes
|
|
|
|
puts "fixing indexes"
|
|
|
|
|
|
|
|
renames.each do |extra_index, missing_index|
|
|
|
|
DB.exec "ALTER INDEX #{extra_index.name} RENAME TO #{missing_index.name}"
|
|
|
|
end
|
|
|
|
|
|
|
|
puts
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if missing.length > 0
|
|
|
|
inconsistency_found = true
|
|
|
|
|
|
|
|
puts "Missing Indexes", ""
|
|
|
|
missing.each { |m| puts m }
|
|
|
|
if fix_indexes
|
|
|
|
puts "Adding missing indexes..."
|
|
|
|
missing.each do |m|
|
|
|
|
begin
|
|
|
|
DB.exec(m)
|
|
|
|
rescue => e
|
|
|
|
$stderr.puts "Error running: #{m} - #{e}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "No missing indexes", ""
|
|
|
|
end
|
|
|
|
|
|
|
|
if extra.length > 0
|
|
|
|
inconsistency_found = true
|
|
|
|
|
|
|
|
puts "", "Extra Indexes", ""
|
|
|
|
extra.each { |e| puts e }
|
|
|
|
|
|
|
|
if fix_indexes
|
|
|
|
puts "Removing extra indexes"
|
|
|
|
extra.each do |statement|
|
|
|
|
if match = /create .*index (\S+) on public\.(\S+)/i.match(statement)
|
|
|
|
index_name, table_name = match[1], match[2]
|
|
|
|
if expected_tables.include?(table_name)
|
|
|
|
puts "Dropping #{index_name}"
|
|
|
|
begin
|
|
|
|
DB.exec("DROP INDEX #{index_name}")
|
|
|
|
rescue => e
|
|
|
|
$stderr.puts "Error dropping index #{index_name} - #{e}"
|
|
|
|
end
|
|
|
|
else
|
|
|
|
$stderr.puts "Skipping #{index_name} since #{table_name} should not exist - maybe an old plugin created it"
|
|
|
|
end
|
|
|
|
else
|
|
|
|
$stderr.puts "ERROR - BAD REGEX - UNABLE TO PARSE INDEX - #{statement}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "No extra indexes", ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
exit 1 if inconsistency_found && !fix_indexes
|
|
|
|
end
|
|
|
|
|
2014-02-01 03:16:15 +08:00
|
|
|
desc "Rebuild indexes"
|
|
|
|
task "db:rebuild_indexes" => "environment" do
|
|
|
|
if Import.backup_tables_count > 0
|
|
|
|
raise "Backup from a previous import exists. Drop them before running this job with rake import:remove_backup, or move them to another schema."
|
|
|
|
end
|
|
|
|
|
2014-02-13 12:38:28 +08:00
|
|
|
Discourse.enable_readonly_mode
|
|
|
|
|
2014-02-01 03:16:15 +08:00
|
|
|
backup_schema = Jobs::Importer::BACKUP_SCHEMA
|
2018-06-19 14:13:14 +08:00
|
|
|
table_names =
|
|
|
|
DB.query_single(
|
|
|
|
"select table_name from information_schema.tables where table_schema = 'public'",
|
|
|
|
)
|
2014-02-01 03:16:15 +08:00
|
|
|
|
|
|
|
begin
|
|
|
|
# Move all tables to the backup schema:
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec("DROP SCHEMA IF EXISTS #{backup_schema} CASCADE")
|
|
|
|
DB.exec("CREATE SCHEMA #{backup_schema}")
|
2014-02-01 03:16:15 +08:00
|
|
|
table_names.each do |table_name|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec("ALTER TABLE public.#{table_name} SET SCHEMA #{backup_schema}")
|
2014-02-01 03:16:15 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Create a new empty db
|
|
|
|
Rake::Task["db:migrate"].invoke
|
|
|
|
|
|
|
|
# Fetch index definitions from the new db
|
|
|
|
index_definitions = {}
|
|
|
|
table_names.each do |table_name|
|
2018-06-19 14:13:14 +08:00
|
|
|
index_definitions[table_name] = DB.query_single(
|
|
|
|
"SELECT indexdef FROM pg_indexes WHERE tablename = '#{table_name}' and schemaname = 'public';",
|
|
|
|
)
|
2014-02-01 03:16:15 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Drop the new tables
|
2018-06-19 14:13:14 +08:00
|
|
|
table_names.each { |table_name| DB.exec("DROP TABLE public.#{table_name}") }
|
2014-02-01 03:16:15 +08:00
|
|
|
|
|
|
|
# Move the old tables back to the public schema
|
|
|
|
table_names.each do |table_name|
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec("ALTER TABLE #{backup_schema}.#{table_name} SET SCHEMA public")
|
2014-02-01 03:16:15 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Drop their indexes
|
2018-06-19 14:13:14 +08:00
|
|
|
index_names =
|
|
|
|
DB.query_single(
|
|
|
|
"SELECT indexname FROM pg_indexes WHERE schemaname = 'public' AND tablename IN ('#{table_names.join("', '")}')",
|
|
|
|
)
|
2014-02-01 03:16:15 +08:00
|
|
|
index_names.each do |index_name|
|
|
|
|
begin
|
|
|
|
puts index_name
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec("DROP INDEX public.#{index_name}")
|
2016-04-08 02:32:31 +08:00
|
|
|
rescue ActiveRecord::StatementInvalid
|
2014-02-01 03:16:15 +08:00
|
|
|
# It's this:
|
|
|
|
# PG::Error: ERROR: cannot drop index category_users_pkey because constraint category_users_pkey on table category_users requires it
|
|
|
|
# HINT: You can drop constraint category_users_pkey on table category_users instead.
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Create the indexes
|
|
|
|
table_names.each do |table_name|
|
|
|
|
index_definitions[table_name].each do |index_def|
|
|
|
|
begin
|
2018-06-19 14:13:14 +08:00
|
|
|
DB.exec(index_def)
|
2016-04-08 02:32:31 +08:00
|
|
|
rescue ActiveRecord::StatementInvalid
|
2014-02-01 03:16:15 +08:00
|
|
|
# Trying to recreate a primary key
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
rescue StandardError
|
|
|
|
# Can we roll this back?
|
|
|
|
raise
|
|
|
|
ensure
|
2014-02-13 12:38:28 +08:00
|
|
|
Discourse.disable_readonly_mode
|
2014-02-01 03:16:15 +08:00
|
|
|
end
|
2014-02-13 12:38:28 +08:00
|
|
|
end
|
2021-09-10 03:42:10 +08:00
|
|
|
|
|
|
|
desc "Check that the DB can be accessed"
|
|
|
|
task "db:status:json" do
|
2021-11-11 21:16:53 +08:00
|
|
|
begin
|
|
|
|
Rake::Task["environment"].invoke
|
|
|
|
DB.query("SELECT 1")
|
|
|
|
rescue StandardError
|
|
|
|
puts({ status: "error" }.to_json)
|
|
|
|
else
|
|
|
|
puts({ status: "ok" }.to_json)
|
2021-09-10 03:42:10 +08:00
|
|
|
end
|
|
|
|
end
|
2023-06-21 21:57:16 +08:00
|
|
|
|
|
|
|
desc "Grow notification id column to a big int in case of overflow"
|
|
|
|
task "db:resize:notification_id" => :environment do
|
|
|
|
sql = <<~SQL
|
|
|
|
SELECT table_name, column_name FROM INFORMATION_SCHEMA.columns
|
|
|
|
WHERE (column_name like '%notification_id' OR column_name = 'id' and table_name = 'notifications') AND data_type = 'integer'
|
|
|
|
SQL
|
|
|
|
|
|
|
|
DB
|
|
|
|
.query(sql)
|
|
|
|
.each do |row|
|
|
|
|
puts "Changing #{row.table_name}(#{row.column_name}) to a bigint"
|
|
|
|
DB.exec("ALTER table #{row.table_name} ALTER COLUMN #{row.column_name} TYPE BIGINT")
|
|
|
|
end
|
|
|
|
end
|