2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-08-15 13:19:23 +08:00
|
|
|
require "socket"
|
|
|
|
require "csv"
|
2013-08-15 14:35:57 +08:00
|
|
|
require "yaml"
|
2013-12-11 07:32:23 +08:00
|
|
|
require "optparse"
|
2014-12-04 09:30:00 +08:00
|
|
|
require "fileutils"
|
2022-12-30 07:25:11 +08:00
|
|
|
require "net/http"
|
|
|
|
require "uri"
|
2013-12-11 07:32:23 +08:00
|
|
|
|
|
|
|
@include_env = false
|
|
|
|
@result_file = nil
|
2013-12-30 12:15:30 +08:00
|
|
|
@iterations = 500
|
2014-01-09 12:56:03 +08:00
|
|
|
@best_of = 1
|
2014-02-14 12:43:08 +08:00
|
|
|
@mem_stats = false
|
2014-02-16 13:44:51 +08:00
|
|
|
@unicorn = false
|
2014-12-08 06:54:35 +08:00
|
|
|
@dump_heap = false
|
2018-02-19 07:12:51 +08:00
|
|
|
@concurrency = 1
|
|
|
|
@skip_asset_bundle = false
|
|
|
|
@unicorn_workers = 3
|
2014-02-14 12:43:08 +08:00
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
opts =
|
|
|
|
OptionParser.new do |o|
|
|
|
|
o.banner = "Usage: ruby bench.rb [options]"
|
2023-01-07 19:53:14 +08:00
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
o.on("-n", "--with_default_env", "Include recommended Discourse env") { @include_env = true }
|
|
|
|
o.on("-o", "--output [FILE]", "Output results to this file") { |f| @result_file = f }
|
2013-12-30 12:15:30 +08:00
|
|
|
o.on("-i", "--iterations [ITERATIONS]", "Number of iterations to run the bench for") do |i|
|
|
|
|
@iterations = i.to_i
|
|
|
|
end
|
2014-01-09 12:56:03 +08:00
|
|
|
o.on("-b", "--best_of [NUM]", "Number of times to run the bench taking best as result") do |i|
|
|
|
|
@best_of = i.to_i
|
|
|
|
end
|
2014-12-08 06:54:35 +08:00
|
|
|
o.on("-d", "--heap_dump") do
|
|
|
|
@dump_heap = true
|
|
|
|
# We need an env var for config/boot.rb to enable allocation tracing prior to framework init
|
|
|
|
ENV["DISCOURSE_DUMP_HEAP"] = "1"
|
|
|
|
end
|
2014-02-14 12:43:08 +08:00
|
|
|
o.on("-m", "--memory_stats") { @mem_stats = true }
|
2018-02-19 07:12:51 +08:00
|
|
|
o.on("-u", "--unicorn", "Use unicorn to serve pages as opposed to puma") { @unicorn = true }
|
|
|
|
o.on(
|
|
|
|
"-c",
|
|
|
|
"--concurrency [NUM]",
|
|
|
|
"Run benchmark with this number of concurrent requests (default: 1)",
|
|
|
|
) { |i| @concurrency = i.to_i }
|
|
|
|
o.on(
|
|
|
|
"-w",
|
|
|
|
"--unicorn_workers [NUM]",
|
|
|
|
"Run benchmark with this number of unicorn workers (default: 3)",
|
|
|
|
) { |i| @unicorn_workers = i.to_i }
|
|
|
|
o.on("-s", "--skip-bundle-assets", "Skip bundling assets") { @skip_asset_bundle = true }
|
2023-01-07 19:53:14 +08:00
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
o.on(
|
|
|
|
"-t",
|
|
|
|
"--tests [STRING]",
|
|
|
|
"List of tests to run. Example: '--tests topic,categories')",
|
|
|
|
) { |i| @tests = i.split(",") }
|
|
|
|
end
|
2013-12-11 07:32:23 +08:00
|
|
|
opts.parse!
|
2013-08-15 14:35:57 +08:00
|
|
|
|
2014-01-09 12:56:03 +08:00
|
|
|
def run(command, opt = nil)
|
2014-12-23 14:29:44 +08:00
|
|
|
exit_status =
|
|
|
|
if opt == :quiet
|
|
|
|
system(command, out: "/dev/null", err: :out)
|
|
|
|
else
|
|
|
|
system(command, out: $stdout, err: :out)
|
|
|
|
end
|
|
|
|
|
2017-04-15 00:58:35 +08:00
|
|
|
abort("Command '#{command}' failed with exit status #{$?}") unless exit_status
|
2013-08-15 13:32:07 +08:00
|
|
|
end
|
|
|
|
|
2013-08-29 19:23:00 +08:00
|
|
|
begin
|
|
|
|
require "facter"
|
2020-05-30 07:03:40 +08:00
|
|
|
raise LoadError if Gem::Version.new(Facter.version) < Gem::Version.new("4.0")
|
2013-08-29 19:23:00 +08:00
|
|
|
rescue LoadError
|
|
|
|
run "gem install facter"
|
2014-01-03 10:03:58 +08:00
|
|
|
puts "please rerun script"
|
2014-01-02 07:21:01 +08:00
|
|
|
exit
|
2013-08-29 19:23:00 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
@timings = {}
|
|
|
|
|
2013-08-15 14:35:57 +08:00
|
|
|
def measure(name)
|
|
|
|
start = Time.now
|
|
|
|
yield
|
|
|
|
@timings[name] = ((Time.now - start) * 1000).to_i
|
|
|
|
end
|
|
|
|
|
2013-08-15 13:37:33 +08:00
|
|
|
def prereqs
|
|
|
|
puts "Be sure to following packages are installed:
|
|
|
|
|
2014-05-29 12:10:34 +08:00
|
|
|
sudo apt-get -y install build-essential libssl-dev libyaml-dev git libtool libxslt-dev libxml2-dev libpq-dev gawk curl pngcrush python-software-properties software-properties-common tasksel
|
2014-05-29 12:35:41 +08:00
|
|
|
|
2013-08-15 13:37:33 +08:00
|
|
|
sudo tasksel install postgresql-server
|
2014-05-29 12:35:41 +08:00
|
|
|
OR
|
|
|
|
apt-get install postgresql-server^
|
2013-08-15 13:37:33 +08:00
|
|
|
|
|
|
|
sudo apt-add-repository -y ppa:rwky/redis
|
|
|
|
sudo apt-get update
|
|
|
|
sudo apt-get install redis-server
|
|
|
|
"
|
|
|
|
end
|
|
|
|
|
2013-08-15 13:19:23 +08:00
|
|
|
puts "Running bundle"
|
2014-12-23 14:29:44 +08:00
|
|
|
if run("bundle", :quiet)
|
2013-08-15 13:32:07 +08:00
|
|
|
puts "Quitting, some of the gems did not install"
|
2013-08-15 13:37:33 +08:00
|
|
|
prereqs
|
2013-08-15 13:32:07 +08:00
|
|
|
exit
|
|
|
|
end
|
2013-08-15 13:19:23 +08:00
|
|
|
|
|
|
|
puts "Ensuring config is setup"
|
|
|
|
|
2014-01-02 07:21:01 +08:00
|
|
|
`which ab > /dev/null 2>&1`
|
|
|
|
unless $? == 0
|
2013-08-15 13:19:23 +08:00
|
|
|
abort "Apache Bench is not installed. Try: apt-get install apache2-utils or brew install ab"
|
|
|
|
end
|
|
|
|
|
2022-01-06 01:45:08 +08:00
|
|
|
unless File.exist?("config/database.yml")
|
2013-08-15 13:19:23 +08:00
|
|
|
puts "Copying database.yml.development.sample to database.yml"
|
|
|
|
`cp config/database.yml.development-sample config/database.yml`
|
|
|
|
end
|
|
|
|
|
|
|
|
ENV["RAILS_ENV"] = "profile"
|
2013-10-13 05:06:45 +08:00
|
|
|
|
2018-02-19 07:12:51 +08:00
|
|
|
discourse_env_vars = %w[
|
|
|
|
DISCOURSE_DUMP_HEAP
|
|
|
|
RUBY_GC_HEAP_INIT_SLOTS
|
|
|
|
RUBY_GC_HEAP_FREE_SLOTS
|
|
|
|
RUBY_GC_HEAP_GROWTH_FACTOR
|
|
|
|
RUBY_GC_HEAP_GROWTH_MAX_SLOTS
|
|
|
|
RUBY_GC_MALLOC_LIMIT
|
|
|
|
RUBY_GC_OLDMALLOC_LIMIT
|
|
|
|
RUBY_GC_MALLOC_LIMIT_MAX
|
|
|
|
RUBY_GC_OLDMALLOC_LIMIT_MAX
|
|
|
|
RUBY_GC_MALLOC_LIMIT_GROWTH_FACTOR
|
|
|
|
RUBY_GC_OLDMALLOC_LIMIT_GROWTH_FACTOR
|
|
|
|
RUBY_GC_HEAP_OLDOBJECT_LIMIT_FACTOR
|
2018-05-03 13:50:45 +08:00
|
|
|
LD_PRELOAD
|
2023-01-07 19:53:14 +08:00
|
|
|
]
|
2014-12-03 23:11:03 +08:00
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
if @include_env
|
|
|
|
puts "Running with tuned environment"
|
2018-02-19 07:12:51 +08:00
|
|
|
discourse_env_vars.each { |v| ENV.delete v }
|
|
|
|
|
|
|
|
ENV["RUBY_GC_HEAP_GROWTH_MAX_SLOTS"] = "40000"
|
|
|
|
ENV["RUBY_GC_HEAP_INIT_SLOTS"] = "400000"
|
|
|
|
ENV["RUBY_GC_HEAP_OLDOBJECT_LIMIT_FACTOR"] = "1.5"
|
2013-12-11 07:32:23 +08:00
|
|
|
else
|
|
|
|
# clean env
|
2013-12-30 12:15:30 +08:00
|
|
|
puts "Running with the following custom environment"
|
2018-02-19 07:12:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
discourse_env_vars.each { |w| puts "#{w}: #{ENV[w]}" if ENV[w].to_s.length > 0 }
|
2013-08-15 14:35:57 +08:00
|
|
|
|
2013-08-15 13:19:23 +08:00
|
|
|
def port_available?(port)
|
2013-08-17 17:36:41 +08:00
|
|
|
server = TCPServer.open("0.0.0.0", port)
|
2013-08-15 13:19:23 +08:00
|
|
|
server.close
|
|
|
|
true
|
|
|
|
rescue Errno::EADDRINUSE
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2013-08-15 15:13:05 +08:00
|
|
|
@port = 60_079
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2013-08-15 15:13:05 +08:00
|
|
|
@port += 1 while !port_available? @port
|
2013-08-15 13:19:23 +08:00
|
|
|
|
|
|
|
puts "Ensuring profiling DB exists and is migrated"
|
|
|
|
puts `bundle exec rake db:create`
|
|
|
|
`bundle exec rake db:migrate`
|
|
|
|
|
2013-08-15 14:59:38 +08:00
|
|
|
puts "Timing loading Rails"
|
2013-08-15 14:35:57 +08:00
|
|
|
measure("load_rails") { `bundle exec rake middleware` }
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2013-08-15 14:59:38 +08:00
|
|
|
puts "Populating Profile DB"
|
|
|
|
run("bundle exec ruby script/profile_db_generator.rb")
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
puts "Getting admin api key"
|
|
|
|
admin_api_key = `bundle exec rake api_key:create_master[bench]`.split("\n")[-1]
|
|
|
|
raise "Failed to obtain a user API key" if admin_api_key.to_s.empty?
|
2013-09-10 14:03:11 +08:00
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
puts "Getting user api key"
|
|
|
|
user_api_key = `bundle exec rake user_api_key:create[user1]`.split("\n")[-1]
|
|
|
|
raise "Failed to obtain a user API key" if user_api_key.to_s.empty?
|
|
|
|
|
|
|
|
def bench(path, name, headers)
|
2013-08-15 13:19:23 +08:00
|
|
|
puts "Running apache bench warmup"
|
2014-02-16 13:44:51 +08:00
|
|
|
add = ""
|
2018-02-19 07:12:51 +08:00
|
|
|
add = "-c #{@concurrency} " if @concurrency > 1
|
2022-12-30 07:25:11 +08:00
|
|
|
header_string = headers&.map { |k, v| "-H \"#{k}:#{v}\"" }&.join(" ")
|
|
|
|
`ab #{add} #{header_string} -n 20 -l "http://127.0.0.1:#{@port}#{path}"`
|
2018-02-19 07:12:51 +08:00
|
|
|
|
2017-04-15 00:58:35 +08:00
|
|
|
puts "Benchmarking #{name} @ #{path}"
|
2022-12-30 07:25:11 +08:00
|
|
|
`ab #{add} #{header_string} -n #{@iterations} -l -e tmp/ab.csv "http://127.0.0.1:#{@port}#{path}"`
|
2013-08-15 13:19:23 +08:00
|
|
|
|
|
|
|
percentiles = Hash[*[50, 75, 90, 99].zip([]).flatten]
|
|
|
|
CSV.foreach("tmp/ab.csv") do |percent, time|
|
|
|
|
percentiles[percent.to_i] = time.to_i if percentiles.key? percent.to_i
|
|
|
|
end
|
|
|
|
|
2013-08-15 15:13:05 +08:00
|
|
|
percentiles
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
2013-09-10 14:03:11 +08:00
|
|
|
# critical cause cache may be incompatible
|
2018-02-19 07:12:51 +08:00
|
|
|
unless @skip_asset_bundle
|
|
|
|
puts "precompiling assets"
|
|
|
|
run("bundle exec rake assets:precompile")
|
|
|
|
end
|
2013-09-10 14:03:11 +08:00
|
|
|
|
2014-02-16 13:44:51 +08:00
|
|
|
pid =
|
|
|
|
if @unicorn
|
|
|
|
ENV["UNICORN_PORT"] = @port.to_s
|
2018-02-19 07:12:51 +08:00
|
|
|
ENV["UNICORN_WORKERS"] = @unicorn_workers.to_s
|
2014-12-04 09:30:00 +08:00
|
|
|
FileUtils.mkdir_p(File.join("tmp", "pids"))
|
2022-12-30 07:25:11 +08:00
|
|
|
unicorn_pid = spawn("bundle exec unicorn -c config/unicorn.conf.rb")
|
|
|
|
|
|
|
|
while (
|
|
|
|
unicorn_master_pid =
|
|
|
|
`ps aux | grep "unicorn master" | grep -v "grep" | awk '{print $2}'`.strip.to_i
|
|
|
|
) == 0
|
|
|
|
sleep 1
|
|
|
|
end
|
|
|
|
|
|
|
|
while `ps -f --ppid #{unicorn_master_pid} | grep worker | awk '{ print $2 }'`.split("\n")
|
|
|
|
.map(&:to_i)
|
|
|
|
.size != @unicorn_workers.to_i
|
|
|
|
sleep 1
|
|
|
|
end
|
|
|
|
|
|
|
|
unicorn_pid
|
2014-02-16 13:44:51 +08:00
|
|
|
else
|
2018-02-19 14:00:28 +08:00
|
|
|
spawn("bundle exec puma -p #{@port} -e production")
|
2014-02-16 13:44:51 +08:00
|
|
|
end
|
2013-08-15 15:13:05 +08:00
|
|
|
|
|
|
|
sleep 1 while port_available? @port
|
|
|
|
|
2013-08-17 17:36:41 +08:00
|
|
|
puts "Starting benchmark..."
|
2022-12-30 07:25:11 +08:00
|
|
|
|
|
|
|
admin_headers = { "Api-Key" => admin_api_key, "Api-Username" => "admin1" }
|
|
|
|
|
|
|
|
user_headers = { "User-Api-Key" => user_api_key }
|
2013-08-17 17:36:41 +08:00
|
|
|
|
2013-09-03 16:58:56 +08:00
|
|
|
# asset precompilation is a dog, wget to force it
|
2020-05-30 07:02:51 +08:00
|
|
|
run "curl -s -o /dev/null http://127.0.0.1:#{@port}/"
|
2013-08-15 15:13:05 +08:00
|
|
|
|
2020-01-08 13:23:29 +08:00
|
|
|
redirect_response = `curl -s -I "http://127.0.0.1:#{@port}/t/i-am-a-topic-used-for-perf-tests"`
|
|
|
|
raise "Unable to locate topic for perf tests" if redirect_response !~ /301 Moved Permanently/
|
|
|
|
|
|
|
|
topic_url =
|
|
|
|
redirect_response.match(%r{^location: .+(/t/i-am-a-topic-used-for-perf-tests/.+)$}i)[1].strip
|
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
all_tests = [
|
2014-02-16 12:11:25 +08:00
|
|
|
%w[categories /categories],
|
2014-01-09 12:56:03 +08:00
|
|
|
%w[home /],
|
2022-12-30 07:25:11 +08:00
|
|
|
["topic", topic_url],
|
|
|
|
["topic.json", "#{topic_url}.json"],
|
|
|
|
["user activity", "/u/admin1/activity"],
|
2014-01-09 12:56:03 +08:00
|
|
|
]
|
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
@tests ||= %w[categories home topic]
|
|
|
|
|
|
|
|
tests_to_run = all_tests.select { |test_name, path| @tests.include?(test_name) }
|
|
|
|
|
|
|
|
tests_to_run.concat(
|
|
|
|
tests_to_run.map { |k, url| ["#{k} user", "#{url}", user_headers] },
|
|
|
|
tests_to_run.map { |k, url| ["#{k} admin", "#{url}", admin_headers] },
|
|
|
|
)
|
|
|
|
|
|
|
|
tests_to_run.each do |test_name, path, headers_for_path|
|
|
|
|
uri = URI.parse("http://127.0.0.1:#{@port}#{path}")
|
|
|
|
http = Net::HTTP.new(uri.host, uri.port)
|
|
|
|
request = Net::HTTP::Get.new(uri.request_uri)
|
|
|
|
|
|
|
|
headers_for_path&.each { |key, value| request[key] = value }
|
2020-01-08 13:23:29 +08:00
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
response = http.request(request)
|
2017-09-13 15:33:59 +08:00
|
|
|
|
2022-12-30 07:25:11 +08:00
|
|
|
raise "#{test_name} #{path} returned non 200 response code" if response.code != "200"
|
2017-09-13 15:33:59 +08:00
|
|
|
end
|
2014-02-16 12:11:25 +08:00
|
|
|
|
|
|
|
# NOTE: we run the most expensive page first in the bench
|
2014-01-09 12:56:03 +08:00
|
|
|
|
|
|
|
def best_of(a, b)
|
|
|
|
return a unless b
|
|
|
|
return b unless a
|
|
|
|
|
|
|
|
a[50] < b[50] ? a : b
|
|
|
|
end
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
@best_of.times do
|
2022-12-30 07:25:11 +08:00
|
|
|
tests_to_run.each do |name, url, headers|
|
|
|
|
results[name] = best_of(bench(url, name, headers), results[name])
|
2014-01-09 12:56:03 +08:00
|
|
|
end
|
|
|
|
end
|
2013-09-10 14:22:58 +08:00
|
|
|
|
2013-08-15 15:48:11 +08:00
|
|
|
puts "Your Results: (note for timings- percentile is first, duration is second in millisecs)"
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2018-02-19 07:12:51 +08:00
|
|
|
if @unicorn
|
|
|
|
puts "Unicorn: (workers: #{@unicorn_workers})"
|
|
|
|
else
|
|
|
|
# TODO we want to also bench puma clusters
|
|
|
|
puts "Puma: (single threaded)"
|
|
|
|
end
|
|
|
|
puts "Include env: #{@include_env}"
|
|
|
|
puts "Iterations: #{@iterations}, Best of: #{@best_of}"
|
|
|
|
puts "Concurrency: #{@concurrency}"
|
|
|
|
puts
|
|
|
|
|
2016-09-22 04:15:58 +08:00
|
|
|
# Prevent using external facts because it breaks when running in the
|
|
|
|
# discourse/discourse_bench docker container.
|
2020-05-30 07:03:40 +08:00
|
|
|
Facter.reset
|
2013-08-29 19:34:32 +08:00
|
|
|
facts = Facter.to_hash
|
|
|
|
|
|
|
|
facts.delete_if do |k, v|
|
|
|
|
!%w[
|
|
|
|
operatingsystem
|
|
|
|
architecture
|
|
|
|
kernelversion
|
|
|
|
memorysize
|
|
|
|
physicalprocessorcount
|
|
|
|
processor0
|
|
|
|
virtual
|
|
|
|
].include?(k)
|
2023-01-07 19:53:14 +08:00
|
|
|
end
|
2013-08-29 19:34:32 +08:00
|
|
|
|
2013-09-10 14:03:11 +08:00
|
|
|
run("RAILS_ENV=profile bundle exec rake assets:clean")
|
|
|
|
|
2014-02-16 13:44:51 +08:00
|
|
|
def get_mem(pid)
|
2021-10-27 16:39:28 +08:00
|
|
|
YAML.safe_load `ruby script/memstats.rb #{pid} --yaml`
|
2014-02-16 13:44:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
mem = get_mem(pid)
|
2014-01-03 08:51:12 +08:00
|
|
|
|
2014-01-10 13:11:10 +08:00
|
|
|
results =
|
|
|
|
results.merge(
|
|
|
|
"timings" => @timings,
|
2022-12-30 07:25:11 +08:00
|
|
|
"ruby-version" => "#{RUBY_DESCRIPTION}",
|
2014-02-16 13:44:51 +08:00
|
|
|
"rss_kb" => mem["rss_kb"],
|
|
|
|
"pss_kb" => mem["pss_kb"],
|
|
|
|
).merge(facts)
|
2013-12-11 07:32:23 +08:00
|
|
|
|
2014-02-16 13:44:51 +08:00
|
|
|
if @unicorn
|
|
|
|
child_pids = `ps --ppid #{pid} | awk '{ print $1; }' | grep -v PID`.split("\n")
|
|
|
|
child_pids.each do |child|
|
|
|
|
mem = get_mem(child)
|
|
|
|
results["rss_kb_#{child}"] = mem["rss_kb"]
|
|
|
|
results["pss_kb_#{child}"] = mem["pss_kb"]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-01-09 12:56:03 +08:00
|
|
|
puts results.to_yaml
|
2013-12-11 07:32:23 +08:00
|
|
|
|
2014-02-14 12:43:08 +08:00
|
|
|
if @mem_stats
|
|
|
|
puts
|
2020-01-08 13:23:29 +08:00
|
|
|
puts open("http://127.0.0.1:#{@port}/admin/memory_stats", headers).read
|
2014-02-14 12:43:08 +08:00
|
|
|
end
|
|
|
|
|
2014-12-08 06:54:35 +08:00
|
|
|
if @dump_heap
|
|
|
|
puts
|
2020-01-08 13:23:29 +08:00
|
|
|
puts open("http://127.0.0.1:#{@port}/admin/dump_heap", headers).read
|
2014-12-08 06:54:35 +08:00
|
|
|
end
|
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
File.open(@result_file, "wb") { |f| f.write(results) } if @result_file
|
2013-08-15 13:19:23 +08:00
|
|
|
ensure
|
|
|
|
Process.kill "KILL", pid
|
|
|
|
end
|