2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-08-15 13:19:23 +08:00
|
|
|
require "socket"
|
|
|
|
require "csv"
|
2013-08-15 14:35:57 +08:00
|
|
|
require "yaml"
|
2013-12-11 07:32:23 +08:00
|
|
|
require "optparse"
|
2014-12-04 09:30:00 +08:00
|
|
|
require "fileutils"
|
2013-12-11 07:32:23 +08:00
|
|
|
|
|
|
|
@include_env = false
|
|
|
|
@result_file = nil
|
2013-12-30 12:15:30 +08:00
|
|
|
@iterations = 500
|
2014-01-09 12:56:03 +08:00
|
|
|
@best_of = 1
|
2014-02-14 12:43:08 +08:00
|
|
|
@mem_stats = false
|
2014-02-16 13:44:51 +08:00
|
|
|
@unicorn = false
|
2014-12-08 06:54:35 +08:00
|
|
|
@dump_heap = false
|
2018-02-19 07:12:51 +08:00
|
|
|
@concurrency = 1
|
|
|
|
@skip_asset_bundle = false
|
|
|
|
@unicorn_workers = 3
|
2014-02-14 12:43:08 +08:00
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
opts = OptionParser.new do |o|
|
|
|
|
o.banner = "Usage: ruby bench.rb [options]"
|
|
|
|
|
|
|
|
o.on("-n", "--with_default_env", "Include recommended Discourse env") do
|
|
|
|
@include_env = true
|
|
|
|
end
|
|
|
|
o.on("-o", "--output [FILE]", "Output results to this file") do |f|
|
|
|
|
@result_file = f
|
|
|
|
end
|
2013-12-30 12:15:30 +08:00
|
|
|
o.on("-i", "--iterations [ITERATIONS]", "Number of iterations to run the bench for") do |i|
|
|
|
|
@iterations = i.to_i
|
|
|
|
end
|
2014-01-09 12:56:03 +08:00
|
|
|
o.on("-b", "--best_of [NUM]", "Number of times to run the bench taking best as result") do |i|
|
|
|
|
@best_of = i.to_i
|
|
|
|
end
|
2014-12-08 06:54:35 +08:00
|
|
|
o.on("-d", "--heap_dump") do
|
|
|
|
@dump_heap = true
|
|
|
|
# We need an env var for config/boot.rb to enable allocation tracing prior to framework init
|
|
|
|
ENV['DISCOURSE_DUMP_HEAP'] = "1"
|
|
|
|
end
|
2014-02-14 12:43:08 +08:00
|
|
|
o.on("-m", "--memory_stats") do
|
|
|
|
@mem_stats = true
|
|
|
|
end
|
2018-02-19 07:12:51 +08:00
|
|
|
o.on("-u", "--unicorn", "Use unicorn to serve pages as opposed to puma") do
|
2014-02-16 13:44:51 +08:00
|
|
|
@unicorn = true
|
|
|
|
end
|
2018-02-19 07:12:51 +08:00
|
|
|
o.on("-c", "--concurrency [NUM]", "Run benchmark with this number of concurrent requests (default: 1)") do |i|
|
|
|
|
@concurrency = i.to_i
|
|
|
|
end
|
|
|
|
o.on("-w", "--unicorn_workers [NUM]", "Run benchmark with this number of unicorn workers (default: 3)") do |i|
|
|
|
|
@unicorn_workers = i.to_i
|
|
|
|
end
|
|
|
|
o.on("-s", "--skip-bundle-assets", "Skip bundling assets") do
|
|
|
|
@skip_asset_bundle = true
|
|
|
|
end
|
2013-12-11 07:32:23 +08:00
|
|
|
end
|
|
|
|
opts.parse!
|
2013-08-15 14:35:57 +08:00
|
|
|
|
2014-01-09 12:56:03 +08:00
|
|
|
def run(command, opt = nil)
|
2014-12-23 14:29:44 +08:00
|
|
|
exit_status =
|
|
|
|
if opt == :quiet
|
|
|
|
system(command, out: "/dev/null", err: :out)
|
|
|
|
else
|
|
|
|
system(command, out: $stdout, err: :out)
|
|
|
|
end
|
|
|
|
|
2017-04-15 00:58:35 +08:00
|
|
|
abort("Command '#{command}' failed with exit status #{$?}") unless exit_status
|
2013-08-15 13:32:07 +08:00
|
|
|
end
|
|
|
|
|
2013-08-29 19:23:00 +08:00
|
|
|
begin
|
|
|
|
require 'facter'
|
2020-05-30 07:03:40 +08:00
|
|
|
raise LoadError if Gem::Version.new(Facter.version) < Gem::Version.new("4.0")
|
2013-08-29 19:23:00 +08:00
|
|
|
rescue LoadError
|
|
|
|
run "gem install facter"
|
2014-01-03 10:03:58 +08:00
|
|
|
puts "please rerun script"
|
2014-01-02 07:21:01 +08:00
|
|
|
exit
|
2013-08-29 19:23:00 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
@timings = {}
|
|
|
|
|
2013-08-15 14:35:57 +08:00
|
|
|
def measure(name)
|
|
|
|
start = Time.now
|
|
|
|
yield
|
|
|
|
@timings[name] = ((Time.now - start) * 1000).to_i
|
|
|
|
end
|
|
|
|
|
2013-08-15 13:37:33 +08:00
|
|
|
def prereqs
|
|
|
|
puts "Be sure to following packages are installed:
|
|
|
|
|
2014-05-29 12:10:34 +08:00
|
|
|
sudo apt-get -y install build-essential libssl-dev libyaml-dev git libtool libxslt-dev libxml2-dev libpq-dev gawk curl pngcrush python-software-properties software-properties-common tasksel
|
2014-05-29 12:35:41 +08:00
|
|
|
|
2013-08-15 13:37:33 +08:00
|
|
|
sudo tasksel install postgresql-server
|
2014-05-29 12:35:41 +08:00
|
|
|
OR
|
|
|
|
apt-get install postgresql-server^
|
2013-08-15 13:37:33 +08:00
|
|
|
|
|
|
|
sudo apt-add-repository -y ppa:rwky/redis
|
|
|
|
sudo apt-get update
|
|
|
|
sudo apt-get install redis-server
|
|
|
|
"
|
|
|
|
end
|
|
|
|
|
2013-08-15 13:19:23 +08:00
|
|
|
puts "Running bundle"
|
2014-12-23 14:29:44 +08:00
|
|
|
if run("bundle", :quiet)
|
2013-08-15 13:32:07 +08:00
|
|
|
puts "Quitting, some of the gems did not install"
|
2013-08-15 13:37:33 +08:00
|
|
|
prereqs
|
2013-08-15 13:32:07 +08:00
|
|
|
exit
|
|
|
|
end
|
2013-08-15 13:19:23 +08:00
|
|
|
|
|
|
|
puts "Ensuring config is setup"
|
|
|
|
|
2014-01-02 07:21:01 +08:00
|
|
|
%x{which ab > /dev/null 2>&1}
|
|
|
|
unless $? == 0
|
2013-08-15 13:19:23 +08:00
|
|
|
abort "Apache Bench is not installed. Try: apt-get install apache2-utils or brew install ab"
|
|
|
|
end
|
|
|
|
|
2022-01-06 01:45:08 +08:00
|
|
|
unless File.exist?("config/database.yml")
|
2013-08-15 13:19:23 +08:00
|
|
|
puts "Copying database.yml.development.sample to database.yml"
|
|
|
|
`cp config/database.yml.development-sample config/database.yml`
|
|
|
|
end
|
|
|
|
|
|
|
|
ENV["RAILS_ENV"] = "profile"
|
2013-10-13 05:06:45 +08:00
|
|
|
|
2018-02-19 07:12:51 +08:00
|
|
|
discourse_env_vars = %w(
|
|
|
|
DISCOURSE_DUMP_HEAP
|
|
|
|
RUBY_GC_HEAP_INIT_SLOTS
|
|
|
|
RUBY_GC_HEAP_FREE_SLOTS
|
|
|
|
RUBY_GC_HEAP_GROWTH_FACTOR
|
|
|
|
RUBY_GC_HEAP_GROWTH_MAX_SLOTS
|
|
|
|
RUBY_GC_MALLOC_LIMIT
|
|
|
|
RUBY_GC_OLDMALLOC_LIMIT
|
|
|
|
RUBY_GC_MALLOC_LIMIT_MAX
|
|
|
|
RUBY_GC_OLDMALLOC_LIMIT_MAX
|
|
|
|
RUBY_GC_MALLOC_LIMIT_GROWTH_FACTOR
|
|
|
|
RUBY_GC_OLDMALLOC_LIMIT_GROWTH_FACTOR
|
|
|
|
RUBY_GC_HEAP_OLDOBJECT_LIMIT_FACTOR
|
|
|
|
RUBY_GLOBAL_METHOD_CACHE_SIZE
|
2018-05-03 13:50:45 +08:00
|
|
|
LD_PRELOAD
|
2018-02-19 07:12:51 +08:00
|
|
|
)
|
2014-12-03 23:11:03 +08:00
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
if @include_env
|
|
|
|
puts "Running with tuned environment"
|
2018-02-19 07:12:51 +08:00
|
|
|
discourse_env_vars.each do |v|
|
2014-12-03 23:11:03 +08:00
|
|
|
ENV.delete v
|
|
|
|
end
|
2018-02-19 07:12:51 +08:00
|
|
|
|
|
|
|
ENV['RUBY_GLOBAL_METHOD_CACHE_SIZE'] = '131072'
|
|
|
|
ENV['RUBY_GC_HEAP_GROWTH_MAX_SLOTS'] = '40000'
|
|
|
|
ENV['RUBY_GC_HEAP_INIT_SLOTS'] = '400000'
|
|
|
|
ENV['RUBY_GC_HEAP_OLDOBJECT_LIMIT_FACTOR'] = '1.5'
|
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
else
|
|
|
|
# clean env
|
2013-12-30 12:15:30 +08:00
|
|
|
puts "Running with the following custom environment"
|
2018-02-19 07:12:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
discourse_env_vars.each do |w|
|
|
|
|
puts "#{w}: #{ENV[w]}" if ENV[w].to_s.length > 0
|
2013-10-13 05:06:45 +08:00
|
|
|
end
|
2013-08-15 14:35:57 +08:00
|
|
|
|
2013-08-15 13:19:23 +08:00
|
|
|
def port_available?(port)
|
2013-08-17 17:36:41 +08:00
|
|
|
server = TCPServer.open("0.0.0.0", port)
|
2013-08-15 13:19:23 +08:00
|
|
|
server.close
|
|
|
|
true
|
|
|
|
rescue Errno::EADDRINUSE
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2013-08-15 15:13:05 +08:00
|
|
|
@port = 60079
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2013-08-15 15:13:05 +08:00
|
|
|
while !port_available? @port
|
|
|
|
@port += 1
|
2013-08-15 13:19:23 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
puts "Ensuring profiling DB exists and is migrated"
|
|
|
|
puts `bundle exec rake db:create`
|
|
|
|
`bundle exec rake db:migrate`
|
|
|
|
|
2013-08-15 14:59:38 +08:00
|
|
|
puts "Timing loading Rails"
|
2013-08-15 14:35:57 +08:00
|
|
|
measure("load_rails") do
|
|
|
|
`bundle exec rake middleware`
|
|
|
|
end
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2013-08-15 14:59:38 +08:00
|
|
|
puts "Populating Profile DB"
|
|
|
|
run("bundle exec ruby script/profile_db_generator.rb")
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2013-09-10 14:03:11 +08:00
|
|
|
puts "Getting api key"
|
2020-01-08 13:23:29 +08:00
|
|
|
api_key = `bundle exec rake api_key:create_master[bench]`.split("\n")[-1]
|
2013-09-10 14:03:11 +08:00
|
|
|
|
2017-04-15 00:58:35 +08:00
|
|
|
def bench(path, name)
|
2013-08-15 13:19:23 +08:00
|
|
|
puts "Running apache bench warmup"
|
2014-02-16 13:44:51 +08:00
|
|
|
add = ""
|
2018-02-19 07:12:51 +08:00
|
|
|
add = "-c #{@concurrency} " if @concurrency > 1
|
2017-09-13 16:10:58 +08:00
|
|
|
`ab #{add} -n 20 -l "http://127.0.0.1:#{@port}#{path}"`
|
2018-02-19 07:12:51 +08:00
|
|
|
|
2017-04-15 00:58:35 +08:00
|
|
|
puts "Benchmarking #{name} @ #{path}"
|
2017-09-13 16:10:58 +08:00
|
|
|
`ab #{add} -n #{@iterations} -l -e tmp/ab.csv "http://127.0.0.1:#{@port}#{path}"`
|
2013-08-15 13:19:23 +08:00
|
|
|
|
|
|
|
percentiles = Hash[*[50, 75, 90, 99].zip([]).flatten]
|
|
|
|
CSV.foreach("tmp/ab.csv") do |percent, time|
|
|
|
|
percentiles[percent.to_i] = time.to_i if percentiles.key? percent.to_i
|
|
|
|
end
|
|
|
|
|
2013-08-15 15:13:05 +08:00
|
|
|
percentiles
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
2013-09-10 14:03:11 +08:00
|
|
|
# critical cause cache may be incompatible
|
2018-02-19 07:12:51 +08:00
|
|
|
unless @skip_asset_bundle
|
|
|
|
puts "precompiling assets"
|
|
|
|
run("bundle exec rake assets:precompile")
|
|
|
|
end
|
2013-09-10 14:03:11 +08:00
|
|
|
|
2014-02-16 13:44:51 +08:00
|
|
|
pid =
|
|
|
|
if @unicorn
|
|
|
|
ENV['UNICORN_PORT'] = @port.to_s
|
2018-02-19 07:12:51 +08:00
|
|
|
ENV['UNICORN_WORKERS'] = @unicorn_workers.to_s
|
2014-12-04 09:30:00 +08:00
|
|
|
FileUtils.mkdir_p(File.join('tmp', 'pids'))
|
2014-02-16 13:44:51 +08:00
|
|
|
spawn("bundle exec unicorn -c config/unicorn.conf.rb")
|
|
|
|
else
|
2018-02-19 14:00:28 +08:00
|
|
|
spawn("bundle exec puma -p #{@port} -e production")
|
2014-02-16 13:44:51 +08:00
|
|
|
end
|
2013-08-15 15:13:05 +08:00
|
|
|
|
|
|
|
while port_available? @port
|
|
|
|
sleep 1
|
|
|
|
end
|
|
|
|
|
2013-08-17 17:36:41 +08:00
|
|
|
puts "Starting benchmark..."
|
2020-01-08 13:23:29 +08:00
|
|
|
headers = { 'Api-Key' => api_key,
|
|
|
|
'Api-Username' => "admin1" }
|
2013-08-17 17:36:41 +08:00
|
|
|
|
2013-09-03 16:58:56 +08:00
|
|
|
# asset precompilation is a dog, wget to force it
|
2020-05-30 07:02:51 +08:00
|
|
|
run "curl -s -o /dev/null http://127.0.0.1:#{@port}/"
|
2013-08-15 15:13:05 +08:00
|
|
|
|
2020-01-08 13:23:29 +08:00
|
|
|
redirect_response = `curl -s -I "http://127.0.0.1:#{@port}/t/i-am-a-topic-used-for-perf-tests"`
|
|
|
|
if redirect_response !~ /301 Moved Permanently/
|
|
|
|
raise "Unable to locate topic for perf tests"
|
|
|
|
end
|
|
|
|
|
|
|
|
topic_url = redirect_response.match(/^location: .+(\/t\/i-am-a-topic-used-for-perf-tests\/.+)$/i)[1].strip
|
|
|
|
|
2014-01-09 12:56:03 +08:00
|
|
|
tests = [
|
2014-02-16 12:11:25 +08:00
|
|
|
["categories", "/categories"],
|
2014-01-09 12:56:03 +08:00
|
|
|
["home", "/"],
|
2020-01-08 13:23:29 +08:00
|
|
|
["topic", topic_url]
|
2017-03-29 02:27:54 +08:00
|
|
|
# ["user", "/u/admin1/activity"],
|
2014-01-09 12:56:03 +08:00
|
|
|
]
|
|
|
|
|
2020-01-08 13:23:29 +08:00
|
|
|
tests.concat(tests.map { |k, url| ["#{k}_admin", "#{url}", headers] })
|
|
|
|
|
|
|
|
tests.each do |_, path, headers_for_path|
|
|
|
|
header_string = headers_for_path&.map { |k, v| "-H \"#{k}: #{v}\"" }&.join(" ")
|
2017-09-13 15:33:59 +08:00
|
|
|
|
2020-01-08 13:23:29 +08:00
|
|
|
if `curl -s -I "http://127.0.0.1:#{@port}#{path}" #{header_string}` !~ /200 OK/
|
2017-09-13 15:33:59 +08:00
|
|
|
raise "#{path} returned non 200 response code"
|
|
|
|
end
|
|
|
|
end
|
2014-02-16 12:11:25 +08:00
|
|
|
|
|
|
|
# NOTE: we run the most expensive page first in the bench
|
2014-01-09 12:56:03 +08:00
|
|
|
|
|
|
|
def best_of(a, b)
|
|
|
|
return a unless b
|
|
|
|
return b unless a
|
|
|
|
|
|
|
|
a[50] < b[50] ? a : b
|
|
|
|
end
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
@best_of.times do
|
|
|
|
tests.each do |name, url|
|
2017-04-15 00:58:35 +08:00
|
|
|
results[name] = best_of(bench(url, name), results[name])
|
2014-01-09 12:56:03 +08:00
|
|
|
end
|
|
|
|
end
|
2013-09-10 14:22:58 +08:00
|
|
|
|
2013-08-15 15:48:11 +08:00
|
|
|
puts "Your Results: (note for timings- percentile is first, duration is second in millisecs)"
|
2013-08-15 13:19:23 +08:00
|
|
|
|
2018-02-19 07:12:51 +08:00
|
|
|
if @unicorn
|
|
|
|
puts "Unicorn: (workers: #{@unicorn_workers})"
|
|
|
|
else
|
|
|
|
# TODO we want to also bench puma clusters
|
|
|
|
puts "Puma: (single threaded)"
|
|
|
|
end
|
|
|
|
puts "Include env: #{@include_env}"
|
|
|
|
puts "Iterations: #{@iterations}, Best of: #{@best_of}"
|
|
|
|
puts "Concurrency: #{@concurrency}"
|
|
|
|
puts
|
|
|
|
|
2016-09-22 04:15:58 +08:00
|
|
|
# Prevent using external facts because it breaks when running in the
|
|
|
|
# discourse/discourse_bench docker container.
|
2020-05-30 07:03:40 +08:00
|
|
|
Facter.reset
|
2013-08-29 19:34:32 +08:00
|
|
|
facts = Facter.to_hash
|
|
|
|
|
|
|
|
facts.delete_if { |k, v|
|
|
|
|
!["operatingsystem", "architecture", "kernelversion",
|
|
|
|
"memorysize", "physicalprocessorcount", "processor0",
|
|
|
|
"virtual"].include?(k)
|
|
|
|
}
|
|
|
|
|
2013-09-10 14:03:11 +08:00
|
|
|
run("RAILS_ENV=profile bundle exec rake assets:clean")
|
|
|
|
|
2014-02-16 13:44:51 +08:00
|
|
|
def get_mem(pid)
|
2021-10-27 16:39:28 +08:00
|
|
|
YAML.safe_load `ruby script/memstats.rb #{pid} --yaml`
|
2014-02-16 13:44:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
mem = get_mem(pid)
|
2014-01-03 08:51:12 +08:00
|
|
|
|
2014-01-10 13:11:10 +08:00
|
|
|
results = results.merge("timings" => @timings,
|
2014-01-03 08:51:12 +08:00
|
|
|
"ruby-version" => "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}",
|
2014-02-16 13:44:51 +08:00
|
|
|
"rss_kb" => mem["rss_kb"],
|
|
|
|
"pss_kb" => mem["pss_kb"]).merge(facts)
|
2013-12-11 07:32:23 +08:00
|
|
|
|
2014-02-16 13:44:51 +08:00
|
|
|
if @unicorn
|
|
|
|
child_pids = `ps --ppid #{pid} | awk '{ print $1; }' | grep -v PID`.split("\n")
|
|
|
|
child_pids.each do |child|
|
|
|
|
mem = get_mem(child)
|
|
|
|
results["rss_kb_#{child}"] = mem["rss_kb"]
|
|
|
|
results["pss_kb_#{child}"] = mem["pss_kb"]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-01-09 12:56:03 +08:00
|
|
|
puts results.to_yaml
|
2013-12-11 07:32:23 +08:00
|
|
|
|
2014-02-14 12:43:08 +08:00
|
|
|
if @mem_stats
|
|
|
|
puts
|
2020-01-08 13:23:29 +08:00
|
|
|
puts open("http://127.0.0.1:#{@port}/admin/memory_stats", headers).read
|
2014-02-14 12:43:08 +08:00
|
|
|
end
|
|
|
|
|
2014-12-08 06:54:35 +08:00
|
|
|
if @dump_heap
|
|
|
|
puts
|
2020-01-08 13:23:29 +08:00
|
|
|
puts open("http://127.0.0.1:#{@port}/admin/dump_heap", headers).read
|
2014-12-08 06:54:35 +08:00
|
|
|
end
|
|
|
|
|
2013-12-11 07:32:23 +08:00
|
|
|
if @result_file
|
|
|
|
File.open(@result_file, "wb") do |f|
|
|
|
|
f.write(results)
|
|
|
|
end
|
|
|
|
end
|
2013-08-15 13:19:23 +08:00
|
|
|
|
|
|
|
ensure
|
|
|
|
Process.kill "KILL", pid
|
|
|
|
end
|