mirror of
https://github.com/discourse/discourse.git
synced 2024-12-12 08:22:22 +08:00
bd301c3f08
Optimize `UserStatpost_read_count` calculation. In addition, tests were updated to fail when code is not evaluated. Creation of PostTiming was updating `post_read_count`. Count it has to be reset to ensure that ensure_consitency correctly calculates result. Extracting users seen in the last hour to separate Common Table Expression reduces the amount of processed rows. Before ``` Update on user_stats (cost=267492.07..270822.95 rows=2900 width=174) (actual time=12606.121..12606.127 rows=0 loops=1) -> Hash Join (cost=267492.07..270822.95 rows=2900 width=174) (actual time=12561.814..12603.689 rows=10 loops=1) Hash Cond: (user_stats.user_id = x.user_id) Join Filter: (x.c <> user_stats.posts_read_count) Rows Removed by Join Filter: 67 -> Seq Scan on user_stats (cost=0.00..3125.34 rows=75534 width=134) (actual time=0.014..39.173 rows=75534 loops=1) -> Hash (cost=267455.80..267455.80 rows=2901 width=48) (actual time=12558.613..12558.617 rows=77 loops=1) Buckets: 4096 Batches: 1 Memory Usage: 39kB -> Subquery Scan on x (cost=267376.03..267455.80 rows=2901 width=48) (actual time=12168.601..12558.572 rows=77 loops=1) -> GroupAggregate (cost=267376.03..267426.79 rows=2901 width=12) (actual time=12168.595..12558.525 rows=77 loops=1) Group Key: pt.user_id -> Sort (cost=267376.03..267383.28 rows=2901 width=4) (actual time=12100.490..12352.106 rows=2072830 loops=1) Sort Key: pt.user_id Sort Method: external merge Disk: 28488kB -> Nested Loop (cost=1.28..267209.18 rows=2901 width=4) (actual time=0.040..11528.680 rows=2072830 loops=1) -> Nested Loop (cost=0.86..261390.02 rows=13159 width=8) (actual time=0.030..3492.887 rows=3581648 loops=1) -> Index Scan using index_users_on_last_seen_at on users u (cost=0.42..89.71 rows=28 width=4) (actual time=0.010..0.201 rows=78 loops=1) Index Cond: (last_seen_at > '2023-04-11 00:22:49.555537'::timestamp without time zone) -> Index Scan using index_post_timings_on_user_id on post_timings pt (cost=0.44..9287.60 rows=4455 width=8) (actual time=0.081..38.542 rows=45919 loops=78) Index Cond: (user_id = u.id) -> Index Scan using forum_threads_pkey on topics t (cost=0.42..0.44 rows=1 width=4) (actual time=0.002..0.002 rows=1 loops=3581648) Index Cond: (id = pt.topic_id) Filter: ((deleted_at IS NULL) AND ((archetype)::text = 'regular'::text)) Rows Removed by Filter: 0 Planning Time: 0.692 ms Execution Time: 12612.587 ms ``` After ``` Update on user_stats (cost=9473.60..12804.30 rows=2828 width=174) (actual time=677.724..677.729 rows=0 loops=1) -> Hash Join (cost=9473.60..12804.30 rows=2828 width=174) (actual time=672.536..677.706 rows=1 loops=1) Hash Cond: (user_stats.user_id = x.user_id) Join Filter: (x.c <> user_stats.posts_read_count) Rows Removed by Join Filter: 54 -> Seq Scan on user_stats (cost=0.00..3125.34 rows=75534 width=134) (actual time=0.012..23.977 rows=75534 loops=1) -> Hash (cost=9438.24..9438.24 rows=2829 width=48) (actual time=647.818..647.822 rows=55 loops=1) Buckets: 4096 Batches: 1 Memory Usage: 37kB -> Subquery Scan on x (cost=9381.66..9438.24 rows=2829 width=48) (actual time=647.409..647.805 rows=55 loops=1) -> HashAggregate (cost=9381.66..9409.95 rows=2829 width=12) (actual time=647.403..647.786 rows=55 loops=1) Group Key: pt.user_id Batches: 1 Memory Usage: 121kB -> Nested Loop (cost=1.86..9367.51 rows=2829 width=4) (actual time=0.056..625.245 rows=120022 loops=1) -> Nested Loop (cost=1.44..3692.96 rows=12832 width=8) (actual time=0.047..171.754 rows=217440 loops=1) -> Nested Loop (cost=1.00..254.63 rows=25 width=12) (actual time=0.030..1.407 rows=56 loops=1) Join Filter: (u.id = user_stats_1.user_id) -> Nested Loop (cost=0.71..243.08 rows=25 width=8) (actual time=0.018..1.207 rows=87 loops=1) -> Index Scan using index_users_on_last_seen_at on users u (cost=0.42..86.71 rows=27 width=4) (actual time=0.009..0.156 rows=87 loops=1) Index Cond: (last_seen_at > '2023-04-11 00:47:07.437568'::timestamp without time zone) -> Index Only Scan using user_stats_pkey on user_stats us (cost=0.29..5.79 rows=1 width=4) (actual time=0.011..0.011 rows=1 loops=87) Index Cond: (user_id = u.id) Heap Fetches: 87 -> Index Scan using user_stats_pkey on user_stats user_stats_1 (cost=0.29..0.45 rows=1 width=4) (actual time=0.002..0.002 rows=1 loops=87) Index Cond: (user_id = us.user_id) Filter: (posts_read_count < 10000) Rows Removed by Filter: 0 -> Index Scan using index_post_timings_on_user_id on post_timings pt (cost=0.44..92.98 rows=4455 width=8) (actual time=0.036..2.492 rows=3883 loops=56) Index Cond: (user_id = user_stats_1.user_id) -> Index Scan using forum_threads_pkey on topics t (cost=0.42..0.44 rows=1 width=4) (actual time=0.002..0.002 rows=1 loops=217440) Index Cond: (id = pt.topic_id) Filter: ((deleted_at IS NULL) AND ((archetype)::text = 'regular'::text)) Rows Removed by Filter: 0 Planning Time: 1.406 ms Execution Time: 677.817 ms ```
309 lines
9.6 KiB
Ruby
309 lines
9.6 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
RSpec.describe UserStat do
|
|
it "is created automatically when a user is created" do
|
|
user = Fabricate(:evil_trout)
|
|
expect(user.user_stat).to be_present
|
|
|
|
# It populates the `new_since` field by default
|
|
expect(user.user_stat.new_since).to be_present
|
|
end
|
|
|
|
describe "#update_view_counts" do
|
|
let(:user) { Fabricate(:user) }
|
|
let(:stat) { user.user_stat }
|
|
|
|
context "with topics_entered" do
|
|
context "without any views" do
|
|
it "doesn't increase the user's topics_entered" do
|
|
expect {
|
|
UserStat.update_view_counts
|
|
stat.reload
|
|
}.not_to change(stat, :topics_entered)
|
|
end
|
|
end
|
|
|
|
context "with a view" do
|
|
fab!(:topic) { Fabricate(:topic) }
|
|
let!(:view) { TopicViewItem.add(topic.id, "127.0.0.1", user.id) }
|
|
|
|
before do
|
|
user.update_column :last_seen_at, 1.second.ago
|
|
stat.update_column :topics_entered, 0
|
|
end
|
|
|
|
it "adds one to the topics entered" do
|
|
expect { UserStat.update_view_counts }.to change { stat.reload.topics_entered }.to 1
|
|
end
|
|
|
|
it "won't record a second view as a different topic" do
|
|
TopicViewItem.add(topic.id, "127.0.0.1", user.id)
|
|
expect { UserStat.update_view_counts }.to change { stat.reload.topics_entered }.to 1
|
|
end
|
|
end
|
|
end
|
|
|
|
context "with posts_read_count" do
|
|
context "without any post timings" do
|
|
it "doesn't increase the user's posts_read_count" do
|
|
expect {
|
|
UserStat.update_view_counts
|
|
stat.reload
|
|
}.not_to change(stat, :posts_read_count)
|
|
end
|
|
end
|
|
|
|
context "with a post timing" do
|
|
let!(:post) { Fabricate(:post) }
|
|
let!(:post_timings) do
|
|
PostTiming.record_timing(
|
|
msecs: 1234,
|
|
topic_id: post.topic_id,
|
|
user_id: user.id,
|
|
post_number: post.post_number,
|
|
)
|
|
end
|
|
|
|
before do
|
|
user.update_column :last_seen_at, 1.second.ago
|
|
stat.update_column :posts_read_count, 0
|
|
end
|
|
|
|
it "increases posts_read_count" do
|
|
expect { UserStat.update_view_counts }.to change { stat.reload.posts_read_count }.to 1
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
describe "ensure consistency!" do
|
|
it "can update first unread" do
|
|
post = create_post
|
|
|
|
freeze_time 10.minutes.from_now
|
|
create_post(topic_id: post.topic_id)
|
|
|
|
post.user.update!(last_seen_at: Time.now)
|
|
|
|
UserStat.ensure_consistency!
|
|
|
|
post.user.user_stat.reload
|
|
expect(post.user.user_stat.first_unread_at).to eq_time(Time.zone.now)
|
|
end
|
|
|
|
it "updates first unread pm timestamp correctly" do
|
|
freeze_time
|
|
|
|
user = Fabricate(:user, last_seen_at: Time.zone.now)
|
|
user_2 = Fabricate(:user, last_seen_at: Time.zone.now)
|
|
pm_topic = Fabricate(:private_message_topic, user: user, recipient: user_2)
|
|
create_post(user: user, topic_id: pm_topic.id)
|
|
|
|
TopicUser.change(
|
|
user.id,
|
|
pm_topic.id,
|
|
notification_level: TopicUser.notification_levels[:tracking],
|
|
)
|
|
|
|
# user that is not tracking PM topic
|
|
TopicUser.change(
|
|
user_2.id,
|
|
pm_topic.id,
|
|
notification_level: TopicUser.notification_levels[:regular],
|
|
)
|
|
|
|
# User that has not been seen recently
|
|
user_3 = Fabricate(:user, last_seen_at: 1.year.ago)
|
|
pm_topic.allowed_users << user_3
|
|
|
|
TopicUser.change(
|
|
user_3.id,
|
|
pm_topic.id,
|
|
notification_level: TopicUser.notification_levels[:tracking],
|
|
)
|
|
|
|
user_3_orig_first_unread_pm_at = user_3.user_stat.first_unread_pm_at
|
|
|
|
# User that is not related to the PM
|
|
user_4 = Fabricate(:user, last_seen_at: Time.zone.now)
|
|
user_4_orig_first_unread_pm_at = user_4.user_stat.first_unread_pm_at
|
|
|
|
# User for another PM topic
|
|
user_5 = Fabricate(:user, last_seen_at: Time.zone.now)
|
|
user_6 = Fabricate(:user, last_seen_at: 10.minutes.ago)
|
|
pm_topic_2 = Fabricate(:private_message_topic, user: user_5, recipient: user_6)
|
|
create_post(user: user_5, topic_id: pm_topic_2.id)
|
|
|
|
TopicUser.change(
|
|
user_5.id,
|
|
pm_topic_2.id,
|
|
notification_level: TopicUser.notification_levels[:tracking],
|
|
)
|
|
|
|
# User out of last seen limit
|
|
TopicUser.change(
|
|
user_6.id,
|
|
pm_topic_2.id,
|
|
notification_level: TopicUser.notification_levels[:tracking],
|
|
)
|
|
|
|
create_post(user: user_6, topic_id: pm_topic_2.id)
|
|
user_6_orig_first_unread_pm_at = user_6.user_stat.first_unread_pm_at
|
|
|
|
create_post(user: user_2, topic_id: pm_topic.id)
|
|
create_post(user: user_6, topic_id: pm_topic_2.id)
|
|
pm_topic.update!(updated_at: 10.minutes.from_now)
|
|
pm_topic_2.update!(updated_at: 20.minutes.from_now)
|
|
|
|
stub_const(UserStat, "UPDATE_UNREAD_USERS_LIMIT", 4) do
|
|
UserStat.ensure_consistency!(1.hour.ago)
|
|
end
|
|
|
|
# User affected
|
|
expect(user.user_stat.reload.first_unread_pm_at).to be_within(1.seconds).of(
|
|
pm_topic.reload.updated_at,
|
|
)
|
|
expect(user_2.user_stat.reload.first_unread_pm_at).to be_within(1.seconds).of(
|
|
UserStat::UPDATE_UNREAD_MINUTES_AGO.minutes.ago,
|
|
)
|
|
expect(user_3.user_stat.reload.first_unread_pm_at).to eq_time(user_3_orig_first_unread_pm_at)
|
|
expect(user_4.user_stat.reload.first_unread_pm_at).to be_within(1.seconds).of(
|
|
UserStat::UPDATE_UNREAD_MINUTES_AGO.minutes.ago,
|
|
)
|
|
expect(user_5.user_stat.reload.first_unread_pm_at).to eq_time(pm_topic_2.reload.updated_at)
|
|
expect(user_6.user_stat.reload.first_unread_pm_at).to eq_time(user_6_orig_first_unread_pm_at)
|
|
end
|
|
end
|
|
|
|
describe "update_time_read!" do
|
|
fab!(:user) { Fabricate(:user) }
|
|
let(:stat) { user.user_stat }
|
|
|
|
it "always expires redis key" do
|
|
# this tests implementation which is not 100% ideal
|
|
# that said, redis key leaks are not good
|
|
stat.update_time_read!
|
|
ttl = Discourse.redis.ttl(UserStat.last_seen_key(user.id))
|
|
expect(ttl).to be > 0
|
|
expect(ttl).to be <= UserStat::MAX_TIME_READ_DIFF
|
|
end
|
|
|
|
it "makes no changes if nothing is cached" do
|
|
Discourse.redis.del(UserStat.last_seen_key(user.id))
|
|
stat.update_time_read!
|
|
stat.reload
|
|
expect(stat.time_read).to eq(0)
|
|
end
|
|
|
|
it "makes a change if time read is below threshold" do
|
|
freeze_time
|
|
UserStat.cache_last_seen(user.id, (Time.now - 10).to_f)
|
|
stat.update_time_read!
|
|
stat.reload
|
|
expect(stat.time_read).to eq(10)
|
|
end
|
|
|
|
it "makes no change if time read is above threshold" do
|
|
freeze_time
|
|
|
|
t = Time.now - 1 - UserStat::MAX_TIME_READ_DIFF
|
|
UserStat.cache_last_seen(user.id, t.to_f)
|
|
|
|
stat.update_time_read!
|
|
stat.reload
|
|
expect(stat.time_read).to eq(0)
|
|
end
|
|
end
|
|
|
|
describe "update_distinct_badge_count" do
|
|
fab!(:user) { Fabricate(:user) }
|
|
let(:stat) { user.user_stat }
|
|
fab!(:badge1) { Fabricate(:badge) }
|
|
fab!(:badge2) { Fabricate(:badge) }
|
|
|
|
it "updates counts correctly" do
|
|
expect(stat.reload.distinct_badge_count).to eq(0)
|
|
BadgeGranter.grant(badge1, user)
|
|
expect(stat.reload.distinct_badge_count).to eq(1)
|
|
BadgeGranter.grant(badge1, user)
|
|
expect(stat.reload.distinct_badge_count).to eq(1)
|
|
BadgeGranter.grant(badge2, user)
|
|
expect(stat.reload.distinct_badge_count).to eq(2)
|
|
user.reload.user_badges.destroy_all
|
|
expect(stat.reload.distinct_badge_count).to eq(0)
|
|
end
|
|
|
|
it "can update counts for all users simultaneously" do
|
|
user2 = Fabricate(:user)
|
|
stat2 = user2.user_stat
|
|
|
|
BadgeGranter.grant(badge1, user)
|
|
BadgeGranter.grant(badge1, user)
|
|
BadgeGranter.grant(badge2, user)
|
|
|
|
BadgeGranter.grant(badge1, user2)
|
|
|
|
# Set some clearly incorrect values
|
|
stat.update(distinct_badge_count: 999)
|
|
stat2.update(distinct_badge_count: 999)
|
|
|
|
UserStat.ensure_consistency!
|
|
|
|
expect(stat.reload.distinct_badge_count).to eq(2)
|
|
expect(stat2.reload.distinct_badge_count).to eq(1)
|
|
end
|
|
|
|
it "ignores disabled badges" do
|
|
BadgeGranter.grant(badge1, user)
|
|
BadgeGranter.grant(badge2, user)
|
|
expect(stat.reload.distinct_badge_count).to eq(2)
|
|
|
|
badge2.update(enabled: false)
|
|
expect(stat.reload.distinct_badge_count).to eq(1)
|
|
|
|
badge2.update(enabled: true)
|
|
expect(stat.reload.distinct_badge_count).to eq(2)
|
|
end
|
|
end
|
|
|
|
describe ".update_draft_count" do
|
|
fab!(:user) { Fabricate(:user) }
|
|
|
|
it "updates draft_count" do
|
|
Draft.create!(user: user, draft_key: "topic_1", data: {})
|
|
Draft.create!(user: user, draft_key: "new_topic", data: {})
|
|
Draft.create!(user: user, draft_key: "topic_2", data: {})
|
|
UserStat.update_all(draft_count: 0)
|
|
|
|
UserStat.update_draft_count(user.id)
|
|
expect(user.user_stat.draft_count).to eq(3)
|
|
end
|
|
end
|
|
|
|
describe "#update_pending_posts" do
|
|
subject(:update_pending_posts) { stat.update_pending_posts }
|
|
|
|
let!(:reviewable) { Fabricate(:reviewable_queued_post) }
|
|
let(:user) { reviewable.created_by }
|
|
let(:stat) { user.user_stat }
|
|
|
|
before do
|
|
stat.update!(pending_posts_count: 0) # the reviewable callback will have set this to 1 already.
|
|
end
|
|
|
|
it "sets 'pending_posts_count'" do
|
|
expect { update_pending_posts }.to change { stat.pending_posts_count }.to 1
|
|
end
|
|
|
|
it "publishes a message to clients" do
|
|
MessageBus.expects(:publish).with(
|
|
"/u/#{user.username_lower}/counters",
|
|
{ pending_posts_count: 1 },
|
|
user_ids: [user.id],
|
|
group_ids: [Group::AUTO_GROUPS[:staff]],
|
|
)
|
|
update_pending_posts
|
|
end
|
|
end
|
|
end
|