2019-04-30 08:27:42 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-07-14 01:34:31 +08:00
|
|
|
RSpec.describe SearchLog, type: :model do
|
2018-01-15 11:48:28 +08:00
|
|
|
after { SearchLog.clear_debounce_cache! }
|
|
|
|
|
2017-07-14 01:34:31 +08:00
|
|
|
describe ".log" do
|
2022-07-28 00:14:14 +08:00
|
|
|
context "with invalid arguments" do
|
2017-07-17 23:57:13 +08:00
|
|
|
it "no search type returns error" do
|
|
|
|
status, _ =
|
|
|
|
SearchLog.log(term: "bounty hunter", search_type: :missing, ip_address: "127.0.0.1")
|
|
|
|
expect(status).to eq(:error)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "no IP returns error" do
|
|
|
|
status, _ = SearchLog.log(term: "bounty hunter", search_type: :header, ip_address: nil)
|
|
|
|
expect(status).to eq(:error)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-07-14 01:34:31 +08:00
|
|
|
context "when anonymous" do
|
|
|
|
it "logs and updates the search" do
|
2017-07-24 21:17:42 +08:00
|
|
|
freeze_time
|
|
|
|
action, log_id =
|
|
|
|
SearchLog.log(term: "jabba", search_type: :header, ip_address: "192.168.0.33")
|
|
|
|
expect(action).to eq(:created)
|
|
|
|
log = SearchLog.find(log_id)
|
|
|
|
expect(log.term).to eq("jabba")
|
|
|
|
expect(log.search_type).to eq(SearchLog.search_types[:header])
|
|
|
|
expect(log.ip_address).to eq("192.168.0.33")
|
|
|
|
|
|
|
|
action, updated_log_id =
|
|
|
|
SearchLog.log(term: "jabba the hut", search_type: :header, ip_address: "192.168.0.33")
|
|
|
|
expect(action).to eq(:updated)
|
|
|
|
expect(updated_log_id).to eq(log_id)
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "creates a new search with a different prefix" do
|
2017-07-24 21:17:42 +08:00
|
|
|
freeze_time
|
|
|
|
action, _ = SearchLog.log(term: "darth", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
expect(action).to eq(:created)
|
|
|
|
|
|
|
|
action, _ = SearchLog.log(term: "anakin", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
expect(action).to eq(:created)
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "creates a new search with a different ip" do
|
2017-07-24 21:17:42 +08:00
|
|
|
freeze_time
|
|
|
|
action, _ = SearchLog.log(term: "darth", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
expect(action).to eq(:created)
|
|
|
|
|
|
|
|
action, _ = SearchLog.log(term: "darth", search_type: :header, ip_address: "127.0.0.2")
|
|
|
|
expect(action).to eq(:created)
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when logged in" do
|
2023-11-10 06:47:59 +08:00
|
|
|
fab!(:user)
|
2017-07-14 01:34:31 +08:00
|
|
|
|
|
|
|
it "logs and updates the search" do
|
2017-07-24 21:17:42 +08:00
|
|
|
freeze_time
|
|
|
|
action, log_id =
|
|
|
|
SearchLog.log(
|
|
|
|
term: "hello",
|
|
|
|
search_type: :full_page,
|
|
|
|
ip_address: "192.168.0.1",
|
|
|
|
user_id: user.id,
|
|
|
|
)
|
|
|
|
expect(action).to eq(:created)
|
|
|
|
log = SearchLog.find(log_id)
|
|
|
|
expect(log.term).to eq("hello")
|
|
|
|
expect(log.search_type).to eq(SearchLog.search_types[:full_page])
|
2018-05-22 03:22:11 +08:00
|
|
|
expect(log.ip_address).to eq(nil)
|
2017-07-24 21:17:42 +08:00
|
|
|
expect(log.user_id).to eq(user.id)
|
|
|
|
|
|
|
|
action, updated_log_id =
|
|
|
|
SearchLog.log(
|
|
|
|
term: "hello dolly",
|
|
|
|
search_type: :header,
|
|
|
|
ip_address: "192.168.0.33",
|
|
|
|
user_id: user.id,
|
|
|
|
)
|
|
|
|
expect(action).to eq(:updated)
|
|
|
|
expect(updated_log_id).to eq(log_id)
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "logs again if time has passed" do
|
2017-07-24 21:17:42 +08:00
|
|
|
freeze_time(10.minutes.ago)
|
|
|
|
|
|
|
|
action, _ =
|
|
|
|
SearchLog.log(
|
|
|
|
term: "hello",
|
|
|
|
search_type: :full_page,
|
|
|
|
ip_address: "192.168.0.1",
|
|
|
|
user_id: user.id,
|
|
|
|
)
|
|
|
|
expect(action).to eq(:created)
|
|
|
|
|
|
|
|
freeze_time(10.minutes.from_now)
|
2019-12-03 17:05:53 +08:00
|
|
|
Discourse.redis.del(SearchLog.redis_key(ip_address: "192.168.0.1", user_id: user.id))
|
2017-07-24 21:17:42 +08:00
|
|
|
|
|
|
|
action, _ =
|
|
|
|
SearchLog.log(
|
|
|
|
term: "hello",
|
|
|
|
search_type: :full_page,
|
|
|
|
ip_address: "192.168.0.1",
|
|
|
|
user_id: user.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(action).to eq(:created)
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "logs again with a different user" do
|
2017-07-24 21:17:42 +08:00
|
|
|
freeze_time
|
|
|
|
|
|
|
|
action, _ =
|
|
|
|
SearchLog.log(
|
|
|
|
term: "hello",
|
|
|
|
search_type: :full_page,
|
|
|
|
ip_address: "192.168.0.1",
|
|
|
|
user_id: user.id,
|
|
|
|
)
|
|
|
|
expect(action).to eq(:created)
|
|
|
|
|
|
|
|
action, _ =
|
|
|
|
SearchLog.log(
|
|
|
|
term: "hello dolly",
|
|
|
|
search_type: :full_page,
|
|
|
|
ip_address: "192.168.0.1",
|
|
|
|
user_id: Fabricate(:user).id,
|
|
|
|
)
|
|
|
|
expect(action).to eq(:created)
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
end
|
2017-07-15 02:29:31 +08:00
|
|
|
end
|
|
|
|
|
2019-03-29 11:30:15 +08:00
|
|
|
describe ".term_details" do
|
2019-03-29 11:50:25 +08:00
|
|
|
it "should only use the date for the period" do
|
2019-05-07 01:18:07 +08:00
|
|
|
time = Time.utc(2019, 5, 23, 18, 15, 30)
|
2019-03-29 11:50:25 +08:00
|
|
|
freeze_time(time)
|
|
|
|
|
|
|
|
search_log = Fabricate(:search_log, created_at: time - 1.hour)
|
|
|
|
search_log2 = Fabricate(:search_log, created_at: time + 1.hour)
|
|
|
|
|
|
|
|
details = SearchLog.term_details(search_log.term, :daily)
|
|
|
|
|
|
|
|
expect(details[:data].first[:y]).to eq(2)
|
2017-12-20 10:41:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "correctly returns term details" do
|
2019-03-29 11:50:25 +08:00
|
|
|
Fabricate(:search_log, term: "ruby")
|
|
|
|
Fabricate(:search_log, term: "ruBy", user: Fabricate(:user))
|
|
|
|
Fabricate(:search_log, term: "ruby core", ip_address: "127.0.0.3")
|
|
|
|
|
|
|
|
Fabricate(
|
|
|
|
:search_log,
|
|
|
|
term: "ruBy",
|
|
|
|
search_type: SearchLog.search_types[:full_page],
|
|
|
|
ip_address: "127.0.0.2",
|
|
|
|
)
|
|
|
|
|
2017-12-20 10:41:31 +08:00
|
|
|
term_details = SearchLog.term_details("ruby")
|
|
|
|
expect(term_details[:data][0][:y]).to eq(3)
|
|
|
|
|
|
|
|
term_header_details = SearchLog.term_details("ruby", :all, :header)
|
|
|
|
expect(term_header_details[:data][0][:y]).to eq(2)
|
|
|
|
|
2019-03-29 11:30:15 +08:00
|
|
|
SearchLog
|
|
|
|
.where("lower(term) = ?", "ruby")
|
|
|
|
.where(ip_address: "127.0.0.2")
|
|
|
|
.update_all(search_result_id: 24)
|
|
|
|
|
2017-12-20 10:41:31 +08:00
|
|
|
term_click_through_details = SearchLog.term_details("ruby", :all, :click_through_only)
|
2018-01-13 10:43:49 +08:00
|
|
|
expect(term_click_through_details[:period]).to eq("all")
|
2017-12-20 10:41:31 +08:00
|
|
|
expect(term_click_through_details[:data][0][:y]).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-07-28 00:14:14 +08:00
|
|
|
describe "trending" do
|
2023-11-10 06:47:59 +08:00
|
|
|
fab!(:user)
|
2017-11-15 08:13:50 +08:00
|
|
|
before do
|
|
|
|
SearchLog.log(term: "ruby", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
SearchLog.log(term: "php", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
SearchLog.log(term: "java", search_type: :header, ip_address: "127.0.0.1")
|
2018-05-22 03:22:11 +08:00
|
|
|
SearchLog.log(term: "ruby", search_type: :header, ip_address: "127.0.0.1", user_id: user.id)
|
2017-11-15 08:13:50 +08:00
|
|
|
SearchLog.log(term: "swift", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
SearchLog.log(term: "ruby", search_type: :header, ip_address: "127.0.0.2")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "considers time period" do
|
2018-12-18 21:43:46 +08:00
|
|
|
expect(SearchLog.trending.to_a.count).to eq(4)
|
2017-11-15 08:13:50 +08:00
|
|
|
|
|
|
|
SearchLog.where(term: "swift").update_all(created_at: 1.year.ago)
|
2018-12-18 21:43:46 +08:00
|
|
|
expect(SearchLog.trending(:monthly).to_a.count).to eq(3)
|
2017-11-15 08:13:50 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "correctly returns trending data" do
|
|
|
|
top_trending = SearchLog.trending.first
|
|
|
|
expect(top_trending.term).to eq("ruby")
|
|
|
|
expect(top_trending.searches).to eq(3)
|
|
|
|
expect(top_trending.click_through).to eq(0)
|
|
|
|
|
2017-11-29 01:54:27 +08:00
|
|
|
SearchLog.where(term: "ruby", ip_address: "127.0.0.1").update_all(search_result_id: 12)
|
2018-05-22 03:22:11 +08:00
|
|
|
SearchLog.where(term: "ruby", user_id: user.id).update_all(search_result_id: 12)
|
2017-11-29 01:54:27 +08:00
|
|
|
SearchLog.where(term: "ruby", ip_address: "127.0.0.2").update_all(search_result_id: 24)
|
2017-11-15 08:13:50 +08:00
|
|
|
top_trending = SearchLog.trending.first
|
|
|
|
expect(top_trending.click_through).to eq(3)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-07-28 00:14:14 +08:00
|
|
|
describe "clean_up" do
|
2017-07-15 02:29:31 +08:00
|
|
|
it "will remove old logs" do
|
|
|
|
SearchLog.log(term: "jawa", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
SearchLog.log(term: "jedi", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
SearchLog.log(term: "rey", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
SearchLog.log(term: "finn", search_type: :header, ip_address: "127.0.0.1")
|
|
|
|
|
|
|
|
SiteSetting.search_query_log_max_size = 5
|
|
|
|
SearchLog.clean_up
|
|
|
|
expect(SearchLog.count).to eq(4)
|
|
|
|
|
|
|
|
SiteSetting.search_query_log_max_size = 2
|
|
|
|
SearchLog.clean_up
|
|
|
|
expect(SearchLog.count).to eq(2)
|
|
|
|
expect(SearchLog.where(term: "rey").first).to be_present
|
|
|
|
expect(SearchLog.where(term: "finn").first).to be_present
|
|
|
|
end
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
end
|