2019-04-30 08:27:42 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-10-11 17:41:23 +08:00
|
|
|
require 'rails_helper'
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
describe SearchController do
|
2019-07-02 09:21:52 +08:00
|
|
|
|
|
|
|
fab!(:awesome_post) do
|
|
|
|
SearchIndexer.enable
|
|
|
|
Fabricate(:post, raw: 'this is my really awesome post')
|
|
|
|
end
|
|
|
|
|
|
|
|
fab!(:user) do
|
|
|
|
Fabricate(:user)
|
|
|
|
end
|
|
|
|
|
|
|
|
fab!(:user_post) do
|
|
|
|
SearchIndexer.enable
|
|
|
|
Fabricate(:post, raw: "#{user.username} is a cool person")
|
|
|
|
end
|
|
|
|
|
2014-09-02 17:15:08 +08:00
|
|
|
context "integration" do
|
|
|
|
before do
|
2016-12-22 10:13:14 +08:00
|
|
|
SearchIndexer.enable
|
2014-09-02 17:15:08 +08:00
|
|
|
end
|
|
|
|
|
2018-06-14 16:31:07 +08:00
|
|
|
before do
|
|
|
|
# TODO be a bit more strategic here instead of junking
|
|
|
|
# all of redis
|
2020-05-23 12:56:13 +08:00
|
|
|
Discourse.redis.flushdb
|
2018-06-14 16:31:07 +08:00
|
|
|
end
|
|
|
|
|
2018-06-05 10:07:05 +08:00
|
|
|
after do
|
2020-05-23 12:56:13 +08:00
|
|
|
Discourse.redis.flushdb
|
2018-06-05 10:07:05 +08:00
|
|
|
end
|
|
|
|
|
2019-07-02 09:21:52 +08:00
|
|
|
context "when overloaded" do
|
|
|
|
|
|
|
|
before do
|
|
|
|
global_setting :disable_search_queue_threshold, 0.2
|
|
|
|
end
|
|
|
|
|
|
|
|
let! :start_time do
|
|
|
|
freeze_time
|
|
|
|
Time.now
|
|
|
|
end
|
|
|
|
|
|
|
|
let! :current_time do
|
|
|
|
freeze_time 0.3.seconds.from_now
|
|
|
|
end
|
|
|
|
|
|
|
|
it "errors on #query" do
|
|
|
|
|
|
|
|
get "/search/query.json", headers: {
|
|
|
|
"HTTP_X_REQUEST_START" => "t=#{start_time.to_f}"
|
|
|
|
}, params: {
|
2020-07-14 11:05:57 +08:00
|
|
|
term: "hi there"
|
2019-07-02 09:21:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(409)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "no results and error on #index" do
|
|
|
|
get "/search.json", headers: {
|
|
|
|
"HTTP_X_REQUEST_START" => "t=#{start_time.to_f}"
|
|
|
|
}, params: {
|
|
|
|
q: "awesome"
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2019-07-02 09:21:52 +08:00
|
|
|
|
|
|
|
expect(data["posts"]).to be_empty
|
|
|
|
expect(data["grouped_search_result"]["error"]).not_to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2018-09-04 10:11:42 +08:00
|
|
|
it "returns a 400 error if you search for null bytes" do
|
|
|
|
term = "hello\0hello"
|
|
|
|
|
|
|
|
get "/search/query.json", params: {
|
2020-07-14 11:05:57 +08:00
|
|
|
term: term
|
2018-09-04 10:11:42 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
|
2014-09-02 17:15:08 +08:00
|
|
|
it "can search correctly" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2020-07-14 11:05:57 +08:00
|
|
|
term: 'awesome'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2014-09-02 17:15:08 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2019-07-02 09:21:52 +08:00
|
|
|
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2019-07-02 09:21:52 +08:00
|
|
|
|
|
|
|
expect(data['posts'].length).to eq(1)
|
|
|
|
expect(data['posts'][0]['id']).to eq(awesome_post.id)
|
|
|
|
expect(data['posts'][0]['blurb']).to eq(awesome_post.raw)
|
|
|
|
expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
|
2014-09-02 17:15:08 +08:00
|
|
|
end
|
2017-07-15 01:56:58 +08:00
|
|
|
|
2020-07-14 11:05:57 +08:00
|
|
|
it "can search correctly with advanced search filters" do
|
|
|
|
awesome_post.update!(
|
|
|
|
raw: "#{"a" * Search::GroupedSearchResults::BLURB_LENGTH} elephant"
|
|
|
|
)
|
|
|
|
|
|
|
|
get "/search/query.json", params: { term: 'order:views elephant' }
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
|
|
|
|
data = response.parsed_body
|
|
|
|
|
|
|
|
expect(data.dig("grouped_search_result", "term")).to eq('order:views elephant')
|
|
|
|
expect(data['posts'].length).to eq(1)
|
|
|
|
expect(data['posts'][0]['id']).to eq(awesome_post.id)
|
|
|
|
expect(data['posts'][0]['blurb']).to include('elephant')
|
|
|
|
expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
|
|
|
|
end
|
|
|
|
|
2017-07-15 01:56:58 +08:00
|
|
|
it 'performs the query with a type filter' do
|
2017-08-31 12:06:56 +08:00
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
term: user.username, type_filter: 'topic'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-07-15 01:56:58 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2017-07-15 01:56:58 +08:00
|
|
|
|
2019-07-02 09:21:52 +08:00
|
|
|
expect(data['posts'][0]['id']).to eq(user_post.id)
|
2017-07-15 01:56:58 +08:00
|
|
|
expect(data['users']).to be_blank
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
term: user.username, type_filter: 'user'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-08-31 12:06:56 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2017-07-15 01:56:58 +08:00
|
|
|
|
|
|
|
expect(data['posts']).to be_blank
|
|
|
|
expect(data['users'][0]['id']).to eq(user.id)
|
|
|
|
end
|
|
|
|
|
2017-07-26 08:51:44 +08:00
|
|
|
context 'searching by topic id' do
|
|
|
|
it 'should not be restricted by minimum search term length' do
|
|
|
|
SiteSetting.min_search_term_length = 20000
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2019-07-02 09:21:52 +08:00
|
|
|
term: awesome_post.topic_id,
|
2017-07-26 08:51:44 +08:00
|
|
|
type_filter: 'topic',
|
|
|
|
search_for_id: true
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-07-26 08:51:44 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2017-07-26 08:51:44 +08:00
|
|
|
|
2019-07-02 09:21:52 +08:00
|
|
|
expect(data['topics'][0]['id']).to eq(awesome_post.topic_id)
|
2017-07-26 08:51:44 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "should return the right result" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2019-07-02 09:21:52 +08:00
|
|
|
term: user_post.topic_id,
|
2017-07-26 08:51:44 +08:00
|
|
|
type_filter: 'topic',
|
|
|
|
search_for_id: true
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-07-26 08:51:44 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2017-07-26 08:51:44 +08:00
|
|
|
|
2019-07-02 09:21:52 +08:00
|
|
|
expect(data['topics'][0]['id']).to eq(user_post.topic_id)
|
2017-07-26 05:09:13 +08:00
|
|
|
end
|
2017-07-15 01:56:58 +08:00
|
|
|
end
|
2014-09-02 17:15:08 +08:00
|
|
|
end
|
|
|
|
|
2017-07-14 01:34:31 +08:00
|
|
|
context "#query" do
|
|
|
|
it "logs the search term" do
|
|
|
|
SiteSetting.log_search_queries = true
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: { term: 'wookie' }
|
2017-07-17 23:57:13 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-07-14 01:34:31 +08:00
|
|
|
expect(SearchLog.where(term: 'wookie')).to be_present
|
2017-07-17 23:57:13 +08:00
|
|
|
|
2020-05-07 23:04:12 +08:00
|
|
|
json = response.parsed_body
|
2017-07-17 23:57:13 +08:00
|
|
|
search_log_id = json['grouped_search_result']['search_log_id']
|
|
|
|
expect(search_log_id).to be_present
|
|
|
|
|
|
|
|
log = SearchLog.where(id: search_log_id).first
|
|
|
|
expect(log).to be_present
|
|
|
|
expect(log.term).to eq('wookie')
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't log when disabled" do
|
|
|
|
SiteSetting.log_search_queries = false
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: { term: 'wookie' }
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-07-14 01:34:31 +08:00
|
|
|
expect(SearchLog.where(term: 'wookie')).to be_blank
|
|
|
|
end
|
2020-06-05 00:26:08 +08:00
|
|
|
|
|
|
|
context 'rate limited' do
|
2020-07-10 07:44:17 +08:00
|
|
|
it 'rate limits anon searches per user' do
|
2020-07-10 07:08:34 +08:00
|
|
|
SiteSetting.rate_limit_search_anon_user = 2
|
2020-07-10 07:44:17 +08:00
|
|
|
RateLimiter.enable
|
|
|
|
RateLimiter.clear_all!
|
|
|
|
|
|
|
|
2.times do
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'wookie'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'wookie'
|
|
|
|
}
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(I18n.t("rate_limiter.slow_down"))
|
2020-06-05 00:26:08 +08:00
|
|
|
end
|
|
|
|
|
2020-07-10 07:44:17 +08:00
|
|
|
it 'rate limits anon searches globally' do
|
|
|
|
SiteSetting.rate_limit_search_anon_global = 2
|
2020-06-05 00:26:08 +08:00
|
|
|
RateLimiter.enable
|
|
|
|
RateLimiter.clear_all!
|
|
|
|
|
|
|
|
2.times do
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'wookie'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'wookie'
|
|
|
|
}
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(I18n.t("rate_limiter.slow_down"))
|
|
|
|
end
|
|
|
|
|
|
|
|
context "and a logged in user" do
|
|
|
|
before { sign_in(user) }
|
|
|
|
|
|
|
|
it 'rate limits logged in searches' do
|
2020-07-10 07:44:17 +08:00
|
|
|
SiteSetting.rate_limit_search_user = 3
|
2020-06-05 00:26:08 +08:00
|
|
|
RateLimiter.enable
|
|
|
|
RateLimiter.clear_all!
|
|
|
|
|
|
|
|
3.times do
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'wookie'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'wookie'
|
|
|
|
}
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(I18n.t("rate_limiter.slow_down"))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
context "#show" do
|
2018-12-21 00:13:14 +08:00
|
|
|
it "doesn't raise an error when search term not specified" do
|
|
|
|
get "/search"
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
end
|
|
|
|
|
2018-12-18 22:36:59 +08:00
|
|
|
it "raises an error when the search term length is less than required" do
|
|
|
|
get "/search.json", params: { q: 'ba' }
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
|
2019-04-29 15:09:25 +08:00
|
|
|
it "raises an error when search term is a hash" do
|
|
|
|
get "/search.json?q[foo]"
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
|
2019-10-22 22:44:52 +08:00
|
|
|
it "returns a 400 error if you search for null bytes" do
|
|
|
|
term = "hello\0hello"
|
|
|
|
|
|
|
|
get "/search.json", params: { q: term }
|
|
|
|
expect(response.status).to eq(400)
|
|
|
|
end
|
|
|
|
|
2017-07-14 01:34:31 +08:00
|
|
|
it "logs the search term" do
|
|
|
|
SiteSetting.log_search_queries = true
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search.json", params: { q: 'bantha' }
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-07-14 01:34:31 +08:00
|
|
|
expect(SearchLog.where(term: 'bantha')).to be_present
|
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't log when disabled" do
|
|
|
|
SiteSetting.log_search_queries = false
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search.json", params: { q: 'bantha' }
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-07-14 01:34:31 +08:00
|
|
|
expect(SearchLog.where(term: 'bantha')).to be_blank
|
|
|
|
end
|
2020-06-05 00:26:08 +08:00
|
|
|
|
|
|
|
context 'rate limited' do
|
2020-07-10 07:44:17 +08:00
|
|
|
it 'rate limits anon searches per user' do
|
|
|
|
SiteSetting.rate_limit_search_anon_user = 2
|
|
|
|
RateLimiter.enable
|
|
|
|
RateLimiter.clear_all!
|
|
|
|
|
|
|
|
2.times do
|
|
|
|
get "/search.json", params: {
|
|
|
|
q: 'bantha'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
get "/search.json", params: {
|
|
|
|
q: 'bantha'
|
|
|
|
}
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(I18n.t("rate_limiter.slow_down"))
|
2020-06-05 00:26:08 +08:00
|
|
|
|
|
|
|
end
|
|
|
|
|
2020-07-10 07:44:17 +08:00
|
|
|
it 'rate limits anon searches globally' do
|
|
|
|
SiteSetting.rate_limit_search_anon_global = 2
|
2020-06-05 00:26:08 +08:00
|
|
|
RateLimiter.enable
|
|
|
|
RateLimiter.clear_all!
|
|
|
|
|
|
|
|
2.times do
|
|
|
|
get "/search.json", params: {
|
|
|
|
q: 'bantha'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
get "/search.json", params: {
|
|
|
|
q: 'bantha'
|
|
|
|
}
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(I18n.t("rate_limiter.slow_down"))
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
context "and a logged in user" do
|
|
|
|
before { sign_in(user) }
|
|
|
|
|
|
|
|
it 'rate limits searches' do
|
2020-07-10 07:44:17 +08:00
|
|
|
SiteSetting.rate_limit_search_user = 3
|
2020-06-05 00:26:08 +08:00
|
|
|
RateLimiter.enable
|
|
|
|
RateLimiter.clear_all!
|
|
|
|
|
|
|
|
3.times do
|
|
|
|
get "/search.json", params: {
|
|
|
|
q: 'bantha'
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
get "/search.json", params: {
|
|
|
|
q: 'bantha'
|
|
|
|
}
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
json = response.parsed_body
|
|
|
|
expect(json["grouped_search_result"]["error"]).to eq(I18n.t("rate_limiter.slow_down"))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-07-14 01:34:31 +08:00
|
|
|
end
|
|
|
|
|
2020-04-27 22:35:27 +08:00
|
|
|
context "search priority" do
|
|
|
|
fab!(:low_priority_category) do
|
|
|
|
Fabricate(
|
|
|
|
:category,
|
|
|
|
search_priority: Searchable::PRIORITIES[:very_low]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
fab!(:high_priority_category) do
|
|
|
|
Fabricate(
|
|
|
|
:category,
|
|
|
|
search_priority: Searchable::PRIORITIES[:high]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
fab!(:very_high_priority_category) do
|
|
|
|
Fabricate(
|
|
|
|
:category,
|
|
|
|
search_priority: Searchable::PRIORITIES[:very_high]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
fab!(:low_priority_topic) { Fabricate(:topic, category: low_priority_category) }
|
|
|
|
fab!(:high_priority_topic) { Fabricate(:topic, category: high_priority_category) }
|
|
|
|
fab!(:very_high_priority_topic) { Fabricate(:topic, category: very_high_priority_category) }
|
|
|
|
fab!(:low_priority_post) do
|
|
|
|
SearchIndexer.enable
|
|
|
|
Fabricate(:post, topic: low_priority_topic, raw: "This is a Low Priority Post")
|
|
|
|
end
|
|
|
|
fab!(:hight_priority_post) do
|
|
|
|
SearchIndexer.enable
|
|
|
|
Fabricate(:post, topic: high_priority_topic, raw: "This is a High Priority Post")
|
|
|
|
end
|
|
|
|
fab!(:old_very_hight_priority_post) do
|
|
|
|
SearchIndexer.enable
|
|
|
|
Fabricate(:old_post, topic: very_high_priority_topic, raw: "This is a Old but Very High Priority Post")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "sort posts with search priority when search term is empty" do
|
|
|
|
get "/search.json", params: { q: 'status:open' }
|
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2020-04-27 22:35:27 +08:00
|
|
|
post1 = data["posts"].find { |e| e["id"] == old_very_hight_priority_post.id }
|
|
|
|
post2 = data["posts"].find { |e| e["id"] == low_priority_post.id }
|
|
|
|
expect(data["posts"][0]["id"]).to eq(old_very_hight_priority_post.id)
|
|
|
|
expect(post1["id"]).to be > post2["id"]
|
|
|
|
end
|
|
|
|
|
|
|
|
it "sort posts with search priority when no order query" do
|
|
|
|
get "/search.json", params: { q: 'status:open Priority Post' }
|
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2020-04-27 22:35:27 +08:00
|
|
|
expect(data["posts"][0]["id"]).to eq(old_very_hight_priority_post.id)
|
|
|
|
expect(data["posts"][1]["id"]).to eq(hight_priority_post.id)
|
|
|
|
expect(data["posts"][2]["id"]).to eq(low_priority_post.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't sort posts with search piority when query with order" do
|
|
|
|
get "/search.json", params: { q: 'status:open order:latest Priority Post' }
|
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 23:04:12 +08:00
|
|
|
data = response.parsed_body
|
2020-04-27 22:35:27 +08:00
|
|
|
expect(data["posts"][0]["id"]).to eq(hight_priority_post.id)
|
|
|
|
expect(data["posts"][1]["id"]).to eq(low_priority_post.id)
|
|
|
|
expect(data["posts"][2]["id"]).to eq(old_very_hight_priority_post.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-07-15 01:56:58 +08:00
|
|
|
context "search context" do
|
2013-05-25 02:03:45 +08:00
|
|
|
it "raises an error with an invalid context type" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2018-01-12 11:15:10 +08:00
|
|
|
term: 'test', search_context: { type: 'security', id: 'hole' }
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2018-01-12 11:15:10 +08:00
|
|
|
expect(response.status).to eq(400)
|
2013-05-25 02:03:45 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "raises an error with a missing id" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json",
|
|
|
|
params: { term: 'test', search_context: { type: 'user' } }
|
2018-01-12 11:15:10 +08:00
|
|
|
expect(response.status).to eq(400)
|
2013-05-25 02:03:45 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
context "with a user" do
|
2017-08-31 12:06:56 +08:00
|
|
|
|
2013-05-25 02:03:45 +08:00
|
|
|
it "raises an error if the user can't see the context" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'test', search_context: { type: 'private_messages', id: user.username }
|
|
|
|
}
|
|
|
|
expect(response).to be_forbidden
|
2013-05-25 02:03:45 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'performs the query with a search context' do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/search/query.json", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
term: 'test', search_context: { type: 'user', id: user.username }
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-08-31 12:06:56 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2013-05-25 02:03:45 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-14 08:40:26 +08:00
|
|
|
context "with a tag" do
|
|
|
|
it "raises an error if the tag does not exist" do
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'test', search_context: { type: 'tag', id: 'important-tag', name: 'important-tag' }
|
|
|
|
}
|
|
|
|
expect(response).to be_forbidden
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'performs the query with a search context' do
|
|
|
|
Fabricate(:tag, name: 'important-tag')
|
|
|
|
get "/search/query.json", params: {
|
|
|
|
term: 'test', search_context: { type: 'tag', id: 'important-tag', name: 'important-tag' }
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
end
|
|
|
|
end
|
2017-07-15 01:56:58 +08:00
|
|
|
end
|
2013-05-25 02:03:45 +08:00
|
|
|
|
2017-07-18 03:42:32 +08:00
|
|
|
context "#click" do
|
2018-04-23 10:00:37 +08:00
|
|
|
after do
|
2018-01-15 12:29:54 +08:00
|
|
|
SearchLog.clear_debounce_cache!
|
|
|
|
end
|
|
|
|
|
2017-07-18 03:42:32 +08:00
|
|
|
it "doesn't work wthout the necessary parameters" do
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click.json"
|
|
|
|
expect(response.status).to eq(400)
|
2017-07-18 03:42:32 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't record the click for a different user" do
|
2019-07-02 09:21:52 +08:00
|
|
|
sign_in(user)
|
2017-07-18 03:42:32 +08:00
|
|
|
|
|
|
|
_, search_log_id = SearchLog.log(
|
2018-06-14 16:31:07 +08:00
|
|
|
term: SecureRandom.hex,
|
2017-07-18 03:42:32 +08:00
|
|
|
search_type: :header,
|
|
|
|
user_id: -10,
|
|
|
|
ip_address: '127.0.0.1'
|
|
|
|
)
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
search_log_id: search_log_id,
|
|
|
|
search_result_id: 12345,
|
|
|
|
search_result_type: 'topic'
|
|
|
|
}
|
2017-07-18 03:42:32 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-11-29 01:54:27 +08:00
|
|
|
expect(SearchLog.find(search_log_id).search_result_id).to be_blank
|
2017-07-18 03:42:32 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "records the click for a logged in user" do
|
2019-07-02 09:21:52 +08:00
|
|
|
sign_in(user)
|
2017-07-18 03:42:32 +08:00
|
|
|
|
|
|
|
_, search_log_id = SearchLog.log(
|
2018-06-14 16:31:07 +08:00
|
|
|
term: SecureRandom.hex,
|
2017-07-18 03:42:32 +08:00
|
|
|
search_type: :header,
|
|
|
|
user_id: user.id,
|
|
|
|
ip_address: '127.0.0.1'
|
|
|
|
)
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click.json", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
search_log_id: search_log_id,
|
|
|
|
search_result_id: 12345,
|
2017-11-29 01:54:27 +08:00
|
|
|
search_result_type: 'user'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-07-18 03:42:32 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-11-29 01:54:27 +08:00
|
|
|
expect(SearchLog.find(search_log_id).search_result_id).to eq(12345)
|
|
|
|
expect(SearchLog.find(search_log_id).search_result_type).to eq(SearchLog.search_result_types[:user])
|
2017-07-18 03:42:32 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "records the click for an anonymous user" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/"
|
|
|
|
ip_address = request.remote_ip
|
2017-07-18 03:42:32 +08:00
|
|
|
|
|
|
|
_, search_log_id = SearchLog.log(
|
2018-06-14 16:31:07 +08:00
|
|
|
term: SecureRandom.hex,
|
2017-07-18 03:42:32 +08:00
|
|
|
search_type: :header,
|
2018-06-04 11:12:38 +08:00
|
|
|
ip_address: ip_address
|
2017-07-18 03:42:32 +08:00
|
|
|
)
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click.json", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
search_log_id: search_log_id,
|
|
|
|
search_result_id: 22222,
|
|
|
|
search_result_type: 'topic'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-07-18 03:42:32 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-11-29 01:54:27 +08:00
|
|
|
expect(SearchLog.find(search_log_id).search_result_id).to eq(22222)
|
|
|
|
expect(SearchLog.find(search_log_id).search_result_type).to eq(SearchLog.search_result_types[:topic])
|
2017-07-18 03:42:32 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't record the click for a different IP" do
|
|
|
|
_, search_log_id = SearchLog.log(
|
2018-06-14 16:31:07 +08:00
|
|
|
term: SecureRandom.hex,
|
2017-07-18 03:42:32 +08:00
|
|
|
search_type: :header,
|
2018-06-04 11:12:38 +08:00
|
|
|
ip_address: '192.168.0.19'
|
2017-07-18 03:42:32 +08:00
|
|
|
)
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click", params: {
|
2017-08-31 12:06:56 +08:00
|
|
|
search_log_id: search_log_id,
|
|
|
|
search_result_id: 22222,
|
|
|
|
search_result_type: 'topic'
|
|
|
|
}
|
2017-07-18 03:42:32 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-11-29 01:54:27 +08:00
|
|
|
expect(SearchLog.find(search_log_id).search_result_id).to be_blank
|
|
|
|
end
|
|
|
|
|
|
|
|
it "records the click for search result type category" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/"
|
|
|
|
ip_address = request.remote_ip
|
2017-11-29 01:54:27 +08:00
|
|
|
|
|
|
|
_, search_log_id = SearchLog.log(
|
2018-06-14 16:31:07 +08:00
|
|
|
term: SecureRandom.hex,
|
2017-11-29 01:54:27 +08:00
|
|
|
search_type: :header,
|
2018-06-04 11:12:38 +08:00
|
|
|
ip_address: ip_address
|
2017-11-29 01:54:27 +08:00
|
|
|
)
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click.json", params: {
|
2017-11-29 01:54:27 +08:00
|
|
|
search_log_id: search_log_id,
|
|
|
|
search_result_id: 23456,
|
|
|
|
search_result_type: 'category'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-11-29 01:54:27 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-11-29 01:54:27 +08:00
|
|
|
expect(SearchLog.find(search_log_id).search_result_id).to eq(23456)
|
|
|
|
expect(SearchLog.find(search_log_id).search_result_type).to eq(SearchLog.search_result_types[:category])
|
|
|
|
end
|
|
|
|
|
|
|
|
it "records the click for search result type tag" do
|
2018-06-04 11:12:38 +08:00
|
|
|
get "/"
|
|
|
|
ip_address = request.remote_ip
|
|
|
|
tag = Fabricate(:tag, name: 'test')
|
2017-11-29 01:54:27 +08:00
|
|
|
|
|
|
|
_, search_log_id = SearchLog.log(
|
2018-06-14 16:31:07 +08:00
|
|
|
term: SecureRandom.hex,
|
2017-11-29 01:54:27 +08:00
|
|
|
search_type: :header,
|
2018-06-04 11:12:38 +08:00
|
|
|
ip_address: ip_address
|
2017-11-29 01:54:27 +08:00
|
|
|
)
|
|
|
|
|
2018-06-04 11:12:38 +08:00
|
|
|
post "/search/click.json", params: {
|
2017-11-29 01:54:27 +08:00
|
|
|
search_log_id: search_log_id,
|
|
|
|
search_result_id: tag.name,
|
|
|
|
search_result_type: 'tag'
|
2018-06-04 11:12:38 +08:00
|
|
|
}
|
2017-11-29 01:54:27 +08:00
|
|
|
|
2018-06-07 16:11:09 +08:00
|
|
|
expect(response.status).to eq(200)
|
2017-11-29 01:54:27 +08:00
|
|
|
expect(SearchLog.find(search_log_id).search_result_id).to eq(tag.id)
|
|
|
|
expect(SearchLog.find(search_log_id).search_result_type).to eq(SearchLog.search_result_types[:tag])
|
2017-07-18 03:42:32 +08:00
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|