mirror of
https://github.com/discourse/discourse.git
synced 2024-11-23 18:05:37 +08:00
73 lines
2.8 KiB
Ruby
73 lines
2.8 KiB
Ruby
require 'rails_helper'
|
|
|
|
RSpec.describe RobotsTxtController do
|
|
describe '#index' do
|
|
context 'allow_index_in_robots_txt is true' do
|
|
|
|
def expect_allowed_and_disallowed_sections(allow_index, disallow_index)
|
|
expect(allow_index).to be_present
|
|
expect(disallow_index).to be_present
|
|
|
|
allow_section = allow_index < disallow_index ?
|
|
response.body[allow_index...disallow_index] : response.body[allow_index..-1]
|
|
|
|
expect(allow_section).to include('Disallow: /u/')
|
|
expect(allow_section).to_not include("Disallow: /\n")
|
|
|
|
disallowed_section = allow_index < disallow_index ?
|
|
response.body[disallow_index..-1] : response.body[disallow_index...allow_index]
|
|
expect(disallowed_section).to include("Disallow: /\n")
|
|
end
|
|
|
|
it "returns index when indexing is allowed" do
|
|
SiteSetting.allow_index_in_robots_txt = true
|
|
get '/robots.txt'
|
|
|
|
i = response.body.index('User-agent: *')
|
|
expect(i).to be_present
|
|
expect(response.body[i..-1]).to include("Disallow: /u/")
|
|
end
|
|
|
|
it "can whitelist user agents" do
|
|
SiteSetting.whitelisted_crawler_user_agents = "Googlebot|Twitterbot"
|
|
get '/robots.txt'
|
|
expect(response.body).to include('User-agent: Googlebot')
|
|
expect(response.body).to include('User-agent: Twitterbot')
|
|
|
|
allowed_index = [response.body.index('User-agent: Googlebot'), response.body.index('User-agent: Twitterbot')].min
|
|
disallow_all_index = response.body.index('User-agent: *')
|
|
|
|
expect_allowed_and_disallowed_sections(allowed_index, disallow_all_index)
|
|
end
|
|
|
|
it "can blacklist user agents" do
|
|
SiteSetting.blacklisted_crawler_user_agents = "Googlebot|Twitterbot"
|
|
get '/robots.txt'
|
|
expect(response.body).to include('User-agent: Googlebot')
|
|
expect(response.body).to include('User-agent: Twitterbot')
|
|
|
|
disallow_index = [response.body.index('User-agent: Googlebot'), response.body.index('User-agent: Twitterbot')].min
|
|
allow_index = response.body.index('User-agent: *')
|
|
|
|
expect_allowed_and_disallowed_sections(allow_index, disallow_index)
|
|
end
|
|
|
|
it "ignores blacklist if whitelist is set" do
|
|
SiteSetting.whitelisted_crawler_user_agents = "Googlebot|Twitterbot"
|
|
SiteSetting.blacklisted_crawler_user_agents = "Bananabot"
|
|
get '/robots.txt'
|
|
expect(response.body).to_not include('Bananabot')
|
|
expect(response.body).to include('User-agent: Googlebot')
|
|
expect(response.body).to include('User-agent: Twitterbot')
|
|
end
|
|
end
|
|
|
|
it "returns noindex when indexing is disallowed" do
|
|
SiteSetting.allow_index_in_robots_txt = false
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).to_not include("Disallow: /u/")
|
|
end
|
|
end
|
|
end
|