diff --git a/spec/requests/robots_txt_controller_spec.rb b/spec/requests/robots_txt_controller_spec.rb index 16327997c9c..151f9d60ba6 100644 --- a/spec/requests/robots_txt_controller_spec.rb +++ b/spec/requests/robots_txt_controller_spec.rb @@ -76,7 +76,7 @@ RSpec.describe RobotsTxtController do allow_section = allow_index < disallow_index ? response.body[allow_index...disallow_index] : response.body[allow_index..-1] - expect(allow_section).to include('Disallow: /u/') + expect(allow_section).to include('Disallow: /auth/') expect(allow_section).to_not include("Disallow: /\n") disallowed_section = allow_index < disallow_index ? @@ -90,7 +90,7 @@ RSpec.describe RobotsTxtController do i = response.body.index('User-agent: *') expect(i).to be_present - expect(response.body[i..-1]).to include("Disallow: /u/") + expect(response.body[i..-1]).to include("Disallow: /auth/") end it "can whitelist user agents" do @@ -131,7 +131,7 @@ RSpec.describe RobotsTxtController do SiteSetting.allow_index_in_robots_txt = false get '/robots.txt' - expect(response.body).to_not include("Disallow: /u/") + expect(response.body).to_not include("Disallow: /auth/") expect(response.body).to include("User-agent: googlebot\nAllow") end