FEATURE: Allow customization of robots.txt (#7884)

* FEATURE: Allow customization of robots.txt

This allows admins to customize/override the content of the robots.txt
file at /admin/customize/robots. That page is not linked to anywhere in
the UI -- admins have to manually type the URL to access that page.

* use Ember.computed.not

* Jeff feedback

* Feedback

* Remove unused import
This commit is contained in:
Osama Sayegh 2019-07-15 20:47:44 +03:00 committed by GitHub
parent 90e0f1b378
commit 6515ff19e5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 282 additions and 7 deletions

View File

@ -0,0 +1,45 @@
import { ajax } from "discourse/lib/ajax";
import { bufferedProperty } from "discourse/mixins/buffered-content";
import { propertyEqual } from "discourse/lib/computed";
export default Ember.Controller.extend(bufferedProperty("model"), {
saved: false,
isSaving: false,
saveDisabled: propertyEqual("model.robots_txt", "buffered.robots_txt"),
resetDisbaled: Ember.computed.not("model.overridden"),
actions: {
save() {
this.setProperties({
isSaving: true,
saved: false
});
ajax("robots.json", {
method: "PUT",
data: { robots_txt: this.buffered.get("robots_txt") }
})
.then(data => {
this.commitBuffer();
this.set("saved", true);
this.set("model.overridden", data.overridden);
})
.finally(() => this.set("isSaving", false));
},
reset() {
this.setProperties({
isSaving: true,
saved: false
});
ajax("robots.json", { method: "DELETE" })
.then(data => {
this.buffered.set("robots_txt", data.robots_txt);
this.commitBuffer();
this.set("saved", true);
this.set("model.overridden", false);
})
.finally(() => this.set("isSaving", false));
}
}
});

View File

@ -0,0 +1,7 @@
import { ajax } from "discourse/lib/ajax";
export default Ember.Route.extend({
model() {
return ajax("/admin/customize/robots");
}
});

View File

@ -86,6 +86,10 @@ export default function() {
this.route("edit", { path: "/:id" });
}
);
this.route("adminCustomizeRobotsTxt", {
path: "/robots",
resetNamespace: true
});
}
);

View File

@ -0,0 +1,20 @@
<div class="robots-txt-edit">
<h3>{{i18n "admin.customize.robots.title"}}</h3>
<p>{{i18n "admin.customize.robots.warning"}}</p>
{{#if model.overridden}}
<div class="overridden">
{{i18n "admin.customize.robots.overridden"}}
</div>
{{/if}}
{{textarea
value=buffered.robots_txt
class="robots-txt-input"}}
{{#save-controls model=this action=(action "save") saved=saved saveDisabled=saveDisabled}}
{{d-button
class="btn-default"
disabled=resetDisbaled
icon="undo"
action=(action "reset")
label="admin.settings.reset"}}
{{/save-controls}}
</div>

View File

@ -777,3 +777,16 @@
margin-left: 1em;
}
}
.robots-txt-edit {
div.overridden {
background: $highlight-medium;
padding: 7px;
margin-bottom: 7px;
}
.robots-txt-input {
width: 100%;
box-sizing: border-box;
height: 600px;
}
}

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
class Admin::RobotsTxtController < Admin::AdminController
def show
render json: { robots_txt: current_robots_txt, overridden: @overridden }
end
def update
params.require(:robots_txt)
SiteSetting.overridden_robots_txt = params[:robots_txt]
render json: { robots_txt: current_robots_txt, overridden: @overridden }
end
def reset
SiteSetting.overridden_robots_txt = ""
render json: { robots_txt: original_robots_txt, overridden: false }
end
private
def current_robots_txt
robots_txt = SiteSetting.overridden_robots_txt.presence
@overridden = robots_txt.present?
robots_txt ||= original_robots_txt
robots_txt
end
def original_robots_txt
if SiteSetting.allow_index_in_robots_txt?
@robots_info = ::RobotsTxtController.fetch_default_robots_info
render_to_string "robots_txt/index"
else
render_to_string "robots_txt/no_index"
end
end
end

View File

@ -4,6 +4,8 @@ class RobotsTxtController < ApplicationController
layout false
skip_before_action :preload_json, :check_xhr, :redirect_to_login_if_required
OVERRIDDEN_HEADER = "# This robots.txt file has been customized at /admin/customize/robots\n"
# NOTE: order is important!
DISALLOWED_PATHS ||= %w{
/auth/
@ -33,8 +35,13 @@ class RobotsTxtController < ApplicationController
}
def index
if (overridden = SiteSetting.overridden_robots_txt.dup).present?
overridden.prepend(OVERRIDDEN_HEADER) if guardian.is_admin? && !is_api?
render plain: overridden
return
end
if SiteSetting.allow_index_in_robots_txt?
@robots_info = fetch_robots_info
@robots_info = self.class.fetch_default_robots_info
render :index, content_type: 'text/plain'
else
render :no_index, content_type: 'text/plain'
@ -46,12 +53,13 @@ class RobotsTxtController < ApplicationController
# JSON that can be used by a script to create a robots.txt that works well with your
# existing site.
def builder
render json: fetch_robots_info
result = self.class.fetch_default_robots_info
overridden = SiteSetting.overridden_robots_txt
result[:overridden] = overridden if overridden.present?
render json: result
end
protected
def fetch_robots_info
def self.fetch_default_robots_info
deny_paths = DISALLOWED_PATHS.map { |p| Discourse.base_uri + p }
deny_all = [ "#{Discourse.base_uri}/" ]
@ -87,5 +95,4 @@ protected
result
end
end

View File

@ -3625,7 +3625,10 @@ en:
love:
name: "love"
description: "The like button's color."
robots:
title: "Override your site's robots.txt file:"
warning: "Warning: overriding the robots.txt file will prevent all future changes to the site settings that modify robots.txt from being applied."
overridden: Your site's default robots.txt file is overridden.
email:
title: "Emails"
settings: "Settings"

View File

@ -235,6 +235,10 @@ Discourse::Application.routes.draw do
get 'email_templates/(:id)' => 'email_templates#show', constraints: { id: /[0-9a-z_.]+/ }
put 'email_templates/(:id)' => 'email_templates#update', constraints: { id: /[0-9a-z_.]+/ }
delete 'email_templates/(:id)' => 'email_templates#revert', constraints: { id: /[0-9a-z_.]+/ }
get 'robots' => 'robots_txt#show'
put 'robots.json' => 'robots_txt#update'
delete 'robots.json' => 'robots_txt#reset'
end
resources :embeddable_hosts, constraints: AdminConstraint.new

View File

@ -1929,6 +1929,10 @@ uncategorized:
default: 50000
hidden: true
overridden_robots_txt:
default: ""
hidden: true
user_preferences:
default_email_digest_frequency:
enum: "DigestEmailSiteSetting"

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
require 'rails_helper'
describe Admin::RobotsTxtController do
it "is a subclass of AdminController" do
expect(described_class < Admin::AdminController).to eq(true)
end
fab!(:admin) { Fabricate(:admin) }
fab!(:user) { Fabricate(:user) }
describe "non-admin users" do
before { sign_in(user) }
it "can't see #show" do
get "/admin/customize/robots.json"
expect(response.status).to eq(404)
end
it "can't perform #update" do
put "/admin/customize/robots.json", params: { robots_txt: "adasdasd" }
expect(response.status).to eq(404)
expect(SiteSetting.overridden_robots_txt).to eq("")
end
it "can't perform #reset" do
SiteSetting.overridden_robots_txt = "overridden_content"
delete "/admin/customize/robots.json"
expect(response.status).to eq(404)
expect(SiteSetting.overridden_robots_txt).to eq("overridden_content")
end
end
describe "#show" do
before { sign_in(admin) }
it "returns default content if there are no overrides" do
get "/admin/customize/robots.json"
expect(response.status).to eq(200)
json = JSON.parse(response.body)
expect(json["robots_txt"]).to be_present
expect(json["overridden"]).to eq(false)
end
it "returns overridden content if there are overrides" do
SiteSetting.overridden_robots_txt = "something"
get "/admin/customize/robots.json"
expect(response.status).to eq(200)
json = JSON.parse(response.body)
expect(json["robots_txt"]).to eq("something")
expect(json["overridden"]).to eq(true)
end
end
describe "#update" do
before { sign_in(admin) }
it "overrides the site's default robots.txt" do
put "/admin/customize/robots.json", params: { robots_txt: "new_content" }
expect(response.status).to eq(200)
json = JSON.parse(response.body)
expect(json["robots_txt"]).to eq("new_content")
expect(json["overridden"]).to eq(true)
expect(SiteSetting.overridden_robots_txt).to eq("new_content")
get "/robots.txt"
expect(response.body).to include("new_content")
end
it "requires `robots_txt` param to be present" do
SiteSetting.overridden_robots_txt = "overridden_content"
put "/admin/customize/robots.json", params: { robots_txt: "" }
expect(response.status).to eq(400)
end
end
describe "#reset" do
before { sign_in(admin) }
it "resets robots.txt file to the default version" do
SiteSetting.overridden_robots_txt = "overridden_content"
delete "/admin/customize/robots.json"
expect(response.status).to eq(200)
json = JSON.parse(response.body)
expect(json["robots_txt"]).not_to include("overridden_content")
expect(json["overridden"]).to eq(false)
expect(SiteSetting.overridden_robots_txt).to eq("")
end
end
end

View File

@ -11,10 +11,42 @@ RSpec.describe RobotsTxtController do
expect(json['header']).to be_present
expect(json['agents']).to be_present
end
it "includes overridden content if robots.txt is is overridden" do
SiteSetting.overridden_robots_txt = "something"
get "/robots-builder.json"
expect(response.status).to eq(200)
json = ::JSON.parse(response.body)
expect(json['header']).to be_present
expect(json['agents']).to be_present
expect(json['overridden']).to eq("something")
end
end
describe '#index' do
context "header for when the content is overridden" do
it "is not prepended if there are no overrides" do
sign_in(Fabricate(:admin))
get '/robots.txt'
expect(response.body).not_to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
end
it "is prepended if there are overrides and the user is admin" do
SiteSetting.overridden_robots_txt = "overridden_content"
sign_in(Fabricate(:admin))
get '/robots.txt'
expect(response.body).to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
end
it "is not prepended if the user is not admin" do
SiteSetting.overridden_robots_txt = "overridden_content"
get '/robots.txt'
expect(response.body).not_to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
end
end
context 'subfolder' do
it 'prefixes the rules with the directory' do
Discourse.stubs(:base_uri).returns('/forum')
@ -101,5 +133,12 @@ RSpec.describe RobotsTxtController do
expect(response.body).to_not include("Disallow: /u/")
end
it "returns overridden robots.txt if the file is overridden" do
SiteSetting.overridden_robots_txt = "blah whatever"
get '/robots.txt'
expect(response.status).to eq(200)
expect(response.body).to eq(SiteSetting.overridden_robots_txt)
end
end
end