mirror of
https://github.com/discourse/discourse.git
synced 2024-11-24 04:31:56 +08:00
3d7dbdedc0
This is mainly useful for subfolder sites, who need to expose their robots.txt contents to a parent site.
19 lines
466 B
Plaintext
19 lines
466 B
Plaintext
<%= @robots_info[:header] %>
|
|
<% if Discourse.base_uri.present? %>
|
|
# This robots.txt file is not used. Please append the content below in the robots.txt file located at the root
|
|
<% end %>
|
|
#
|
|
<% @robots_info[:agents].each do |agent| %>
|
|
User-agent: <%= agent[:name] %>
|
|
<%- if agent[:delay] -%>
|
|
Crawl-delay: <%= agent[:delay] %>
|
|
<%- end -%>
|
|
<% agent[:disallow].each do |path| %>
|
|
Disallow: <%= path %>
|
|
<% end %>
|
|
|
|
|
|
<% end %>
|
|
|
|
<%= server_plugin_outlet "robots_txt_index" %>
|