2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-02-21 05:07:02 +08:00
|
|
|
module CrawlerDetection
|
2017-09-29 10:31:50 +08:00
|
|
|
|
2018-01-16 13:28:11 +08:00
|
|
|
def self.to_matcher(string, type: nil)
|
2017-09-29 10:31:50 +08:00
|
|
|
escaped = string.split('|').map { |agent| Regexp.escape(agent) }.join('|')
|
2018-01-16 13:28:11 +08:00
|
|
|
|
|
|
|
if type == :real && Rails.env == "test"
|
|
|
|
# we need this bypass so we properly render views
|
|
|
|
escaped << "|Rails Testing"
|
|
|
|
end
|
|
|
|
|
2018-01-16 12:41:13 +08:00
|
|
|
Regexp.new(escaped, Regexp::IGNORECASE)
|
2017-09-29 10:31:50 +08:00
|
|
|
end
|
2015-02-14 22:24:51 +08:00
|
|
|
|
2014-02-21 05:07:02 +08:00
|
|
|
def self.crawler?(user_agent)
|
2018-01-16 13:28:11 +08:00
|
|
|
return true if user_agent.nil?
|
|
|
|
|
2017-09-29 10:31:50 +08:00
|
|
|
# this is done to avoid regenerating regexes
|
2018-01-16 12:41:13 +08:00
|
|
|
@non_crawler_matchers ||= {}
|
2017-09-29 10:31:50 +08:00
|
|
|
@matchers ||= {}
|
2018-01-16 12:41:13 +08:00
|
|
|
|
2018-01-16 13:28:11 +08:00
|
|
|
possibly_real = (@non_crawler_matchers[SiteSetting.non_crawler_user_agents] ||= to_matcher(SiteSetting.non_crawler_user_agents, type: :real))
|
2018-01-16 12:41:13 +08:00
|
|
|
|
|
|
|
if user_agent.match?(possibly_real)
|
|
|
|
known_bots = (@matchers[SiteSetting.crawler_user_agents] ||= to_matcher(SiteSetting.crawler_user_agents))
|
2018-06-21 08:56:46 +08:00
|
|
|
if user_agent.match?(known_bots)
|
|
|
|
bypass = (@matchers[SiteSetting.crawler_check_bypass_agents] ||= to_matcher(SiteSetting.crawler_check_bypass_agents))
|
|
|
|
!user_agent.match?(bypass)
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
2018-01-16 12:41:13 +08:00
|
|
|
else
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2014-02-15 06:10:08 +08:00
|
|
|
end
|
2018-03-16 05:10:45 +08:00
|
|
|
|
|
|
|
# Given a user_agent that returns true from crawler?, should its request be allowed?
|
|
|
|
def self.allow_crawler?(user_agent)
|
|
|
|
return true if SiteSetting.whitelisted_crawler_user_agents.blank? &&
|
|
|
|
SiteSetting.blacklisted_crawler_user_agents.blank?
|
|
|
|
|
|
|
|
@whitelisted_matchers ||= {}
|
|
|
|
@blacklisted_matchers ||= {}
|
|
|
|
|
|
|
|
if SiteSetting.whitelisted_crawler_user_agents.present?
|
|
|
|
whitelisted = @whitelisted_matchers[SiteSetting.whitelisted_crawler_user_agents] ||= to_matcher(SiteSetting.whitelisted_crawler_user_agents)
|
|
|
|
!user_agent.nil? && user_agent.match?(whitelisted)
|
|
|
|
else
|
|
|
|
blacklisted = @blacklisted_matchers[SiteSetting.blacklisted_crawler_user_agents] ||= to_matcher(SiteSetting.blacklisted_crawler_user_agents)
|
|
|
|
user_agent.nil? || !user_agent.match?(blacklisted)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.is_blocked_crawler?(user_agent)
|
|
|
|
crawler?(user_agent) && !allow_crawler?(user_agent)
|
|
|
|
end
|
2014-02-15 06:10:08 +08:00
|
|
|
end
|