2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
module Jobs
|
|
|
|
|
2013-03-22 23:35:32 +08:00
|
|
|
def self.queued
|
|
|
|
Sidekiq::Stats.new.enqueued
|
|
|
|
end
|
|
|
|
|
2019-03-14 22:47:38 +08:00
|
|
|
def self.run_later?
|
|
|
|
!@run_immediately
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.run_immediately?
|
|
|
|
!!@run_immediately
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.run_immediately!
|
|
|
|
@run_immediately = true
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.run_later!
|
|
|
|
@run_immediately = false
|
|
|
|
end
|
|
|
|
|
2013-03-22 23:35:32 +08:00
|
|
|
def self.last_job_performed_at
|
|
|
|
Sidekiq.redis do |r|
|
|
|
|
int = r.get('last_job_perform_at')
|
|
|
|
int ? Time.at(int.to_i) : nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-04-23 01:37:16 +08:00
|
|
|
def self.num_email_retry_jobs
|
2015-04-12 20:26:48 +08:00
|
|
|
Sidekiq::RetrySet.new.count { |job| job.klass =~ /Email$/ }
|
2013-04-23 01:37:16 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
class Base
|
2019-03-05 19:19:11 +08:00
|
|
|
class JobInstrumenter
|
2019-03-08 17:16:13 +08:00
|
|
|
def initialize(job_class:, opts:, db:, jid:)
|
2019-03-05 19:19:11 +08:00
|
|
|
return unless enabled?
|
2019-03-08 18:31:49 +08:00
|
|
|
self.class.mutex.synchronize do
|
|
|
|
@data = {}
|
|
|
|
|
2020-02-18 12:11:30 +08:00
|
|
|
@data["hostname"] = Discourse.os_hostname
|
2019-03-08 18:31:49 +08:00
|
|
|
@data["pid"] = Process.pid # Pid
|
|
|
|
@data["database"] = db # DB name - multisite db name it ran on
|
|
|
|
@data["job_id"] = jid # Job unique ID
|
|
|
|
@data["job_name"] = job_class.name # Job Name - eg: Jobs::AboutStats
|
|
|
|
@data["job_type"] = job_class.try(:scheduled?) ? "scheduled" : "regular" # Job Type - either s for scheduled or r for regular
|
|
|
|
@data["opts"] = opts.to_json # Params - json encoded params for the job
|
|
|
|
|
2019-03-08 18:56:36 +08:00
|
|
|
if ENV["DISCOURSE_LOG_SIDEKIQ_INTERVAL"]
|
|
|
|
@data["status"] = "starting"
|
|
|
|
write_to_log
|
|
|
|
end
|
|
|
|
|
|
|
|
@data["status"] = "pending"
|
2019-03-08 18:31:49 +08:00
|
|
|
@start_timestamp = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
|
|
|
|
|
|
self.class.ensure_interval_logging!
|
|
|
|
@@active_jobs ||= []
|
|
|
|
@@active_jobs << self
|
|
|
|
|
|
|
|
MethodProfiler.ensure_discourse_instrumentation!
|
|
|
|
MethodProfiler.start
|
|
|
|
end
|
2019-03-05 19:19:11 +08:00
|
|
|
end
|
2013-09-30 11:22:19 +08:00
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
def stop(exception:)
|
|
|
|
return unless enabled?
|
2019-03-08 18:31:49 +08:00
|
|
|
self.class.mutex.synchronize do
|
|
|
|
profile = MethodProfiler.stop
|
|
|
|
|
|
|
|
@@active_jobs.delete(self)
|
|
|
|
|
|
|
|
@data["duration"] = profile[:total_duration] # Duration - length in seconds it took to run
|
|
|
|
@data["sql_duration"] = profile.dig(:sql, :duration) || 0 # Sql Duration (s)
|
|
|
|
@data["sql_calls"] = profile.dig(:sql, :calls) || 0 # Sql Statements - how many statements ran
|
|
|
|
@data["redis_duration"] = profile.dig(:redis, :duration) || 0 # Redis Duration (s)
|
|
|
|
@data["redis_calls"] = profile.dig(:redis, :calls) || 0 # Redis commands
|
|
|
|
@data["net_duration"] = profile.dig(:net, :duration) || 0 # Redis Duration (s)
|
|
|
|
@data["net_calls"] = profile.dig(:net, :calls) || 0 # Redis commands
|
|
|
|
|
|
|
|
if exception.present?
|
|
|
|
@data["exception"] = exception # Exception - if job fails a json encoded exception
|
|
|
|
@data["status"] = 'failed'
|
|
|
|
else
|
|
|
|
@data["status"] = 'success' # Status - fail, success, pending
|
|
|
|
end
|
2013-09-30 11:22:19 +08:00
|
|
|
|
2019-03-08 18:31:49 +08:00
|
|
|
write_to_log
|
2013-09-30 11:22:19 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
def self.raw_log(message)
|
2019-03-06 20:50:15 +08:00
|
|
|
@@logger ||= begin
|
|
|
|
f = File.open "#{Rails.root}/log/sidekiq.log", "a"
|
|
|
|
f.sync = true
|
|
|
|
Logger.new f
|
|
|
|
end
|
2022-06-24 10:28:18 +08:00
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
@@log_queue ||= Queue.new
|
2022-06-24 10:28:18 +08:00
|
|
|
|
2022-06-24 12:17:39 +08:00
|
|
|
if !defined?(@@log_thread) || !@@log_thread.alive?
|
2022-06-24 10:28:18 +08:00
|
|
|
@@log_thread = Thread.new do
|
|
|
|
loop do
|
|
|
|
@@logger << @@log_queue.pop
|
|
|
|
rescue Exception => e
|
|
|
|
Discourse.warn_exception(e, message: "Exception encountered while logging Sidekiq job")
|
|
|
|
end
|
2019-03-05 19:19:11 +08:00
|
|
|
end
|
|
|
|
end
|
2022-06-24 10:28:18 +08:00
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
@@log_queue.push(message)
|
2013-09-30 11:22:19 +08:00
|
|
|
end
|
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
def current_duration
|
|
|
|
Process.clock_gettime(Process::CLOCK_MONOTONIC) - @start_timestamp
|
|
|
|
end
|
|
|
|
|
|
|
|
def write_to_log
|
|
|
|
return unless enabled?
|
|
|
|
@data["@timestamp"] = Time.now
|
|
|
|
@data["duration"] = current_duration if @data["status"] == "pending"
|
|
|
|
self.class.raw_log("#{@data.to_json}\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
def enabled?
|
|
|
|
ENV["DISCOURSE_LOG_SIDEKIQ"] == "1"
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2019-03-08 18:31:49 +08:00
|
|
|
def self.mutex
|
|
|
|
@@mutex ||= Mutex.new
|
|
|
|
end
|
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
def self.ensure_interval_logging!
|
|
|
|
interval = ENV["DISCOURSE_LOG_SIDEKIQ_INTERVAL"]
|
|
|
|
return if !interval
|
2019-03-08 18:31:49 +08:00
|
|
|
interval = interval.to_i
|
2019-03-05 19:19:11 +08:00
|
|
|
@@interval_thread ||= Thread.new do
|
|
|
|
begin
|
|
|
|
loop do
|
2019-03-08 18:31:49 +08:00
|
|
|
sleep interval
|
|
|
|
mutex.synchronize do
|
|
|
|
@@active_jobs.each { |j| j.write_to_log if j.current_duration > interval }
|
|
|
|
end
|
2019-03-05 19:19:11 +08:00
|
|
|
end
|
|
|
|
rescue Exception => e
|
|
|
|
Discourse.warn_exception(e, message: "Sidekiq interval logging thread terminated unexpectedly")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-09-30 11:22:19 +08:00
|
|
|
end
|
|
|
|
|
2019-03-05 19:19:11 +08:00
|
|
|
include Sidekiq::Worker
|
|
|
|
|
2013-07-01 02:00:23 +08:00
|
|
|
def log(*args)
|
|
|
|
args.each do |arg|
|
|
|
|
Rails.logger.info "#{Time.now.to_formatted_s(:db)}: [#{self.class.name.upcase}] #{arg}"
|
|
|
|
end
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2014-07-18 04:22:46 +08:00
|
|
|
# Construct an error context object for Discourse.handle_exception
|
|
|
|
# Subclasses are encouraged to use this!
|
|
|
|
#
|
|
|
|
# `opts` is the arguments passed to execute().
|
|
|
|
# `code_desc` is a short string describing what the code was doing (optional).
|
|
|
|
# `extra` is for any other context you logged.
|
|
|
|
# Note that, when building your `extra`, that :opts, :job, and :code are used by this method,
|
|
|
|
# and :current_db and :current_hostname are used by handle_exception.
|
|
|
|
def error_context(opts, code_desc = nil, extra = {})
|
|
|
|
ctx = {}
|
|
|
|
ctx[:opts] = opts
|
|
|
|
ctx[:job] = self.class
|
2014-07-18 06:19:58 +08:00
|
|
|
ctx[:message] = code_desc if code_desc
|
2014-07-18 04:22:46 +08:00
|
|
|
ctx.merge!(extra) if extra != nil
|
|
|
|
ctx
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def self.delayed_perform(opts = {})
|
|
|
|
self.new.perform(opts)
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute(opts = {})
|
|
|
|
raise "Overwrite me!"
|
|
|
|
end
|
|
|
|
|
2013-09-30 11:22:19 +08:00
|
|
|
def last_db_duration
|
|
|
|
@db_duration || 0
|
|
|
|
end
|
|
|
|
|
2013-08-08 01:25:05 +08:00
|
|
|
def perform(*args)
|
|
|
|
opts = args.extract_options!.with_indifferent_access
|
2013-02-26 00:42:20 +08:00
|
|
|
|
2019-10-02 12:01:53 +08:00
|
|
|
if ::Jobs.run_later?
|
2013-03-22 23:35:32 +08:00
|
|
|
Sidekiq.redis do |r|
|
|
|
|
r.set('last_job_perform_at', Time.now.to_i)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
if opts.delete(:sync_exec)
|
2013-03-05 08:42:44 +08:00
|
|
|
if opts.has_key?(:current_site_id) && opts[:current_site_id] != RailsMultisite::ConnectionManagement.current_db
|
2013-02-06 03:16:51 +08:00
|
|
|
raise ArgumentError.new("You can't connect to another database when executing a job synchronously.")
|
|
|
|
else
|
2014-07-18 04:22:46 +08:00
|
|
|
begin
|
|
|
|
retval = execute(opts)
|
|
|
|
rescue => exc
|
2015-02-10 04:47:46 +08:00
|
|
|
Discourse.handle_job_exception(exc, error_context(opts))
|
2014-07-18 04:22:46 +08:00
|
|
|
end
|
|
|
|
return retval
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
dbs =
|
2013-02-26 00:42:20 +08:00
|
|
|
if opts[:current_site_id]
|
2013-02-06 03:16:51 +08:00
|
|
|
[opts[:current_site_id]]
|
2013-02-26 00:42:20 +08:00
|
|
|
else
|
2013-02-06 03:16:51 +08:00
|
|
|
RailsMultisite::ConnectionManagement.all_dbs
|
|
|
|
end
|
|
|
|
|
2014-02-21 12:31:15 +08:00
|
|
|
exceptions = []
|
2013-02-06 03:16:51 +08:00
|
|
|
dbs.each do |db|
|
|
|
|
begin
|
2017-10-09 18:23:25 +08:00
|
|
|
exception = {}
|
|
|
|
|
2017-10-11 17:45:19 +08:00
|
|
|
RailsMultisite::ConnectionManagement.with_connection(db) do
|
2019-03-08 17:16:13 +08:00
|
|
|
job_instrumenter = JobInstrumenter.new(job_class: self.class, opts: opts, db: db, jid: jid)
|
2013-08-16 11:08:23 +08:00
|
|
|
begin
|
2019-05-16 05:43:00 +08:00
|
|
|
I18n.locale = SiteSetting.default_locale || SiteSettings::DefaultsProvider::DEFAULT_LOCALE
|
2017-10-11 17:17:03 +08:00
|
|
|
I18n.ensure_all_loaded!
|
|
|
|
begin
|
2018-09-04 14:05:21 +08:00
|
|
|
logster_env = {}
|
|
|
|
Logster.add_to_env(logster_env, :job, self.class.to_s)
|
2018-08-16 10:38:25 +08:00
|
|
|
Logster.add_to_env(logster_env, :db, db)
|
2018-09-04 14:05:21 +08:00
|
|
|
Thread.current[Logster::Logger::LOGSTER_ENV] = logster_env
|
|
|
|
|
2017-10-11 17:17:03 +08:00
|
|
|
execute(opts)
|
|
|
|
rescue => e
|
|
|
|
exception[:ex] = e
|
|
|
|
exception[:other] = { problem_db: db }
|
|
|
|
end
|
2013-08-16 11:08:23 +08:00
|
|
|
rescue => e
|
2017-10-09 18:23:25 +08:00
|
|
|
exception[:ex] = e
|
2017-10-11 17:17:03 +08:00
|
|
|
exception[:message] = "While establishing database connection to #{db}"
|
2017-10-09 18:23:25 +08:00
|
|
|
exception[:other] = { problem_db: db }
|
2017-10-11 17:17:03 +08:00
|
|
|
ensure
|
2019-03-05 19:19:11 +08:00
|
|
|
job_instrumenter.stop(exception: exception)
|
2013-08-16 11:08:23 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-10-09 18:23:25 +08:00
|
|
|
exceptions << exception unless exception.empty?
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
end
|
2013-05-19 22:22:31 +08:00
|
|
|
|
2018-08-16 10:38:25 +08:00
|
|
|
Thread.current[Logster::Logger::LOGSTER_ENV] = nil
|
|
|
|
|
2014-02-21 12:31:15 +08:00
|
|
|
if exceptions.length > 0
|
2014-07-18 04:22:46 +08:00
|
|
|
exceptions.each do |exception_hash|
|
2016-05-10 02:37:33 +08:00
|
|
|
Discourse.handle_job_exception(exception_hash[:ex], error_context(opts, exception_hash[:code], exception_hash[:other]))
|
2014-02-21 12:31:15 +08:00
|
|
|
end
|
2016-05-10 02:37:33 +08:00
|
|
|
raise HandledExceptionWrapper.new(exceptions[0][:ex])
|
2014-02-21 12:31:15 +08:00
|
|
|
end
|
|
|
|
|
2014-07-18 04:22:46 +08:00
|
|
|
nil
|
2013-05-19 22:22:31 +08:00
|
|
|
ensure
|
|
|
|
ActiveRecord::Base.connection_handler.clear_active_connections!
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2013-05-19 22:22:31 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2015-03-23 09:20:50 +08:00
|
|
|
class HandledExceptionWrapper < StandardError
|
2014-07-18 04:22:46 +08:00
|
|
|
attr_accessor :wrapped
|
|
|
|
def initialize(ex)
|
|
|
|
super("Wrapped #{ex.class}: #{ex.message}")
|
|
|
|
@wrapped = ex
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-08-08 01:25:05 +08:00
|
|
|
class Scheduled < Base
|
2018-08-01 05:12:55 +08:00
|
|
|
extend MiniScheduler::Schedule
|
2016-01-12 01:31:28 +08:00
|
|
|
|
|
|
|
def perform(*args)
|
2019-10-02 12:01:53 +08:00
|
|
|
if (::Jobs::Heartbeat === self) || !Discourse.readonly_mode?
|
2018-08-29 10:36:59 +08:00
|
|
|
super
|
|
|
|
end
|
2016-01-12 01:31:28 +08:00
|
|
|
end
|
2013-08-08 01:25:05 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2020-05-28 19:52:27 +08:00
|
|
|
def self.enqueue(job, opts = {})
|
|
|
|
if job.instance_of?(Class)
|
|
|
|
klass = job
|
|
|
|
else
|
|
|
|
klass = "::Jobs::#{job.to_s.camelcase}".constantize
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
# Unless we want to work on all sites
|
|
|
|
unless opts.delete(:all_sites)
|
|
|
|
opts[:current_site_id] ||= RailsMultisite::ConnectionManagement.current_db
|
|
|
|
end
|
|
|
|
|
2022-02-08 01:59:55 +08:00
|
|
|
delay = opts.delete(:delay_for)
|
|
|
|
queue = opts.delete(:queue)
|
|
|
|
|
|
|
|
# Only string keys are allowed in JSON. We call `.with_indifferent_access`
|
|
|
|
# in Jobs::Base#perform, so this is invisible to developers
|
2022-02-08 04:28:45 +08:00
|
|
|
opts = opts.deep_stringify_keys
|
2022-02-08 01:59:55 +08:00
|
|
|
|
|
|
|
# Simulate the args being dumped/parsed through JSON
|
|
|
|
parsed_opts = JSON.parse(JSON.dump(opts))
|
|
|
|
if opts != parsed_opts
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
Discourse.deprecate(<<~TEXT.squish, since: "2.9", drop_from: "3.0")
|
2022-02-08 01:59:55 +08:00
|
|
|
#{klass.name} was enqueued with argument values which do not cleanly serialize to/from JSON.
|
|
|
|
This means that the job will be run with slightly different values than the ones supplied to `enqueue`.
|
|
|
|
Argument values should be strings, booleans, numbers, or nil (or arrays/hashes of those value types).
|
DEV: Correctly tag heredocs (#16061)
This allows text editors to use correct syntax coloring for the heredoc sections.
Heredoc tag names we use:
languages: SQL, JS, RUBY, LUA, HTML, CSS, SCSS, SH, HBS, XML, YAML/YML, MF, ICS
other: MD, TEXT/TXT, RAW, EMAIL
2022-03-01 03:50:55 +08:00
|
|
|
TEXT
|
2022-02-08 01:59:55 +08:00
|
|
|
end
|
|
|
|
opts = parsed_opts
|
2019-10-02 12:01:53 +08:00
|
|
|
|
|
|
|
if ::Jobs.run_later?
|
2019-01-09 05:57:20 +08:00
|
|
|
hash = {
|
2022-02-08 01:59:55 +08:00
|
|
|
'class' => klass,
|
|
|
|
'args' => [opts]
|
2019-01-09 05:57:20 +08:00
|
|
|
}
|
|
|
|
|
2022-02-08 01:59:55 +08:00
|
|
|
if delay
|
2019-01-09 05:57:20 +08:00
|
|
|
if delay.to_f > 0
|
|
|
|
hash['at'] = Time.now.to_f + delay.to_f
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-02-08 01:59:55 +08:00
|
|
|
if queue
|
2019-01-09 05:57:20 +08:00
|
|
|
hash['queue'] = queue
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2019-01-09 05:57:20 +08:00
|
|
|
|
2020-12-08 08:05:01 +08:00
|
|
|
DB.after_commit { klass.client_push(hash) }
|
2013-02-06 03:16:51 +08:00
|
|
|
else
|
|
|
|
# Otherwise execute the job right away
|
2022-02-08 01:59:55 +08:00
|
|
|
opts["sync_exec"] = true
|
2019-01-09 05:57:20 +08:00
|
|
|
|
2017-10-31 10:48:47 +08:00
|
|
|
if Rails.env == "development"
|
|
|
|
Scheduler::Defer.later("job") do
|
|
|
|
klass.new.perform(opts)
|
|
|
|
end
|
|
|
|
else
|
2020-05-28 19:52:27 +08:00
|
|
|
# Run the job synchronously
|
|
|
|
# But never run a job inside another job
|
|
|
|
# That could cause deadlocks during test runs
|
|
|
|
queue = Thread.current[:discourse_nested_job_queue]
|
|
|
|
outermost_job = !queue
|
|
|
|
|
|
|
|
if outermost_job
|
|
|
|
queue = Queue.new
|
|
|
|
Thread.current[:discourse_nested_job_queue] = queue
|
|
|
|
end
|
|
|
|
|
|
|
|
queue.push([klass, opts])
|
|
|
|
|
|
|
|
if outermost_job
|
|
|
|
# responsible for executing the queue
|
|
|
|
begin
|
|
|
|
until queue.empty?
|
|
|
|
queued_klass, queued_opts = queue.pop(true)
|
|
|
|
queued_klass.new.perform(queued_opts)
|
|
|
|
end
|
|
|
|
ensure
|
|
|
|
Thread.current[:discourse_nested_job_queue] = nil
|
|
|
|
end
|
|
|
|
end
|
2017-10-31 10:48:47 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.enqueue_in(secs, job_name, opts = {})
|
|
|
|
enqueue(job_name, opts.merge!(delay_for: secs))
|
|
|
|
end
|
|
|
|
|
2013-05-08 02:25:41 +08:00
|
|
|
def self.enqueue_at(datetime, job_name, opts = {})
|
2020-07-24 17:16:52 +08:00
|
|
|
secs = [datetime.to_f - Time.zone.now.to_f, 0].max
|
2015-07-29 22:34:21 +08:00
|
|
|
enqueue_in(secs, job_name, opts)
|
2013-05-08 02:25:41 +08:00
|
|
|
end
|
|
|
|
|
2016-08-12 19:10:52 +08:00
|
|
|
def self.cancel_scheduled_job(job_name, opts = {})
|
|
|
|
scheduled_for(job_name, opts).each(&:delete)
|
2013-05-16 03:19:41 +08:00
|
|
|
end
|
|
|
|
|
2016-08-12 19:10:52 +08:00
|
|
|
def self.scheduled_for(job_name, opts = {})
|
|
|
|
opts = opts.with_indifferent_access
|
|
|
|
unless opts.delete(:all_sites)
|
|
|
|
opts[:current_site_id] ||= RailsMultisite::ConnectionManagement.current_db
|
|
|
|
end
|
|
|
|
|
2013-05-08 02:25:41 +08:00
|
|
|
job_class = "Jobs::#{job_name.to_s.camelcase}"
|
2016-04-14 00:30:25 +08:00
|
|
|
Sidekiq::ScheduledSet.new.select do |scheduled_job|
|
|
|
|
if scheduled_job.klass.to_s == job_class
|
|
|
|
matched = true
|
|
|
|
job_params = scheduled_job.item["args"][0].with_indifferent_access
|
2016-08-12 19:10:52 +08:00
|
|
|
opts.each do |key, value|
|
2016-04-14 00:30:25 +08:00
|
|
|
if job_params[key] != value
|
|
|
|
matched = false
|
|
|
|
break
|
2013-05-08 02:25:41 +08:00
|
|
|
end
|
|
|
|
end
|
2016-04-14 00:30:25 +08:00
|
|
|
matched
|
|
|
|
else
|
|
|
|
false
|
2013-05-08 02:25:41 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|