2013-02-06 03:16:51 +08:00
|
|
|
require_dependency 'jobs'
|
|
|
|
require_dependency 'pretty_text'
|
|
|
|
require_dependency 'rate_limiter'
|
2013-02-09 23:33:07 +08:00
|
|
|
require_dependency 'post_revisor'
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
require 'archetype'
|
|
|
|
require 'digest/sha1'
|
|
|
|
|
|
|
|
class Post < ActiveRecord::Base
|
|
|
|
include RateLimiter::OnCreateRecord
|
|
|
|
|
|
|
|
module HiddenReason
|
|
|
|
FLAG_THRESHOLD_REACHED = 1
|
|
|
|
FLAG_THRESHOLD_REACHED_AGAIN = 2
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
versioned
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
rate_limit
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
acts_as_paranoid
|
2013-02-06 09:13:41 +08:00
|
|
|
after_recover :update_flagged_posts_count
|
2013-02-07 23:45:24 +08:00
|
|
|
after_destroy :update_flagged_posts_count
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
belongs_to :user
|
|
|
|
belongs_to :topic, counter_cache: :posts_count
|
|
|
|
|
|
|
|
has_many :post_replies
|
|
|
|
has_many :replies, through: :post_replies
|
|
|
|
has_many :post_actions
|
|
|
|
|
|
|
|
validates_presence_of :raw, :user_id, :topic_id
|
|
|
|
validates :raw, length: {in: SiteSetting.min_post_length..SiteSetting.max_post_length}
|
2013-02-07 09:09:31 +08:00
|
|
|
validate :raw_quality
|
2013-02-06 03:16:51 +08:00
|
|
|
validate :max_mention_validator
|
|
|
|
validate :max_images_validator
|
|
|
|
validate :max_links_validator
|
|
|
|
validate :unique_post_validator
|
|
|
|
|
|
|
|
# We can pass a hash of image sizes when saving to prevent crawling those images
|
|
|
|
attr_accessor :image_sizes, :quoted_post_numbers, :no_bump, :invalidate_oneboxes
|
|
|
|
|
|
|
|
SHORT_POST_CHARS = 1200
|
|
|
|
|
|
|
|
# Post Types
|
|
|
|
REGULAR = 1
|
|
|
|
MODERATOR_ACTION = 2
|
|
|
|
|
|
|
|
before_save :extract_quoted_post_numbers
|
|
|
|
after_commit :feature_topic_users, on: :create
|
|
|
|
after_commit :trigger_post_process, on: :create
|
|
|
|
after_commit :email_private_message, on: :create
|
|
|
|
|
|
|
|
# Related to unique post tracking
|
|
|
|
after_commit :store_unique_post_key, on: :create
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
after_create do
|
2013-02-06 03:16:51 +08:00
|
|
|
TopicUser.auto_track(self.user_id, self.topic_id, TopicUser::NotificationReasons::CREATED_POST)
|
|
|
|
end
|
|
|
|
|
|
|
|
before_validation do
|
|
|
|
self.raw.strip! if self.raw.present?
|
|
|
|
end
|
|
|
|
|
2013-02-07 09:09:31 +08:00
|
|
|
def raw_quality
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
sentinel = TextSentinel.new(self.raw, min_entropy: SiteSetting.body_min_entropy)
|
2013-02-07 09:09:31 +08:00
|
|
|
if sentinel.valid?
|
|
|
|
# It's possible the sentinel has cleaned up the title a bit
|
2013-02-07 23:45:24 +08:00
|
|
|
self.raw = sentinel.text
|
2013-02-07 09:09:31 +08:00
|
|
|
else
|
|
|
|
errors.add(:raw, I18n.t(:is_invalid)) unless sentinel.valid?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
# Stop us from posting the same thing too quickly
|
|
|
|
def unique_post_validator
|
|
|
|
return if SiteSetting.unique_posts_mins == 0
|
|
|
|
return if user.admin? or user.has_trust_level?(:moderator)
|
|
|
|
|
|
|
|
# If the post is empty, default to the validates_presence_of
|
|
|
|
return if raw.blank?
|
|
|
|
|
|
|
|
if $redis.exists(unique_post_key)
|
|
|
|
errors.add(:raw, I18n.t(:just_posted_that))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# On successful post, store a hash key to prevent the same post from happening again
|
|
|
|
def store_unique_post_key
|
|
|
|
return if SiteSetting.unique_posts_mins == 0
|
|
|
|
$redis.setex(unique_post_key, SiteSetting.unique_posts_mins.minutes.to_i, "1")
|
|
|
|
end
|
|
|
|
|
|
|
|
# The key we use in reddit to ensure unique posts
|
|
|
|
def unique_post_key
|
|
|
|
"post-#{user_id}:#{raw_hash}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def raw_hash
|
|
|
|
return nil if raw.blank?
|
|
|
|
Digest::SHA1.hexdigest(raw.gsub(/\s+/, "").downcase)
|
|
|
|
end
|
|
|
|
|
|
|
|
def cooked_document
|
|
|
|
self.cooked ||= cook(self.raw, topic_id: topic_id)
|
|
|
|
@cooked_document ||= Nokogiri::HTML.fragment(self.cooked)
|
|
|
|
end
|
|
|
|
|
2013-02-09 23:33:07 +08:00
|
|
|
def reset_cooked
|
|
|
|
@cooked_document = nil
|
|
|
|
self.cooked = nil
|
|
|
|
end
|
|
|
|
|
2013-02-12 15:43:48 +08:00
|
|
|
def self.white_listed_image_classes
|
|
|
|
@white_listed_image_classes ||= ['avatar']
|
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
def image_count
|
2013-02-06 03:16:51 +08:00
|
|
|
return 0 unless self.raw.present?
|
2013-02-12 15:43:48 +08:00
|
|
|
|
|
|
|
cooked_document.search("img").reject{ |t|
|
|
|
|
dom_class = t["class"]
|
|
|
|
if dom_class
|
|
|
|
(Post.white_listed_image_classes & dom_class.split(" ")).count > 0
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
|
|
|
}.count
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def link_count
|
|
|
|
return 0 unless self.raw.present?
|
|
|
|
cooked_document.search("a[href]").count
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_mention_validator
|
|
|
|
errors.add(:raw, I18n.t(:too_many_mentions)) if raw_mentions.size > SiteSetting.max_mentions_per_post
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_images_validator
|
|
|
|
return if user.present? and user.has_trust_level?(:basic)
|
|
|
|
errors.add(:raw, I18n.t(:too_many_images)) if image_count > 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_links_validator
|
|
|
|
return if user.present? and user.has_trust_level?(:basic)
|
|
|
|
errors.add(:raw, I18n.t(:too_many_links)) if link_count > 1
|
2013-02-07 23:45:24 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
|
|
|
|
def raw_mentions
|
2013-02-07 23:45:24 +08:00
|
|
|
return [] if raw.blank?
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
# We don't count mentions in quotes
|
|
|
|
return @raw_mentions if @raw_mentions.present?
|
2013-02-06 03:16:51 +08:00
|
|
|
raw_stripped = raw.gsub(/\[quote=(.*)\]([^\[]*?)\[\/quote\]/im, '')
|
|
|
|
|
|
|
|
# Strip pre and code tags
|
|
|
|
doc = Nokogiri::HTML.fragment(raw_stripped)
|
|
|
|
doc.search("pre").remove
|
|
|
|
doc.search("code").remove
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
results = doc.to_html.scan(PrettyText.mention_matcher)
|
|
|
|
if results.present?
|
|
|
|
@raw_mentions = results.uniq.map {|un| un.first.downcase.gsub!(/^@/, '')}
|
|
|
|
else
|
|
|
|
@raw_mentions = []
|
|
|
|
end
|
2013-02-08 04:12:55 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-08 04:12:55 +08:00
|
|
|
# The rules for deletion change depending on who is doing it.
|
|
|
|
def delete_by(deleted_by)
|
|
|
|
if deleted_by.has_trust_level?(:moderator)
|
|
|
|
# As a moderator, delete the post.
|
|
|
|
Post.transaction do
|
|
|
|
self.destroy
|
|
|
|
Topic.reset_highest(self.topic_id)
|
|
|
|
end
|
|
|
|
elsif deleted_by.id == self.user_id
|
|
|
|
# As the poster, make a revision that says deleted.
|
|
|
|
Post.transaction do
|
|
|
|
revise(deleted_by, I18n.t('js.post.deleted_by_author'), force_new_version: true)
|
|
|
|
update_column(:user_deleted, true)
|
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def archetype
|
|
|
|
topic.archetype
|
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def self.regular_order
|
2013-02-07 23:45:24 +08:00
|
|
|
order(:sort_order, :post_number)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.reverse_order
|
2013-02-07 23:45:24 +08:00
|
|
|
order('sort_order desc, post_number desc')
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.best_of
|
2013-02-07 23:45:24 +08:00
|
|
|
where("(post_number = 1) or (score >= ?)", SiteSetting.best_of_score_threshold)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 09:13:41 +08:00
|
|
|
def update_flagged_posts_count
|
|
|
|
PostAction.update_flagged_posts_count
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def filter_quotes(parent_post=nil)
|
|
|
|
return cooked if parent_post.blank?
|
|
|
|
|
|
|
|
# We only filter quotes when there is exactly 1
|
|
|
|
return cooked unless (quote_count == 1)
|
|
|
|
|
|
|
|
parent_raw = parent_post.raw.sub(/\[quote.+\/quote\]/m, '').strip
|
|
|
|
|
|
|
|
if raw[parent_raw] or (parent_raw.size < SHORT_POST_CHARS)
|
|
|
|
return cooked.sub(/\<aside.+\<\/aside\>/m, '')
|
|
|
|
end
|
|
|
|
|
|
|
|
cooked
|
|
|
|
end
|
|
|
|
|
|
|
|
def username
|
|
|
|
user.username
|
|
|
|
end
|
|
|
|
|
|
|
|
def external_id
|
2013-02-07 23:45:24 +08:00
|
|
|
"#{topic_id}/#{post_number}"
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def quoteless?
|
|
|
|
(quote_count == 0) and (reply_to_post_number.present?)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Get the post that we reply to.
|
|
|
|
def reply_to_user
|
|
|
|
return nil unless reply_to_post_number.present?
|
|
|
|
User.where('id = (select user_id from posts where topic_id = ? and post_number = ?)', topic_id, reply_to_post_number).first
|
|
|
|
end
|
|
|
|
|
|
|
|
def reply_notification_target
|
|
|
|
return nil unless reply_to_post_number.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
reply_post = Post.where("topic_id = :topic_id AND post_number = :post_number AND user_id <> :user_id",
|
|
|
|
topic_id: topic_id,
|
2013-02-06 03:16:51 +08:00
|
|
|
post_number: reply_to_post_number,
|
|
|
|
user_id: user_id).first
|
|
|
|
return reply_post.try(:user)
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.excerpt(cooked, maxlength=nil)
|
|
|
|
maxlength ||= SiteSetting.post_excerpt_maxlength
|
|
|
|
PrettyText.excerpt(cooked, maxlength)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Strip out most of the markup
|
|
|
|
def excerpt(maxlength=nil)
|
|
|
|
Post.excerpt(cooked, maxlength)
|
|
|
|
end
|
|
|
|
|
|
|
|
# What we use to cook posts
|
|
|
|
def cook(*args)
|
2013-02-07 23:45:24 +08:00
|
|
|
cooked = PrettyText.cook(*args)
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
# If we have any of the oneboxes in the cache, throw them in right away, don't
|
2013-02-06 03:16:51 +08:00
|
|
|
# wait for the post processor.
|
|
|
|
dirty = false
|
|
|
|
doc = Oneboxer.each_onebox_link(cooked) do |url, elem|
|
|
|
|
cached = Oneboxer.render_from_cache(url)
|
|
|
|
if cached.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
elem.swap(cached.cooked)
|
2013-02-06 03:16:51 +08:00
|
|
|
dirty = true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
cooked = doc.to_html if dirty
|
|
|
|
cooked
|
|
|
|
end
|
|
|
|
|
|
|
|
# A list of versions including the initial version
|
|
|
|
def all_versions
|
|
|
|
result = []
|
|
|
|
result << {number: 1, display_username: user.name, created_at: created_at}
|
|
|
|
versions.order(:number).includes(:user).each do |v|
|
|
|
|
result << {number: v.number, display_username: v.user.name, created_at: v.created_at}
|
|
|
|
end
|
|
|
|
result
|
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
def is_flagged?
|
2013-02-07 12:15:48 +08:00
|
|
|
post_actions.where('post_action_type_id in (?) and deleted_at is null', PostActionType.FlagTypes).count != 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def unhide!
|
|
|
|
self.hidden = false
|
|
|
|
self.hidden_reason_id = nil
|
|
|
|
self.topic.update_attributes(visible: true)
|
|
|
|
self.save
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def url
|
|
|
|
"/t/#{Slug.for(topic.title)}/#{topic.id}/#{post_number}"
|
|
|
|
end
|
|
|
|
|
2013-02-09 23:33:07 +08:00
|
|
|
def revise(updated_by, new_raw, opts={})
|
|
|
|
PostRevisor.new(self).revise!(updated_by, new_raw, opts)
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
# Various callbacks
|
|
|
|
before_create do
|
|
|
|
self.post_number ||= Topic.next_post_number(topic_id, reply_to_post_number.present?)
|
|
|
|
self.cooked ||= cook(raw, topic_id: topic_id)
|
|
|
|
self.sort_order = post_number
|
2013-02-07 23:45:24 +08:00
|
|
|
DiscourseEvent.trigger(:before_create_post, self)
|
2013-02-06 03:16:51 +08:00
|
|
|
self.last_version_at ||= Time.now
|
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: Move some of this into an asynchronous job?
|
|
|
|
after_create do
|
|
|
|
|
|
|
|
# Update attributes on the topic - featured users and last posted.
|
|
|
|
attrs = {last_posted_at: self.created_at, last_post_user_id: self.user_id}
|
|
|
|
attrs[:bumped_at] = self.created_at unless no_bump
|
|
|
|
topic.update_attributes(attrs)
|
|
|
|
|
|
|
|
# Update the user's last posted at date
|
|
|
|
user.update_column(:last_posted_at, self.created_at)
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
# Update topic user data
|
|
|
|
TopicUser.change(user,
|
|
|
|
topic.id,
|
|
|
|
posted: true,
|
2013-02-06 03:16:51 +08:00
|
|
|
last_read_post_number: self.post_number,
|
|
|
|
seen_post_count: self.post_number)
|
|
|
|
end
|
|
|
|
|
|
|
|
def email_private_message
|
|
|
|
# send a mail to notify users in case of a private message
|
2013-02-07 23:45:24 +08:00
|
|
|
if topic.private_message?
|
2013-02-06 03:16:51 +08:00
|
|
|
topic.allowed_users.where(["users.email_private_messages = true and users.id != ?", self.user_id]).each do |u|
|
|
|
|
Jobs.enqueue_in(SiteSetting.email_time_window_mins.minutes, :user_email, type: :private_message, user_id: u.id, post_id: self.id)
|
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def feature_topic_users
|
|
|
|
Jobs.enqueue(:feature_topic_users, topic_id: self.topic_id)
|
|
|
|
end
|
|
|
|
|
|
|
|
# This calculates the geometric mean of the post timings and stores it along with
|
2013-02-07 23:45:24 +08:00
|
|
|
# each post.
|
2013-02-06 03:16:51 +08:00
|
|
|
def self.calculate_avg_time
|
2013-02-12 04:47:28 +08:00
|
|
|
retry_lock_error do
|
|
|
|
exec_sql("UPDATE posts
|
|
|
|
SET avg_time = (x.gmean / 1000)
|
|
|
|
FROM (SELECT post_timings.topic_id,
|
|
|
|
post_timings.post_number,
|
|
|
|
round(exp(avg(ln(msecs)))) AS gmean
|
|
|
|
FROM post_timings
|
|
|
|
INNER JOIN posts AS p2
|
|
|
|
ON p2.post_number = post_timings.post_number
|
|
|
|
AND p2.topic_id = post_timings.topic_id
|
|
|
|
AND p2.user_id <> post_timings.user_id
|
|
|
|
GROUP BY post_timings.topic_id, post_timings.post_number) AS x
|
|
|
|
WHERE x.topic_id = posts.topic_id
|
|
|
|
AND x.post_number = posts.post_number")
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
before_save do
|
2013-02-06 03:16:51 +08:00
|
|
|
self.last_editor_id ||= self.user_id
|
|
|
|
self.cooked = cook(raw, topic_id: topic_id) unless new_record?
|
|
|
|
end
|
|
|
|
|
|
|
|
before_destroy do
|
|
|
|
|
|
|
|
# Update the last post id to the previous post if it exists
|
|
|
|
last_post = Post.where("topic_id = ? and id <> ?", self.topic_id, self.id).order('created_at desc').limit(1).first
|
|
|
|
if last_post.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
topic.update_attributes(last_posted_at: last_post.created_at,
|
2013-02-06 03:16:51 +08:00
|
|
|
last_post_user_id: last_post.user_id,
|
|
|
|
highest_post_number: last_post.post_number)
|
|
|
|
|
|
|
|
# If the poster doesn't have any other posts in the topic, clear their posted flag
|
|
|
|
unless Post.exists?(["topic_id = ? and user_id = ? and id <> ?", self.topic_id, self.user_id, self.id])
|
|
|
|
TopicUser.update_all 'posted = false', ['topic_id = ? and user_id = ?', self.topic_id, self.user_id]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Feature users in the topic
|
2013-02-07 23:45:24 +08:00
|
|
|
Jobs.enqueue(:feature_topic_users, topic_id: topic_id, except_post_id: self.id)
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
after_destroy do
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
# Remove any reply records that point to deleted posts
|
|
|
|
post_ids = PostReply.select(:post_id).where(reply_id: self.id).map(&:post_id)
|
|
|
|
PostReply.delete_all ["reply_id = ?", self.id]
|
|
|
|
|
|
|
|
if post_ids.present?
|
|
|
|
Post.where(id: post_ids).each {|p| p.update_column :reply_count, p.replies.count}
|
|
|
|
end
|
|
|
|
|
|
|
|
# Remove any notifications that point to this deleted post
|
|
|
|
Notification.delete_all ["topic_id = ? and post_number = ?", self.topic_id, self.post_number]
|
|
|
|
end
|
|
|
|
|
|
|
|
after_save do
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
DraftSequence.next! self.last_editor_id, self.topic.draft_key if self.topic # could be deleted
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
quoted_post_numbers << reply_to_post_number if reply_to_post_number.present?
|
|
|
|
|
|
|
|
# Create a reply relationship between quoted posts and this new post
|
|
|
|
if self.quoted_post_numbers.present?
|
|
|
|
self.quoted_post_numbers.map! {|pid| pid.to_i}.uniq!
|
|
|
|
self.quoted_post_numbers.each do |p|
|
|
|
|
post = Post.where(topic_id: topic_id, post_number: p).first
|
|
|
|
if post.present?
|
|
|
|
post_reply = post.post_replies.new(reply_id: self.id)
|
|
|
|
if post_reply.save
|
2013-02-09 06:10:18 +08:00
|
|
|
Post.update_all ['reply_count = reply_count + 1'], id: post.id
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_quoted_post_numbers
|
|
|
|
self.quoted_post_numbers = []
|
|
|
|
|
|
|
|
# Create relationships for the quotes
|
|
|
|
raw.scan(/\[quote=\"([^"]+)"\]/).each do |m|
|
|
|
|
if m.present?
|
|
|
|
args = {}
|
|
|
|
m.first.scan(/([a-z]+)\:(\d+)/).each do |arg|
|
|
|
|
args[arg[0].to_sym] = arg[1].to_i
|
|
|
|
end
|
|
|
|
|
|
|
|
if args[:topic].present?
|
|
|
|
# If the topic attribute is present, ensure it's the same topic
|
|
|
|
self.quoted_post_numbers << args[:post] if self.topic_id == args[:topic]
|
|
|
|
else
|
|
|
|
self.quoted_post_numbers << args[:post]
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
self.quoted_post_numbers.uniq!
|
2013-02-07 23:45:24 +08:00
|
|
|
self.quote_count = self.quoted_post_numbers.size
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Process this post after comitting it
|
|
|
|
def trigger_post_process
|
|
|
|
args = {post_id: self.id}
|
|
|
|
args[:image_sizes] = self.image_sizes if self.image_sizes.present?
|
|
|
|
args[:invalidate_oneboxes] = true if self.invalidate_oneboxes.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
Jobs.enqueue(:process_post, args)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|