2013-02-06 03:16:51 +08:00
|
|
|
require_dependency 'jobs'
|
|
|
|
require_dependency 'pretty_text'
|
|
|
|
require_dependency 'rate_limiter'
|
2013-02-09 23:33:07 +08:00
|
|
|
require_dependency 'post_revisor'
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
require 'archetype'
|
|
|
|
require 'digest/sha1'
|
|
|
|
|
|
|
|
class Post < ActiveRecord::Base
|
|
|
|
include RateLimiter::OnCreateRecord
|
|
|
|
|
2013-03-14 00:35:55 +08:00
|
|
|
versioned if: :raw_changed?
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
rate_limit
|
2013-02-06 03:16:51 +08:00
|
|
|
acts_as_paranoid
|
2013-02-19 14:57:14 +08:00
|
|
|
|
2013-02-06 09:13:41 +08:00
|
|
|
after_recover :update_flagged_posts_count
|
2013-02-07 23:45:24 +08:00
|
|
|
after_destroy :update_flagged_posts_count
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
belongs_to :user
|
|
|
|
belongs_to :topic, counter_cache: :posts_count
|
|
|
|
|
|
|
|
has_many :post_replies
|
|
|
|
has_many :replies, through: :post_replies
|
|
|
|
has_many :post_actions
|
|
|
|
|
|
|
|
validates_presence_of :raw, :user_id, :topic_id
|
2013-03-01 02:54:12 +08:00
|
|
|
validates :raw, stripped_length: { in: SiteSetting.post_length }
|
2013-02-07 09:09:31 +08:00
|
|
|
validate :raw_quality
|
2013-02-06 03:16:51 +08:00
|
|
|
validate :max_mention_validator
|
|
|
|
validate :max_images_validator
|
|
|
|
validate :max_links_validator
|
|
|
|
validate :unique_post_validator
|
|
|
|
|
|
|
|
# We can pass a hash of image sizes when saving to prevent crawling those images
|
|
|
|
attr_accessor :image_sizes, :quoted_post_numbers, :no_bump, :invalidate_oneboxes
|
|
|
|
|
|
|
|
SHORT_POST_CHARS = 1200
|
|
|
|
|
|
|
|
# Post Types
|
|
|
|
REGULAR = 1
|
|
|
|
MODERATOR_ACTION = 2
|
|
|
|
|
|
|
|
before_save :extract_quoted_post_numbers
|
|
|
|
|
2013-02-22 02:20:00 +08:00
|
|
|
scope :by_newest, order('created_at desc, id desc')
|
|
|
|
scope :with_user, includes(:user)
|
|
|
|
|
2013-03-19 02:59:34 +08:00
|
|
|
def self.hidden_reasons
|
|
|
|
@hidden_reasons ||= Enum.new(:flag_threshold_reached, :flag_threshold_reached_again)
|
|
|
|
end
|
|
|
|
|
2013-02-07 09:09:31 +08:00
|
|
|
def raw_quality
|
2013-03-01 02:54:12 +08:00
|
|
|
sentinel = TextSentinel.new(raw, min_entropy: SiteSetting.body_min_entropy)
|
2013-02-07 09:09:31 +08:00
|
|
|
if sentinel.valid?
|
|
|
|
# It's possible the sentinel has cleaned up the title a bit
|
2013-02-07 23:45:24 +08:00
|
|
|
self.raw = sentinel.text
|
2013-02-07 09:09:31 +08:00
|
|
|
else
|
|
|
|
errors.add(:raw, I18n.t(:is_invalid)) unless sentinel.valid?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
# Stop us from posting the same thing too quickly
|
|
|
|
def unique_post_validator
|
|
|
|
return if SiteSetting.unique_posts_mins == 0
|
2013-03-01 02:54:12 +08:00
|
|
|
return if user.admin? || user.moderator?
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
# If the post is empty, default to the validates_presence_of
|
|
|
|
return if raw.blank?
|
|
|
|
|
|
|
|
if $redis.exists(unique_post_key)
|
|
|
|
errors.add(:raw, I18n.t(:just_posted_that))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# The key we use in reddit to ensure unique posts
|
|
|
|
def unique_post_key
|
|
|
|
"post-#{user_id}:#{raw_hash}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def raw_hash
|
2013-03-01 02:54:12 +08:00
|
|
|
return if raw.blank?
|
2013-02-06 03:16:51 +08:00
|
|
|
Digest::SHA1.hexdigest(raw.gsub(/\s+/, "").downcase)
|
|
|
|
end
|
|
|
|
|
|
|
|
def cooked_document
|
2013-03-01 02:54:12 +08:00
|
|
|
self.cooked ||= cook(raw, topic_id: topic_id)
|
|
|
|
@cooked_document ||= Nokogiri::HTML.fragment(cooked)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-02-09 23:33:07 +08:00
|
|
|
def reset_cooked
|
|
|
|
@cooked_document = nil
|
|
|
|
self.cooked = nil
|
|
|
|
end
|
|
|
|
|
2013-02-12 15:43:48 +08:00
|
|
|
def self.white_listed_image_classes
|
|
|
|
@white_listed_image_classes ||= ['avatar']
|
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
def image_count
|
2013-03-01 02:54:12 +08:00
|
|
|
return 0 unless raw.present?
|
2013-02-12 15:43:48 +08:00
|
|
|
|
2013-03-01 02:54:12 +08:00
|
|
|
cooked_document.search("img").reject do |t|
|
2013-02-12 15:43:48 +08:00
|
|
|
dom_class = t["class"]
|
|
|
|
if dom_class
|
|
|
|
(Post.white_listed_image_classes & dom_class.split(" ")).count > 0
|
|
|
|
end
|
2013-03-01 02:54:12 +08:00
|
|
|
end.count
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def link_count
|
2013-03-01 02:54:12 +08:00
|
|
|
return 0 unless raw.present?
|
2013-02-06 03:16:51 +08:00
|
|
|
cooked_document.search("a[href]").count
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_mention_validator
|
|
|
|
errors.add(:raw, I18n.t(:too_many_mentions)) if raw_mentions.size > SiteSetting.max_mentions_per_post
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_images_validator
|
2013-03-01 02:54:12 +08:00
|
|
|
return if user.present? && user.has_trust_level?(:basic)
|
2013-02-06 03:16:51 +08:00
|
|
|
errors.add(:raw, I18n.t(:too_many_images)) if image_count > 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def max_links_validator
|
2013-03-01 02:54:12 +08:00
|
|
|
return if user.present? && user.has_trust_level?(:basic)
|
2013-02-06 03:16:51 +08:00
|
|
|
errors.add(:raw, I18n.t(:too_many_links)) if link_count > 1
|
2013-02-07 23:45:24 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
|
|
|
|
def raw_mentions
|
2013-02-07 23:45:24 +08:00
|
|
|
return [] if raw.blank?
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
# We don't count mentions in quotes
|
|
|
|
return @raw_mentions if @raw_mentions.present?
|
2013-02-06 03:16:51 +08:00
|
|
|
raw_stripped = raw.gsub(/\[quote=(.*)\]([^\[]*?)\[\/quote\]/im, '')
|
|
|
|
|
|
|
|
# Strip pre and code tags
|
|
|
|
doc = Nokogiri::HTML.fragment(raw_stripped)
|
|
|
|
doc.search("pre").remove
|
|
|
|
doc.search("code").remove
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
results = doc.to_html.scan(PrettyText.mention_matcher)
|
2013-03-01 02:54:12 +08:00
|
|
|
@raw_mentions = results.uniq.map { |un| un.first.downcase.gsub!(/^@/, '') }
|
2013-02-08 04:12:55 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-08 04:12:55 +08:00
|
|
|
# The rules for deletion change depending on who is doing it.
|
|
|
|
def delete_by(deleted_by)
|
2013-03-01 02:54:12 +08:00
|
|
|
if deleted_by.moderator?
|
2013-02-08 04:12:55 +08:00
|
|
|
# As a moderator, delete the post.
|
|
|
|
Post.transaction do
|
|
|
|
self.destroy
|
2013-03-01 02:54:12 +08:00
|
|
|
Topic.reset_highest(topic_id)
|
2013-02-08 04:12:55 +08:00
|
|
|
end
|
2013-03-01 02:54:12 +08:00
|
|
|
elsif deleted_by.id == user_id
|
2013-02-08 04:12:55 +08:00
|
|
|
# As the poster, make a revision that says deleted.
|
|
|
|
Post.transaction do
|
|
|
|
revise(deleted_by, I18n.t('js.post.deleted_by_author'), force_new_version: true)
|
|
|
|
update_column(:user_deleted, true)
|
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def archetype
|
|
|
|
topic.archetype
|
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def self.regular_order
|
2013-02-07 23:45:24 +08:00
|
|
|
order(:sort_order, :post_number)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.reverse_order
|
2013-02-07 23:45:24 +08:00
|
|
|
order('sort_order desc, post_number desc')
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.best_of
|
2013-02-07 23:45:24 +08:00
|
|
|
where("(post_number = 1) or (score >= ?)", SiteSetting.best_of_score_threshold)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 09:13:41 +08:00
|
|
|
def update_flagged_posts_count
|
|
|
|
PostAction.update_flagged_posts_count
|
|
|
|
end
|
|
|
|
|
2013-03-01 02:54:12 +08:00
|
|
|
def filter_quotes(parent_post = nil)
|
2013-02-06 03:16:51 +08:00
|
|
|
return cooked if parent_post.blank?
|
|
|
|
|
|
|
|
# We only filter quotes when there is exactly 1
|
|
|
|
return cooked unless (quote_count == 1)
|
|
|
|
|
2013-02-16 09:58:33 +08:00
|
|
|
parent_raw = parent_post.raw.sub(/\[quote.+\/quote\]/m, '')
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-03-05 08:42:44 +08:00
|
|
|
if raw[parent_raw] || (parent_raw.size < SHORT_POST_CHARS)
|
2013-02-06 03:16:51 +08:00
|
|
|
return cooked.sub(/\<aside.+\<\/aside\>/m, '')
|
|
|
|
end
|
|
|
|
|
|
|
|
cooked
|
|
|
|
end
|
|
|
|
|
|
|
|
def username
|
|
|
|
user.username
|
|
|
|
end
|
|
|
|
|
|
|
|
def external_id
|
2013-02-07 23:45:24 +08:00
|
|
|
"#{topic_id}/#{post_number}"
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def quoteless?
|
2013-03-01 02:54:12 +08:00
|
|
|
(quote_count == 0) && (reply_to_post_number.present?)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Get the post that we reply to.
|
|
|
|
def reply_to_user
|
2013-03-01 02:54:12 +08:00
|
|
|
return if reply_to_post_number.blank?
|
|
|
|
Post.where(topic_id: topic_id, post_number: reply_to_post_number).first.try(:user)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def reply_notification_target
|
2013-03-01 02:54:12 +08:00
|
|
|
return if reply_to_post_number.blank?
|
|
|
|
Post.where("topic_id = :topic_id AND post_number = :post_number AND user_id <> :user_id",
|
|
|
|
topic_id: topic_id,
|
|
|
|
post_number: reply_to_post_number,
|
|
|
|
user_id: user_id).first.try(:user)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-03-01 02:54:12 +08:00
|
|
|
def self.excerpt(cooked, maxlength = nil)
|
2013-02-06 03:16:51 +08:00
|
|
|
maxlength ||= SiteSetting.post_excerpt_maxlength
|
|
|
|
PrettyText.excerpt(cooked, maxlength)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Strip out most of the markup
|
2013-03-01 02:54:12 +08:00
|
|
|
def excerpt(maxlength = nil)
|
2013-02-06 03:16:51 +08:00
|
|
|
Post.excerpt(cooked, maxlength)
|
|
|
|
end
|
|
|
|
|
|
|
|
# What we use to cook posts
|
|
|
|
def cook(*args)
|
2013-02-07 23:45:24 +08:00
|
|
|
cooked = PrettyText.cook(*args)
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
# If we have any of the oneboxes in the cache, throw them in right away, don't
|
2013-02-06 03:16:51 +08:00
|
|
|
# wait for the post processor.
|
|
|
|
dirty = false
|
|
|
|
doc = Oneboxer.each_onebox_link(cooked) do |url, elem|
|
|
|
|
cached = Oneboxer.render_from_cache(url)
|
|
|
|
if cached.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
elem.swap(cached.cooked)
|
2013-02-06 03:16:51 +08:00
|
|
|
dirty = true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
cooked = doc.to_html if dirty
|
|
|
|
cooked
|
|
|
|
end
|
|
|
|
|
|
|
|
# A list of versions including the initial version
|
|
|
|
def all_versions
|
|
|
|
result = []
|
2013-03-14 00:35:55 +08:00
|
|
|
result << { number: 1, display_username: user.username, created_at: created_at }
|
2013-02-06 03:16:51 +08:00
|
|
|
versions.order(:number).includes(:user).each do |v|
|
2013-03-14 00:35:55 +08:00
|
|
|
if v.user.present?
|
|
|
|
result << { number: v.number, display_username: v.user.username, created_at: v.created_at }
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
result
|
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
def is_flagged?
|
2013-03-01 20:07:44 +08:00
|
|
|
post_actions.where(post_action_type_id: PostActionType.flag_types.values, deleted_at: nil).count != 0
|
2013-02-07 12:15:48 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def unhide!
|
|
|
|
self.hidden = false
|
|
|
|
self.hidden_reason_id = nil
|
|
|
|
self.topic.update_attributes(visible: true)
|
2013-03-01 02:54:12 +08:00
|
|
|
save
|
2013-02-07 12:15:48 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def url
|
|
|
|
"/t/#{Slug.for(topic.title)}/#{topic.id}/#{post_number}"
|
|
|
|
end
|
|
|
|
|
2013-02-22 02:20:00 +08:00
|
|
|
def author_readable
|
|
|
|
user.readable_name
|
|
|
|
end
|
|
|
|
|
2013-03-01 02:54:12 +08:00
|
|
|
def revise(updated_by, new_raw, opts = {})
|
2013-02-09 23:33:07 +08:00
|
|
|
PostRevisor.new(self).revise!(updated_by, new_raw, opts)
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
# Various callbacks
|
|
|
|
before_create do
|
|
|
|
self.post_number ||= Topic.next_post_number(topic_id, reply_to_post_number.present?)
|
|
|
|
self.cooked ||= cook(raw, topic_id: topic_id)
|
|
|
|
self.sort_order = post_number
|
2013-02-07 23:45:24 +08:00
|
|
|
DiscourseEvent.trigger(:before_create_post, self)
|
2013-02-06 03:16:51 +08:00
|
|
|
self.last_version_at ||= Time.now
|
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: Move some of this into an asynchronous job?
|
|
|
|
after_create do
|
|
|
|
# Update attributes on the topic - featured users and last posted.
|
2013-03-01 02:54:12 +08:00
|
|
|
attrs = {last_posted_at: created_at, last_post_user_id: user_id}
|
|
|
|
attrs[:bumped_at] = created_at unless no_bump
|
2013-02-06 03:16:51 +08:00
|
|
|
topic.update_attributes(attrs)
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
# Update topic user data
|
|
|
|
TopicUser.change(user,
|
2013-03-01 02:54:12 +08:00
|
|
|
topic.id,
|
|
|
|
posted: true,
|
|
|
|
last_read_post_number: post_number,
|
|
|
|
seen_post_count: post_number)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# This calculates the geometric mean of the post timings and stores it along with
|
2013-02-07 23:45:24 +08:00
|
|
|
# each post.
|
2013-02-06 03:16:51 +08:00
|
|
|
def self.calculate_avg_time
|
2013-02-12 04:47:28 +08:00
|
|
|
retry_lock_error do
|
|
|
|
exec_sql("UPDATE posts
|
|
|
|
SET avg_time = (x.gmean / 1000)
|
|
|
|
FROM (SELECT post_timings.topic_id,
|
|
|
|
post_timings.post_number,
|
|
|
|
round(exp(avg(ln(msecs)))) AS gmean
|
|
|
|
FROM post_timings
|
|
|
|
INNER JOIN posts AS p2
|
|
|
|
ON p2.post_number = post_timings.post_number
|
|
|
|
AND p2.topic_id = post_timings.topic_id
|
|
|
|
AND p2.user_id <> post_timings.user_id
|
|
|
|
GROUP BY post_timings.topic_id, post_timings.post_number) AS x
|
|
|
|
WHERE x.topic_id = posts.topic_id
|
|
|
|
AND x.post_number = posts.post_number")
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
before_save do
|
2013-03-01 02:54:12 +08:00
|
|
|
self.last_editor_id ||= user_id
|
2013-02-06 03:16:51 +08:00
|
|
|
self.cooked = cook(raw, topic_id: topic_id) unless new_record?
|
|
|
|
end
|
|
|
|
|
|
|
|
before_destroy do
|
|
|
|
|
|
|
|
# Update the last post id to the previous post if it exists
|
2013-03-01 02:54:12 +08:00
|
|
|
last_post = Post.where("topic_id = ? and id <> ?", topic_id, id).order('created_at desc').limit(1).first
|
2013-02-06 03:16:51 +08:00
|
|
|
if last_post.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
topic.update_attributes(last_posted_at: last_post.created_at,
|
2013-02-06 03:16:51 +08:00
|
|
|
last_post_user_id: last_post.user_id,
|
|
|
|
highest_post_number: last_post.post_number)
|
|
|
|
|
|
|
|
# If the poster doesn't have any other posts in the topic, clear their posted flag
|
2013-03-01 02:54:12 +08:00
|
|
|
unless Post.exists?(["topic_id = ? and user_id = ? and id <> ?", topic_id, user_id, id])
|
|
|
|
TopicUser.update_all 'posted = false', topic_id: topic_id, user_id: user_id
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Feature users in the topic
|
2013-03-01 02:54:12 +08:00
|
|
|
Jobs.enqueue(:feature_topic_users, topic_id: topic_id, except_post_id: id)
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
after_destroy do
|
|
|
|
# Remove any reply records that point to deleted posts
|
2013-03-01 02:54:12 +08:00
|
|
|
post_ids = PostReply.select(:post_id).where(reply_id: id).map(&:post_id)
|
|
|
|
PostReply.delete_all reply_id: id
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
if post_ids.present?
|
2013-03-01 02:54:12 +08:00
|
|
|
Post.where(id: post_ids).each { |p| p.update_column :reply_count, p.replies.count }
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Remove any notifications that point to this deleted post
|
2013-03-01 02:54:12 +08:00
|
|
|
Notification.delete_all topic_id: topic_id, post_number: post_number
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
after_save do
|
2013-03-01 02:54:12 +08:00
|
|
|
DraftSequence.next! last_editor_id, topic.draft_key if topic # could be deleted
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
quoted_post_numbers << reply_to_post_number if reply_to_post_number.present?
|
|
|
|
|
|
|
|
# Create a reply relationship between quoted posts and this new post
|
2013-03-01 02:54:12 +08:00
|
|
|
if quoted_post_numbers.present?
|
|
|
|
quoted_post_numbers.map(&:to_i).uniq.each do |p|
|
2013-02-06 03:16:51 +08:00
|
|
|
post = Post.where(topic_id: topic_id, post_number: p).first
|
|
|
|
if post.present?
|
2013-03-01 02:54:12 +08:00
|
|
|
post_reply = post.post_replies.new(reply_id: id)
|
2013-02-06 03:16:51 +08:00
|
|
|
if post_reply.save
|
2013-02-09 06:10:18 +08:00
|
|
|
Post.update_all ['reply_count = reply_count + 1'], id: post.id
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_quoted_post_numbers
|
|
|
|
self.quoted_post_numbers = []
|
|
|
|
|
|
|
|
# Create relationships for the quotes
|
|
|
|
raw.scan(/\[quote=\"([^"]+)"\]/).each do |m|
|
|
|
|
if m.present?
|
|
|
|
args = {}
|
|
|
|
m.first.scan(/([a-z]+)\:(\d+)/).each do |arg|
|
|
|
|
args[arg[0].to_sym] = arg[1].to_i
|
|
|
|
end
|
|
|
|
|
|
|
|
if args[:topic].present?
|
|
|
|
# If the topic attribute is present, ensure it's the same topic
|
2013-03-01 02:54:12 +08:00
|
|
|
self.quoted_post_numbers << args[:post] if topic_id == args[:topic]
|
2013-02-06 03:16:51 +08:00
|
|
|
else
|
|
|
|
self.quoted_post_numbers << args[:post]
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
self.quoted_post_numbers.uniq!
|
2013-03-01 02:54:12 +08:00
|
|
|
self.quote_count = quoted_post_numbers.size
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-03-19 01:55:34 +08:00
|
|
|
# Enqueue post processing for this post
|
2013-02-06 03:16:51 +08:00
|
|
|
def trigger_post_process
|
2013-03-01 02:54:12 +08:00
|
|
|
args = { post_id: id }
|
|
|
|
args[:image_sizes] = image_sizes if image_sizes.present?
|
|
|
|
args[:invalidate_oneboxes] = true if invalidate_oneboxes.present?
|
2013-02-07 23:45:24 +08:00
|
|
|
Jobs.enqueue(:process_post, args)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2013-03-08 00:07:59 +08:00
|
|
|
|
|
|
|
def self.count_per_day(since=30.days.ago)
|
|
|
|
where('created_at > ?', since).group('date(created_at)').order('date(created_at)').count
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|