diff --git a/.rubocop.yml b/.rubocop.yml index 792d6e22e1b..088f59f4d3c 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,14 +1,104 @@ AllCops: - TargetRubyVersion: 2.3 + TargetRubyVersion: 2.4 + DisabledByDefault: true -Metrics/LineLength: - Max: 120 +# Prefer &&/|| over and/or. +Style/AndOr: + Enabled: true -Metrics/MethodLength: +# Do not use braces for hash literals when they are the last argument of a +# method call. +Style/BracesAroundHashParameters: + Enabled: true + +# Align `when` with `case`. +Layout/CaseIndentation: + Enabled: true + +# Align comments with method definitions. +Layout/CommentIndentation: + Enabled: true + +# No extra empty lines. +Layout/EmptyLines: + Enabled: true + +# Use Ruby >= 1.9 syntax for hashes. Prefer { a: :b } over { :a => :b }. +Style/HashSyntax: + Enabled: true + +# Two spaces, no tabs (for indentation). +Layout/IndentationWidth: + Enabled: true + +Layout/SpaceAfterColon: + Enabled: true + +Layout/SpaceAfterComma: + Enabled: true + +Layout/SpaceAroundEqualsInParameterDefault: + Enabled: true + +Layout/SpaceAroundKeyword: + Enabled: true + +Layout/SpaceAroundOperators: + Enabled: true + +Layout/SpaceBeforeFirstArg: + Enabled: true + +# Defining a method with parameters needs parentheses. +Style/MethodDefParentheses: + Enabled: true + +# Use `foo {}` not `foo{}`. +Layout/SpaceBeforeBlockBraces: + Enabled: true + +# Use `foo { bar }` not `foo {bar}`. +Layout/SpaceInsideBlockBraces: + Enabled: true + +# Use `{ a: 1 }` not `{a:1}`. +Layout/SpaceInsideHashLiteralBraces: + Enabled: true + +Layout/SpaceInsideParens: + Enabled: true + +# Detect hard tabs, no hard tabs. +Layout/Tab: + Enabled: true + +# Blank lines should not have any spaces. +Layout/TrailingBlankLines: + Enabled: true + +# No trailing whitespace. +Layout/TrailingWhitespace: + Enabled: true + +Lint/BlockAlignment: + Enabled: true + +# Align `end` with the matching keyword or starting expression except for +# assignments, where it should be aligned with the LHS. +Lint/EndAlignment: + Enabled: true + EnforcedStyleAlignWith: variable + +# Use my_method(my_arg) not my_method( my_arg ) or my_method my_arg. +Lint/RequireParentheses: + Enabled: true + +Layout/MultilineMethodCallIndentation: + Enabled: true + EnforcedStyle: indented + +Layout/AlignHash: + Enabled: true + +Bundler/OrderedGems: Enabled: false - -Style/Documentation: - Enabled: false - -Style/FrozenStringLiteralComment: - Enabled: False diff --git a/.travis.yml b/.travis.yml index 25827245817..dc66517428e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,7 +36,7 @@ cache: - vendor/bundle before_install: - - gem install bundler + - gem install bundler rubocop - git clone --depth=1 https://github.com/discourse/discourse-backup-uploads-to-s3.git plugins/discourse-backup-uploads-to-s3 - git clone --depth=1 https://github.com/discourse/discourse-spoiler-alert.git plugins/discourse-spoiler-alert - git clone --depth=1 https://github.com/discourse/discourse-cakeday.git plugins/discourse-cakeday @@ -48,6 +48,7 @@ before_install: - eslint --ext .es6 test/javascripts - eslint --ext .es6 plugins/**/assets/javascripts - eslint test/javascripts + - rubocop --parallel before_script: - bundle exec rake db:create db:migrate diff --git a/Gemfile b/Gemfile index 091bf3fd1eb..42e358e8e67 100644 --- a/Gemfile +++ b/Gemfile @@ -189,7 +189,6 @@ gem 'logster' gem 'sassc', require: false - if ENV["IMPORT"] == "1" gem 'mysql2' gem 'redcarpet' diff --git a/Rakefile b/Rakefile index 179ba560eb2..7a7f9ee44b3 100755 --- a/Rakefile +++ b/Rakefile @@ -9,4 +9,3 @@ Discourse::Application.load_tasks # this prevents crashes when migrating a database in production in certain # PostgreSQL configuations when trying to create structure.sql Rake::Task["db:structure:dump"].clear if Rails.env.production? - diff --git a/app/controllers/admin/badges_controller.rb b/app/controllers/admin/badges_controller.rb index e33a4765862..6fcdc7e7df4 100644 --- a/app/controllers/admin/badges_controller.rb +++ b/app/controllers/admin/badges_controller.rb @@ -5,9 +5,9 @@ class Admin::BadgesController < Admin::AdminController badge_types: BadgeType.all.order(:id).to_a, badge_groupings: BadgeGrouping.all.order(:position).to_a, badges: Badge.includes(:badge_grouping) - .includes(:badge_type) - .references(:badge_grouping) - .order('badge_groupings.position, badge_type_id, badges.name').to_a, + .includes(:badge_type) + .references(:badge_grouping) + .order('badge_groupings.position, badge_type_id, badges.name').to_a, protected_system_fields: Badge.protected_system_fields, triggers: Badge.trigger_hash } @@ -43,9 +43,9 @@ class Admin::BadgesController < Admin::AdminController badge_groupings = BadgeGrouping.all.order(:position).to_a ids = params[:ids].map(&:to_i) - params[:names].each_with_index do |name,index| + params[:names].each_with_index do |name, index| id = ids[index].to_i - group = badge_groupings.find{|b| b.id == id} || BadgeGrouping.new() + group = badge_groupings.find { |b| b.id == id } || BadgeGrouping.new() group.name = name group.position = index group.save @@ -95,7 +95,7 @@ class Admin::BadgesController < Admin::AdminController # Options: # :new - reset the badge id to nil before saving - def update_badge_from_params(badge, opts={}) + def update_badge_from_params(badge, opts = {}) errors = [] Badge.transaction do allowed = Badge.column_names.map(&:to_sym) @@ -112,7 +112,7 @@ class Admin::BadgesController < Admin::AdminController # Badge query contract checks begin if SiteSetting.enable_badge_sql - BadgeGranter.contract_checks!(badge.query, { target_posts: badge.target_posts, trigger: badge.trigger }) + BadgeGranter.contract_checks!(badge.query, target_posts: badge.target_posts, trigger: badge.trigger) end rescue => e errors << e.message diff --git a/app/controllers/admin/color_schemes_controller.rb b/app/controllers/admin/color_schemes_controller.rb index dda6c9f07ce..a070f2324d4 100644 --- a/app/controllers/admin/color_schemes_controller.rb +++ b/app/controllers/admin/color_schemes_controller.rb @@ -29,7 +29,6 @@ class Admin::ColorSchemesController < Admin::AdminController render json: success_json end - private def fetch_color_scheme diff --git a/app/controllers/admin/dashboard_controller.rb b/app/controllers/admin/dashboard_controller.rb index d7250ca8262..ed021d9158d 100644 --- a/app/controllers/admin/dashboard_controller.rb +++ b/app/controllers/admin/dashboard_controller.rb @@ -2,13 +2,13 @@ require 'disk_space' class Admin::DashboardController < Admin::AdminController def index dashboard_data = AdminDashboardData.fetch_cached_stats || Jobs::DashboardStats.new.execute({}) - dashboard_data.merge!({version_check: DiscourseUpdates.check_version.as_json}) if SiteSetting.version_checks? + dashboard_data.merge!(version_check: DiscourseUpdates.check_version.as_json) if SiteSetting.version_checks? dashboard_data[:disk_space] = DiskSpace.cached_stats render json: dashboard_data end def problems - render_json_dump({problems: AdminDashboardData.fetch_problems}) + render_json_dump(problems: AdminDashboardData.fetch_problems) end end diff --git a/app/controllers/admin/diagnostics_controller.rb b/app/controllers/admin/diagnostics_controller.rb index ba889837a34..8411e854845 100644 --- a/app/controllers/admin/diagnostics_controller.rb +++ b/app/controllers/admin/diagnostics_controller.rb @@ -42,8 +42,8 @@ class Admin::DiagnosticsController < Admin::AdminController GC.start(full_mark: true) require 'objspace' - io = File.open("discourse-heap-#{SecureRandom.hex(3)}.json",'w') - ObjectSpace.dump_all(:output => io) + io = File.open("discourse-heap-#{SecureRandom.hex(3)}.json", 'w') + ObjectSpace.dump_all(output: io) io.close render plain: "HEAP DUMP:\n#{io.path}" diff --git a/app/controllers/admin/email_controller.rb b/app/controllers/admin/email_controller.rb index 75e07bb22b6..a5030d06dd8 100644 --- a/app/controllers/admin/email_controller.rb +++ b/app/controllers/admin/email_controller.rb @@ -13,7 +13,7 @@ class Admin::EmailController < Admin::AdminController Jobs::TestEmail.new.execute(to_address: params[:email_address]) render nothing: true rescue => e - render json: {errors: [e.message]}, status: 422 + render json: { errors: [e.message] }, status: 422 end end @@ -55,17 +55,17 @@ class Admin::EmailController < Admin::AdminController params.require(:username) params.require(:email) user = User.find_by_username(params[:username]) - message, skip_reason = UserNotifications.send(:digest, user, {since: params[:last_seen_at]}) + message, skip_reason = UserNotifications.send(:digest, user, since: params[:last_seen_at]) if message message.to = params[:email] begin Email::Sender.new(message, :digest).send render json: success_json rescue => e - render json: {errors: [e.message]}, status: 422 + render json: { errors: [e.message] }, status: 422 end else - render json: {errors: skip_reason} + render json: { errors: skip_reason } end end @@ -131,18 +131,18 @@ class Admin::EmailController < Admin::AdminController serializer = IncomingEmailDetailsSerializer.new(incoming_email, root: false) render_json_dump(serializer) rescue => e - render json: {errors: [e.message]}, status: 404 + render json: { errors: [e.message] }, status: 404 end end private def filter_email_logs(email_logs, params) - email_logs = email_logs.includes(:user, { post: :topic }) - .references(:user) - .order(created_at: :desc) - .offset(params[:offset] || 0) - .limit(50) + email_logs = email_logs.includes(:user, post: :topic) + .references(:user) + .order(created_at: :desc) + .offset(params[:offset] || 0) + .limit(50) email_logs = email_logs.where("users.username ILIKE ?", "%#{params[:user]}%") if params[:user].present? email_logs = email_logs.where("email_logs.to_address ILIKE ?", "%#{params[:address]}%") if params[:address].present? @@ -154,10 +154,10 @@ class Admin::EmailController < Admin::AdminController end def filter_incoming_emails(incoming_emails, params) - incoming_emails = incoming_emails.includes(:user, { post: :topic }) - .order(created_at: :desc) - .offset(params[:offset] || 0) - .limit(50) + incoming_emails = incoming_emails.includes(:user, post: :topic) + .order(created_at: :desc) + .offset(params[:offset] || 0) + .limit(50) incoming_emails = incoming_emails.where("from_address ILIKE ?", "%#{params[:from]}%") if params[:from].present? incoming_emails = incoming_emails.where("to_addresses ILIKE :to OR cc_addresses ILIKE :to", to: "%#{params[:to]}%") if params[:to].present? @@ -170,7 +170,7 @@ class Admin::EmailController < Admin::AdminController def delivery_settings action_mailer_settings .reject { |k, _| k == :password } - .map { |k, v| { name: k, value: v }} + .map { |k, v| { name: k, value: v } } end def delivery_method diff --git a/app/controllers/admin/emojis_controller.rb b/app/controllers/admin/emojis_controller.rb index 8bd3f36edec..47b78e83b4d 100644 --- a/app/controllers/admin/emojis_controller.rb +++ b/app/controllers/admin/emojis_controller.rb @@ -13,8 +13,8 @@ class Admin::EmojisController < Admin::AdminController Scheduler::Defer.later("Upload Emoji") do # fix the name name = name.gsub(/[^a-z0-9]+/i, '_') - .gsub(/_{2,}/, '_') - .downcase + .gsub(/_{2,}/, '_') + .downcase upload = UploadCreator.new( file.tempfile, @@ -61,4 +61,3 @@ class Admin::EmojisController < Admin::AdminController end end - diff --git a/app/controllers/admin/flags_controller.rb b/app/controllers/admin/flags_controller.rb index 1385f958d50..f986ecd43e7 100644 --- a/app/controllers/admin/flags_controller.rb +++ b/app/controllers/admin/flags_controller.rb @@ -10,11 +10,11 @@ class Admin::FlagsController < Admin::AdminController if posts.blank? render json: { posts: [], topics: [], users: [] } else - render json: MultiJson.dump({ + render json: MultiJson.dump( posts: posts, topics: serialize_data(topics, FlaggedTopicSerializer), users: serialize_data(users, FlaggedUserSerializer) - }) + ) end end diff --git a/app/controllers/admin/groups_controller.rb b/app/controllers/admin/groups_controller.rb index 3da78834fd4..7d54bb12634 100644 --- a/app/controllers/admin/groups_controller.rb +++ b/app/controllers/admin/groups_controller.rb @@ -11,7 +11,7 @@ class Admin::GroupsController < Admin::AdminController group = Group.find(params[:group_id].to_i) users_added = 0 if group.present? - users = (params[:users] || []).map {|u| u.downcase} + users = (params[:users] || []).map { |u| u.downcase } valid_emails = {} valid_usernames = {} @@ -162,7 +162,7 @@ class Admin::GroupsController < Admin::AdminController protected def can_not_modify_automatic - render json: {errors: I18n.t('groups.errors.can_not_modify_automatic')}, status: 422 + render json: { errors: I18n.t('groups.errors.can_not_modify_automatic') }, status: 422 end private diff --git a/app/controllers/admin/screened_ip_addresses_controller.rb b/app/controllers/admin/screened_ip_addresses_controller.rb index 31df2a6b8eb..1f4bc315657 100644 --- a/app/controllers/admin/screened_ip_addresses_controller.rb +++ b/app/controllers/admin/screened_ip_addresses_controller.rb @@ -46,7 +46,7 @@ class Admin::ScreenedIpAddressesController < Admin::AdminController def roll_up subnets = ScreenedIpAddress.roll_up(current_user) - render json: success_json.merge!({ subnets: subnets }) + render json: success_json.merge!(subnets: subnets) end private diff --git a/app/controllers/admin/site_settings_controller.rb b/app/controllers/admin/site_settings_controller.rb index 7f20c5d88e5..87478597a17 100644 --- a/app/controllers/admin/site_settings_controller.rb +++ b/app/controllers/admin/site_settings_controller.rb @@ -17,7 +17,7 @@ class Admin::SiteSettingsController < Admin::AdminController SiteSetting.set_and_log(id, value, current_user) render nothing: true rescue Discourse::InvalidParameters => e - render json: {errors: [e.message]}, status: 422 + render json: { errors: [e.message] }, status: 422 end end diff --git a/app/controllers/admin/site_texts_controller.rb b/app/controllers/admin/site_texts_controller.rb index 33c7b716fc3..8cdb0d9b196 100644 --- a/app/controllers/admin/site_texts_controller.rb +++ b/app/controllers/admin/site_texts_controller.rb @@ -14,7 +14,7 @@ class Admin::SiteTextsController < Admin::AdminController query = params[:q] || "" if query.blank? && !overridden extras[:recommended] = true - results = self.class.preferred_keys.map {|k| record_for(k) } + results = self.class.preferred_keys.map { |k| record_for(k) } else results = [] translations = I18n.search(query, overridden: overridden) @@ -69,14 +69,14 @@ class Admin::SiteTextsController < Admin::AdminController protected - def record_for(k, value=nil) + def record_for(k, value = nil) if k.ends_with?("_MF") ovr = TranslationOverride.where(translation_key: k).pluck(:value) value = ovr[0] if ovr.present? end value ||= I18n.t(k) - {id: k, value: value} + { id: k, value: value } end def find_site_text diff --git a/app/controllers/admin/staff_action_logs_controller.rb b/app/controllers/admin/staff_action_logs_controller.rb index 242cc4f4027..2e72e2b86f4 100644 --- a/app/controllers/admin/staff_action_logs_controller.rb +++ b/app/controllers/admin/staff_action_logs_controller.rb @@ -6,7 +6,7 @@ class Admin::StaffActionLogsController < Admin::AdminController staff_action_logs = UserHistory.staff_action_records(current_user, filters).to_a render json: StaffActionLogsSerializer.new({ staff_action_logs: staff_action_logs, - user_history_actions: UserHistory.staff_actions.sort.map{|name| {id: UserHistory.actions[name], name: name}} + user_history_actions: UserHistory.staff_actions.sort.map { |name| { id: UserHistory.actions[name], name: name } } }, root: false) end @@ -46,20 +46,18 @@ class Admin::StaffActionLogsController < Admin::AdminController cur: child_themes(cur) } - load_diff(diff_fields, :cur, cur) load_diff(diff_fields, :prev, prev) - diff_fields.delete_if{|k,v| v[:cur] == v[:prev]} + diff_fields.delete_if { |k, v| v[:cur] == v[:prev] } - - diff_fields.each do |k,v| + diff_fields.each do |k, v| output << "
\s*\n?(.*?)\n?<\/code>\s*<\/pre>/m) {
- "\n```#{lang}\n#{coder.decode($1)}\n```\n"
+ "\n```#{lang}\n#{coder.decode($1)}\n```\n"
}
end
end
diff --git a/lib/import_export/category_exporter.rb b/lib/import_export/category_exporter.rb
index 9f182e72470..4e2a57e8208 100644
--- a/lib/import_export/category_exporter.rb
+++ b/lib/import_export/category_exporter.rb
@@ -22,15 +22,14 @@ module ImportExport
self
end
-
CATEGORY_ATTRS = [:id, :name, :color, :created_at, :user_id, :slug, :description, :text_color,
:auto_close_hours, :auto_close_based_on_last_post,
:topic_template, :suppress_from_homepage, :all_topics_wiki, :permissions_params]
def export_categories
- @export_data[:category] = CATEGORY_ATTRS.inject({}) { |h,a| h[a] = @category.send(a); h }
+ @export_data[:category] = CATEGORY_ATTRS.inject({}) { |h, a| h[a] = @category.send(a); h }
@subcategories.find_each do |subcat|
- @export_data[:subcategories] << CATEGORY_ATTRS.inject({}) { |h,a| h[a] = subcat.send(a); h }
+ @export_data[:subcategories] << CATEGORY_ATTRS.inject({}) { |h, a| h[a] = subcat.send(a); h }
end
# export groups that are mentioned in category permissions
@@ -49,7 +48,6 @@ module ImportExport
self
end
-
GROUP_ATTRS = [ :id, :name, :created_at, :alias_level, :visible,
:automatic_membership_email_domains, :automatic_membership_retroactive,
:primary_group, :title, :grant_trust_level, :incoming_email]
@@ -57,7 +55,7 @@ module ImportExport
def export_groups(group_names)
group_names.each do |name|
group = Group.find_by_name(name)
- group_attrs = GROUP_ATTRS.inject({}) { |h,a| h[a] = group.send(a); h }
+ group_attrs = GROUP_ATTRS.inject({}) { |h, a| h[a] = group.send(a); h }
group_attrs[:user_ids] = group.users.pluck(:id)
@export_data[:groups] << group_attrs
end
@@ -75,7 +73,7 @@ module ImportExport
self
end
- def save_to_file(filename=nil)
+ def save_to_file(filename = nil)
require 'json'
output_basename = filename || File.join("category-export-#{Time.now.strftime("%Y-%m-%d-%H%M%S")}.json")
File.open(output_basename, "w:UTF-8") do |f|
diff --git a/lib/import_export/category_importer.rb b/lib/import_export/category_importer.rb
index 25b57acfcb9..8493b006a09 100644
--- a/lib/import_export/category_importer.rb
+++ b/lib/import_export/category_importer.rb
@@ -28,7 +28,7 @@ module ImportExport
external_id = g.delete(:id)
new_group = Group.find_by_name(g[:name]) || Group.create!(g)
user_ids.each do |external_user_id|
- new_group.add( User.find(@topic_importer.new_user_id(external_user_id)) ) rescue ActiveRecord::RecordNotUnique
+ new_group.add(User.find(@topic_importer.new_user_id(external_user_id))) rescue ActiveRecord::RecordNotUnique
end
end
end
@@ -47,7 +47,7 @@ module ImportExport
parent = Category.new(@export_data[:category])
parent.user_id = @topic_importer.new_user_id(@export_data[:category][:user_id]) # imported user's new id
parent.custom_fields["import_id"] = id
- parent.permissions = permissions.present? ? permissions : {"everyone" => CategoryGroup.permission_types[:full]}
+ parent.permissions = permissions.present? ? permissions : { "everyone" => CategoryGroup.permission_types[:full] }
parent.save!
set_category_description(parent, @export_data[:category][:description])
end
@@ -62,7 +62,7 @@ module ImportExport
subcategory.parent_category_id = parent.id
subcategory.user_id = @topic_importer.new_user_id(cat_attrs[:user_id])
subcategory.custom_fields["import_id"] = id
- subcategory.permissions = permissions.present? ? permissions : {"everyone" => CategoryGroup.permission_types[:full]}
+ subcategory.permissions = permissions.present? ? permissions : { "everyone" => CategoryGroup.permission_types[:full] }
subcategory.save!
set_category_description(subcategory, cat_attrs[:description])
end
diff --git a/lib/import_export/import_export.rb b/lib/import_export/import_export.rb
index 4bb6fa9250d..fad560768f2 100644
--- a/lib/import_export/import_export.rb
+++ b/lib/import_export/import_export.rb
@@ -6,7 +6,7 @@ require "json"
module ImportExport
- def self.export_category(category_id, filename=nil)
+ def self.export_category(category_id, filename = nil)
ImportExport::CategoryExporter.new(category_id).perform.save_to_file(filename)
end
diff --git a/lib/import_export/topic_exporter.rb b/lib/import_export/topic_exporter.rb
index 78b4dc4d9eb..4c494003edd 100644
--- a/lib/import_export/topic_exporter.rb
+++ b/lib/import_export/topic_exporter.rb
@@ -20,7 +20,6 @@ module ImportExport
self
end
-
USER_ATTRS = [:id, :email, :username, :name, :created_at, :trust_level, :active, :last_emailed_at]
def export_users
@@ -33,11 +32,9 @@ module ImportExport
u = post.user
unless @exported_user_ids.include?(u.id)
x = USER_ATTRS.inject({}) { |h, a| h[a] = u.send(a); h; }
- @export_data[:users] << x.merge({
- bio_raw: u.user_profile.bio_raw,
- website: u.user_profile.website,
- location: u.user_profile.location
- })
+ @export_data[:users] << x.merge(bio_raw: u.user_profile.bio_raw,
+ website: u.user_profile.website,
+ location: u.user_profile.location)
@exported_user_ids << u.id
end
end
@@ -46,7 +43,6 @@ module ImportExport
self
end
-
def export_topics
@topic_ids.each do |topic_id|
t = Topic.find(topic_id)
@@ -56,7 +52,6 @@ module ImportExport
puts ""
end
-
TOPIC_ATTRS = [:id, :title, :created_at, :views, :category_id, :closed, :archived, :archetype]
POST_ATTRS = [:id, :user_id, :post_number, :raw, :created_at, :reply_to_post_number,
:hidden, :hidden_reason_id, :wiki]
@@ -81,8 +76,7 @@ module ImportExport
self
end
-
- def save_to_file(filename=nil)
+ def save_to_file(filename = nil)
require 'json'
output_basename = filename || File.join("topic-export-#{Time.now.strftime("%Y-%m-%d-%H%M%S")}.json")
File.open(output_basename, "w:UTF-8") do |f|
diff --git a/lib/import_export/topic_importer.rb b/lib/import_export/topic_importer.rb
index 57496c5dab3..c681d39c2f6 100644
--- a/lib/import_export/topic_importer.rb
+++ b/lib/import_export/topic_importer.rb
@@ -36,14 +36,14 @@ module ImportExport
puts ""
print t[:title]
- first_post_attrs = t[:posts].first.merge( t.slice(*(TopicExporter::TOPIC_ATTRS - [:id, :category_id])) )
+ first_post_attrs = t[:posts].first.merge(t.slice(*(TopicExporter::TOPIC_ATTRS - [:id, :category_id])))
first_post_attrs[:user_id] = new_user_id(first_post_attrs[:user_id])
first_post_attrs[:category] = new_category_id(t[:category_id])
first_post = PostCustomField.where(name: "import_id", value: first_post_attrs[:id]).first.try(:post)
unless first_post
- first_post = create_post( first_post_attrs, first_post_attrs[:id] )
+ first_post = create_post(first_post_attrs, first_post_attrs[:id])
end
topic_id = first_post.topic_id
@@ -53,10 +53,8 @@ module ImportExport
print "."
existing = PostCustomField.where(name: "import_id", value: post_data[:id]).first.try(:post)
unless existing
- create_post(post_data.merge({
- topic_id: topic_id,
- user_id: new_user_id(post_data[:user_id])
- }), post_data[:id]) # see ImportScripts::Base
+ create_post(post_data.merge(topic_id: topic_id,
+ user_id: new_user_id(post_data[:user_id])), post_data[:id]) # see ImportScripts::Base
end
end
end
diff --git a/lib/inline_oneboxer.rb b/lib/inline_oneboxer.rb
index b064794882a..f345bef4d38 100644
--- a/lib/inline_oneboxer.rb
+++ b/lib/inline_oneboxer.rb
@@ -2,13 +2,13 @@ require_dependency 'retrieve_title'
class InlineOneboxer
- def initialize(urls, opts=nil)
+ def initialize(urls, opts = nil)
@urls = urls
@opts = opts || {}
end
def process
- @urls.map {|url| InlineOneboxer.lookup(url, @opts) }.compact
+ @urls.map { |url| InlineOneboxer.lookup(url, @opts) }.compact
end
def self.purge(url)
@@ -19,7 +19,7 @@ class InlineOneboxer
Rails.cache.read(cache_key(url))
end
- def self.lookup(url, opts=nil)
+ def self.lookup(url, opts = nil)
opts ||= {}
unless opts[:skip_cache]
@@ -44,7 +44,7 @@ class InlineOneboxer
uri.hostname.present? &&
domains.include?(uri.hostname) &&
title = RetrieveTitle.crawl(url)
- return onebox_for(url, title, opts)
+ return onebox_for(url, title, opts)
end
end
@@ -70,4 +70,3 @@ class InlineOneboxer
end
end
-
diff --git a/lib/js_locale_helper.rb b/lib/js_locale_helper.rb
index 2aa8d65a0f1..c352ee42d74 100644
--- a/lib/js_locale_helper.rb
+++ b/lib/js_locale_helper.rb
@@ -16,7 +16,7 @@ module JsLocaleHelper
end
end
- def self.load_translations(locale, opts=nil)
+ def self.load_translations(locale, opts = nil)
opts ||= {}
@loaded_translations = nil if opts[:force]
@@ -149,7 +149,7 @@ module JsLocaleHelper
end
def self.generate_message_format(message_formats, locale_str)
- formats = message_formats.map { |k,v| k.inspect << " : " << compile_message_format(locale_str, v) }.join(", ")
+ formats = message_formats.map { |k, v| k.inspect << " : " << compile_message_format(locale_str, v) }.join(", ")
filename = "#{Rails.root}/lib/javascripts/locale/#{locale_str}.js"
filename = "#{Rails.root}/lib/javascripts/locale/en.js" unless File.exists?(filename)
diff --git a/lib/json_error.rb b/lib/json_error.rb
index e52f18edf4d..5dfc720506c 100644
--- a/lib/json_error.rb
+++ b/lib/json_error.rb
@@ -1,6 +1,6 @@
module JsonError
- def create_errors_json(obj, type=nil)
+ def create_errors_json(obj, type = nil)
errors = create_errors_array obj
errors[:error_type] = type if type
errors
diff --git a/lib/letter_avatar.rb b/lib/letter_avatar.rb
index 62915edcaba..93766c4b2d1 100644
--- a/lib/letter_avatar.rb
+++ b/lib/letter_avatar.rb
@@ -89,7 +89,7 @@ class LetterAvatar
end
def to_rgb(color)
- r,g,b = color
+ r, g, b = color
"rgb(#{r},#{g},#{b})"
end
@@ -108,7 +108,7 @@ class LetterAvatar
skip = File.basename(cache_path)
parent_path = File.dirname(cache_path)
Dir.entries(parent_path).each do |path|
- unless ['.','..'].include?(path) || path == skip
+ unless ['.', '..'].include?(path) || path == skip
FileUtils.rm_rf(parent_path + "/" + path)
end
end
@@ -121,220 +121,220 @@ class LetterAvatar
# - H: 0 - 360
# - C: 0 - 2
# - L: 0.75 - 1.5
- COLORS = [[198,125,40],
- [61,155,243],
- [74,243,75],
- [238,89,166],
- [52,240,224],
- [177,156,155],
- [240,120,145],
- [111,154,78],
- [237,179,245],
- [237,101,95],
- [89,239,155],
- [43,254,70],
- [163,212,245],
- [65,152,142],
- [165,135,246],
- [181,166,38],
- [187,229,206],
- [77,164,25],
- [179,246,101],
- [234,93,37],
- [225,155,115],
- [142,140,188],
- [223,120,140],
- [249,174,27],
- [244,117,225],
- [137,141,102],
- [75,191,146],
- [188,239,142],
- [164,199,145],
- [173,120,149],
- [59,195,89],
- [222,198,220],
- [68,145,187],
- [236,204,179],
- [159,195,72],
- [188,121,189],
- [166,160,85],
- [181,233,37],
- [236,177,85],
- [121,147,160],
- [234,218,110],
- [241,157,191],
- [62,200,234],
- [133,243,34],
- [88,149,110],
- [59,228,248],
- [183,119,118],
- [251,195,45],
- [113,196,122],
- [197,115,70],
- [80,175,187],
- [103,231,238],
- [240,72,133],
- [228,149,241],
- [180,188,159],
- [172,132,85],
- [180,135,251],
- [236,194,58],
- [217,176,109],
- [88,244,199],
- [186,157,239],
- [113,230,96],
- [206,115,165],
- [244,178,163],
- [230,139,26],
- [241,125,89],
- [83,160,66],
- [107,190,166],
- [197,161,210],
- [198,203,245],
- [238,117,19],
- [228,119,116],
- [131,156,41],
- [145,178,168],
- [139,170,220],
- [233,95,125],
- [87,178,230],
- [157,200,119],
- [237,140,76],
- [229,185,186],
- [144,206,212],
- [236,209,158],
- [185,189,79],
- [34,208,66],
- [84,238,129],
- [133,140,134],
- [67,157,94],
- [168,179,25],
- [140,145,240],
- [151,241,125],
- [67,162,107],
- [200,156,21],
- [169,173,189],
- [226,116,189],
- [133,231,191],
- [194,161,63],
- [241,77,99],
- [241,217,53],
- [123,204,105],
- [210,201,119],
- [229,108,155],
- [240,91,72],
- [187,115,210],
- [240,163,100],
- [178,217,57],
- [179,135,116],
- [204,211,24],
- [186,135,57],
- [223,176,135],
- [204,148,151],
- [116,223,50],
- [95,195,46],
- [123,160,236],
- [181,172,131],
- [142,220,202],
- [240,140,112],
- [172,145,164],
- [228,124,45],
- [135,151,243],
- [42,205,125],
- [192,233,116],
- [119,170,114],
- [158,138,26],
- [73,190,183],
- [185,229,243],
- [227,107,55],
- [196,205,202],
- [132,143,60],
- [233,192,237],
- [62,150,220],
- [205,201,141],
- [106,140,190],
- [161,131,205],
- [135,134,158],
- [198,139,81],
- [115,171,32],
- [101,181,67],
- [149,137,119],
- [37,142,183],
- [183,130,175],
- [168,125,133],
- [124,142,87],
- [236,156,171],
- [232,194,91],
- [219,200,69],
- [144,219,34],
- [219,95,187],
- [145,154,217],
- [165,185,100],
- [127,238,163],
- [224,178,198],
- [119,153,120],
- [124,212,92],
- [172,161,105],
- [231,155,135],
- [157,132,101],
- [122,185,146],
- [53,166,51],
- [70,163,90],
- [150,190,213],
- [210,107,60],
- [166,152,185],
- [159,194,159],
- [39,141,222],
- [202,176,161],
- [95,140,229],
- [168,142,87],
- [93,170,203],
- [159,142,54],
- [14,168,39],
- [94,150,149],
- [187,206,136],
- [157,224,166],
- [235,158,208],
- [109,232,216],
- [141,201,87],
- [208,124,118],
- [142,125,214],
- [19,237,174],
- [72,219,41],
- [234,102,111],
- [168,142,79],
- [188,135,35],
- [95,155,143],
- [148,173,116],
- [223,112,95],
- [228,128,236],
- [206,114,54],
- [195,119,88],
- [235,140,94],
- [235,202,125],
- [233,155,153],
- [214,214,238],
- [246,200,35],
- [151,125,171],
- [132,145,172],
- [131,142,118],
- [199,126,150],
- [61,162,123],
- [58,176,151],
- [215,141,69],
- [225,154,220],
- [220,77,167],
- [233,161,64],
- [130,221,137],
- [81,191,129],
- [169,162,140],
- [174,177,222],
- [236,174,47],
- [233,188,180],
- [69,222,172],
- [71,232,93],
- [118,211,238],
- [157,224,83],
- [218,105,73],
- [126,169,36]]
+ COLORS = [[198, 125, 40],
+ [61, 155, 243],
+ [74, 243, 75],
+ [238, 89, 166],
+ [52, 240, 224],
+ [177, 156, 155],
+ [240, 120, 145],
+ [111, 154, 78],
+ [237, 179, 245],
+ [237, 101, 95],
+ [89, 239, 155],
+ [43, 254, 70],
+ [163, 212, 245],
+ [65, 152, 142],
+ [165, 135, 246],
+ [181, 166, 38],
+ [187, 229, 206],
+ [77, 164, 25],
+ [179, 246, 101],
+ [234, 93, 37],
+ [225, 155, 115],
+ [142, 140, 188],
+ [223, 120, 140],
+ [249, 174, 27],
+ [244, 117, 225],
+ [137, 141, 102],
+ [75, 191, 146],
+ [188, 239, 142],
+ [164, 199, 145],
+ [173, 120, 149],
+ [59, 195, 89],
+ [222, 198, 220],
+ [68, 145, 187],
+ [236, 204, 179],
+ [159, 195, 72],
+ [188, 121, 189],
+ [166, 160, 85],
+ [181, 233, 37],
+ [236, 177, 85],
+ [121, 147, 160],
+ [234, 218, 110],
+ [241, 157, 191],
+ [62, 200, 234],
+ [133, 243, 34],
+ [88, 149, 110],
+ [59, 228, 248],
+ [183, 119, 118],
+ [251, 195, 45],
+ [113, 196, 122],
+ [197, 115, 70],
+ [80, 175, 187],
+ [103, 231, 238],
+ [240, 72, 133],
+ [228, 149, 241],
+ [180, 188, 159],
+ [172, 132, 85],
+ [180, 135, 251],
+ [236, 194, 58],
+ [217, 176, 109],
+ [88, 244, 199],
+ [186, 157, 239],
+ [113, 230, 96],
+ [206, 115, 165],
+ [244, 178, 163],
+ [230, 139, 26],
+ [241, 125, 89],
+ [83, 160, 66],
+ [107, 190, 166],
+ [197, 161, 210],
+ [198, 203, 245],
+ [238, 117, 19],
+ [228, 119, 116],
+ [131, 156, 41],
+ [145, 178, 168],
+ [139, 170, 220],
+ [233, 95, 125],
+ [87, 178, 230],
+ [157, 200, 119],
+ [237, 140, 76],
+ [229, 185, 186],
+ [144, 206, 212],
+ [236, 209, 158],
+ [185, 189, 79],
+ [34, 208, 66],
+ [84, 238, 129],
+ [133, 140, 134],
+ [67, 157, 94],
+ [168, 179, 25],
+ [140, 145, 240],
+ [151, 241, 125],
+ [67, 162, 107],
+ [200, 156, 21],
+ [169, 173, 189],
+ [226, 116, 189],
+ [133, 231, 191],
+ [194, 161, 63],
+ [241, 77, 99],
+ [241, 217, 53],
+ [123, 204, 105],
+ [210, 201, 119],
+ [229, 108, 155],
+ [240, 91, 72],
+ [187, 115, 210],
+ [240, 163, 100],
+ [178, 217, 57],
+ [179, 135, 116],
+ [204, 211, 24],
+ [186, 135, 57],
+ [223, 176, 135],
+ [204, 148, 151],
+ [116, 223, 50],
+ [95, 195, 46],
+ [123, 160, 236],
+ [181, 172, 131],
+ [142, 220, 202],
+ [240, 140, 112],
+ [172, 145, 164],
+ [228, 124, 45],
+ [135, 151, 243],
+ [42, 205, 125],
+ [192, 233, 116],
+ [119, 170, 114],
+ [158, 138, 26],
+ [73, 190, 183],
+ [185, 229, 243],
+ [227, 107, 55],
+ [196, 205, 202],
+ [132, 143, 60],
+ [233, 192, 237],
+ [62, 150, 220],
+ [205, 201, 141],
+ [106, 140, 190],
+ [161, 131, 205],
+ [135, 134, 158],
+ [198, 139, 81],
+ [115, 171, 32],
+ [101, 181, 67],
+ [149, 137, 119],
+ [37, 142, 183],
+ [183, 130, 175],
+ [168, 125, 133],
+ [124, 142, 87],
+ [236, 156, 171],
+ [232, 194, 91],
+ [219, 200, 69],
+ [144, 219, 34],
+ [219, 95, 187],
+ [145, 154, 217],
+ [165, 185, 100],
+ [127, 238, 163],
+ [224, 178, 198],
+ [119, 153, 120],
+ [124, 212, 92],
+ [172, 161, 105],
+ [231, 155, 135],
+ [157, 132, 101],
+ [122, 185, 146],
+ [53, 166, 51],
+ [70, 163, 90],
+ [150, 190, 213],
+ [210, 107, 60],
+ [166, 152, 185],
+ [159, 194, 159],
+ [39, 141, 222],
+ [202, 176, 161],
+ [95, 140, 229],
+ [168, 142, 87],
+ [93, 170, 203],
+ [159, 142, 54],
+ [14, 168, 39],
+ [94, 150, 149],
+ [187, 206, 136],
+ [157, 224, 166],
+ [235, 158, 208],
+ [109, 232, 216],
+ [141, 201, 87],
+ [208, 124, 118],
+ [142, 125, 214],
+ [19, 237, 174],
+ [72, 219, 41],
+ [234, 102, 111],
+ [168, 142, 79],
+ [188, 135, 35],
+ [95, 155, 143],
+ [148, 173, 116],
+ [223, 112, 95],
+ [228, 128, 236],
+ [206, 114, 54],
+ [195, 119, 88],
+ [235, 140, 94],
+ [235, 202, 125],
+ [233, 155, 153],
+ [214, 214, 238],
+ [246, 200, 35],
+ [151, 125, 171],
+ [132, 145, 172],
+ [131, 142, 118],
+ [199, 126, 150],
+ [61, 162, 123],
+ [58, 176, 151],
+ [215, 141, 69],
+ [225, 154, 220],
+ [220, 77, 167],
+ [233, 161, 64],
+ [130, 221, 137],
+ [81, 191, 129],
+ [169, 162, 140],
+ [174, 177, 222],
+ [236, 174, 47],
+ [233, 188, 180],
+ [69, 222, 172],
+ [71, 232, 93],
+ [118, 211, 238],
+ [157, 224, 83],
+ [218, 105, 73],
+ [126, 169, 36]]
end
diff --git a/lib/markdown_linker.rb b/lib/markdown_linker.rb
index b6bb1a2eb1e..fab7c5a6b60 100644
--- a/lib/markdown_linker.rb
+++ b/lib/markdown_linker.rb
@@ -17,7 +17,7 @@ class MarkdownLinker
def references
result = ""
- (@rendered..@index-1).each do |i|
+ (@rendered..@index - 1).each do |i|
result << "[#{i}]: #{@markdown_links[i]}\n"
end
@rendered = @index
diff --git a/lib/memory_diagnostics.rb b/lib/memory_diagnostics.rb
index a49480677a9..3a3fcd60f54 100644
--- a/lib/memory_diagnostics.rb
+++ b/lib/memory_diagnostics.rb
@@ -4,7 +4,7 @@ module MemoryDiagnostics
File.exists?(snapshot_filename)
end
- def self.compare(from=nil, to=nil)
+ def self.compare(from = nil, to = nil)
from ||= snapshot_filename
if !to
@@ -38,7 +38,7 @@ module MemoryDiagnostics
end
end
- report << summary.sort{|a,b| b[1] <=> a[1]}[0..50].map{|k,v|
+ report << summary.sort { |a, b| b[1] <=> a[1] }[0..50].map { |k, v|
"#{k}: #{v}"
}.join("\n")
@@ -59,9 +59,9 @@ module MemoryDiagnostics
"#{snapshot_path}/#{Process.pid}.snapshot"
end
- def self.snapshot_current_process(filename=nil)
+ def self.snapshot_current_process(filename = nil)
filename ||= snapshot_filename
- pid=fork do
+ pid = fork do
snapshot(filename)
end
@@ -86,7 +86,7 @@ module MemoryDiagnostics
IO.binwrite(filename, Marshal::dump(object_ids))
end
- def self.memory_report(opts={})
+ def self.memory_report(opts = {})
begin
# ruby 2.1
GC.start(full_mark: true)
@@ -94,7 +94,6 @@ module MemoryDiagnostics
GC.start
end
-
classes = {}
large_objects = []
@@ -113,11 +112,11 @@ module MemoryDiagnostics
classes[:unknown] += 1
end
end
- classes = classes.sort{|a,b| b[1] <=> a[1]}[0..40].map{|klass, count| "#{klass}: #{count}"}
+ classes = classes.sort { |a, b| b[1] <=> a[1] }[0..40].map { |klass, count| "#{klass}: #{count}" }
- classes << "\nLarge Objects (#{large_objects.length} larger than 200 bytes total size #{large_objects.map{|x,_| x}.sum}):\n"
+ classes << "\nLarge Objects (#{large_objects.length} larger than 200 bytes total size #{large_objects.map { |x, _| x }.sum}):\n"
- classes += large_objects.sort{|a,b| b[0] <=> a[0]}[0..800].map do |size,object|
+ classes += large_objects.sort { |a, b| b[0] <=> a[0] }[0..800].map do |size, object|
rval = "#{object.class}: size #{size}"
rval << " " << object.to_s[0..500].gsub("\n", "") if (String === object) || (Regexp === object)
rval << "\n"
@@ -125,10 +124,8 @@ module MemoryDiagnostics
end
end
- stats = GC.stat.map{|k,v| "#{k}: #{v}"}
- counts = ObjectSpace.count_objects.sort{|a,b| b[1] <=> a[1] }.map{|k,v| "#{k}: #{v}"}
-
-
+ stats = GC.stat.map { |k, v| "#{k}: #{v}" }
+ counts = ObjectSpace.count_objects.sort { |a, b| b[1] <=> a[1] }.map { |k, v| "#{k}: #{v}" }
< 'image/png' }, [ File.read(default_image)] ]
end
end
-
+
status, headers, response = @app.call(env)
[status, headers, response]
end
diff --git a/lib/middleware/request_tracker.rb b/lib/middleware/request_tracker.rb
index 90b36890d0e..a2dae84e97f 100644
--- a/lib/middleware/request_tracker.rb
+++ b/lib/middleware/request_tracker.rb
@@ -2,11 +2,11 @@ require_dependency 'middleware/anonymous_cache'
class Middleware::RequestTracker
- def initialize(app, settings={})
+ def initialize(app, settings = {})
@app = app
end
- def self.log_request_on_site(data,host)
+ def self.log_request_on_site(data, host)
RailsMultisite::ConnectionManagement.with_hostname(host) do
log_request(data)
end
@@ -46,8 +46,8 @@ class Middleware::RequestTracker
TRACK_VIEW = "HTTP_DISCOURSE_TRACK_VIEW".freeze
CONTENT_TYPE = "Content-Type".freeze
- def self.get_data(env,result)
- status,headers = result
+ def self.get_data(env, result)
+ status, headers = result
status = status.to_i
helper = Middleware::AnonymousCache::Helper.new(env)
@@ -74,21 +74,21 @@ class Middleware::RequestTracker
ensure
# we got to skip this on error ... its just logging
- data = self.class.get_data(env,result) rescue nil
+ data = self.class.get_data(env, result) rescue nil
host = RailsMultisite::ConnectionManagement.host(env)
if data
- if result && (headers=result[1])
+ if result && (headers = result[1])
headers["X-Discourse-TrackView"] = "1" if data[:track_view]
end
- log_later(data,host)
+ log_later(data, host)
end
end
- def log_later(data,host)
- Scheduler::Defer.later("Track view", _db=nil) do
- self.class.log_request_on_site(data,host)
+ def log_later(data, host)
+ Scheduler::Defer.later("Track view", _db = nil) do
+ self.class.log_request_on_site(data, host)
end
end
diff --git a/lib/middleware/turbo_dev.rb b/lib/middleware/turbo_dev.rb
index beda1652243..53e81cb50df 100644
--- a/lib/middleware/turbo_dev.rb
+++ b/lib/middleware/turbo_dev.rb
@@ -13,7 +13,7 @@ module Middleware
# config.middleware.insert 0, Middleware::TurboDev
#
class TurboDev
- def initialize(app, settings={})
+ def initialize(app, settings = {})
@app = app
end
@@ -27,7 +27,7 @@ module Middleware
etag = etag.gsub "\"", ""
asset = Rails.application.assets.find_asset(name)
if asset && asset.digest == etag
- return [304,{},[]]
+ return [304, {}, []]
end
end
diff --git a/lib/new_post_manager.rb b/lib/new_post_manager.rb
index 7d99a8b4ed6..8777f1b8ff7 100644
--- a/lib/new_post_manager.rb
+++ b/lib/new_post_manager.rb
@@ -17,16 +17,16 @@ class NewPostManager
end
def self.handlers
- sorted_handlers.map {|h| h[:proc]}
+ sorted_handlers.map { |h| h[:proc] }
end
def self.clear_handlers!
@sorted_handlers = [{ priority: 0, proc: method(:default_handler) }]
end
- def self.add_handler(priority=0, &block)
+ def self.add_handler(priority = 0, &block)
sorted_handlers << { priority: priority, proc: block }
- @sorted_handlers.sort_by! {|h| -h[:priority]}
+ @sorted_handlers.sort_by! { |h| -h[:priority] }
end
def self.is_first_post?(manager)
@@ -129,7 +129,7 @@ class NewPostManager
def initialize(user, args)
@user = user
- @args = args.delete_if {|_, v| v.nil?}
+ @args = args.delete_if { |_, v| v.nil? }
end
def perform
@@ -158,11 +158,11 @@ class NewPostManager
end
# Enqueue this post in a queue
- def enqueue(queue, reason=nil)
+ def enqueue(queue, reason = nil)
result = NewPostResult.new(:enqueued)
enqueuer = PostEnqueuer.new(@user, queue)
- queued_args = {post_options: @args.dup}
+ queued_args = { post_options: @args.dup }
queued_args[:raw] = queued_args[:post_options].delete(:raw)
queued_args[:topic_id] = queued_args[:post_options].delete(:topic_id)
diff --git a/lib/new_post_result.rb b/lib/new_post_result.rb
index 63acab0b002..78725ea11f0 100644
--- a/lib/new_post_result.rb
+++ b/lib/new_post_result.rb
@@ -10,7 +10,7 @@ class NewPostResult
attr_accessor :queued_post
attr_accessor :pending_count
- def initialize(action, success=false)
+ def initialize(action, success = false)
@action = action
@success = success
end
diff --git a/lib/oneboxer.rb b/lib/oneboxer.rb
index 08d3ac4149b..42862abe325 100644
--- a/lib/oneboxer.rb
+++ b/lib/oneboxer.rb
@@ -18,16 +18,16 @@ module Oneboxer
end
def self.ignore_redirects
- @ignore_redirects ||= ['http://www.dropbox.com','http://store.steampowered.com', Discourse.base_url]
+ @ignore_redirects ||= ['http://www.dropbox.com', 'http://store.steampowered.com', Discourse.base_url]
end
- def self.preview(url, options=nil)
+ def self.preview(url, options = nil)
options ||= {}
invalidate(url) if options[:invalidate_oneboxes]
onebox_raw(url)[:preview]
end
- def self.onebox(url, options=nil)
+ def self.onebox(url, options = nil)
options ||= {}
invalidate(url) if options[:invalidate_oneboxes]
onebox_raw(url)[:onebox]
@@ -74,7 +74,7 @@ module Oneboxer
def self.append_source_topic_id(url, topic_id)
# hack urls to create proper expansions
- if url =~ Regexp.new("^#{Discourse.base_url.gsub(".","\\.")}.*$", true)
+ if url =~ Regexp.new("^#{Discourse.base_url.gsub(".", "\\.")}.*$", true)
uri = URI.parse(url) rescue nil
if uri && uri.path
route = Rails.application.routes.recognize_path(uri.path) rescue nil
@@ -86,7 +86,7 @@ module Oneboxer
url
end
- def self.apply(string_or_doc, args=nil)
+ def self.apply(string_or_doc, args = nil)
doc = string_or_doc
doc = Nokogiri::HTML::fragment(doc) if doc.is_a?(String)
changed = false
@@ -95,7 +95,7 @@ module Oneboxer
if args && args[:topic_id]
url = append_source_topic_id(url, args[:topic_id])
end
- onebox, _preview = yield(url,element)
+ onebox, _preview = yield(url, element)
if onebox
parsed_onebox = Nokogiri::HTML::fragment(onebox)
next unless parsed_onebox.children.count > 0
diff --git a/lib/pbkdf2.rb b/lib/pbkdf2.rb
index f21cc8aebce..1032b4a1ad0 100644
--- a/lib/pbkdf2.rb
+++ b/lib/pbkdf2.rb
@@ -20,18 +20,18 @@ class Pbkdf2
u = ret = prf(h, password, salt + [1].pack("N"))
2.upto(iterations) do
- u = prf(h, password, u)
+ u = prf(h, password, u)
ret.xor!(u)
end
- ret.bytes.map{|b| ("0" + b.to_s(16))[-2..-1]}.join("")
+ ret.bytes.map { |b| ("0" + b.to_s(16))[-2..-1] }.join("")
end
protected
# fallback xor in case we need it for jruby ... way slower
- def self.xor(x,y)
- x.bytes.zip(y.bytes).map{|x,y| x ^ y}.pack('c*')
+ def self.xor(x, y)
+ x.bytes.zip(y.bytes).map { |x, y| x ^ y }.pack('c*')
end
def self.prf(hash_function, password, data)
diff --git a/lib/pinned_check.rb b/lib/pinned_check.rb
index b1a25a4d5db..54543499b34 100644
--- a/lib/pinned_check.rb
+++ b/lib/pinned_check.rb
@@ -2,16 +2,16 @@
# taking into account anonymous users and users who have dismissed it
class PinnedCheck
- def self.unpinned?(topic,topic_user=nil)
+ def self.unpinned?(topic, topic_user = nil)
topic.pinned_at &&
topic_user &&
topic_user.cleared_pinned_at &&
topic_user.cleared_pinned_at > topic.pinned_at
end
- def self.pinned?(topic, topic_user=nil)
+ def self.pinned?(topic, topic_user = nil)
!!topic.pinned_at &&
- !unpinned?(topic,topic_user)
+ !unpinned?(topic, topic_user)
end
end
diff --git a/lib/plugin/auth_provider.rb b/lib/plugin/auth_provider.rb
index 570a245b081..996d9ba0413 100644
--- a/lib/plugin/auth_provider.rb
+++ b/lib/plugin/auth_provider.rb
@@ -12,7 +12,7 @@ class Plugin::AuthProvider
end
def to_json
- result = {name: name}
+ result = { name: name }
result['customUrl'] = custom_url if custom_url
result['titleOverride'] = title if title
result['titleSetting'] = title_setting if title_setting
diff --git a/lib/plugin/instance.rb b/lib/plugin/instance.rb
index ca749901078..50867afaa0e 100644
--- a/lib/plugin/instance.rb
+++ b/lib/plugin/instance.rb
@@ -58,14 +58,14 @@ class Plugin::Instance
}
end
- def initialize(metadata=nil, path=nil)
+ def initialize(metadata = nil, path = nil)
@metadata = metadata
@path = path
@idx = 0
end
def add_admin_route(label, location)
- @admin_route = {label: label, location: location}
+ @admin_route = { label: label, location: location }
end
def enabled?
@@ -74,7 +74,7 @@ class Plugin::Instance
delegate :name, to: :metadata
- def add_to_serializer(serializer, attr, define_include_method=true, &block)
+ def add_to_serializer(serializer, attr, define_include_method = true, &block)
klass = "#{serializer.to_s.classify}Serializer".constantize rescue "#{serializer.to_s}Serializer".constantize
klass.attributes(attr) unless attr.to_s.start_with?("include_")
@@ -164,7 +164,7 @@ class Plugin::Instance
def delete_extra_automatic_assets(good_paths)
return unless Dir.exists? auto_generated_path
- filenames = good_paths.map{|f| File.basename(f)}
+ filenames = good_paths.map { |f| File.basename(f) }
# nuke old files
Dir.foreach(auto_generated_path) do |p|
next if [".", ".."].include?(p)
@@ -242,13 +242,13 @@ class Plugin::Instance
DiscoursePluginRegistry.register_html_builder(name, &block)
end
- def register_asset(file, opts=nil)
+ def register_asset(file, opts = nil)
full_path = File.dirname(path) << "/assets/" << file
assets << [full_path, opts]
end
def register_color_scheme(name, colors)
- color_schemes << {name: name, colors: colors}
+ color_schemes << { name: name, colors: colors }
end
def register_seed_data(key, value)
@@ -362,14 +362,13 @@ JS
target = Rails.root.to_s + "/public/plugins/"
Discourse::Utils.execute_command('mkdir', '-p', target)
- target << name.gsub(/\s/,"_")
+ target << name.gsub(/\s/, "_")
# TODO a cleaner way of registering and unregistering
Discourse::Utils.execute_command('rm', '-f', target)
Discourse::Utils.execute_command('ln', '-s', public_data, target)
end
end
-
def auth_provider(opts)
provider = Plugin::AuthProvider.new
@@ -379,7 +378,6 @@ JS
auth_providers << provider
end
-
# shotgun approach to gem loading, in future we need to hack bundler
# to at least determine dependencies do not clash before loading
#
@@ -390,7 +388,7 @@ JS
PluginGem.load(path, name, version, opts)
end
- def enabled_site_setting(setting=nil)
+ def enabled_site_setting(setting = nil)
if setting
@enabled_site_setting = setting
else
@@ -421,9 +419,9 @@ JS
Dir.glob("#{root_path}/**/*") do |f|
if File.directory?(f)
- yield [f,true]
+ yield [f, true]
elsif f.to_s.ends_with?(".js.es6") || f.to_s.ends_with?(".hbs")
- yield [f,false]
+ yield [f, false]
end
end
end
@@ -442,7 +440,7 @@ JS
def write_asset(path, contents)
unless File.exists?(path)
ensure_directory(path)
- File.open(path,"w") { |f| f.write(contents) }
+ File.open(path, "w") { |f| f.write(contents) }
end
end
diff --git a/lib/plugin/theme.rb b/lib/plugin/theme.rb
index 04128d6f352..41ba7dc9992 100644
--- a/lib/plugin/theme.rb
+++ b/lib/plugin/theme.rb
@@ -27,4 +27,3 @@ class Plugin::Theme
end
end
end
-
diff --git a/lib/plugin_gem.rb b/lib/plugin_gem.rb
index 48df5e57c8b..6347cc8dce9 100644
--- a/lib/plugin_gem.rb
+++ b/lib/plugin_gem.rb
@@ -1,5 +1,5 @@
module PluginGem
- def self.load(path, name, version, opts=nil)
+ def self.load(path, name, version, opts = nil)
opts ||= {}
gems_path = File.dirname(path) + "/gems/#{RUBY_VERSION}"
diff --git a/lib/post_creator.rb b/lib/post_creator.rb
index 405a7e0537a..42b70558235 100644
--- a/lib/post_creator.rb
+++ b/lib/post_creator.rb
@@ -239,8 +239,8 @@ class PostCreator
return unless post.reply_to_post_number.present?
reply_info = Post.where(topic_id: post.topic_id, post_number: post.reply_to_post_number)
- .select(:user_id, :post_type)
- .first
+ .select(:user_id, :post_type)
+ .first
if reply_info.present?
post.reply_to_user_id ||= reply_info.user_id
@@ -316,11 +316,11 @@ class PostCreator
def handle_spam
if @spam
- GroupMessage.create( Group[:moderators].name,
+ GroupMessage.create(Group[:moderators].name,
:spam_post_blocked,
- { user: @user,
- limit_once_per: 24.hours,
- message_params: {domains: @post.linked_hosts.keys.join(', ')} } )
+ user: @user,
+ limit_once_per: 24.hours,
+ message_params: { domains: @post.linked_hosts.keys.join(', ') })
elsif @post && errors.blank? && !skip_validations?
SpamRulesEnforcer.enforce!(@post)
end
@@ -339,7 +339,7 @@ class PostCreator
unless @topic.topic_allowed_users.where(user_id: @user.id).exists?
unless @topic.topic_allowed_groups.where('group_id IN (
SELECT group_id FROM group_users where user_id = ?
- )',@user.id).exists?
+ )', @user.id).exists?
@topic.topic_allowed_users.create!(user_id: @user.id)
end
end
@@ -363,11 +363,13 @@ class PostCreator
def find_category_id
@opts.delete(:category) if @opts[:archetype].present? && @opts[:archetype] == Archetype.private_message
- category = if (@opts[:category].is_a? Integer) || (@opts[:category] =~ /^\d+$/)
- Category.find_by(id: @opts[:category])
- else
- Category.find_by(name_lower: @opts[:category].try(:downcase))
- end
+ category =
+ if (@opts[:category].is_a? Integer) || (@opts[:category] =~ /^\d+$/)
+ Category.find_by(id: @opts[:category])
+ else
+ Category.find_by(name_lower: @opts[:category].try(:downcase))
+ end
+
category&.id
end
@@ -491,7 +493,6 @@ class PostCreator
last_read_post_number: @post.post_number,
highest_seen_post_number: @post.post_number)
-
# assume it took us 5 seconds of reading time to make a post
PostTiming.record_timing(topic_id: @post.topic_id,
user_id: @post.user_id,
diff --git a/lib/post_destroyer.rb b/lib/post_destroyer.rb
index 07f34e85ee7..3f7cdbffab1 100644
--- a/lib/post_destroyer.rb
+++ b/lib/post_destroyer.rb
@@ -6,10 +6,10 @@ class PostDestroyer
def self.destroy_old_hidden_posts
Post.where(deleted_at: nil, hidden: true)
- .where("hidden_at < ?", 30.days.ago)
- .find_each do |post|
- PostDestroyer.new(Discourse.system_user, post).destroy
- end
+ .where("hidden_at < ?", 30.days.ago)
+ .find_each do |post|
+ PostDestroyer.new(Discourse.system_user, post).destroy
+ end
end
def self.destroy_stubs
@@ -17,26 +17,26 @@ class PostDestroyer
# exclude deleted topics and posts that are actively flagged
Post.where(deleted_at: nil, user_deleted: true)
- .where("NOT EXISTS (
+ .where("NOT EXISTS (
SELECT 1 FROM topics t
WHERE t.deleted_at IS NOT NULL AND
t.id = posts.topic_id
)")
- .where("updated_at < ? AND post_number > 1", SiteSetting.delete_removed_posts_after.hours.ago)
- .where("NOT EXISTS (
+ .where("updated_at < ? AND post_number > 1", SiteSetting.delete_removed_posts_after.hours.ago)
+ .where("NOT EXISTS (
SELECT 1
FROM post_actions pa
WHERE pa.post_id = posts.id AND
pa.deleted_at IS NULL AND
pa.post_action_type_id IN (?)
)", PostActionType.notify_flag_type_ids)
- .find_each do |post|
+ .find_each do |post|
PostDestroyer.new(Discourse.system_user, post, context: context).destroy
end
end
- def initialize(user, post, opts={})
+ def initialize(user, post, opts = {})
@user = user
@post = post
@topic = post.topic if post
diff --git a/lib/post_jobs_enqueuer.rb b/lib/post_jobs_enqueuer.rb
index 3227f76c3cf..cb8fdb56c1a 100644
--- a/lib/post_jobs_enqueuer.rb
+++ b/lib/post_jobs_enqueuer.rb
@@ -1,5 +1,5 @@
class PostJobsEnqueuer
- def initialize(post, topic, new_topic, opts={})
+ def initialize(post, topic, new_topic, opts = {})
@post = post
@topic = topic
@new_topic = new_topic
diff --git a/lib/post_revisor.rb b/lib/post_revisor.rb
index 831faa47b43..f443a64d93b 100644
--- a/lib/post_revisor.rb
+++ b/lib/post_revisor.rb
@@ -42,7 +42,7 @@ class PostRevisor
attr_reader :category_changed
- def initialize(post, topic=nil)
+ def initialize(post, topic = nil)
@post = post
@topic = topic || post.topic
end
@@ -112,7 +112,7 @@ class PostRevisor
# - bypass_bump: do not bump the topic, even if last post
# - skip_validations: ask ActiveRecord to skip validations
# - skip_revision: do not create a new PostRevision record
- def revise!(editor, fields, opts={})
+ def revise!(editor, fields, opts = {})
@editor = editor
@fields = fields.with_indifferent_access
@opts = opts
@@ -197,7 +197,7 @@ class PostRevisor
end
def topic_changed?
- PostRevisor.tracked_topic_fields.keys.any? {|f| @fields.has_key?(f)}
+ PostRevisor.tracked_topic_fields.keys.any? { |f| @fields.has_key?(f) }
end
def revise_post
@@ -253,15 +253,15 @@ class PostRevisor
# UserActionCreator will create new UserAction records for the new owner
UserAction.where(target_post_id: @post.id)
- .where(user_id: prev_owner.id)
- .where(action_type: USER_ACTIONS_TO_REMOVE)
- .destroy_all
+ .where(user_id: prev_owner.id)
+ .where(action_type: USER_ACTIONS_TO_REMOVE)
+ .destroy_all
if @post.post_number == 1
UserAction.where(target_topic_id: @post.topic_id)
- .where(user_id: prev_owner.id)
- .where(action_type: UserAction::NEW_TOPIC)
- .destroy_all
+ .where(user_id: prev_owner.id)
+ .where(action_type: UserAction::NEW_TOPIC)
+ .destroy_all
end
end
@@ -283,9 +283,9 @@ class PostRevisor
# post owner changed
if prev_owner && new_owner && prev_owner != new_owner
likes = UserAction.where(target_post_id: @post.id)
- .where(user_id: prev_owner.id)
- .where(action_type: UserAction::WAS_LIKED)
- .update_all(user_id: new_owner.id)
+ .where(user_id: prev_owner.id)
+ .where(action_type: UserAction::WAS_LIKED)
+ .update_all(user_id: new_owner.id)
private_message = @post.topic.private_message?
@@ -412,8 +412,8 @@ class PostRevisor
def is_last_post?
!Post.where(topic_id: @topic.id)
- .where("post_number > ?", @post.post_number)
- .exists?
+ .where("post_number > ?", @post.post_number)
+ .exists?
end
def plugin_callbacks
diff --git a/lib/pretty_text.rb b/lib/pretty_text.rb
index 8c74bd6fe1f..cfe191f5051 100644
--- a/lib/pretty_text.rb
+++ b/lib/pretty_text.rb
@@ -128,7 +128,7 @@ module PrettyText
end
end
- def self.markdown(text, opts={})
+ def self.markdown(text, opts = {})
# we use the exact same markdown converter as the client
# TODO: use the same extensions on both client and server (in particular the template for mentions)
baked = nil
@@ -165,7 +165,7 @@ module PrettyText
__optInput.customEmoji = #{custom_emoji.to_json};
__optInput.emojiUnicodeReplacer = __emojiUnicodeReplacer;
__optInput.lookupInlineOnebox = __lookupInlineOnebox;
- #{opts[:linkify] == false ? "__optInput.linkify = false;": ""}
+ #{opts[:linkify] == false ? "__optInput.linkify = false;" : ""}
__optInput.censoredWords = #{WordWatcher.words_for_action(:censor).join('|').to_json};
JS
@@ -179,7 +179,6 @@ module PrettyText
buffer << "__textOptions = __buildOptions(__optInput);\n"
-
buffer << ("__pt = new __PrettyText(__textOptions);")
# Be careful disabling sanitization. We allow for custom emails
@@ -225,7 +224,7 @@ module PrettyText
end
end
- def self.cook(text, opts={})
+ def self.cook(text, opts = {})
options = opts.dup
# we have a minor inconsistency
@@ -281,7 +280,7 @@ module PrettyText
if !uri.host.present? ||
uri.host == site_uri.host ||
uri.host.ends_with?("." << site_uri.host) ||
- whitelist.any?{|u| uri.host == u || uri.host.ends_with?("." << u)}
+ whitelist.any? { |u| uri.host == u || uri.host.ends_with?("." << u) }
# we are good no need for nofollow
else
l["rel"] = "nofollow noopener"
@@ -328,7 +327,7 @@ module PrettyText
links
end
- def self.excerpt(html, max_length, options={})
+ def self.excerpt(html, max_length, options = {})
# TODO: properly fix this HACK in ExcerptParser without introducing XSS
doc = Nokogiri::HTML.fragment(html)
strip_image_wrapping(doc)
@@ -342,7 +341,7 @@ module PrettyText
# If the user is not basic, strip links from their bio
fragment = Nokogiri::HTML.fragment(string)
- fragment.css('a').each {|a| a.replace(a.inner_html) }
+ fragment.css('a').each { |a| a.replace(a.inner_html) }
fragment.to_html
end
diff --git a/lib/pretty_text/helpers.rb b/lib/pretty_text/helpers.rb
index c9859e04ad7..38abc290922 100644
--- a/lib/pretty_text/helpers.rb
+++ b/lib/pretty_text/helpers.rb
@@ -11,7 +11,7 @@ module PrettyText
I18n.t(key)
else
str = I18n.t(key, Hash[opts.entries].symbolize_keys).dup
- opts.each { |k,v| str.gsub!("{{#{k.to_s}}}", v.to_s) }
+ opts.each { |k, v| str.gsub!("{{#{k.to_s}}}", v.to_s) }
str
end
end
diff --git a/lib/primary_group_lookup.rb b/lib/primary_group_lookup.rb
index c61bb1b7e5e..5f488dfedf9 100644
--- a/lib/primary_group_lookup.rb
+++ b/lib/primary_group_lookup.rb
@@ -1,5 +1,5 @@
class PrimaryGroupLookup
- def initialize(user_ids=[])
+ def initialize(user_ids = [])
@user_ids = user_ids.tap(&:compact!).tap(&:uniq!).tap(&:flatten!)
end
@@ -20,13 +20,13 @@ class PrimaryGroupLookup
def user_lookup_hash
users_with_primary_group = User.where(id: @user_ids)
- .where.not(primary_group_id: nil)
- .select(:id, :primary_group_id)
+ .where.not(primary_group_id: nil)
+ .select(:id, :primary_group_id)
group_lookup = {}
group_ids = users_with_primary_group.map(&:primary_group_id).compact
Group.where(id: group_ids).select(self.class.lookup_columns)
- .each { |g| group_lookup[g.id] = g }
+ .each { |g| group_lookup[g.id] = g }
hash = {}
users_with_primary_group.each do |u|
diff --git a/lib/promotion.rb b/lib/promotion.rb
index 82dd5a998cf..368032144d5 100644
--- a/lib/promotion.rb
+++ b/lib/promotion.rb
@@ -7,7 +7,6 @@ class Promotion
@user = user
end
-
# Review a user for a promotion. Delegates work to a review_#{trust_level} method.
# Returns true if the user was promoted, false otherwise.
def review
@@ -17,7 +16,6 @@ class Promotion
# Promotion beyond basic requires some expensive queries, so don't do that here.
return false if @user.trust_level >= TrustLevel[2]
-
review_method = :"review_tl#{@user.trust_level}"
return send(review_method) if respond_to?(review_method)
@@ -43,7 +41,7 @@ class Promotion
new_level = level
if new_level < old_level && !@user.trust_level_locked
- next_up = new_level+1
+ next_up = new_level + 1
key = "tl#{next_up}_met?"
if self.class.respond_to?(key) && self.class.send(key, @user)
raise Discourse::InvalidAccess.new, I18n.t('trust_levels.change_failed_explanation',
@@ -62,10 +60,10 @@ class Promotion
if admin
StaffActionLogger.new(admin).log_trust_level_change(@user, old_level, new_level)
else
- UserHistory.create!( action: UserHistory.actions[:auto_trust_level_change],
- target_user_id: @user.id,
- previous_value: old_level,
- new_value: new_level)
+ UserHistory.create!(action: UserHistory.actions[:auto_trust_level_change],
+ target_user_id: @user.id,
+ previous_value: old_level,
+ new_value: new_level)
end
@user.save!
@user.user_profile.recook_bio
@@ -77,7 +75,6 @@ class Promotion
true
end
-
def self.tl2_met?(user)
stat = user.user_stat
return false if stat.topics_entered < SiteSetting.tl2_requires_topics_entered
diff --git a/lib/rate_limiter.rb b/lib/rate_limiter.rb
index 0985db91162..59359af107e 100644
--- a/lib/rate_limiter.rb
+++ b/lib/rate_limiter.rb
@@ -72,7 +72,7 @@ class RateLimiter
arr = $redis.lrange(@key, 0, @max) || []
t0 = Time.now.to_i
- arr.reject! {|a| (t0 - a.to_i) > @secs}
+ arr.reject! { |a| (t0 - a.to_i) > @secs }
@max - arr.size
end
diff --git a/lib/rate_limiter/limit_exceeded.rb b/lib/rate_limiter/limit_exceeded.rb
index 787bcdd381b..0836ecd3a7e 100644
--- a/lib/rate_limiter/limit_exceeded.rb
+++ b/lib/rate_limiter/limit_exceeded.rb
@@ -4,7 +4,7 @@ class RateLimiter
class LimitExceeded < StandardError
attr_reader :type
- def initialize(available_in, type=nil)
+ def initialize(available_in, type = nil)
@available_in = available_in
@type = type
end
diff --git a/lib/rate_limiter/on_create_record.rb b/lib/rate_limiter/on_create_record.rb
index 4232ab9a7f8..14374de6a12 100644
--- a/lib/rate_limiter/on_create_record.rb
+++ b/lib/rate_limiter/on_create_record.rb
@@ -13,7 +13,7 @@ class RateLimiter
return @rate_limiter if @rate_limiter.present?
limit_key = "create_#{self.class.name.underscore}"
- max_setting = if user && user.new_user? and SiteSetting.has_setting?("rate_limit_new_user_#{limit_key}")
+ max_setting = if user && user.new_user? && SiteSetting.has_setting?("rate_limit_new_user_#{limit_key}")
SiteSetting.send("rate_limit_new_user_#{limit_key}")
else
SiteSetting.send("rate_limit_#{limit_key}")
@@ -31,7 +31,7 @@ class RateLimiter
end
module ClassMethods
- def rate_limit(limiter_method=nil)
+ def rate_limit(limiter_method = nil)
limiter_method = limiter_method || :default_rate_limiter
diff --git a/lib/s3_helper.rb b/lib/s3_helper.rb
index 7fbfcc8d10e..778808a7018 100644
--- a/lib/s3_helper.rb
+++ b/lib/s3_helper.rb
@@ -6,7 +6,7 @@ class S3Helper
attr_reader :s3_bucket_name
- def initialize(s3_upload_bucket, tombstone_prefix='', options={})
+ def initialize(s3_upload_bucket, tombstone_prefix = '', options = {})
@s3_options = default_s3_options.merge(options)
@s3_bucket_name, @s3_bucket_folder_path = begin
@@ -24,14 +24,14 @@ class S3Helper
check_missing_options
end
- def upload(file, path, options={})
+ def upload(file, path, options = {})
path = get_path_for_s3_upload(path)
obj = s3_bucket.object(path)
obj.upload_file(file, options)
path
end
- def remove(s3_filename, copy_to_tombstone=false)
+ def remove(s3_filename, copy_to_tombstone = false)
bucket = s3_bucket
# copy the file in tombstone
@@ -50,9 +50,8 @@ class S3Helper
return if @tombstone_prefix.blank?
# cf. http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html
- s3_resource.client.put_bucket_lifecycle({
- bucket: @s3_bucket_name,
- lifecycle_configuration: {
+ s3_resource.client.put_bucket_lifecycle(bucket: @s3_bucket_name,
+ lifecycle_configuration: {
rules: [
{
id: "purge-tombstone",
@@ -61,8 +60,7 @@ class S3Helper
prefix: @tombstone_prefix
}
]
- }
- })
+ })
end
private
diff --git a/lib/scheduler/defer.rb b/lib/scheduler/defer.rb
index e055e7f9206..7b4abf7d64c 100644
--- a/lib/scheduler/defer.rb
+++ b/lib/scheduler/defer.rb
@@ -22,7 +22,7 @@ module Scheduler
@async = val
end
- def later(desc = nil, db=RailsMultisite::ConnectionManagement.current_db, &blk)
+ def later(desc = nil, db = RailsMultisite::ConnectionManagement.current_db, &blk)
if @async
start_thread unless (@thread && @thread.alive?) || @paused
@queue << [db, blk, desc]
@@ -43,7 +43,7 @@ module Scheduler
def do_all_work
while !@queue.empty?
- do_work(_non_block=true)
+ do_work(_non_block = true)
end
end
@@ -61,16 +61,16 @@ module Scheduler
end
# using non_block to match Ruby #deq
- def do_work(non_block=false)
+ def do_work(non_block = false)
db, job, desc = @queue.deq(non_block)
begin
RailsMultisite::ConnectionManagement.establish_connection(db: db) if db
job.call
rescue => ex
- Discourse.handle_job_exception(ex, {message: "Running deferred code '#{desc}'"})
+ Discourse.handle_job_exception(ex, message: "Running deferred code '#{desc}'")
end
rescue => ex
- Discourse.handle_job_exception(ex, {message: "Processing deferred code queue"})
+ Discourse.handle_job_exception(ex, message: "Processing deferred code queue")
ensure
ActiveRecord::Base.connection_handler.clear_active_connections!
end
diff --git a/lib/scheduler/manager.rb b/lib/scheduler/manager.rb
index 48466a6687c..6b6cf9fd0e9 100644
--- a/lib/scheduler/manager.rb
+++ b/lib/scheduler/manager.rb
@@ -42,13 +42,13 @@ module Scheduler
def keep_alive
@manager.keep_alive
rescue => ex
- Discourse.handle_job_exception(ex, {message: "Scheduling manager keep-alive"})
+ Discourse.handle_job_exception(ex, message: "Scheduling manager keep-alive")
end
def reschedule_orphans
@manager.reschedule_orphans!
rescue => ex
- Discourse.handle_job_exception(ex, {message: "Scheduling manager orphan rescheduler"})
+ Discourse.handle_job_exception(ex, message: "Scheduling manager orphan rescheduler")
end
def hostname
@@ -117,7 +117,7 @@ module Scheduler
@mutex.synchronize { info.write! }
end
rescue => ex
- Discourse.handle_job_exception(ex, {message: "Processing scheduled job queue"})
+ Discourse.handle_job_exception(ex, message: "Processing scheduled job queue")
ensure
@running = false
ActiveRecord::Base.connection_handler.clear_active_connections!
@@ -175,11 +175,11 @@ module Scheduler
end
- def self.without_runner(redis=nil)
+ def self.without_runner(redis = nil)
self.new(redis, skip_runner: true)
end
- def initialize(redis = nil, options=nil)
+ def initialize(redis = nil, options = nil)
@redis = $redis || redis
@random_ratio = 0.1
unless options && options[:skip_runner]
@@ -236,7 +236,7 @@ module Scheduler
end
end
- def reschedule_orphans_on!(hostname=nil)
+ def reschedule_orphans_on!(hostname = nil)
redis.zrange(Manager.queue_key(hostname), 0, -1).each do |key|
klass = get_klass(key)
next unless klass
@@ -264,7 +264,7 @@ module Scheduler
end
end
- def schedule_next_job(hostname=nil)
+ def schedule_next_job(hostname = nil)
(key, due), _ = redis.zrange Manager.queue_key(hostname), 0, 0, withscores: true
return unless key
@@ -310,7 +310,6 @@ module Scheduler
end
end
-
def self.discover_schedules
# hack for developemnt reloader is crazytown
# multiple classes with same name can be in
@@ -343,7 +342,7 @@ module Scheduler
"_scheduler_lock_"
end
- def self.queue_key(hostname=nil)
+ def self.queue_key(hostname = nil)
if hostname
"_scheduler_queue_#{hostname}_"
else
@@ -351,7 +350,7 @@ module Scheduler
end
end
- def self.schedule_key(klass,hostname=nil)
+ def self.schedule_key(klass, hostname = nil)
if hostname
"_scheduler_#{klass}_#{hostname}"
else
diff --git a/lib/scheduler/schedule.rb b/lib/scheduler/schedule.rb
index a00fe1330f4..05c8085bc1f 100644
--- a/lib/scheduler/schedule.rb
+++ b/lib/scheduler/schedule.rb
@@ -1,13 +1,13 @@
module Scheduler::Schedule
- def daily(options=nil)
+ def daily(options = nil)
if options
@daily = options
end
@daily
end
- def every(duration=nil)
+ def every(duration = nil)
if duration
@every = duration
if manager = Scheduler::Manager.current
diff --git a/lib/scheduler/web.rb b/lib/scheduler/web.rb
index 6d414ce8e91..e9d573ea442 100644
--- a/lib/scheduler/web.rb
+++ b/lib/scheduler/web.rb
@@ -15,8 +15,8 @@ module Scheduler
return unless duration
if duration < 1000
"#{duration}ms"
- elsif duration < 60*1000
- "#{'%.2f' % (duration/1000.0)} secs"
+ elsif duration < 60 * 1000
+ "#{'%.2f' % (duration / 1000.0)} secs"
end
end
end
@@ -24,7 +24,7 @@ module Scheduler
app.get "/scheduler" do
RailsMultisite::ConnectionManagement.with_connection("default") do
@manager = Scheduler::Manager.without_runner
- @schedules = Scheduler::Manager.discover_schedules.sort do |a,b|
+ @schedules = Scheduler::Manager.discover_schedules.sort do |a, b|
a_next = a.schedule_info.next_run
b_next = b.schedule_info.next_run
if a_next && b_next
@@ -35,13 +35,13 @@ module Scheduler
1
end
end
- erb File.read(File.join(VIEWS, 'scheduler.erb')), locals: {view_path: VIEWS}
+ erb File.read(File.join(VIEWS, 'scheduler.erb')), locals: { view_path: VIEWS }
end
end
app.get "/scheduler/history" do
@scheduler_stats = SchedulerStat.order('started_at desc').limit(200)
- erb File.read(File.join(VIEWS, 'history.erb')), locals: {view_path: VIEWS}
+ erb File.read(File.join(VIEWS, 'history.erb')), locals: { view_path: VIEWS }
end
app.post "/scheduler/:name/trigger" do
diff --git a/lib/score_calculator.rb b/lib/score_calculator.rb
index 8901c12e5ab..baf93cb457e 100644
--- a/lib/score_calculator.rb
+++ b/lib/score_calculator.rb
@@ -11,19 +11,18 @@ class ScoreCalculator
}
end
- def initialize(weightings=nil)
+ def initialize(weightings = nil)
@weightings = weightings || ScoreCalculator.default_score_weights
end
# Calculate the score for all posts based on the weightings
- def calculate(opts=nil)
+ def calculate(opts = nil)
update_posts_score(opts)
update_posts_rank(opts)
update_topics_rank(opts)
update_topics_percent_rank(opts)
end
-
private
def update_posts_score(opts)
@@ -110,7 +109,6 @@ SQL
posts_required: SiteSetting.summary_posts_required,
score_required: SiteSetting.summary_score_threshold)
-
filter_topics(builder, opts)
builder.exec
@@ -131,7 +129,6 @@ SQL
builder.exec
end
-
def filter_topics(builder, opts)
return builder unless opts
diff --git a/lib/screening_model.rb b/lib/screening_model.rb
index 26ed789c6c4..62cc0eb8a1a 100644
--- a/lib/screening_model.rb
+++ b/lib/screening_model.rb
@@ -24,7 +24,7 @@ module ScreeningModel
end
def action_name=(arg)
- raise ArgumentError.new("Invalid action type #{arg}") if arg.nil? or !self.class.actions.has_key?(arg.to_sym)
+ raise ArgumentError.new("Invalid action type #{arg}") if arg.nil? || !self.class.actions.has_key?(arg.to_sym)
self.action_type = self.class.actions[arg.to_sym]
end
diff --git a/lib/search.rb b/lib/search.rb
index 0edd4eecf7c..843127e8499 100644
--- a/lib/search.rb
+++ b/lib/search.rb
@@ -27,25 +27,25 @@ class Search
# base docker config
#
case SiteSetting.default_locale.to_sym
- when :da then 'danish'
- when :de then 'german'
- when :en then 'english'
- when :es then 'spanish'
- when :fr then 'french'
- when :it then 'italian'
- when :nl then 'dutch'
- when :nb_NO then 'norwegian'
- when :pt then 'portuguese'
- when :pt_BR then 'portuguese'
- when :sv then 'swedish'
- when :ru then 'russian'
+ when :da then 'danish'
+ when :de then 'german'
+ when :en then 'english'
+ when :es then 'spanish'
+ when :fr then 'french'
+ when :it then 'italian'
+ when :nl then 'dutch'
+ when :nb_NO then 'norwegian'
+ when :pt then 'portuguese'
+ when :pt_BR then 'portuguese'
+ when :sv then 'swedish'
+ when :ru then 'russian'
else 'simple' # use the 'simple' stemmer for other languages
end
end
def self.rebuild_problem_posts(limit = 10000)
posts = Post.joins(:topic)
- .where('posts.id IN (
+ .where('posts.id IN (
SELECT p2.id FROM posts p2
LEFT JOIN post_search_data pd ON locale = ? AND p2.id = pd.post_id
WHERE pd.post_id IS NULL
@@ -58,7 +58,7 @@ class Search
end
posts = Post.joins(:topic)
- .where('posts.id IN (
+ .where('posts.id IN (
SELECT p2.id FROM posts p2
LEFT JOIN topic_search_data pd ON locale = ? AND p2.topic_id = pd.topic_id
WHERE pd.topic_id IS NULL AND p2.post_number = 1
@@ -105,7 +105,7 @@ class Search
month = $2 ? $3.to_i : 1
day = $4 ? $5.to_i : 1
- return if day==0 || month==0 || day > 31 || month > 12
+ return if day == 0 || month == 0 || day > 31 || month > 12
return Time.zone.parse("#{year}-#{month}-#{day}") rescue nil
end
@@ -131,17 +131,16 @@ class Search
def self.min_post_id_no_cache
return 0 unless SiteSetting.search_prefer_recent_posts?
-
offset, has_more = Post.unscoped
- .order('id desc')
- .offset(SiteSetting.search_recent_posts_size-1)
- .limit(2)
- .pluck(:id)
+ .order('id desc')
+ .offset(SiteSetting.search_recent_posts_size - 1)
+ .limit(2)
+ .pluck(:id)
has_more ? offset : 0
end
- def self.min_post_id(opts=nil)
+ def self.min_post_id(opts = nil)
return 0 unless SiteSetting.search_prefer_recent_posts?
# It can be quite slow to count all the posts so let's cache it
@@ -153,7 +152,7 @@ class Search
attr_accessor :term
attr_reader :clean_term
- def initialize(term, opts=nil)
+ def initialize(term, opts = nil)
@opts = opts || {}
@guardian = @opts[:guardian] || Guardian.new
@search_context = @opts[:search_context]
@@ -195,7 +194,7 @@ class Search
@valid
end
- def self.execute(term, opts=nil)
+ def self.execute(term, opts = nil)
self.new(term, opts).execute
end
@@ -213,7 +212,7 @@ class Search
unless @filters.present? || @opts[:search_for_id]
min_length = @opts[:min_search_term_length] || SiteSetting.min_search_term_length
- terms = (@term || '').split(/\s(?=(?:[^"]|"[^"]*")*$)/).reject {|t| t.length < min_length }
+ terms = (@term || '').split(/\s(?=(?:[^"]|"[^"]*")*$)/).reject { |t| t.length < min_length }
if terms.blank?
@term = ''
@@ -240,7 +239,7 @@ class Search
@results
end
- def self.advanced_filter(trigger,&block)
+ def self.advanced_filter(trigger, &block)
(@advanced_filters ||= {})[trigger] = block
end
@@ -292,11 +291,11 @@ class Search
end
end
- advanced_filter(/in:wiki/) do |posts,match|
+ advanced_filter(/in:wiki/) do |posts, match|
posts.where(wiki: true)
end
- advanced_filter(/badge:(.*)/) do |posts,match|
+ advanced_filter(/badge:(.*)/) do |posts, match|
badge_id = Badge.where('name ilike ? OR id = ?', match, match.to_i).pluck(:id).first
if badge_id
posts.where('posts.user_id IN (SELECT ub.user_id FROM user_badges ub WHERE ub.badge_id = ?)', badge_id)
@@ -323,7 +322,7 @@ class Search
posts.where("posts.user_id = #{@guardian.user.id}") if @guardian.user
end
- advanced_filter(/in:(watching|tracking)/) do |posts,match|
+ advanced_filter(/in:(watching|tracking)/) do |posts, match|
if @guardian.user
level = TopicUser.notification_levels[match.to_sym]
posts.where("posts.topic_id IN (
@@ -358,7 +357,7 @@ class Search
end
end
- advanced_filter(/category:(.+)/) do |posts,match|
+ advanced_filter(/category:(.+)/) do |posts, match|
exact = false
if match[0] == "="
@@ -381,7 +380,7 @@ class Search
end
end
- advanced_filter(/^\#([a-zA-Z0-9\-:=]+)/) do |posts,match|
+ advanced_filter(/^\#([a-zA-Z0-9\-:=]+)/) do |posts, match|
exact = true
@@ -422,7 +421,7 @@ class Search
end
end
- advanced_filter(/group:(.+)/) do |posts,match|
+ advanced_filter(/group:(.+)/) do |posts, match|
group_id = Group.where('name ilike ? OR (id = ? AND id > 0)', match, match.to_i).pluck(:id).first
if group_id
posts.where("posts.user_id IN (select gu.user_id from group_users gu where gu.group_id = ?)", group_id)
@@ -431,7 +430,7 @@ class Search
end
end
- advanced_filter(/user:(.+)/) do |posts,match|
+ advanced_filter(/user:(.+)/) do |posts, match|
user_id = User.where(staged: false).where('username_lower = ? OR id = ?', match.downcase, match.to_i).pluck(:id).first
if user_id
posts.where("posts.user_id = #{user_id}")
@@ -440,7 +439,7 @@ class Search
end
end
- advanced_filter(/^\@([a-zA-Z0-9_\-.]+)/) do |posts,match|
+ advanced_filter(/^\@([a-zA-Z0-9_\-.]+)/) do |posts, match|
user_id = User.where(staged: false).where(username_lower: match.downcase).pluck(:id).first
if user_id
posts.where("posts.user_id = #{user_id}")
@@ -449,7 +448,7 @@ class Search
end
end
- advanced_filter(/before:(.*)/) do |posts,match|
+ advanced_filter(/before:(.*)/) do |posts, match|
if date = Search.word_to_date(match)
posts.where("posts.created_at < ?", date)
else
@@ -457,7 +456,7 @@ class Search
end
end
- advanced_filter(/after:(.*)/) do |posts,match|
+ advanced_filter(/after:(.*)/) do |posts, match|
if date = Search.word_to_date(match)
posts.where("posts.created_at > ?", date)
else
@@ -490,16 +489,15 @@ class Search
private
-
def process_advanced_search!(term)
- term.to_s.scan(/(([^" \t\n\x0B\f\r]+)?(("[^"]+")?))/).to_a.map do |(word,_)|
+ term.to_s.scan(/(([^" \t\n\x0B\f\r]+)?(("[^"]+")?))/).to_a.map do |(word, _)|
next if word.blank?
found = false
Search.advanced_filters.each do |matcher, block|
- cleaned = word.gsub(/["']/,"")
+ cleaned = word.gsub(/["']/, "")
if cleaned =~ matcher
(@filters ||= []) << [block, $1]
found = true
@@ -539,7 +537,6 @@ class Search
end.compact.join(' ')
end
-
def find_grouped_results
if @results.type_filter.present?
@@ -600,11 +597,11 @@ class Search
secure_category_ids
categories = Category.includes(:category_search_data)
- .where("category_search_data.search_data @@ #{ts_query}")
- .references(:category_search_data)
- .order("topics_month DESC")
- .secured(@guardian)
- .limit(@limit)
+ .where("category_search_data.search_data @@ #{ts_query}")
+ .references(:category_search_data)
+ .order("topics_month DESC")
+ .secured(@guardian)
+ .limit(@limit)
categories.each do |category|
@results.add(category)
@@ -615,37 +612,37 @@ class Search
return if SiteSetting.hide_user_profiles_from_public && !@guardian.user
users = User.includes(:user_search_data)
- .references(:user_search_data)
- .where(active: true)
- .where(staged: false)
- .where("user_search_data.search_data @@ #{ts_query("simple")}")
- .order("CASE WHEN username_lower = '#{@original_term.downcase}' THEN 0 ELSE 1 END")
- .order("last_posted_at DESC")
- .limit(@limit)
+ .references(:user_search_data)
+ .where(active: true)
+ .where(staged: false)
+ .where("user_search_data.search_data @@ #{ts_query("simple")}")
+ .order("CASE WHEN username_lower = '#{@original_term.downcase}' THEN 0 ELSE 1 END")
+ .order("last_posted_at DESC")
+ .limit(@limit)
users.each do |user|
@results.add(user)
end
end
- def posts_query(limit, opts=nil)
+ def posts_query(limit, opts = nil)
opts ||= {}
posts = Post.where(post_type: Topic.visible_post_types(@guardian.user))
- .joins(:post_search_data, :topic)
- .joins("LEFT JOIN categories ON categories.id = topics.category_id")
- .where("topics.deleted_at" => nil)
- .where("topics.visible")
+ .joins(:post_search_data, :topic)
+ .joins("LEFT JOIN categories ON categories.id = topics.category_id")
+ .where("topics.deleted_at" => nil)
+ .where("topics.visible")
is_topic_search = @search_context.present? && @search_context.is_a?(Topic)
if opts[:private_messages] || (is_topic_search && @search_context.private_message?)
- posts = posts.where("topics.archetype = ?", Archetype.private_message)
+ posts = posts.where("topics.archetype = ?", Archetype.private_message)
unless @guardian.is_admin?
posts = posts.private_posts_for_user(@guardian.user)
end
else
- posts = posts.where("topics.archetype <> ?", Archetype.private_message)
+ posts = posts.where("topics.archetype <> ?", Archetype.private_message)
end
if @term.present?
@@ -696,7 +693,7 @@ class Search
posts = posts.where("topics.category_id in (?)", category_ids)
elsif @search_context.is_a?(Topic)
posts = posts.where("topics.id = #{@search_context.id}")
- .order("posts.post_number #{@order == :latest ? "DESC" : ""}")
+ .order("posts.post_number #{@order == :latest ? "DESC" : ""}")
end
end
@@ -767,11 +764,11 @@ class Search
t.split(/[\)\(&']/)[0]
end.compact!
- query = Post.sanitize(all_terms.map {|t| "'#{PG::Connection.escape_string(t)}':*"}.join(" #{joiner} "))
+ query = Post.sanitize(all_terms.map { |t| "'#{PG::Connection.escape_string(t)}':*" }.join(" #{joiner} "))
"TO_TSQUERY(#{locale || query_locale}, #{query})"
end
- def ts_query(locale=nil)
+ def ts_query(locale = nil)
@ts_query_cache ||= {}
@ts_query_cache[(locale || query_locale) + " " + @term] ||= Search.ts_query(@term, locale)
end
@@ -825,7 +822,7 @@ class Search
end
if added < @limit
- aggregate_posts(post_sql[:remaining]).each {|p| @results.add(p) }
+ aggregate_posts(post_sql[:remaining]).each { |p| @results.add(p) }
end
end
diff --git a/lib/search/grouped_search_results.rb b/lib/search/grouped_search_results.rb
index d6677155c28..90ca05f229f 100644
--- a/lib/search/grouped_search_results.rb
+++ b/lib/search/grouped_search_results.rb
@@ -57,8 +57,7 @@ class Search
end
end
-
- def self.blurb_for(cooked, term=nil, blurb_length=200)
+ def self.blurb_for(cooked, term = nil, blurb_length = 200)
cooked = SearchIndexer::HtmlScrubber.scrub(cooked).squish
blurb = nil
diff --git a/lib/secure_session.rb b/lib/secure_session.rb
index bbe9a71833e..ecb46a6f67e 100644
--- a/lib/secure_session.rb
+++ b/lib/secure_session.rb
@@ -8,7 +8,7 @@ class SecureSession
$redis.get("#{@prefix}#{key}")
end
- def []=(key,val)
+ def []=(key, val)
if val == nil
$redis.del("#{@prefix}#{key}")
else
diff --git a/lib/single_sign_on.rb b/lib/single_sign_on.rb
index b8a20a80eb8..7578a8aabed 100644
--- a/lib/single_sign_on.rb
+++ b/lib/single_sign_on.rb
@@ -43,7 +43,7 @@ class SingleSignOn
sso.send("#{k}=", val)
end
- decoded_hash.each do |k,v|
+ decoded_hash.each do |k, v|
if field = k[/^custom\.(.+)$/, 1]
sso.custom_fields[field] = v
end
@@ -72,7 +72,7 @@ class SingleSignOn
OpenSSL::HMAC.hexdigest("sha256", sso_secret, payload)
end
- def to_url(base_url=nil)
+ def to_url(base_url = nil)
base = "#{base_url || sso_url}"
"#{base}#{base.include?('?') ? '&' : '?'}#{payload}"
end
@@ -86,7 +86,7 @@ class SingleSignOn
payload = {}
ACCESSORS.each do |k|
- next if (val = send k) == nil
+ next if (val = send k) == nil
payload[k] = val
end
diff --git a/lib/site_setting_extension.rb b/lib/site_setting_extension.rb
index 99ab174d22d..5b9fb8987d6 100644
--- a/lib/site_setting_extension.rb
+++ b/lib/site_setting_extension.rb
@@ -169,13 +169,13 @@ module SiteSettingExtension
end
def client_settings_json_uncached
- MultiJson.dump(Hash[*@client_settings.map{|n| [n, self.send(n)]}.flatten])
+ MultiJson.dump(Hash[*@client_settings.map { |n| [n, self.send(n)] }.flatten])
end
# Retrieve all settings
- def all_settings(include_hidden=false)
+ def all_settings(include_hidden = false)
@defaults
- .reject{|s, _| hidden_settings.include?(s) && !include_hidden}
+ .reject { |s, _| hidden_settings.include?(s) && !include_hidden }
.map do |s, v|
value = send(s)
type = types[get_data_type(s, value)]
@@ -190,9 +190,9 @@ module SiteSettingExtension
}
if type == :enum && enum_class(s)
- opts.merge!({valid_values: enum_class(s).values, translate_names: enum_class(s).translate_names?})
+ opts.merge!(valid_values: enum_class(s).values, translate_names: enum_class(s).translate_names?)
elsif type == :enum
- opts.merge!({valid_values: choices[s].map{|c| {name: c, value: c}}, translate_names: false})
+ opts.merge!(valid_values: choices[s].map { |c| { name: c, value: c } }, translate_names: false)
end
opts[:textarea] = true if static_types[s] == :textarea
@@ -219,7 +219,7 @@ module SiteSettingExtension
ensure_listen_for_changes
old = current
- new_hash = Hash[*(provider.all.map { |s|
+ new_hash = Hash[*(provider.all.map { |s|
[s.name.intern, convert(s.value, s.data_type, s.name)]
}.to_a.flatten)]
@@ -325,11 +325,11 @@ module SiteSettingExtension
end
def notify_changed!
- MessageBus.publish('/site_settings', {process: process_id})
+ MessageBus.publish('/site_settings', process: process_id)
end
def notify_clients!(name)
- MessageBus.publish('/client_settings', {name: name, value: self.send(name)})
+ MessageBus.publish('/client_settings', name: name, value: self.send(name))
end
def has_setting?(name)
@@ -371,7 +371,7 @@ module SiteSettingExtension
end
end
- def set_and_log(name, value, user=Discourse.system_user)
+ def set_and_log(name, value, user = Discourse.system_user)
prev_value = send(name)
set(name, value)
StaffActionLogger.new(user).log_site_setting_change(name, prev_value, value) if has_setting?(name)
@@ -389,14 +389,14 @@ module SiteSettingExtension
deletions = []
new_hash.each do |name, value|
- changes << [name,value] if !old.has_key?(name) || old[name] != value
+ changes << [name, value] if !old.has_key?(name) || old[name] != value
end
- old.each do |name,value|
- deletions << [name,value] unless new_hash.has_key?(name)
+ old.each do |name, value|
+ deletions << [name, value] unless new_hash.has_key?(name)
end
- [changes,deletions]
+ [changes, deletions]
end
def get_data_type(name, val)
diff --git a/lib/site_settings/db_provider.rb b/lib/site_settings/db_provider.rb
index 7757299fbd7..4410e05eeef 100644
--- a/lib/site_settings/db_provider.rb
+++ b/lib/site_settings/db_provider.rb
@@ -33,8 +33,8 @@ class SiteSettings::DbProvider
model ||= @model.new
model.name = name
- model.value = value
- model.data_type = data_type
+ model.value = value
+ model.data_type = data_type
# save! used to ensure after_commit is called
model.save!
diff --git a/lib/slug.rb b/lib/slug.rb
index 0f3b62b3f07..5ee570198ee 100644
--- a/lib/slug.rb
+++ b/lib/slug.rb
@@ -5,11 +5,12 @@ module Slug
CHAR_FILTER_REGEXP = /[:\/\?#\[\]@!\$&'\(\)\*\+,;=_\.~%\\`^\s|\{\}"<>]+/ # :/?#[]@!$&'()*+,;=_.~%\`^|{}"<>
def self.for(string, default = 'topic')
- slug = case (SiteSetting.slug_generation_method || :ascii).to_sym
- when :ascii then self.ascii_generator(string)
- when :encoded then self.encoded_generator(string)
- when :none then self.none_generator(string)
- end
+ slug =
+ case (SiteSetting.slug_generation_method || :ascii).to_sym
+ when :ascii then self.ascii_generator(string)
+ when :encoded then self.encoded_generator(string)
+ when :none then self.none_generator(string)
+ end
# Reject slugs that only contain numbers, because they would be indistinguishable from id's.
slug = (slug =~ /[^\d]/ ? slug : '')
slug.blank? ? default : slug
@@ -23,8 +24,8 @@ module Slug
def self.ascii_generator(string)
string.tr("'", "")
- .parameterize
- .tr("_", "-")
+ .parameterize
+ .tr("_", "-")
end
def self.encoded_generator(string)
@@ -32,10 +33,10 @@ module Slug
# including reserved characters from RFC3986.
# See also URI::REGEXP::PATTERN.
string.strip
- .gsub(/\s+/, '-')
- .gsub(CHAR_FILTER_REGEXP, '')
- .gsub(/\A-+|-+\z/, '') # remove possible trailing and preceding dashes
- .squeeze('-') # squeeze continuous dashes to prettify slug
+ .gsub(/\s+/, '-')
+ .gsub(CHAR_FILTER_REGEXP, '')
+ .gsub(/\A-+|-+\z/, '') # remove possible trailing and preceding dashes
+ .squeeze('-') # squeeze continuous dashes to prettify slug
end
def self.none_generator(string)
diff --git a/lib/source_url.rb b/lib/source_url.rb
index 41d4fe5decc..90a4391f5b1 100644
--- a/lib/source_url.rb
+++ b/lib/source_url.rb
@@ -6,7 +6,7 @@ class SourceURL < Tilt::Template
source = input[:data]
context = input[:environment].context_class.new(input)
- result = new(filename){source}.render(context)
+ result = new(filename) { source }.render(context)
context.metadata.merge(data: result)
end
diff --git a/lib/spam_handler.rb b/lib/spam_handler.rb
index b14532c27d6..2b8253512c3 100644
--- a/lib/spam_handler.rb
+++ b/lib/spam_handler.rb
@@ -4,15 +4,15 @@ class SpamHandler
return false if SiteSetting.max_new_accounts_per_registration_ip <= 0
tl2_plus_accounts_with_same_ip = User.where("trust_level >= ?", TrustLevel[2])
- .where(ip_address: ip_address.to_s)
- .count
+ .where(ip_address: ip_address.to_s)
+ .count
return false if tl2_plus_accounts_with_same_ip > 0
staff_user_ids = Group[:staff].user_ids - [-1]
staff_members_with_same_ip = User.where(id: staff_user_ids)
- .where(ip_address: ip_address.to_s)
- .count
+ .where(ip_address: ip_address.to_s)
+ .count
return false if staff_members_with_same_ip > 0
@@ -20,9 +20,9 @@ class SpamHandler
return false if ip_whitelisted
tl0_accounts_with_same_ip = User.unscoped
- .where(trust_level: TrustLevel[0])
- .where(ip_address: ip_address.to_s)
- .count
+ .where(trust_level: TrustLevel[0])
+ .where(ip_address: ip_address.to_s)
+ .count
tl0_accounts_with_same_ip >= SiteSetting.max_new_accounts_per_registration_ip
end
diff --git a/lib/sql_builder.rb b/lib/sql_builder.rb
index 70e461095c3..35598c57232 100644
--- a/lib/sql_builder.rb
+++ b/lib/sql_builder.rb
@@ -1,13 +1,13 @@
class SqlBuilder
- def initialize(template,klass=nil)
+ def initialize(template, klass = nil)
@args = {}
@sql = template
@sections = {}
@klass = klass
end
- [:set, :where2,:where,:order_by,:limit,:left_join,:join,:offset, :select].each do |k|
+ [:set, :where2, :where, :order_by, :limit, :left_join, :join, :offset, :select].each do |k|
define_method k do |data, args = {}|
@args.merge!(args)
@sections[k] ||= []
@@ -18,7 +18,7 @@ class SqlBuilder
def secure_category(secure_category_ids, category_alias = 'c')
if secure_category_ids.present?
- where("NOT COALESCE(" << category_alias << ".read_restricted, false) OR " << category_alias << ".id IN (:secure_category_ids)", secure_category_ids: secure_category_ids)
+ where("NOT COALESCE(" << category_alias << ".read_restricted, false) OR " << category_alias << ".id IN (:secure_category_ids)", secure_category_ids: secure_category_ids)
else
where("NOT COALESCE(" << category_alias << ".read_restricted, false)")
end
@@ -28,17 +28,17 @@ class SqlBuilder
def to_sql
sql = @sql.dup
- @sections.each do |k,v|
+ @sections.each do |k, v|
joined = nil
case k
when :select
joined = "SELECT " << v.join(" , ")
when :where, :where2
- joined = "WHERE " << v.map{|c| "(" << c << ")" }.join(" AND ")
+ joined = "WHERE " << v.map { |c| "(" << c << ")" }.join(" AND ")
when :join
- joined = v.map{|item| "JOIN " << item }.join("\n")
+ joined = v.map { |item| "JOIN " << item }.join("\n")
when :left_join
- joined = v.map{|item| "LEFT JOIN " << item }.join("\n")
+ joined = v.map { |item| "LEFT JOIN " << item }.join("\n")
when :limit
joined = "LIMIT " << v.last.to_s
when :offset
@@ -64,7 +64,7 @@ class SqlBuilder
if @args == {}
ActiveRecord::Base.exec_sql(sql)
else
- ActiveRecord::Base.exec_sql(sql,@args)
+ ActiveRecord::Base.exec_sql(sql, @args)
end
end
end
@@ -74,7 +74,7 @@ class SqlBuilder
end
class RailsDateTimeDecoder < PG::SimpleDecoder
- def decode(string, tuple=nil, field=nil)
+ def decode(string, tuple = nil, field = nil)
if Rails.version >= "4.2.0"
@caster ||= ActiveRecord::Type::DateTime.new
@caster.type_cast_from_database(string)
@@ -84,14 +84,13 @@ class SqlBuilder
end
end
-
class ActiveRecordTypeMap < PG::BasicTypeMapForResults
def initialize(connection)
super(connection)
rm_coder 0, 1114
add_coder RailsDateTimeDecoder.new(name: "timestamp", oid: 1114, format: 0)
- # we don't need deprecations
- self.default_type_map = PG::TypeMapInRuby.new
+ # we don't need deprecations
+ self.default_type_map = PG::TypeMapInRuby.new
end
end
diff --git a/lib/stats_socket.rb b/lib/stats_socket.rb
index 337bc7b058a..e1afc677baf 100644
--- a/lib/stats_socket.rb
+++ b/lib/stats_socket.rb
@@ -16,7 +16,7 @@ class StatsSocket < SocketServer
when "v8_stat"
stats = {}
ObjectSpace.each_object(MiniRacer::Context) do |context|
- context.heap_stats.each do |k,v|
+ context.heap_stats.each do |k, v|
stats[k] = (stats[k] || 0) + v
end
end
diff --git a/lib/stylesheet/compiler.rb b/lib/stylesheet/compiler.rb
index e6a7530b88b..e886c6bdcef 100644
--- a/lib/stylesheet/compiler.rb
+++ b/lib/stylesheet/compiler.rb
@@ -15,7 +15,7 @@ module Stylesheet
footer:after { content: '#{error}' }"
end
- def self.compile_asset(asset, options={})
+ def self.compile_asset(asset, options = {})
if Importer.special_imports[asset.to_s]
filename = "theme.scss"
@@ -26,12 +26,12 @@ module Stylesheet
file = File.read path
end
- compile(file,filename,options)
+ compile(file, filename, options)
end
- def self.compile(stylesheet, filename, options={})
- source_map_file = options[:source_map_file] || "#{filename.sub(".scss","")}.css.map";
+ def self.compile(stylesheet, filename, options = {})
+ source_map_file = options[:source_map_file] || "#{filename.sub(".scss", "")}.css.map";
engine = SassC::Engine.new(stylesheet,
importer: Importer,
@@ -44,7 +44,6 @@ module Stylesheet
theme_field: options[:theme_field],
load_paths: [ASSET_ROOT])
-
result = engine.render
if options[:rtl]
diff --git a/lib/stylesheet/manager.rb b/lib/stylesheet/manager.rb
index ebceac32698..85cd6bd0833 100644
--- a/lib/stylesheet/manager.rb
+++ b/lib/stylesheet/manager.rb
@@ -16,7 +16,7 @@ class Stylesheet::Manager
end
def self.clear_theme_cache!
- cache.hash.keys.select{|k| k =~ /theme/}.each{|k|cache.delete(k)}
+ cache.hash.keys.select { |k| k =~ /theme/ }.each { |k|cache.delete(k) }
end
def self.stylesheet_href(target = :desktop, theme_key = :missing)
@@ -54,9 +54,9 @@ class Stylesheet::Manager
end
def self.precompile_css
- themes = Theme.where('user_selectable OR key = ?', SiteSetting.default_theme_key).pluck(:key,:name)
+ themes = Theme.where('user_selectable OR key = ?', SiteSetting.default_theme_key).pluck(:key, :name)
themes << nil
- themes.each do |key,name|
+ themes.each do |key, name|
[:desktop, :mobile, :desktop_rtl, :mobile_rtl].each do |target|
theme_key = key || SiteSetting.default_theme_key
cache_key = "#{target}_#{theme_key}"
@@ -104,7 +104,7 @@ class Stylesheet::Manager
@theme_key = theme_key
end
- def compile(opts={})
+ def compile(opts = {})
unless opts[:force]
if File.exists?(stylesheet_fullpath)
unless StylesheetCache.where(target: qualified_target, digest: digest).exists?
@@ -120,7 +120,7 @@ class Stylesheet::Manager
end
rtl = @target.to_s =~ /_rtl$/
- css,source_map = begin
+ css, source_map = begin
Stylesheet::Compiler.compile_asset(
@target,
rtl: rtl,
@@ -216,7 +216,7 @@ class Stylesheet::Manager
end
def stylesheet_filename_no_digest
- stylesheet_filename(_with_digest=false)
+ stylesheet_filename(_with_digest = false)
end
def is_theme?
diff --git a/lib/stylesheet/watcher.rb b/lib/stylesheet/watcher.rb
index 138d750fd60..e7c9a5ab9b1 100644
--- a/lib/stylesheet/watcher.rb
+++ b/lib/stylesheet/watcher.rb
@@ -3,7 +3,7 @@ require 'listen'
module Stylesheet
class Watcher
- def self.watch(paths=nil)
+ def self.watch(paths = nil)
watcher = new(paths)
watcher.start
watcher
@@ -26,7 +26,6 @@ module Stylesheet
end
end
-
root = Rails.root.to_s
@paths.each do |watch|
Thread.new do
@@ -34,7 +33,7 @@ module Stylesheet
listener = Listen.to("#{root}/#{watch}", ignore: /xxxx/) do |modified, added, _|
paths = [modified, added].flatten
paths.compact!
- paths.map!{|long| long[(root.length+1)..-1]}
+ paths.map! { |long| long[(root.length + 1)..-1] }
process_change(paths)
end
rescue => e
@@ -55,7 +54,7 @@ module Stylesheet
Stylesheet::Manager.cache.clear
message = ["desktop", "mobile", "admin"].map do |name|
- {target: name, new_href: Stylesheet::Manager.stylesheet_href(name.to_sym) , theme_key: SiteSetting.default_theme_key}
+ { target: name, new_href: Stylesheet::Manager.stylesheet_href(name.to_sym) , theme_key: SiteSetting.default_theme_key }
end
MessageBus.publish '/file-change', message
diff --git a/lib/suggested_topics_builder.rb b/lib/suggested_topics_builder.rb
index 9436bb5c5a9..cd857875b16 100644
--- a/lib/suggested_topics_builder.rb
+++ b/lib/suggested_topics_builder.rb
@@ -11,15 +11,14 @@ class SuggestedTopicsBuilder
@results = []
end
-
- def add_results(results, priority=:low)
+ def add_results(results, priority = :low)
# WARNING .blank? will execute an Active Record query
return unless results
# Only add results if we don't have those topic ids already
results = results.where('topics.id NOT IN (?)', @excluded_topic_ids)
- .where(visible: true)
+ .where(visible: true)
# If limit suggested to category is enabled, restrict to that category
if @category_id && SiteSetting.limit_suggested_to_category?
@@ -29,8 +28,8 @@ class SuggestedTopicsBuilder
unless results.empty?
# Keep track of the ids we've added
- @excluded_topic_ids.concat results.map {|r| r.id}
- splice_results(results,priority)
+ @excluded_topic_ids.concat results.map { |r| r.id }
+ splice_results(results, priority)
end
end
@@ -40,7 +39,7 @@ class SuggestedTopicsBuilder
# Topics from category @category_id need to be first in the list, all others after.
other_category_index = @results.index { |r| r.category_id != @category_id }
- category_results, other_category_results = results.partition{ |r| r.category_id == @category_id }
+ category_results, other_category_results = results.partition { |r| r.category_id == @category_id }
if other_category_index
@results.insert other_category_index, *category_results
@@ -66,7 +65,7 @@ class SuggestedTopicsBuilder
end
def category_results_left
- SiteSetting.suggested_topics - @results.count{|r| r.category_id == @category_id}
+ SiteSetting.suggested_topics - @results.count { |r| r.category_id == @category_id }
end
def size
diff --git a/lib/table_migration_helper.rb b/lib/table_migration_helper.rb
index 4d0308341bc..605555d49f8 100644
--- a/lib/table_migration_helper.rb
+++ b/lib/table_migration_helper.rb
@@ -45,7 +45,7 @@ SQL
new_name: new_name,
delay: "#{delay.to_i || 0} seconds",
after_migration: after_migration).to_a.length > 0
- on_drop&.call
+ on_drop&.call
ActiveRecord::Base.exec_sql("DROP TABLE #{old_name}")
end
diff --git a/lib/tasks/add_topic_to_quotes.rake b/lib/tasks/add_topic_to_quotes.rake
index 51d1cf2edd8..ff36e46bf06 100644
--- a/lib/tasks/add_topic_to_quotes.rake
+++ b/lib/tasks/add_topic_to_quotes.rake
@@ -6,4 +6,3 @@ task "add_topic_to_quotes" => :environment do
Post.update_all ["raw = ?, cooked = ?", new_raw, new_cooked], ["id = ?", p.id]
end
end
-
diff --git a/lib/tasks/admin.rake b/lib/tasks/admin.rake
index cd7e40d39fa..c57e03b7859 100644
--- a/lib/tasks/admin.rake
+++ b/lib/tasks/admin.rake
@@ -1,6 +1,6 @@
desc "invite an admin to this discourse instance"
-task "admin:invite", [:email] => [:environment] do |_,args|
+task "admin:invite", [:email] => [:environment] do |_, args|
email = args[:email]
if !email || email !~ /@/
puts "ERROR: Expecting rake admin:invite[some@email.com]"
@@ -20,7 +20,7 @@ task "admin:invite", [:email] => [:environment] do |_,args|
puts "Granting admin!"
user.grant_admin!
user.change_trust_level!(4)
- user.email_tokens.update_all confirmed: true
+ user.email_tokens.update_all confirmed: true
puts "Sending email!"
email_token = user.email_tokens.create(email: user.email)
@@ -42,8 +42,8 @@ task "admin:create" => :environment do
reset_password = ask("User with this email already exists! Do you want to reset the password for this email? (Y/n) ")
if (reset_password == "" || reset_password.downcase == 'y')
begin
- password = ask("Password: ") {|q| q.echo = false}
- password_confirmation = ask("Repeat password: ") {|q| q.echo = false}
+ password = ask("Password: ") { |q| q.echo = false }
+ password_confirmation = ask("Repeat password: ") { |q| q.echo = false }
end while password != password_confirmation
admin.password = password
end
@@ -53,8 +53,8 @@ task "admin:create" => :environment do
admin.email = email
admin.username = UserNameSuggester.suggest(admin.email)
begin
- password = ask("Password: ") {|q| q.echo = false}
- password_confirmation = ask("Repeat password: ") {|q| q.echo = false}
+ password = ask("Password: ") { |q| q.echo = false }
+ password_confirmation = ask("Repeat password: ") { |q| q.echo = false }
end while password != password_confirmation
admin.password = password
end
@@ -82,7 +82,7 @@ task "admin:create" => :environment do
if (grant_admin == "" || grant_admin.downcase == 'y')
admin.grant_admin!
admin.change_trust_level!(4)
- admin.email_tokens.update_all confirmed: true
+ admin.email_tokens.update_all confirmed: true
admin.activate
say("\nYour account now has Admin privileges!")
diff --git a/lib/tasks/api.rake b/lib/tasks/api.rake
index ef42e8a7bd4..5a4ef6c85e0 100644
--- a/lib/tasks/api.rake
+++ b/lib/tasks/api.rake
@@ -1,6 +1,6 @@
desc "generate api key if missing, return existing if already there"
task "api_key:get" => :environment do
- api_key = ApiKey.create_master_key
+ api_key = ApiKey.create_master_key
puts api_key.key
end
diff --git a/lib/tasks/assets.rake b/lib/tasks/assets.rake
index 57235096c16..9029f9cf4b5 100644
--- a/lib/tasks/assets.rake
+++ b/lib/tasks/assets.rake
@@ -71,10 +71,10 @@ def assets_path
"#{Rails.root}/public/assets"
end
-def compress_node(from,to)
+def compress_node(from, to)
to_path = "#{assets_path}/#{to}"
assets = cdn_relative_path("/assets")
- source_map_root = assets + ((d=File.dirname(from)) == "." ? "" : "/#{d}")
+ source_map_root = assets + ((d = File.dirname(from)) == "." ? "" : "/#{d}")
source_map_url = cdn_path "/assets/#{to}.map"
cmd = "uglifyjs '#{assets_path}/#{from}' -p relative -c -m -o '#{to_path}' --source-map-root '#{source_map_root}' --source-map '#{assets_path}/#{to}.map' --source-map-url '#{source_map_url}'"
@@ -89,7 +89,7 @@ def compress_node(from,to)
result
end
-def compress_ruby(from,to)
+def compress_ruby(from, to)
data = File.read("#{assets_path}/#{from}")
uglified, map = Uglifier.new(comments: :none,
@@ -99,7 +99,7 @@ def compress_ruby(from,to)
output_filename: File.basename(to)
}
)
- .compile_with_map(data)
+ .compile_with_map(data)
dest = "#{assets_path}/#{to}"
File.write(dest, uglified << "\n//# sourceMappingURL=#{cdn_path "/assets/#{to}.map"}")
@@ -121,11 +121,11 @@ def brotli(path)
end
end
-def compress(from,to)
+def compress(from, to)
if $node_uglify
- compress_node(from,to)
+ compress_node(from, to)
else
- compress_ruby(from,to)
+ compress_ruby(from, to)
end
end
@@ -148,10 +148,10 @@ task 'assets:precompile' => 'assets:precompile:before' do
concurrent? do |proc|
to_skip = Rails.configuration.assets.skip_minification || []
manifest.files
- .select{|k,v| k =~ /\.js$/}
- .each do |file, info|
+ .select { |k, v| k =~ /\.js$/ }
+ .each do |file, info|
- path = "#{assets_path}/#{file}"
+ path = "#{assets_path}/#{file}"
_file = (d = File.dirname(file)) == "." ? "_#{file}" : "#{d}/_#{File.basename(file)}"
_path = "#{assets_path}/#{_file}"
@@ -164,7 +164,7 @@ task 'assets:precompile' => 'assets:precompile:before' do
# We can specify some files to never minify
unless (ENV["DONT_MINIFY"] == "1") || to_skip.include?(info['logical_path'])
FileUtils.mv(path, _path)
- compress(_file,file)
+ compress(_file, file)
end
info["size"] = File.size(path)
diff --git a/lib/tasks/auto_annotate_models.rake b/lib/tasks/auto_annotate_models.rake
index dde0cc03644..617f473c9b6 100644
--- a/lib/tasks/auto_annotate_models.rake
+++ b/lib/tasks/auto_annotate_models.rake
@@ -1,36 +1,34 @@
# NOTE: only doing this in development as some production environments (Heroku)
# NOTE: are sensitive to local FS writes, and besides -- it's just not proper
# NOTE: to have a dev-mode tool do its thing in production.
-if(Rails.env.development? || Rails.env.test?)
+if (Rails.env.development? || Rails.env.test?)
task :set_annotation_options do
# You can override any of these by setting an environment variable of the
# same name.
- Annotate.set_defaults({
- 'position_in_routes' => "before",
- 'position_in_class' => "after",
- 'position_in_test' => "before",
- 'position_in_fixture' => "before",
- 'position_in_factory' => "before",
- 'show_indexes' => "true",
- 'simple_indexes' => "false",
- 'model_dir' => "app/models",
- 'include_version' => "false",
- 'require' => "",
- 'exclude_tests' => "true",
- 'exclude_fixtures' => "true",
- 'exclude_helpers' => "true",
- 'exclude_factories' => "true",
- 'exclude_serializers' => "true",
- 'exclude_controllers' => "true",
- 'ignore_model_sub_dir' => "false",
- 'skip_on_db_migrate' => "true",
- 'format_bare' => "true",
- 'format_rdoc' => "false",
- 'format_markdown' => "false",
- 'sort' => "false",
- 'force' => "false",
- 'trace' => "false",
- })
+ Annotate.set_defaults('position_in_routes' => "before",
+ 'position_in_class' => "after",
+ 'position_in_test' => "before",
+ 'position_in_fixture' => "before",
+ 'position_in_factory' => "before",
+ 'show_indexes' => "true",
+ 'simple_indexes' => "false",
+ 'model_dir' => "app/models",
+ 'include_version' => "false",
+ 'require' => "",
+ 'exclude_tests' => "true",
+ 'exclude_fixtures' => "true",
+ 'exclude_helpers' => "true",
+ 'exclude_factories' => "true",
+ 'exclude_serializers' => "true",
+ 'exclude_controllers' => "true",
+ 'ignore_model_sub_dir' => "false",
+ 'skip_on_db_migrate' => "true",
+ 'format_bare' => "true",
+ 'format_rdoc' => "false",
+ 'format_markdown' => "false",
+ 'sort' => "false",
+ 'force' => "false",
+ 'trace' => "false")
end
end
diff --git a/lib/tasks/autospec.rake b/lib/tasks/autospec.rake
index 3df52d0f051..2f1de2c4863 100644
--- a/lib/tasks/autospec.rake
+++ b/lib/tasks/autospec.rake
@@ -6,7 +6,7 @@ desc "Run all specs automatically as needed"
task "autospec" => :environment do
require 'autospec/manager'
- debug = ARGV.any?{ |a| a == "d" || a == "debug" } || ENV["DEBUG"]
+ debug = ARGV.any? { |a| a == "d" || a == "debug" } || ENV["DEBUG"]
force_polling = ARGV.any? { |a| a == "p" || a == "polling" }
latency = ((ARGV.find { |a| a =~ /l=|latency=/ } || "").split("=")[1] || 3).to_i
diff --git a/lib/tasks/avatars.rake b/lib/tasks/avatars.rake
index 4c1da8fd54f..5b2df506cf6 100644
--- a/lib/tasks/avatars.rake
+++ b/lib/tasks/avatars.rake
@@ -24,7 +24,7 @@ task "avatars:clean" => :environment do
OptimizedImage.where("upload_id IN (SELECT custom_upload_id FROM user_avatars) OR
upload_id IN (SELECT gravatar_upload_id FROM user_avatars) OR
upload_id IN (SELECT uploaded_avatar_id FROM users)")
- .find_each do |optimized_image|
+ .find_each do |optimized_image|
optimized_image.destroy!
putc "." if (i += 1) % 10 == 0
end
diff --git a/lib/tasks/backfill.thor b/lib/tasks/backfill.thor
index a69a58803d3..44afebbda59 100644
--- a/lib/tasks/backfill.thor
+++ b/lib/tasks/backfill.thor
@@ -1,7 +1,6 @@
class Backfill < Thor
desc "link_titles", "Backfills link titles"
-
def link_titles
require './config/environment'
topic_links = TopicLink.where(crawled_at: nil, internal: false)
@@ -13,4 +12,3 @@ class Backfill < Thor
end
end
end
-
diff --git a/lib/tasks/build_test_topic.rake b/lib/tasks/build_test_topic.rake
index e135cabc928..7c64a76566e 100644
--- a/lib/tasks/build_test_topic.rake
+++ b/lib/tasks/build_test_topic.rake
@@ -4,7 +4,6 @@ desc 'create pushstate/replacestate test topic'
task 'build_test_topic' => :environment do
puts 'Creating topic'
-
# Acceptable options:
#
# raw - raw text of post
@@ -33,7 +32,7 @@ task 'build_test_topic' => :environment do
links = []
[-30, -10, 10, 30].each do |offset|
where = (post_number + offset)
- if where >= 1 and where <= 100
+ if where >= (1) && where <= (100)
links << "Link to ##{where}: #{topic_url}/#{where}"
end
end
diff --git a/lib/tasks/cdn.rake b/lib/tasks/cdn.rake
index c9b793cbf34..0e13c3ae8c7 100644
--- a/lib/tasks/cdn.rake
+++ b/lib/tasks/cdn.rake
@@ -34,7 +34,7 @@ task 'assets:prestage' => :environment do |t|
"id" => config["id"],
"login" => config["login"],
"passwd" => config["password"],
- "json" => {"prefetch_paths" => asset}.to_json
+ "json" => { "prefetch_paths" => asset }.to_json
)
response = http.request(request)
@@ -43,7 +43,7 @@ task 'assets:prestage' => :environment do |t|
failed_assets.push(asset)
end
end
-
+
if failed_assets.length > 0
raise "Failed to pre-stage #{failed_assets.length}/#{assets.length} files"
end
diff --git a/lib/tasks/docker.rake b/lib/tasks/docker.rake
index b6089394478..64a4514ae78 100644
--- a/lib/tasks/docker.rake
+++ b/lib/tasks/docker.rake
@@ -16,7 +16,7 @@
# this can also be set to a branch, e.g. "origin/tests-passed"
#
# Example usage:
-# Run all core and plugin tests:
+# Run all core and plugin tests:
# docker run discourse/discourse_test:release
# Run only rspec tests:
# docker run -e RUBY_ONLY=1 discourse/discourse_test:release
@@ -25,7 +25,6 @@
# Run tests for a specific plugin (with a plugin mounted from host filesystem):
# docker run -e SKIP_CORE=1 SINGLE_PLUGIN='my-awesome-plugin' -v $(pwd)/my-awesome-plugin:/var/www/discourse/plugins/my-awesome-plugin discourse/discourse_test:release
-
def run_or_fail(command)
pid = Process.spawn(command)
Process.wait(pid)
@@ -53,7 +52,6 @@ task 'docker:test' do
puts "Starting postgres"
@pg_pid = Process.spawn("#{@postgres_bin}postmaster -D tmp/test_data/pg")
-
ENV["RAILS_ENV"] = "test"
@good = run_or_fail("bundle exec rake db:create db:migrate")
diff --git a/lib/tasks/emoji.rake b/lib/tasks/emoji.rake
index a601dbc3c87..5ff933cab7f 100644
--- a/lib/tasks/emoji.rake
+++ b/lib/tasks/emoji.rake
@@ -446,10 +446,10 @@ def generate_emoji_groups(keywords)
emoji_list_section = title_section.first.parent.parent.next_element
emoji_list_section.css("a.plain img").each do |link|
emoji_code = link.attr("title")
- .scan(/U\+(.{4,5})\b/)
- .flatten
- .map { |code| code.downcase.strip }
- .join("_")
+ .scan(/U\+(.{4,5})\b/)
+ .flatten
+ .map { |code| code.downcase.strip }
+ .join("_")
emoji_char = code_to_emoji(emoji_code)
@@ -489,16 +489,16 @@ def write_db_json(emojis)
# skin tones variations of emojis shouldn’t appear in autocomplete
emojis_without_tones = emojis
.select { |char, config|
- !FITZPATRICK_SCALE.any? { |scale|
- codepoints_to_code(char.codepoints, config["fitzpatrick_scale"])[scale]
- }
- }
+ !FITZPATRICK_SCALE.any? { |scale|
+ codepoints_to_code(char.codepoints, config["fitzpatrick_scale"])[scale]
+ }
+ }
.map { |char, config|
- {
- "code" => codepoints_to_code(char.codepoints, config["fitzpatrick_scale"]).tr("_", "-"),
- "name" => config["name"]
- }
+ {
+ "code" => codepoints_to_code(char.codepoints, config["fitzpatrick_scale"]).tr("_", "-"),
+ "name" => config["name"]
}
+ }
emoji_with_tones = emojis
.select { |code, config| config["fitzpatrick_scale"] }
@@ -532,9 +532,9 @@ end
def codepoints_to_code(codepoints, fitzpatrick_scale)
codepoints = codepoints
- .map { |c| c.to_s(16).rjust(4, "0") }
- .join("_")
- .downcase
+ .map { |c| c.to_s(16).rjust(4, "0") }
+ .join("_")
+ .downcase
if !fitzpatrick_scale
codepoints.gsub!(/_fe0f$/, "")
diff --git a/lib/tasks/highlight.rake b/lib/tasks/highlight.rake
index d6baa771fbb..f0e0c2bc209 100644
--- a/lib/tasks/highlight.rake
+++ b/lib/tasks/highlight.rake
@@ -1,7 +1,7 @@
desc "download latest version of highlight and prepare it"
task "highlightjs:update" do
- def run(cmd, opts={})
+ def run(cmd, opts = {})
puts cmd
system(cmd, opts.merge(out: $stdout, err: :out))
end
diff --git a/lib/tasks/i18n_stats.rake b/lib/tasks/i18n_stats.rake
index 59884021368..ae48b2e9686 100644
--- a/lib/tasks/i18n_stats.rake
+++ b/lib/tasks/i18n_stats.rake
@@ -9,7 +9,7 @@ task "i18n:stats" => :environment do
# detect pluralizable string
if (source["other"] != nil)
- target[namespace] = {pluralizable: true, content: source}
+ target[namespace] = { pluralizable: true, content: source }
return
end
@@ -36,7 +36,7 @@ task "i18n:stats" => :environment do
same = []
total = a.count
- a.each do |key,value|
+ a.each do |key, value|
if b[key] == nil
minus << key
end
@@ -45,13 +45,13 @@ task "i18n:stats" => :environment do
end
end
- b.each do |key,value|
+ b.each do |key, value|
if a[key] == nil
plus << key
end
end
- a.each do |key,value|
+ a.each do |key, value|
if value.kind_of?(Hash)
if value[:pluralizable]
plural_keys.each do |pl|
@@ -61,7 +61,7 @@ task "i18n:stats" => :environment do
end
if b[key] != nil && b[key].kind_of?(Hash)
- b[key][:content].each do |pl,val|
+ b[key][:content].each do |pl, val|
if ! plural_keys.include?(pl)
if a[key][:content]["zero"] == nil
plus << "#{key}.#{pl}"
@@ -80,11 +80,11 @@ task "i18n:stats" => :environment do
end
end
- return plus,minus,same,total
+ return plus, minus, same, total
end
def get_plurals(locale)
- I18n.t("i18n.plural.keys", :locale => locale).map { |x| x.to_s }
+ I18n.t("i18n.plural.keys", locale: locale).map { |x| x.to_s }
end
puts "Discourse Translation Status Script"
diff --git a/lib/tasks/images.rake b/lib/tasks/images.rake
index c754841f05d..b74a7720a9f 100644
--- a/lib/tasks/images.rake
+++ b/lib/tasks/images.rake
@@ -2,7 +2,7 @@ require_dependency "file_helper"
task "images:compress" => :environment do
images = Dir.glob("#{Rails.root}/app/**/*.png")
- image_sizes = Hash[*images.map{|i| [i,File.size(i)]}.to_a.flatten]
+ image_sizes = Hash[*images.map { |i| [i, File.size(i)] }.to_a.flatten]
FileHelper.optimize_images!(images) do |name, optimized|
if optimized
new_size = File.size(name)
diff --git a/lib/tasks/import.rake b/lib/tasks/import.rake
index a462631c8c4..a0ec50de6c4 100644
--- a/lib/tasks/import.rake
+++ b/lib/tasks/import.rake
@@ -293,7 +293,6 @@ def update_posts
AND reply_count <> Y.replies
SQL
-
# -- TODO: ensure this is how this works!
# WITH X AS (
# SELECT pr.post_id, p.user_id
diff --git a/lib/tasks/integration.rake b/lib/tasks/integration.rake
index 1317eb6b1d8..32e2a8d1e98 100644
--- a/lib/tasks/integration.rake
+++ b/lib/tasks/integration.rake
@@ -38,7 +38,6 @@ task 'integration:create_fixtures' => :environment do
end
-
def fake_xhr(url)
uri = URI(url)
diff --git a/lib/tasks/plugin.rake b/lib/tasks/plugin.rake
index 51c674132f0..56cfb312072 100644
--- a/lib/tasks/plugin.rake
+++ b/lib/tasks/plugin.rake
@@ -49,7 +49,7 @@ task 'plugin:install', :repo do |t, args|
plugin_path = File.expand_path('plugins/' + name)
if File.directory?(File.expand_path(plugin_path))
- abort('Plugin directory, ' + plugin_path + ', already exists.')
+ abort('Plugin directory, ' + plugin_path + ', already exists.')
end
clone_status = system('git clone ' + repo + ' ' + plugin_path)
diff --git a/lib/tasks/posts.rake b/lib/tasks/posts.rake
index 0338f3c9fcc..f7793400401 100644
--- a/lib/tasks/posts.rake
+++ b/lib/tasks/posts.rake
@@ -13,7 +13,7 @@ task 'posts:fix_letter_avatars' => :environment do
return unless SiteSetting.external_system_avatars_enabled
search = Post.where("user_id <> -1")
- .where("raw LIKE '%/letter\_avatar/%' OR cooked LIKE '%/letter\_avatar/%'")
+ .where("raw LIKE '%/letter\_avatar/%' OR cooked LIKE '%/letter\_avatar/%'")
rebaked = 0
total = search.count
@@ -27,7 +27,7 @@ task 'posts:fix_letter_avatars' => :environment do
end
desc 'Rebake all posts matching string/regex and optionally delay the loop'
-task 'posts:rebake_match', [:pattern, :type, :delay] => [:environment] do |_,args|
+task 'posts:rebake_match', [:pattern, :type, :delay] => [:environment] do |_, args|
pattern = args[:pattern]
type = args[:type]
type = type.downcase if type
@@ -106,7 +106,7 @@ task 'posts:normalize_code' => :environment do
Post.where("raw like '%%%'").each do |p|
normalized = Import::Normalize.normalize_code_blocks(p.raw, lang)
if normalized != p.raw
- p.revise(Discourse.system_user, { raw: normalized })
+ p.revise(Discourse.system_user, raw: normalized)
putc "."
i += 1
end
@@ -116,14 +116,14 @@ task 'posts:normalize_code' => :environment do
puts "#{i} posts normalized!"
end
-def remap_posts(find, replace="")
+def remap_posts(find, replace = "")
i = 0
Post.where("raw LIKE ?", "%#{find}%").each do |p|
new_raw = p.raw.dup
new_raw = new_raw.gsub!(/#{Regexp.escape(find)}/, replace) || new_raw
if new_raw != p.raw
- p.revise(Discourse.system_user, { raw: new_raw }, { bypass_bump: true, skip_revision: true })
+ p.revise(Discourse.system_user, { raw: new_raw }, bypass_bump: true, skip_revision: true)
putc "."
i += 1
end
@@ -132,7 +132,7 @@ def remap_posts(find, replace="")
end
desc 'Remap all posts matching specific string'
-task 'posts:remap', [:find, :replace] => [:environment] do |_,args|
+task 'posts:remap', [:find, :replace] => [:environment] do |_, args|
find = args[:find]
replace = args[:replace]
@@ -150,7 +150,7 @@ task 'posts:remap', [:find, :replace] => [:environment] do |_,args|
end
desc 'Delete occurrence of a word/string'
-task 'posts:delete_word', [:find] => [:environment] do |_,args|
+task 'posts:delete_word', [:find] => [:environment] do |_, args|
require 'highline/import'
find = args[:find]
diff --git a/lib/tasks/qunit.rake b/lib/tasks/qunit.rake
index 715af0ed6ef..a80e443ed4a 100644
--- a/lib/tasks/qunit.rake
+++ b/lib/tasks/qunit.rake
@@ -10,7 +10,7 @@ task "qunit:test", [:timeout, :qunit_path] => :environment do |_, args|
end
# ensure we have this port available
- def port_available? port
+ def port_available?(port)
server = TCPServer.open port
server.close
true
@@ -26,9 +26,9 @@ task "qunit:test", [:timeout, :qunit_path] => :environment do |_, args|
unless pid = fork
Discourse.after_fork
- Rack::Server.start(:config => "config.ru",
- :AccessLog => [],
- :Port => port)
+ Rack::Server.start(config: "config.ru",
+ AccessLog: [],
+ Port: port)
exit
end
diff --git a/lib/tasks/release_note.rake b/lib/tasks/release_note.rake
index 54c40e267b1..4023727d45d 100644
--- a/lib/tasks/release_note.rake
+++ b/lib/tasks/release_note.rake
@@ -55,14 +55,14 @@ end
def escape_brackets(line)
line.gsub("<", "`<")
- .gsub(">", ">`")
- .gsub("[", "`[")
- .gsub("]", "]`")
+ .gsub(">", ">`")
+ .gsub("[", "`[")
+ .gsub("]", "]`")
end
def split_comments(text)
text = normalize_terms(text)
- terms = ["FIX:", "FEATURE:", "UX:", "SECURITY:" ,"PERF:"]
+ terms = ["FIX:", "FEATURE:", "UX:", "SECURITY:" , "PERF:"]
terms.each do |term|
text = newlines_at_term(text, term)
end
@@ -79,7 +79,7 @@ end
def newlines_at_term(text, term)
if text.include?(term)
- text = text.split(term).map{ |l| l.strip }.join("\n#{term} ")
+ text = text.split(term).map { |l| l.strip }.join("\n#{term} ")
end
text
end
diff --git a/lib/tasks/scheduler.rake b/lib/tasks/scheduler.rake
index 208d524ee59..d7d1efeb86b 100644
--- a/lib/tasks/scheduler.rake
+++ b/lib/tasks/scheduler.rake
@@ -1,21 +1,21 @@
desc "This task is called by the Heroku scheduler add-on"
# Every day at 6am
-task :enqueue_digest_emails => :environment do
+task enqueue_digest_emails: :environment do
Jobs::EnqueueDigestEmails.new.execute(nil)
end
# Every day at 4am
-task :category_stats => :environment do
+task category_stats: :environment do
Jobs::CategoryStats.new.execute(nil)
end
# Every 10 minutes
-task :periodical_updates => :environment do
+task periodical_updates: :environment do
Jobs::PeriodicalUpdates.new.execute(nil)
end
# Every day
-task :version_check => :environment do
+task version_check: :environment do
Jobs::VersionCheck.new.execute(nil)
-end
\ No newline at end of file
+end
diff --git a/lib/tasks/search.rake b/lib/tasks/search.rake
index c944812d793..ef84747cce4 100644
--- a/lib/tasks/search.rake
+++ b/lib/tasks/search.rake
@@ -2,7 +2,7 @@ task "search:reindex" => :environment do
ENV['RAILS_DB'] ? reindex_search : reindex_search_all_sites
end
-def reindex_search(db=RailsMultisite::ConnectionManagement.current_db)
+def reindex_search(db = RailsMultisite::ConnectionManagement.current_db)
puts "Reindexing '#{db}'"
puts ""
puts "Posts:"
diff --git a/lib/tasks/typepad.thor b/lib/tasks/typepad.thor
index 26613f89b61..7db75d415ed 100644
--- a/lib/tasks/typepad.thor
+++ b/lib/tasks/typepad.thor
@@ -45,7 +45,7 @@ class Typepad < Thor
end
end
- entries.each_with_index do |e,i|
+ entries.each_with_index do |e, i|
if e[:title] =~ /Head/
puts "#{i}: #{e[:title]}"
end
@@ -56,7 +56,7 @@ class Typepad < Thor
puts "Importing #{entries.size} entries"
entries.each_with_index do |entry, idx|
- puts "Importing (#{idx+1}/#{entries.size})"
+ puts "Importing (#{idx + 1}/#{entries.size})"
next if entry[:body].blank?
puts entry[:unique_url]
@@ -219,7 +219,7 @@ class Typepad < Thor
current << c
end
end
- segments.delete_if {|s| s.nil? || s.size < 2}
+ segments.delete_if { |s| s.nil? || s.size < 2 }
segments << current
comment[:author] = segments[0]
diff --git a/lib/tasks/uploads.rake b/lib/tasks/uploads.rake
index e8c5dc67b29..9da415dba0b 100644
--- a/lib/tasks/uploads.rake
+++ b/lib/tasks/uploads.rake
@@ -27,8 +27,8 @@ def gather_uploads
puts "", "Gathering uploads for '#{current_db}'...", ""
Upload.where("url ~ '^\/uploads\/'")
- .where("url !~ '^\/uploads\/#{current_db}'")
- .find_each do |upload|
+ .where("url !~ '^\/uploads\/#{current_db}'")
+ .find_each do |upload|
begin
old_db = upload.url[/^\/uploads\/([^\/]+)\//, 1]
from = upload.url.dup
@@ -100,7 +100,7 @@ def guess_filename(url, raw)
filename ||= File.basename(url)
filename
rescue
- nil
+ nil
ensure
f.try(:close!) rescue nil
end
@@ -203,8 +203,8 @@ def migrate_to_s3
# Migrate all uploads
Upload.where.not(sha1: nil)
- .where("url NOT LIKE '#{s3.absolute_base_url}%'")
- .find_each do |upload|
+ .where("url NOT LIKE '#{s3.absolute_base_url}%'")
+ .find_each do |upload|
# remove invalid uploads
if upload.url.blank?
upload.destroy!
@@ -215,7 +215,7 @@ def migrate_to_s3
# retrieve the path to the local file
path = local.path_for(upload)
# make sure the file exists locally
- if !path or !File.exists?(path)
+ if !path || !File.exists?(path)
putc "X"
next
end
diff --git a/lib/tasks/user_actions.rake b/lib/tasks/user_actions.rake
index ffba1f2b228..f457454978e 100644
--- a/lib/tasks/user_actions.rake
+++ b/lib/tasks/user_actions.rake
@@ -2,9 +2,9 @@ desc "rebuild the user_actions table"
task "user_actions:rebuild" => :environment do
MessageBus.off
UserAction.delete_all
- PostAction.all.each{|i| UserActionCreator.log_post_action(i)}
- Topic.all.each {|i| UserActionCreator.log_topic(i)}
- Post.all.each {|i| UserActionCreator.log_post(i)}
+ PostAction.all.each { |i| UserActionCreator.log_post_action(i) }
+ Topic.all.each { |i| UserActionCreator.log_topic(i) }
+ Post.all.each { |i| UserActionCreator.log_post(i) }
Notification.all.each do |notification|
UserActionCreator.log_notification(notification.post,
notification.user,
@@ -12,4 +12,3 @@ task "user_actions:rebuild" => :environment do
notification.user)
end
end
-
diff --git a/lib/tasks/users.rake b/lib/tasks/users.rake
index 7772bba6600..d01d30b744b 100644
--- a/lib/tasks/users.rake
+++ b/lib/tasks/users.rake
@@ -1,5 +1,5 @@
desc "Change topic/post ownership of all the topics/posts by a specific user (without creating new revision)"
-task "users:change_post_ownership", [:old_username, :new_username, :archetype] => [:environment] do |_,args|
+task "users:change_post_ownership", [:old_username, :new_username, :archetype] => [:environment] do |_, args|
old_username = args[:old_username]
new_username = args[:new_username]
archetype = args[:archetype]
diff --git a/lib/text_sentinel.rb b/lib/text_sentinel.rb
index e86a897b67c..fa88e647f96 100644
--- a/lib/text_sentinel.rb
+++ b/lib/text_sentinel.rb
@@ -11,12 +11,12 @@ class TextSentinel
ENTROPY_SCALE ||= 0.7
- def initialize(text, opts=nil)
+ def initialize(text, opts = nil)
@opts = opts || {}
@text = text.to_s.encode('UTF-8', invalid: :replace, undef: :replace, replace: '')
end
- def self.body_sentinel(text, opts={})
+ def self.body_sentinel(text, opts = {})
entropy = SiteSetting.body_min_entropy
if opts[:private_message]
scale_entropy = SiteSetting.min_private_message_post_length.to_f / SiteSetting.min_post_length.to_f
@@ -41,7 +41,7 @@ class TextSentinel
# Non-ASCII characters are weighted heavier since they contain more "information"
def entropy
chars = @text.to_s.strip.split('')
- @entropy ||= chars.pack('M*'*chars.size).gsub("\n",'').split('=').uniq.size
+ @entropy ||= chars.pack('M*' * chars.size).gsub("\n", '').split('=').uniq.size
end
def valid?
@@ -74,7 +74,6 @@ class TextSentinel
@opts[:max_word_length].blank? || @text.split(/\s|\/|-|\.|:/).map(&:size).max <= @opts[:max_word_length]
end
-
def seems_quiet?
# We don't allow all upper case content
SiteSetting.allow_uppercase_posts || @text == @text.mb_chars.downcase.to_s || @text != @text.mb_chars.upcase.to_s
diff --git a/lib/timeline_lookup.rb b/lib/timeline_lookup.rb
index 1c62d879f52..90d4a5e02a6 100644
--- a/lib/timeline_lookup.rb
+++ b/lib/timeline_lookup.rb
@@ -2,7 +2,7 @@ module TimelineLookup
# Given an array of tuples (id, post_number, days_ago), return at most `max_values` worth of a
# lookup table to help the front end timeline display dates associated with posts
- def self.build(tuples, max_values=300)
+ def self.build(tuples, max_values = 300)
result = []
every = (tuples.size.to_f / max_values).ceil
@@ -14,7 +14,7 @@ module TimelineLookup
days_ago = t[2]
if (days_ago != last_days_ago)
- result << [idx+1, days_ago]
+ result << [idx + 1, days_ago]
last_days_ago = days_ago
end
diff --git a/lib/topic_creator.rb b/lib/topic_creator.rb
index ecd35fd116b..885f4f26053 100644
--- a/lib/topic_creator.rb
+++ b/lib/topic_creator.rb
@@ -80,13 +80,16 @@ class TopicCreator
topic.topic_allowed_groups(true).each do |tag|
tag.group.group_users.each do |gu|
next if gu.user_id == -1 || gu.user_id == topic.user_id
- action = case gu.notification_level
- when TopicUser.notification_levels[:tracking] then "track!"
- when TopicUser.notification_levels[:regular] then "regular!"
- when TopicUser.notification_levels[:muted] then "mute!"
- when TopicUser.notification_levels[:watching] then "watch!"
- else "track!"
- end
+
+ action =
+ case gu.notification_level
+ when TopicUser.notification_levels[:tracking] then "track!"
+ when TopicUser.notification_levels[:regular] then "regular!"
+ when TopicUser.notification_levels[:muted] then "mute!"
+ when TopicUser.notification_levels[:watching] then "watch!"
+ else "track!"
+ end
+
topic.notifier.send(action, gu.user_id)
end
end
@@ -152,7 +155,7 @@ class TopicCreator
def setup_auto_close_time(topic)
return unless @opts[:auto_close_time].present?
return unless @guardian.can_moderate?(topic)
- topic.set_auto_close(@opts[:auto_close_time], {by_user: @user})
+ topic.set_auto_close(@opts[:auto_close_time], by_user: @user)
end
def process_private_message(topic)
@@ -163,8 +166,8 @@ class TopicCreator
rollback_with!(topic, :no_user_selected)
end
- add_users(topic,@opts[:target_usernames])
- add_groups(topic,@opts[:target_group_names])
+ add_users(topic, @opts[:target_usernames])
+ add_groups(topic, @opts[:target_group_names])
topic.topic_allowed_users.build(user_id: @user.id)
end
diff --git a/lib/topic_list_responder.rb b/lib/topic_list_responder.rb
index 05133300855..b53380345f6 100644
--- a/lib/topic_list_responder.rb
+++ b/lib/topic_list_responder.rb
@@ -21,4 +21,3 @@ module TopicListResponder
end
end
-
diff --git a/lib/topic_query.rb b/lib/topic_query.rb
index 51deb6654fa..934c3e84ef0 100644
--- a/lib/topic_query.rb
+++ b/lib/topic_query.rb
@@ -46,7 +46,6 @@ class TopicQuery
no_definitions)
end
-
# Maps `order` to a columns in `topics`
SORTABLE_MAPPING = {
'likes' => 'like_count',
@@ -80,21 +79,21 @@ class TopicQuery
def self.apply_custom_filters(results, topic_query)
if @custom_filters
- @custom_filters.each do |key,filter|
+ @custom_filters.each do |key, filter|
results = filter.call(results, topic_query)
end
end
results
end
- def initialize(user=nil, options={})
+ def initialize(user = nil, options = {})
options.assert_valid_keys(TopicQuery.valid_options)
@options = options.dup
@user = user
@guardian = Guardian.new(@user)
end
- def joined_topic_user(list=nil)
+ def joined_topic_user(list = nil)
(list || Topic).joins("LEFT OUTER JOIN topic_users AS tu ON (topics.id = tu.topic_id AND tu.user_id = #{@user.id.to_i})")
end
@@ -108,8 +107,8 @@ class TopicQuery
if topic.private_message?
group_ids = topic.topic_allowed_groups
- .where('group_id IN (SELECT group_id FROM group_users WHERE user_id = :user_id)', user_id: @user.id)
- .pluck(:group_id)
+ .where('group_id IN (SELECT group_id FROM group_users WHERE user_id = :user_id)', user_id: @user.id)
+ .pluck(:group_id)
{
topic: topic,
my_group_ids: group_ids,
@@ -151,7 +150,7 @@ class TopicQuery
builder.add_results(random_suggested(topic, builder.results_left, builder.excluded_topic_ids)) unless builder.full?
end
- params = {unordered: true}
+ params = { unordered: true }
if topic.private_message?
params[:preload_posters] = true
end
@@ -170,19 +169,19 @@ class TopicQuery
end
def list_new
- create_list(:new, {unordered: true}, new_results)
+ create_list(:new, { unordered: true }, new_results)
end
def list_unread
- create_list(:unread, {unordered: true}, unread_results)
+ create_list(:unread, { unordered: true }, unread_results)
end
def list_posted
- create_list(:posted) {|l| l.where('tu.posted') }
+ create_list(:posted) { |l| l.where('tu.posted') }
end
def list_bookmarks
- create_list(:bookmarks) {|l| l.where('tu.bookmarked') }
+ create_list(:bookmarks) { |l| l.where('tu.bookmarked') }
end
def list_top_for(period)
@@ -207,14 +206,14 @@ class TopicQuery
def not_archived(list, user)
list.joins("LEFT JOIN user_archived_messages um
ON um.user_id = #{user.id.to_i} AND um.topic_id = topics.id")
- .where('um.user_id IS NULL')
+ .where('um.user_id IS NULL')
end
def list_private_messages(user)
list = private_messages_for(user, :user)
list = not_archived(list, user)
- .where('NOT (topics.participant_count = 1 AND topics.user_id = ?)', user.id)
+ .where('NOT (topics.participant_count = 1 AND topics.user_id = ?)', user.id)
create_list(:private_messages, {}, list)
end
@@ -262,8 +261,8 @@ class TopicQuery
def list_category_topic_ids(category)
query = default_results(category: category.id)
pinned_ids = query.where('pinned_at IS NOT NULL AND category_id = ?', category.id)
- .limit(nil)
- .order('pinned_at DESC').pluck(:id)
+ .limit(nil)
+ .order('pinned_at DESC').pluck(:id)
non_pinned_ids = query.where('pinned_at IS NULL OR category_id <> ?', category.id).pluck(:id)
(pinned_ids + non_pinned_ids)
end
@@ -276,8 +275,8 @@ class TopicQuery
def self.new_filter(list, treat_as_new_topic_start_date)
list.where("topics.created_at >= :created_at", created_at: treat_as_new_topic_start_date)
- .where("tu.last_read_post_number IS NULL")
- .where("COALESCE(tu.notification_level, :tracking) >= :tracking", tracking: TopicUser.notification_levels[:tracking])
+ .where("tu.last_read_post_number IS NULL")
+ .where("COALESCE(tu.notification_level, :tracking) >= :tracking", tracking: TopicUser.notification_levels[:tracking])
end
def self.unread_filter(list, user_id, opts)
@@ -293,8 +292,8 @@ class TopicQuery
col_name = opts[:staff] ? "highest_staff_post_number" : "highest_post_number"
list
- .where("tu.last_read_post_number < topics.#{col_name}")
- .where("COALESCE(tu.notification_level, :regular) >= :tracking",
+ .where("tu.last_read_post_number < topics.#{col_name}")
+ .where("COALESCE(tu.notification_level, :regular) >= :tracking",
regular: TopicUser.notification_levels[:regular], tracking: TopicUser.notification_levels[:tracking])
end
@@ -322,12 +321,12 @@ class TopicQuery
end
- def create_list(filter, options={}, topics = nil)
+ def create_list(filter, options = {}, topics = nil)
topics ||= default_results(options)
topics = yield(topics) if block_given?
options = options.merge(@options)
- if ["activity","default"].include?(options[:order] || "activity") &&
+ if ["activity", "default"].include?(options[:order] || "activity") &&
!options[:unordered] &&
filter != :private_messages
topics = prioritize_pinned_topics(topics, options)
@@ -348,7 +347,7 @@ class TopicQuery
end
topics.each do |t|
- t.allowed_user_ids = filter == :private_messages ? t.allowed_users.map{|u| u.id} : []
+ t.allowed_user_ids = filter == :private_messages ? t.allowed_users.map { |u| u.id } : []
end
list = TopicList.new(filter, @user, topics, options.merge(@options))
@@ -356,7 +355,7 @@ class TopicQuery
list
end
- def latest_results(options={})
+ def latest_results(options = {})
result = default_results(options)
result = remove_muted_topics(result, @user) unless options && options[:state] == "muted".freeze
result = remove_muted_categories(result, @user, exclude: options[:category])
@@ -370,12 +369,12 @@ class TopicQuery
result
end
- def unread_results(options={})
+ def unread_results(options = {})
result = TopicQuery.unread_filter(
- default_results(options.reverse_merge(:unordered => true)),
+ default_results(options.reverse_merge(unordered: true)),
@user&.id,
staff: @user&.staff?)
- .order('CASE WHEN topics.user_id = tu.user_id THEN 1 ELSE 2 END')
+ .order('CASE WHEN topics.user_id = tu.user_id THEN 1 ELSE 2 END')
self.class.results_filter_callbacks.each do |filter_callback|
result = filter_callback.call(:unread, result, @user, options)
@@ -384,10 +383,10 @@ class TopicQuery
suggested_ordering(result, options)
end
- def new_results(options={})
+ def new_results(options = {})
# TODO does this make sense or should it be ordered on created_at
# it is ordering on bumped_at now
- result = TopicQuery.new_filter(default_results(options.reverse_merge(:unordered => true)), @user.user_option.treat_as_new_topic_start_date)
+ result = TopicQuery.new_filter(default_results(options.reverse_merge(unordered: true)), @user.user_option.treat_as_new_topic_start_date)
result = remove_muted_topics(result, @user)
result = remove_muted_categories(result, @user, exclude: options[:category])
result = remove_muted_tags(result, @user, options)
@@ -436,8 +435,8 @@ class TopicQuery
end
result = result.joins("LEFT OUTER JOIN topic_users AS tu ON (topics.id = tu.topic_id AND tu.user_id = #{user.id.to_i})")
- .order("topics.bumped_at DESC")
- .private_messages
+ .order("topics.bumped_at DESC")
+ .private_messages
result = result.limit(options[:per_page]) unless options[:limit] == false
result = result.visible if options[:visible] || @user.nil? || @user.regular?
@@ -489,9 +488,8 @@ class TopicQuery
category_id
end
-
# Create results based on a bunch of default options
- def default_results(options={})
+ def default_results(options = {})
options.reverse_merge!(@options)
options.reverse_merge!(per_page: per_page_setting)
@@ -504,7 +502,7 @@ class TopicQuery
if @user
result = result.joins("LEFT OUTER JOIN topic_users AS tu ON (topics.id = tu.topic_id AND tu.user_id = #{@user.id.to_i})")
- .references('tu')
+ .references('tu')
end
category_id = get_category_id(options[:category])
@@ -559,7 +557,7 @@ class TopicQuery
end
elsif @options[:no_tags]
# the following will do: ("topics"."id" NOT IN (SELECT DISTINCT "topic_tags"."topic_id" FROM "topic_tags"))
- result = result.where.not(:id => TopicTag.select(:topic_id).uniq)
+ result = result.where.not(id: TopicTag.select(:topic_id).uniq)
end
end
@@ -578,7 +576,7 @@ class TopicQuery
result = result.limit(options[:per_page]) unless options[:limit] == false
result = result.visible if options[:visible]
- result = result.where.not(topics: {id: options[:except_topic_ids]}).references(:topics) if options[:except_topic_ids]
+ result = result.where.not(topics: { id: options[:except_topic_ids] }).references(:topics) if options[:except_topic_ids]
if options[:page]
offset = options[:page].to_i * options[:per_page]
@@ -641,7 +639,7 @@ class TopicQuery
end
end
- if (filter=options[:filter]) && @user
+ if (filter = options[:filter]) && @user
action =
if filter == "bookmarked"
PostActionType.types[:bookmark]
@@ -665,7 +663,7 @@ class TopicQuery
result = result.where('topics.posts_count <= ?', options[:max_posts]) if options[:max_posts].present?
result = result.where('topics.posts_count >= ?', options[:min_posts]) if options[:min_posts].present?
- result = TopicQuery.apply_custom_filters(result,self)
+ result = TopicQuery.apply_custom_filters(result, self)
@guardian.filter_allowed_categories(result)
end
@@ -677,12 +675,12 @@ class TopicQuery
list
end
- def remove_muted_categories(list, user, opts=nil)
+ def remove_muted_categories(list, user, opts = nil)
category_id = get_category_id(opts[:exclude]) if opts
if user
list = list.references("cu")
- .where("
+ .where("
NOT EXISTS (
SELECT 1
FROM category_users cu
@@ -699,7 +697,7 @@ class TopicQuery
list
end
- def remove_muted_tags(list, user, opts=nil)
+ def remove_muted_tags(list, user, opts = nil)
if user.nil? || !SiteSetting.tagging_enabled || !SiteSetting.remove_muted_tags_from_latest
list
else
@@ -732,7 +730,7 @@ class TopicQuery
def new_messages(params)
TopicQuery.new_filter(messages_for_groups_or_user(params[:my_group_ids]), Time.at(SiteSetting.min_new_topics_time).to_datetime)
- .limit(params[:count])
+ .limit(params[:count])
end
@@ -741,7 +739,7 @@ class TopicQuery
messages_for_groups_or_user(params[:my_group_ids]),
@user&.id,
staff: @user&.staff?)
- .limit(params[:count])
+ .limit(params[:count])
end
def related_messages_user(params)
@@ -811,7 +809,7 @@ class TopicQuery
query.order('topics.bumped_at DESC')
end
- def random_suggested(topic, count, excluded_topic_ids=[])
+ def random_suggested(topic, count, excluded_topic_ids = [])
result = default_results(unordered: true, per_page: count).where(closed: false, archived: false)
excluded_topic_ids += Category.topic_ids.to_a
result = result.where("topics.id NOT IN (?)", excluded_topic_ids) unless excluded_topic_ids.empty?
@@ -829,7 +827,7 @@ class TopicQuery
# of muted, big edge case
#
# we over select in case cache is stale
- max = (count*1.3).to_i
+ max = (count * 1.3).to_i
ids = RandomTopicSelector.next(max) + RandomTopicSelector.next(max, topic.category)
result.where(id: ids.uniq)
diff --git a/lib/topic_retriever.rb b/lib/topic_retriever.rb
index 672ca60a696..82f815d9f7c 100644
--- a/lib/topic_retriever.rb
+++ b/lib/topic_retriever.rb
@@ -1,6 +1,6 @@
class TopicRetriever
- def initialize(embed_url, opts=nil)
+ def initialize(embed_url, opts = nil)
@embed_url = embed_url
@author_username = opts[:author_username]
@opts = opts || {}
@@ -34,7 +34,6 @@ class TopicRetriever
# It's possible another process or job found the embed already. So if that happened bail out.
return if TopicEmbed.where(embed_url: @embed_url).exists?
-
# First check RSS if that is enabled
if SiteSetting.feed_polling_enabled?
Jobs::PollFeed.new.execute({})
diff --git a/lib/topic_subtype.rb b/lib/topic_subtype.rb
index 0d8224c4e45..731ed29c3e0 100644
--- a/lib/topic_subtype.rb
+++ b/lib/topic_subtype.rb
@@ -9,7 +9,7 @@ class TopicSubtype
end
def attributes
- {'id' => @id, 'options' => @options }
+ { 'id' => @id, 'options' => @options }
end
def self.list
@@ -41,7 +41,7 @@ class TopicSubtype
'pending_users'
end
- def self.register(name, options={})
+ def self.register(name, options = {})
@subtypes ||= {}
@subtypes[name] = TopicSubtype.new(name, options)
end
diff --git a/lib/topic_view.rb b/lib/topic_view.rb
index 482853d71e8..d631a331353 100644
--- a/lib/topic_view.rb
+++ b/lib/topic_view.rb
@@ -37,7 +37,7 @@ class TopicView
wpcf.flatten.uniq
end
- def initialize(topic_id, user=nil, options={})
+ def initialize(topic_id, user = nil, options = {})
@message_bus_last_id = MessageBus.last_id("/topic/#{topic_id}")
@user = user
@guardian = Guardian.new(@user)
@@ -50,11 +50,13 @@ class TopicView
end
@page = 1 if (!@page || @page.zero?)
- @chunk_size = case
- when options[:slow_platform] then TopicView.slow_chunk_size
- when @print then TopicView.print_chunk_size
- else TopicView.chunk_size
- end
+ @chunk_size =
+ case
+ when options[:slow_platform] then TopicView.slow_chunk_size
+ when @print then TopicView.print_chunk_size
+ else TopicView.chunk_size
+ end
+
@limit ||= @chunk_size
setup_filtered_posts
@@ -82,12 +84,15 @@ class TopicView
def canonical_path
path = relative_url
- path << if @post_number
- page = ((@post_number.to_i - 1) / @limit) + 1
- (page > 1) ? "?page=#{page}" : ""
- else
- (@page && @page.to_i > 1) ? "?page=#{@page}" : ""
- end
+
+ path <<
+ if @post_number
+ page = ((@post_number.to_i - 1) / @limit) + 1
+ (page > 1) ? "?page=#{page}" : ""
+ else
+ (@page && @page.to_i > 1) ? "?page=#{@page}" : ""
+ end
+
path
end
@@ -161,7 +166,7 @@ class TopicView
return @desired_post if @desired_post.present?
return nil if posts.blank?
- @desired_post = posts.detect {|p| p.post_number == @post_number.to_i}
+ @desired_post = posts.detect { |p| p.post_number == @post_number.to_i }
@desired_post ||= posts.first
@desired_post
end
@@ -175,7 +180,7 @@ class TopicView
def read_time
return nil if @post_number.present? && @post_number.to_i != 1 # only show for topic URLs
- (@topic.word_count/SiteSetting.read_time_word_count).floor if @topic.word_count
+ (@topic.word_count / SiteSetting.read_time_word_count).floor if @topic.word_count
end
def like_count
@@ -235,7 +240,6 @@ class TopicView
filter_posts_in_range(min_idx, max_idx)
end
-
def filter_posts_paged(page)
page = [page, 1].max
min = @limit * (page - 1)
@@ -248,7 +252,7 @@ class TopicView
filter_posts_in_range(min, max)
end
- def filter_best(max, opts={})
+ def filter_best(max, opts = {})
filter = FilterBestPosts.new(@topic, @filtered_posts, max, opts)
@posts = filter.posts
@filtered_posts = filter.filtered_posts
@@ -261,9 +265,9 @@ class TopicView
def has_deleted?
@predelete_filtered_posts.with_deleted
- .where("posts.deleted_at IS NOT NULL")
- .where("posts.post_number > 1")
- .exists?
+ .where("posts.deleted_at IS NOT NULL")
+ .where("posts.post_number > 1")
+ .exists?
end
def topic_user
@@ -275,17 +279,17 @@ class TopicView
def post_counts_by_user
@post_counts_by_user ||= Post.where(topic_id: @topic.id)
- .where("user_id IS NOT NULL")
- .group(:user_id)
- .order("count_all DESC")
- .limit(24)
- .count
+ .where("user_id IS NOT NULL")
+ .group(:user_id)
+ .order("count_all DESC")
+ .limit(24)
+ .count
end
def participants
@participants ||= begin
participants = {}
- User.where(id: post_counts_by_user.map {|k,v| k}).includes(:primary_group).each {|u| participants[u.id] = u}
+ User.where(id: post_counts_by_user.map { |k, v| k }).includes(:primary_group).each { |u| participants[u.id] = u }
participants
end
end
@@ -303,7 +307,7 @@ class TopicView
end
def link_counts
- @link_counts ||= TopicLink.counts_for(guardian,@topic, posts)
+ @link_counts ||= TopicLink.counts_for(guardian, @topic, posts)
end
# Are we the initial page load? If so, we can return extra information like
@@ -331,7 +335,7 @@ class TopicView
def current_post_ids
@current_post_ids ||= if @posts.is_a?(Array)
- @posts.map {|p| p.id }
+ @posts.map { |p| p.id }
else
@posts.pluck(:post_number)
end
@@ -341,13 +345,13 @@ class TopicView
# calculations.
def filtered_post_stream
@filtered_post_stream ||= @filtered_posts.order(:sort_order)
- .pluck(:id,
+ .pluck(:id,
:post_number,
'EXTRACT(DAYS FROM CURRENT_TIMESTAMP - created_at)::INT AS days_ago')
end
def filtered_post_ids
- @filtered_post_ids ||= filtered_post_stream.map {|tuple| tuple[0]}
+ @filtered_post_ids ||= filtered_post_stream.map { |tuple| tuple[0] }
end
protected
@@ -359,11 +363,11 @@ class TopicView
return result unless topic_user.present?
post_numbers = PostTiming
- .where(topic_id: @topic.id, user_id: @user.id)
- .where(post_number: current_post_ids)
- .pluck(:post_number)
+ .where(topic_id: @topic.id, user_id: @user.id)
+ .where(post_number: current_post_ids)
+ .pluck(:post_number)
- post_numbers.each {|pn| result << pn}
+ post_numbers.each { |pn| result << pn }
result
end
end
@@ -383,8 +387,8 @@ class TopicView
def filter_posts_by_ids(post_ids)
# TODO: Sort might be off
@posts = Post.where(id: post_ids, topic_id: @topic.id)
- .includes(:user, :reply_to_user, :incoming_email)
- .order('sort_order')
+ .includes(:user, :reply_to_user, :incoming_email)
+ .order('sort_order')
@posts = filter_post_types(@posts)
@posts = @posts.with_deleted if @guardian.can_see_deleted_posts?
@posts
@@ -435,7 +439,7 @@ class TopicView
# Username filters
if @username_filters.present?
- usernames = @username_filters.map{|u| u.downcase}
+ usernames = @username_filters.map { |u| u.downcase }
@filtered_posts = @filtered_posts.where('post_number = 1 OR posts.user_id IN (SELECT u.id FROM users u WHERE username_lower IN (?))', usernames)
@contains_gaps = true
end
diff --git a/lib/topics_bulk_action.rb b/lib/topics_bulk_action.rb
index 5e4d8d9f75d..1af2db62a2e 100644
--- a/lib/topics_bulk_action.rb
+++ b/lib/topics_bulk_action.rb
@@ -1,6 +1,6 @@
class TopicsBulkAction
- def initialize(user, topic_ids, operation, options={})
+ def initialize(user, topic_ids, operation, options = {})
@user = user
@topic_ids = topic_ids
@operation = operation
@@ -45,7 +45,7 @@ class TopicsBulkAction
if group
GroupArchivedMessage.move_to_inbox!(group.id, t.id)
else
- UserArchivedMessage.move_to_inbox!(@user.id,t.id)
+ UserArchivedMessage.move_to_inbox!(@user.id, t.id)
end
end
end
@@ -177,6 +177,4 @@ class TopicsBulkAction
@topics ||= Topic.where(id: @topic_ids)
end
-
end
-
diff --git a/lib/twitter_api.rb b/lib/twitter_api.rb
index 2c9ce48a601..da281fe05fe 100644
--- a/lib/twitter_api.rb
+++ b/lib/twitter_api.rb
@@ -5,7 +5,7 @@ class TwitterApi
def prettify_tweet(tweet)
text = tweet["full_text"].dup
- if entities = tweet["entities"] and urls = entities["urls"]
+ if (entities = tweet["entities"]) && (urls = entities["urls"])
urls.each do |url|
text.gsub!(url["url"], "#{url["display_url"]}")
end
@@ -125,7 +125,6 @@ class TwitterApi
URI.parse "#{BASE_URL}/oauth2/token"
end
-
def http(uri)
Net::HTTP.new(uri.host, uri.port).tap { |http| http.use_ssl = true }
end
diff --git a/lib/unread.rb b/lib/unread.rb
index f04ffcd2289..36a7d09e816 100644
--- a/lib/unread.rb
+++ b/lib/unread.rb
@@ -8,10 +8,9 @@ class Unread
@topic_user = topic_user
end
-
def unread_posts
return 0 if do_not_notify?(@topic_user.notification_level)
- result = ((@topic_user.highest_seen_post_number||0) - (@topic_user.last_read_post_number||0))
+ result = ((@topic_user.highest_seen_post_number || 0) - (@topic_user.last_read_post_number || 0))
result = 0 if result < 0
result
end
@@ -22,7 +21,7 @@ class Unread
highest_post_number = @guardian.is_staff? ? @topic.highest_staff_post_number : @topic.highest_post_number
- return 0 if (@topic_user.last_read_post_number||0) > highest_post_number
+ return 0 if (@topic_user.last_read_post_number || 0) > highest_post_number
new_posts = (highest_post_number - @topic_user.highest_seen_post_number)
new_posts = 0 if new_posts < 0
diff --git a/lib/validators/allow_user_locale_enabled_validator.rb b/lib/validators/allow_user_locale_enabled_validator.rb
index ebae4be4509..73d3cb5c51b 100644
--- a/lib/validators/allow_user_locale_enabled_validator.rb
+++ b/lib/validators/allow_user_locale_enabled_validator.rb
@@ -1,6 +1,6 @@
class AllowUserLocaleEnabledValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
@@ -15,4 +15,4 @@ class AllowUserLocaleEnabledValidator
I18n.t("site_settings.errors.user_locale_not_enabled");
end
-end
\ No newline at end of file
+end
diff --git a/lib/validators/alternative_reply_by_email_addresses_validator.rb b/lib/validators/alternative_reply_by_email_addresses_validator.rb
index d35dedf7c3a..412059ce86e 100644
--- a/lib/validators/alternative_reply_by_email_addresses_validator.rb
+++ b/lib/validators/alternative_reply_by_email_addresses_validator.rb
@@ -1,5 +1,5 @@
class AlternativeReplyByEmailAddressesValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/email_setting_validator.rb b/lib/validators/email_setting_validator.rb
index e9fb583167b..e44725396d3 100644
--- a/lib/validators/email_setting_validator.rb
+++ b/lib/validators/email_setting_validator.rb
@@ -1,5 +1,5 @@
class EmailSettingValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/integer_setting_validator.rb b/lib/validators/integer_setting_validator.rb
index 185eb0a7994..48a8d6ca7a1 100644
--- a/lib/validators/integer_setting_validator.rb
+++ b/lib/validators/integer_setting_validator.rb
@@ -1,5 +1,5 @@
class IntegerSettingValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
@opts[:min] = 0 unless @opts[:min].present? || @opts[:hidden]
@opts[:max] = 20000 unless @opts[:max].present? || @opts[:hidden]
@@ -7,18 +7,18 @@ class IntegerSettingValidator
def valid_value?(val)
return false if val.to_i.to_s != val.to_s
- return false if @opts[:min] and @opts[:min].to_i > val.to_i
- return false if @opts[:max] and @opts[:max].to_i < val.to_i
+ return false if @opts[:min] && @opts[:min].to_i > (val.to_i)
+ return false if @opts[:max] && @opts[:max].to_i < (val.to_i)
true
end
def error_message
if @opts[:min] && @opts[:max]
- I18n.t('site_settings.errors.invalid_integer_min_max', {min: @opts[:min], max: @opts[:max]})
+ I18n.t('site_settings.errors.invalid_integer_min_max', min: @opts[:min], max: @opts[:max])
elsif @opts[:min]
- I18n.t('site_settings.errors.invalid_integer_min', {min: @opts[:min]})
+ I18n.t('site_settings.errors.invalid_integer_min', min: @opts[:min])
elsif @opts[:max]
- I18n.t('site_settings.errors.invalid_integer_max', {max: @opts[:max]})
+ I18n.t('site_settings.errors.invalid_integer_max', max: @opts[:max])
else
I18n.t('site_settings.errors.invalid_integer')
end
diff --git a/lib/validators/pop3_polling_enabled_setting_validator.rb b/lib/validators/pop3_polling_enabled_setting_validator.rb
index f62e252bbbe..d002f4eee7f 100644
--- a/lib/validators/pop3_polling_enabled_setting_validator.rb
+++ b/lib/validators/pop3_polling_enabled_setting_validator.rb
@@ -2,7 +2,7 @@ require "net/pop"
class POP3PollingEnabledSettingValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/post_validator.rb b/lib/validators/post_validator.rb
index dec44496a8a..a9b1f468c45 100644
--- a/lib/validators/post_validator.rb
+++ b/lib/validators/post_validator.rb
@@ -24,7 +24,7 @@ class Validators::PostValidator < ActiveModel::Validator
post.errors.add(:topic_id, :blank, options) if post.topic_id.blank?
end
- if post.new_record? and post.user_id.nil?
+ if post.new_record? && post.user_id.nil?
post.errors.add(:user_id, :blank, options)
end
end
diff --git a/lib/validators/regex_setting_validator.rb b/lib/validators/regex_setting_validator.rb
index e1aa9900b64..895342372aa 100644
--- a/lib/validators/regex_setting_validator.rb
+++ b/lib/validators/regex_setting_validator.rb
@@ -2,7 +2,7 @@ class RegexSettingValidator
LOREM = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam eget sem non elit tincidunt rhoncus.'.freeze
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/reply_by_email_address_validator.rb b/lib/validators/reply_by_email_address_validator.rb
index 024a1aab707..3c3b5698f89 100644
--- a/lib/validators/reply_by_email_address_validator.rb
+++ b/lib/validators/reply_by_email_address_validator.rb
@@ -1,5 +1,5 @@
class ReplyByEmailAddressValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/reply_by_email_enabled_validator.rb b/lib/validators/reply_by_email_enabled_validator.rb
index e7333de0f6c..8170a129354 100644
--- a/lib/validators/reply_by_email_enabled_validator.rb
+++ b/lib/validators/reply_by_email_enabled_validator.rb
@@ -1,6 +1,6 @@
class ReplyByEmailEnabledValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/sso_overrides_email_validator.rb b/lib/validators/sso_overrides_email_validator.rb
index 4af0149b5f6..202cfc7b00c 100644
--- a/lib/validators/sso_overrides_email_validator.rb
+++ b/lib/validators/sso_overrides_email_validator.rb
@@ -1,5 +1,5 @@
class SsoOverridesEmailValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/validators/string_setting_validator.rb b/lib/validators/string_setting_validator.rb
index 07a65500bdd..b7ffc2dcf3e 100644
--- a/lib/validators/string_setting_validator.rb
+++ b/lib/validators/string_setting_validator.rb
@@ -1,5 +1,5 @@
class StringSettingValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
@regex = Regexp.new(opts[:regex]) if opts[:regex]
@regex_error = opts[:regex_error] || 'site_settings.errors.regex_mismatch'
@@ -8,12 +8,12 @@ class StringSettingValidator
def valid_value?(val)
return true if !val.present?
- if (@opts[:min] and @opts[:min].to_i > val.length) || (@opts[:max] and @opts[:max].to_i < val.length)
+ if (@opts[:min] && @opts[:min].to_i > (val.length)) || (@opts[:max] && @opts[:max].to_i < (val.length))
@length_fail = true
return false
end
- if @regex and !(val =~ @regex)
+ if @regex && !(val =~ @regex)
@regex_fail = true
return false
end
@@ -26,11 +26,11 @@ class StringSettingValidator
I18n.t(@regex_error)
elsif @length_fail
if @opts[:min] && @opts[:max]
- I18n.t('site_settings.errors.invalid_string_min_max', {min: @opts[:min], max: @opts[:max]})
+ I18n.t('site_settings.errors.invalid_string_min_max', min: @opts[:min], max: @opts[:max])
elsif @opts[:min]
- I18n.t('site_settings.errors.invalid_string_min', {min: @opts[:min]})
+ I18n.t('site_settings.errors.invalid_string_min', min: @opts[:min])
else
- I18n.t('site_settings.errors.invalid_string_max', {max: @opts[:max]})
+ I18n.t('site_settings.errors.invalid_string_max', max: @opts[:max])
end
else
I18n.t('site_settings.errors.invalid_string')
diff --git a/lib/validators/topic_title_length_validator.rb b/lib/validators/topic_title_length_validator.rb
index 82d9d54f9ee..bdee42c81c8 100644
--- a/lib/validators/topic_title_length_validator.rb
+++ b/lib/validators/topic_title_length_validator.rb
@@ -7,15 +7,16 @@ class TopicTitleLengthValidator < ActiveModel::EachValidator
private
def title_validator(record)
- length_range = if record.user.try(:admin?)
- 1..SiteSetting.max_topic_title_length
- elsif record.private_message?
- SiteSetting.private_message_title_length
- else
- SiteSetting.topic_title_length
- end
+ length_range =
+ if record.user.try(:admin?)
+ 1..SiteSetting.max_topic_title_length
+ elsif record.private_message?
+ SiteSetting.private_message_title_length
+ else
+ SiteSetting.topic_title_length
+ end
- ActiveModel::Validations::LengthValidator.new({attributes: :title, in: length_range, allow_blank: true})
+ ActiveModel::Validations::LengthValidator.new(attributes: :title, in: length_range, allow_blank: true)
end
end
diff --git a/lib/validators/username_setting_validator.rb b/lib/validators/username_setting_validator.rb
index 3dbc75fba1d..d9aa18ad5fc 100644
--- a/lib/validators/username_setting_validator.rb
+++ b/lib/validators/username_setting_validator.rb
@@ -1,5 +1,5 @@
class UsernameSettingValidator
- def initialize(opts={})
+ def initialize(opts = {})
@opts = opts
end
diff --git a/lib/wizard.rb b/lib/wizard.rb
index e7a74f190c1..d577c2aa29a 100644
--- a/lib/wizard.rb
+++ b/lib/wizard.rb
@@ -87,10 +87,10 @@ class Wizard
end
first_admin_id = User.where(admin: true)
- .human_users
- .joins(:user_auth_tokens)
- .order('user_auth_tokens.created_at')
- .pluck(:id).first
+ .human_users
+ .joins(:user_auth_tokens)
+ .order('user_auth_tokens.created_at')
+ .pluck(:id).first
if @user&.id && first_admin_id == @user.id
!Wizard::Builder.new(@user).build.completed?
diff --git a/lib/wizard/builder.rb b/lib/wizard/builder.rb
index 56e6bc4a32f..d3ba97ff1f6 100644
--- a/lib/wizard/builder.rb
+++ b/lib/wizard/builder.rb
@@ -82,7 +82,7 @@ class Wizard
username = Discourse.system_user.username if username.blank?
contact = step.add_field(id: 'site_contact', type: 'dropdown', value: username)
- User.where(admin: true).pluck(:username).each {|c| contact.add_choice(c) }
+ User.where(admin: true).pluck(:username).each { |c| contact.add_choice(c) }
step.on_update do |updater|
updater.apply_settings(:contact_email, :contact_url)
@@ -120,8 +120,8 @@ class Wizard
themes = step.add_field(id: 'base_scheme_id', type: 'dropdown', required: true, value: scheme_id)
ColorScheme.base_color_scheme_colors.each do |t|
with_hash = t[:colors].dup
- with_hash.map{|k,v| with_hash[k] = "##{v}"}
- themes.add_choice(t[:id], data: {colors: with_hash})
+ with_hash.map { |k, v| with_hash[k] = "##{v}" }
+ themes.add_choice(t[:id], data: { colors: with_hash })
end
step.add_field(id: 'theme_preview', type: 'component')
@@ -187,12 +187,10 @@ class Wizard
end
@wizard.append_step('emoji') do |step|
- sets = step.add_field({
- id: 'emoji_set',
- type: 'radio',
- required: true,
- value: SiteSetting.emoji_set
- })
+ sets = step.add_field(id: 'emoji_set',
+ type: 'radio',
+ required: true,
+ value: SiteSetting.emoji_set)
emoji = ["smile", "+1", "tada", "poop"]
@@ -201,10 +199,8 @@ class Wizard
""
end
- sets.add_choice(set[:value], {
- label: I18n.t("js.#{set[:name]}"),
- extra_label: "#{imgs.join}"
- })
+ sets.add_choice(set[:value], label: I18n.t("js.#{set[:name]}"),
+ extra_label: "#{imgs.join}")
step.on_update do |updater|
updater.apply_settings(:emoji_set)
diff --git a/lib/wizard/field.rb b/lib/wizard/field.rb
index 0816e055d7f..45fdacc9992 100644
--- a/lib/wizard/field.rb
+++ b/lib/wizard/field.rb
@@ -27,7 +27,7 @@ class Wizard
@choices = []
end
- def add_choice(id, opts=nil)
+ def add_choice(id, opts = nil)
choice = Choice.new(id, opts || {})
choice.field = self
diff --git a/lib/wizard/step_updater.rb b/lib/wizard/step_updater.rb
index f05605c33fe..3d8ad87a3d6 100644
--- a/lib/wizard/step_updater.rb
+++ b/lib/wizard/step_updater.rb
@@ -44,7 +44,7 @@ class Wizard
end
def apply_settings(*ids)
- ids.each {|id| apply_setting(id)}
+ ids.each { |id| apply_setting(id) }
end
end
diff --git a/plugins/discourse-narrative-bot/db/fixtures/001_discobot.rb b/plugins/discourse-narrative-bot/db/fixtures/001_discobot.rb
index e9ce7b2dd6a..ebc2299937f 100644
--- a/plugins/discourse-narrative-bot/db/fixtures/001_discobot.rb
+++ b/plugins/discourse-narrative-bot/db/fixtures/001_discobot.rb
@@ -1,4 +1,4 @@
-discobot_username ='discobot'
+discobot_username = 'discobot'
user = User.find_by(id: -2)
if !user
@@ -37,7 +37,7 @@ if !user
end
bot = User.find(-2)
-bot.update!(admin:true, moderator: false)
+bot.update!(admin: true, moderator: false)
bot.user_option.update!(
email_private_messages: false,
diff --git a/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/advanced_user_narrative.rb b/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/advanced_user_narrative.rb
index fb6b4e2e14a..b5fbe5bc732 100644
--- a/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/advanced_user_narrative.rb
+++ b/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/advanced_user_narrative.rb
@@ -101,7 +101,7 @@ module DiscourseNarrativeBot
def reset_bot(user, post)
if pm_to_bot?(post)
- reset_data(user, { topic_id: post.topic_id })
+ reset_data(user, topic_id: post.topic_id)
else
reset_data(user)
end
@@ -116,15 +116,13 @@ module DiscourseNarrativeBot
fake_delay
- post = PostCreator.create!(@user, {
- raw: I18n.t(
+ post = PostCreator.create!(@user, raw: I18n.t(
"#{I18N_KEY}.edit.bot_created_post_raw",
i18n_post_args(discobot_username: self.discobot_user.username)
),
- topic_id: data[:topic_id],
- skip_bot: true,
- skip_validations: true
- })
+ topic_id: data[:topic_id],
+ skip_bot: true,
+ skip_validations: true)
set_state_data(:post_id, post.id)
post
@@ -133,15 +131,13 @@ module DiscourseNarrativeBot
def init_tutorial_recover
data = get_data(@user)
- post = PostCreator.create!(@user, {
- raw: I18n.t(
+ post = PostCreator.create!(@user, raw: I18n.t(
"#{I18N_KEY}.recover.deleted_post_raw",
i18n_post_args(discobot_username: self.discobot_user.username)
),
- topic_id: data[:topic_id],
- skip_bot: true,
- skip_validations: true
- })
+ topic_id: data[:topic_id],
+ skip_bot: true,
+ skip_validations: true)
set_state_data(:post_id, post.id)
diff --git a/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/base.rb b/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/base.rb
index e00cf5eb2f0..f2bbc9d3b1c 100644
--- a/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/base.rb
+++ b/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/base.rb
@@ -109,7 +109,7 @@ module DiscourseNarrativeBot
skip_trigger: TrackSelector.skip_trigger,
reset_trigger: "#{TrackSelector.reset_trigger} #{self.class.reset_trigger}"
)
- ), {}, { skip_send_email: false })
+ ), {}, skip_send_email: false)
end
end
@@ -168,8 +168,8 @@ module DiscourseNarrativeBot
end
end
- def i18n_post_args(extra={})
- {base_uri: Discourse.base_uri}.merge(extra)
+ def i18n_post_args(extra = {})
+ { base_uri: Discourse.base_uri }.merge(extra)
end
def valid_topic?(topic_id)
diff --git a/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/new_user_narrative.rb b/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/new_user_narrative.rb
index 76e5b2e6fe0..7596d335a15 100644
--- a/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/new_user_narrative.rb
+++ b/plugins/discourse-narrative-bot/lib/discourse_narrative_bot/new_user_narrative.rb
@@ -121,7 +121,7 @@ module DiscourseNarrativeBot
def reset_bot(user, post)
if pm_to_bot?(post)
- reset_data(user, { topic_id: post.topic_id })
+ reset_data(user, topic_id: post.topic_id)
else
reset_data(user)
end
@@ -154,7 +154,7 @@ module DiscourseNarrativeBot
PostRevisor.new(post, topic).revise!(
self.discobot_user,
{ raw: raw },
- { skip_validations: true, force_new_version: true }
+ skip_validations: true, force_new_version: true
)
set_state_data(:post_version, post.reload.version || 0)
diff --git a/plugins/discourse-narrative-bot/plugin.rb b/plugins/discourse-narrative-bot/plugin.rb
index adc514e29c4..79aba1b6ede 100644
--- a/plugins/discourse-narrative-bot/plugin.rb
+++ b/plugins/discourse-narrative-bot/plugin.rb
@@ -96,7 +96,7 @@ after_initialize do
end
respond_to do |format|
- format.svg { render inline: svg}
+ format.svg { render inline: svg }
end
end
end
diff --git a/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/advanced_user_narrative_spec.rb b/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/advanced_user_narrative_spec.rb
index a3dd4e826ca..5c2eeb04523 100644
--- a/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/advanced_user_narrative_spec.rb
+++ b/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/advanced_user_narrative_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe DiscourseNarrativeBot::AdvancedUserNarrative do
let(:topic) do
Fabricate(:private_message_topic, first_post: first_post,
- topic_allowed_users: [
+ topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: discobot_user),
Fabricate.build(:topic_allowed_user, user: user),
]
@@ -79,15 +79,13 @@ RSpec.describe DiscourseNarrativeBot::AdvancedUserNarrative do
new_post = Post.offset(1).last
- expect(narrative.get_data(user)).to eq({
- "topic_id" => topic.id,
- "state" => "tutorial_edit",
- "last_post_id" => new_post.id,
- "track" => described_class.to_s,
- "tutorial_edit" => {
+ expect(narrative.get_data(user)).to eq("topic_id" => topic.id,
+ "state" => "tutorial_edit",
+ "last_post_id" => new_post.id,
+ "track" => described_class.to_s,
+ "tutorial_edit" => {
"post_id" => Post.last.id
- }
- })
+ })
expect(new_post.raw).to eq(expected_raw.chomp)
expect(new_post.topic.id).to eq(topic.id)
@@ -111,15 +109,13 @@ RSpec.describe DiscourseNarrativeBot::AdvancedUserNarrative do
new_post = Post.offset(1).last
- expect(narrative.get_data(user)).to eq({
- "topic_id" => new_post.topic.id,
- "state" => "tutorial_edit",
- "last_post_id" => new_post.id,
- "track" => described_class.to_s,
- "tutorial_edit" => {
+ expect(narrative.get_data(user)).to eq("topic_id" => new_post.topic.id,
+ "state" => "tutorial_edit",
+ "last_post_id" => new_post.id,
+ "track" => described_class.to_s,
+ "tutorial_edit" => {
"post_id" => Post.last.id
- }
- })
+ })
expect(new_post.raw).to eq(expected_raw.chomp)
expect(new_post.topic.id).to_not eq(topic.id)
@@ -659,11 +655,9 @@ RSpec.describe DiscourseNarrativeBot::AdvancedUserNarrative do
'discourse_narrative_bot.advanced_user_narrative.details.reply', base_uri: ''
))
- expect(narrative.get_data(user)).to eq({
- "state" => "end",
- "topic_id" => topic.id,
- "track" => described_class.to_s
- })
+ expect(narrative.get_data(user)).to eq("state" => "end",
+ "topic_id" => topic.id,
+ "track" => described_class.to_s)
expect(user.badges.where(name: DiscourseNarrativeBot::AdvancedUserNarrative::BADGE_NAME).exists?)
.to eq(true)
diff --git a/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/new_user_narrative_spec.rb b/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/new_user_narrative_spec.rb
index b234519ed9f..f85c05b070f 100644
--- a/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/new_user_narrative_spec.rb
+++ b/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/new_user_narrative_spec.rb
@@ -8,7 +8,7 @@ describe DiscourseNarrativeBot::NewUserNarrative do
let(:topic) do
Fabricate(:private_message_topic, first_post: first_post,
- topic_allowed_users: [
+ topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: discobot_user),
Fabricate.build(:topic_allowed_user, user: user),
]
@@ -83,12 +83,10 @@ describe DiscourseNarrativeBot::NewUserNarrative do
new_post = Post.last
- expect(narrative.get_data(user)).to eq({
- "topic_id" => topic.id,
- "state" => "tutorial_bookmark",
- "last_post_id" => new_post.id,
- "track" => described_class.to_s
- })
+ expect(narrative.get_data(user)).to eq("topic_id" => topic.id,
+ "state" => "tutorial_bookmark",
+ "last_post_id" => new_post.id,
+ "track" => described_class.to_s)
expect(new_post.raw).to eq(expected_raw.chomp)
expect(new_post.topic.id).to eq(topic.id)
@@ -111,12 +109,10 @@ describe DiscourseNarrativeBot::NewUserNarrative do
new_post = Post.last
- expect(narrative.get_data(user)).to eq({
- "topic_id" => new_post.topic.id,
- "state" => "tutorial_bookmark",
- "last_post_id" => new_post.id,
- "track" => described_class.to_s
- })
+ expect(narrative.get_data(user)).to eq("topic_id" => new_post.topic.id,
+ "state" => "tutorial_bookmark",
+ "last_post_id" => new_post.id,
+ "track" => described_class.to_s)
expect(new_post.raw).to eq(expected_raw.chomp)
expect(new_post.topic.id).to_not eq(topic.id)
@@ -922,11 +918,9 @@ describe DiscourseNarrativeBot::NewUserNarrative do
expect(first_post.reload.raw).to eq('Hello world')
- expect(narrative.get_data(user)).to include({
- "state" => "end",
- "topic_id" => new_post.topic_id,
- "track" => described_class.to_s,
- })
+ expect(narrative.get_data(user)).to include("state" => "end",
+ "topic_id" => new_post.topic_id,
+ "track" => described_class.to_s)
expect(user.badges.where(name: DiscourseNarrativeBot::NewUserNarrative::BADGE_NAME).exists?)
.to eq(true)
diff --git a/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/track_selector_spec.rb b/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/track_selector_spec.rb
index b6166ccfcfe..5e7594a2e72 100644
--- a/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/track_selector_spec.rb
+++ b/plugins/discourse-narrative-bot/spec/discourse_narrative_bot/track_selector_spec.rb
@@ -42,7 +42,7 @@ describe DiscourseNarrativeBot::TrackSelector do
let(:topic) do
Fabricate(:private_message_topic, first_post: first_post,
- topic_allowed_users: [
+ topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: discobot_user),
Fabricate.build(:topic_allowed_user, user: user),
]
@@ -454,7 +454,6 @@ describe DiscourseNarrativeBot::TrackSelector do
expect(new_post.raw).to eq(random_mention_reply)
end
-
describe 'rate limiting random reply message in public topic' do
let(:topic) { Fabricate(:topic) }
let(:other_post) { Fabricate(:post, raw: '@discobot show me something', topic: topic) }
diff --git a/plugins/discourse-nginx-performance-report/app/jobs/scheduled/daily_performance_report.rb b/plugins/discourse-nginx-performance-report/app/jobs/scheduled/daily_performance_report.rb
index 78b25666e4d..80d44a2aeb2 100644
--- a/plugins/discourse-nginx-performance-report/app/jobs/scheduled/daily_performance_report.rb
+++ b/plugins/discourse-nginx-performance-report/app/jobs/scheduled/daily_performance_report.rb
@@ -45,7 +45,6 @@ module Jobs
title: I18n.t('performance_report.initial_topic_title'),
skip_validations: true)
-
unless post && post.topic_id
raise StandardError, "Could not create or retrieve performance report topic id"
end
diff --git a/plugins/discourse-nginx-performance-report/lib/log_analyzer.rb b/plugins/discourse-nginx-performance-report/lib/log_analyzer.rb
index 79cac5163ad..439b1202470 100644
--- a/plugins/discourse-nginx-performance-report/lib/log_analyzer.rb
+++ b/plugins/discourse-nginx-performance-report/lib/log_analyzer.rb
@@ -2,7 +2,6 @@ class LogAnalyzer
class LineParser
-
# log_format log_discourse '[$time_local] "$http_host" $remote_addr "$request" "$http_user_agent" "$sent_http_x_discourse_route" $status $bytes_sent "$http_referer" $upstream_response_time $request_time "$sent_http_x_discourse_username"';
attr_accessor :time, :ip_address, :url, :route, :user_agent, :rails_duration, :total_duration,
@@ -69,8 +68,8 @@ class LogAnalyzer
@aggregate_type = :duration
end
- def add(id, duration, aggregate=nil)
- ary = (@data[id] ||= [0,0])
+ def add(id, duration, aggregate = nil)
+ ary = (@data[id] ||= [0, 0])
ary[0] += duration
ary[1] += 1
unless aggregate.nil?
@@ -83,8 +82,8 @@ class LogAnalyzer
end
end
- def top(n, aggregator_formatter=nil)
- @data.sort{|a,b| b[1][0] <=> a[1][0]}.first(n).map do |metric, ary|
+ def top(n, aggregator_formatter = nil)
+ @data.sort { |a, b| b[1][0] <=> a[1][0] }.first(n).map do |metric, ary|
metric = metric.to_s
metric = "[empty]" if metric.length == 0
result = [metric, ary[0], ary[1]]
@@ -93,7 +92,7 @@ class LogAnalyzer
if aggregator_formatter
result.push aggregator_formatter.call(ary[2], ary[0], ary[1])
else
- result.push ary[2].sort{|a,b| b[1] <=> a[1]}.first(5).map{|k,v|
+ result.push ary[2].sort { |a, b| b[1] <=> a[1] }.first(5).map { |k, v|
v = "%.2f" % v if Float === v
"#{k}(#{v})"}.join(" ")
end
@@ -104,7 +103,7 @@ class LogAnalyzer
end
end
- def initialize(filenames, args={})
+ def initialize(filenames, args = {})
@filenames = filenames
@ip_to_rails_duration = Aggeregator.new
@username_to_rails_duration = Aggeregator.new
@@ -148,7 +147,7 @@ class LogAnalyzer
@url_to_rails_duration.add(parsed.url, parsed.rails_duration)
- @status_404_to_count.add(parsed.url,1) if parsed.status == "404"
+ @status_404_to_count.add(parsed.url, 1) if parsed.status == "404"
end
end
self
diff --git a/plugins/discourse-nginx-performance-report/script/nginx_analyze.rb b/plugins/discourse-nginx-performance-report/script/nginx_analyze.rb
index 8550806f879..480beb67ebd 100644
--- a/plugins/discourse-nginx-performance-report/script/nginx_analyze.rb
+++ b/plugins/discourse-nginx-performance-report/script/nginx_analyze.rb
@@ -30,7 +30,7 @@ def top(cols, aggregator, count, aggregator_formatter = nil)
col_just = []
- col_widths = map_with_index(cols) do |name,idx|
+ col_widths = map_with_index(cols) do |name, idx|
max_width = name.length
if cols[idx].respond_to? :align
@@ -46,14 +46,14 @@ def top(cols, aggregator, count, aggregator_formatter = nil)
row[idx] = row[idx].to_s
max_width = row[idx].length if row[idx].length > max_width
end
- [max_width,80].min
+ [max_width, 80].min
end
- puts(map_with_index(cols) do |name,idx|
+ puts(map_with_index(cols) do |name, idx|
name.ljust(col_widths[idx])
end.join(" "))
- puts(map_with_index(cols) do |name,idx|
+ puts(map_with_index(cols) do |name, idx|
("-" * name.length).ljust(col_widths[idx])
end.join(" "))
@@ -102,7 +102,7 @@ end
puts
puts "Analyzed: #{analyzer.filenames.join(",")} on #{`hostname`}"
if limit
- puts "Limited to #{DateTime.now - (limit.to_f / (60*24.0))} - #{DateTime.now}"
+ puts "Limited to #{DateTime.now - (limit.to_f / (60 * 24.0))} - #{DateTime.now}"
end
puts SPACER
puts "#{analyzer.from_time} - #{analyzer.to_time}"
@@ -121,7 +121,7 @@ puts SPACER
puts
puts "Top 100 routes by Server Load"
puts
-top(["Route", "Duration", "Reqs", Column.new("Mobile", :rjust)], analyzer.route_to_rails_duration, 100, lambda{
+top(["Route", "Duration", "Reqs", Column.new("Mobile", :rjust)], analyzer.route_to_rails_duration, 100, lambda {
|hash, name, total|
"#{hash["mobile"] || 0} (#{"%.2f" % (((hash["mobile"] || 0) / (total + 0.0)) * 100)})%"}
)
diff --git a/plugins/poll/db/migrate/20151016163051_merge_polls_votes.rb b/plugins/poll/db/migrate/20151016163051_merge_polls_votes.rb
index 17e6459e120..33bd122bdf9 100644
--- a/plugins/poll/db/migrate/20151016163051_merge_polls_votes.rb
+++ b/plugins/poll/db/migrate/20151016163051_merge_polls_votes.rb
@@ -5,11 +5,11 @@ class MergePollsVotes < ActiveRecord::Migration
polls_votes = {}
PostCustomField.where(post_id: post_id).where("name LIKE 'polls-votes-%'").find_each do |pcf|
user_id = pcf.name["polls-votes-".size..-1]
- polls_votes["#{user_id}"] = ::JSON.parse(pcf.value||"{}")
+ polls_votes["#{user_id}"] = ::JSON.parse(pcf.value || "{}")
end
pcf = PostCustomField.find_or_create_by(name: "polls-votes", post_id: post_id)
- pcf.value = ::JSON.parse(pcf.value||"{}").merge(polls_votes).to_json
+ pcf.value = ::JSON.parse(pcf.value || "{}").merge(polls_votes).to_json
pcf.save
end
end
diff --git a/plugins/poll/db/migrate/20160321164925_close_polls_in_closed_topics.rb b/plugins/poll/db/migrate/20160321164925_close_polls_in_closed_topics.rb
index 5ca969c68dc..978a41c890c 100644
--- a/plugins/poll/db/migrate/20160321164925_close_polls_in_closed_topics.rb
+++ b/plugins/poll/db/migrate/20160321164925_close_polls_in_closed_topics.rb
@@ -2,9 +2,9 @@ class ClosePollsInClosedTopics < ActiveRecord::Migration
def up
PostCustomField.joins(post: :topic)
- .where("post_custom_fields.name = 'polls'")
- .where("topics.closed")
- .find_each do |pcf|
+ .where("post_custom_fields.name = 'polls'")
+ .where("topics.closed")
+ .find_each do |pcf|
polls = ::JSON.parse(pcf.value || "{}")
polls.values.each { |poll| poll["status"] = "closed" }
pcf.value = polls.to_json
diff --git a/plugins/poll/lib/polls_updater.rb b/plugins/poll/lib/polls_updater.rb
index 5464005175d..488331041b1 100644
--- a/plugins/poll/lib/polls_updater.rb
+++ b/plugins/poll/lib/polls_updater.rb
@@ -101,7 +101,7 @@ module DiscoursePoll
post.save_custom_fields(true)
# publish the changes
- MessageBus.publish("/polls/#{post.topic_id}", { post_id: post.id, polls: polls })
+ MessageBus.publish("/polls/#{post.topic_id}", post_id: post.id, polls: polls)
end
end
diff --git a/plugins/poll/lib/tasks/migrate_old_polls.rake b/plugins/poll/lib/tasks/migrate_old_polls.rake
index 17e5c3946c8..d8f2ee40644 100644
--- a/plugins/poll/lib/tasks/migrate_old_polls.rake
+++ b/plugins/poll/lib/tasks/migrate_old_polls.rake
@@ -27,9 +27,9 @@ desc "Migrate old polls to new syntax"
task "poll:migrate_old_polls" => :environment do
# iterate over all polls
PluginStoreRow.where(plugin_name: "poll")
- .where("key LIKE 'poll_options_%'")
- .pluck(:key)
- .each do |poll_options_key|
+ .where("key LIKE 'poll_options_%'")
+ .pluck(:key)
+ .each do |poll_options_key|
# extract the post_id
post_id = poll_options_key["poll_options_".length..-1].to_i
# load the post from the db
@@ -56,9 +56,9 @@ task "poll:migrate_old_polls" => :environment do
options = post.custom_fields["polls"]["poll"]["options"]
# iterate over all votes
PluginStoreRow.where(plugin_name: "poll")
- .where("key LIKE 'poll_vote_#{post_id}_%'")
- .pluck(:key, :value)
- .each do |poll_vote_key, vote|
+ .where("key LIKE 'poll_vote_#{post_id}_%'")
+ .pluck(:key, :value)
+ .each do |poll_vote_key, vote|
# extract the user_id
user_id = poll_vote_key["poll_vote_#{post_id}_%".length..-1].to_i
# find the selected option
diff --git a/plugins/poll/plugin.rb b/plugins/poll/plugin.rb
index 2bf54226e3f..33709ecb0f5 100644
--- a/plugins/poll/plugin.rb
+++ b/plugins/poll/plugin.rb
@@ -136,7 +136,7 @@ after_initialize do
post.save_custom_fields(true)
- MessageBus.publish("/polls/#{post.topic_id}", {post_id: post.id, polls: polls })
+ MessageBus.publish("/polls/#{post.topic_id}", post_id: post.id, polls: polls)
polls[poll_name]
end
@@ -230,7 +230,7 @@ after_initialize do
end
next unless option["voter_ids"]
- user_ids << option["voter_ids"].slice((params[:offset].to_i || 0) * 25, 25)
+ user_ids << option["voter_ids"].slice((params[:offset].to_i || 0) * 25, 25)
end
user_ids.flatten!
@@ -295,7 +295,7 @@ after_initialize do
end
end
- validate(:post, :validate_polls) do |force=nil|
+ validate(:post, :validate_polls) do |force = nil|
return if !SiteSetting.poll_enabled? && (self.user && !self.user.staff?)
# only care when raw has changed!
@@ -360,9 +360,8 @@ after_initialize do
# tells the front-end we have a poll for that post
on(:post_created) do |post|
next if post.is_first_post? || post.custom_fields[DiscoursePoll::POLLS_CUSTOM_FIELD].blank?
- MessageBus.publish("/polls/#{post.topic_id}", {
- post_id: post.id,
- polls: post.custom_fields[DiscoursePoll::POLLS_CUSTOM_FIELD]})
+ MessageBus.publish("/polls/#{post.topic_id}", post_id: post.id,
+ polls: post.custom_fields[DiscoursePoll::POLLS_CUSTOM_FIELD])
end
add_to_serializer(:post, :polls, false) do
diff --git a/plugins/poll/spec/controllers/polls_controller_spec.rb b/plugins/poll/spec/controllers/polls_controller_spec.rb
index 677a72d6098..02db8bdf930 100644
--- a/plugins/poll/spec/controllers/polls_controller_spec.rb
+++ b/plugins/poll/spec/controllers/polls_controller_spec.rb
@@ -13,7 +13,7 @@ describe ::DiscoursePoll::PollsController do
it "works" do
MessageBus.expects(:publish)
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"]
expect(response).to be_success
json = ::JSON.parse(response.body)
@@ -23,7 +23,7 @@ describe ::DiscoursePoll::PollsController do
end
it "requires at least 1 valid option" do
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["A", "B"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["A", "B"]
expect(response).not_to be_success
json = ::JSON.parse(response.body)
@@ -31,10 +31,10 @@ describe ::DiscoursePoll::PollsController do
end
it "supports vote changes" do
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"]
expect(response).to be_success
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["e89dec30bbd9bf50fabf6a05b4324edf"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["e89dec30bbd9bf50fabf6a05b4324edf"]
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["poll"]["voters"]).to eq(1)
@@ -44,13 +44,13 @@ describe ::DiscoursePoll::PollsController do
it "works even if topic is closed" do
topic.update_attribute(:closed, true)
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"]
expect(response).to be_success
end
it "ensures topic is not archived" do
topic.update_attribute(:archived, true)
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["A"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["A"]
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.topic_must_be_open_to_vote"))
@@ -58,21 +58,21 @@ describe ::DiscoursePoll::PollsController do
it "ensures post is not trashed" do
poll.trash!
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["A"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["A"]
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.post_is_deleted"))
end
it "ensures polls are associated with the post" do
- xhr :put, :vote, { post_id: Fabricate(:post).id, poll_name: "foobar", options: ["A"] }
+ xhr :put, :vote, post_id: Fabricate(:post).id, poll_name: "foobar", options: ["A"]
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.no_polls_associated_with_this_post"))
end
it "checks the name of the poll" do
- xhr :put, :vote, { post_id: poll.id, poll_name: "foobar", options: ["A"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "foobar", options: ["A"]
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.no_poll_with_this_name", name: "foobar"))
@@ -80,7 +80,7 @@ describe ::DiscoursePoll::PollsController do
it "ensures poll is open" do
closed_poll = create_post(raw: "[poll status=closed]\n- A\n- B\n[/poll]")
- xhr :put, :vote, { post_id: closed_poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"] }
+ xhr :put, :vote, post_id: closed_poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"]
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.poll_must_be_open_to_vote"))
@@ -88,9 +88,9 @@ describe ::DiscoursePoll::PollsController do
it "doesn't discard anonymous votes when someone votes" do
default_poll = poll.custom_fields["polls"]["poll"]
- add_anonymous_votes(poll, default_poll, 17, {"5c24fc1df56d764b550ceae1b9319125" => 11, "e89dec30bbd9bf50fabf6a05b4324edf" => 6})
+ add_anonymous_votes(poll, default_poll, 17, "5c24fc1df56d764b550ceae1b9319125" => 11, "e89dec30bbd9bf50fabf6a05b4324edf" => 6)
- xhr :put, :vote, { post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"] }
+ xhr :put, :vote, post_id: poll.id, poll_name: "poll", options: ["5c24fc1df56d764b550ceae1b9319125"]
expect(response).to be_success
json = ::JSON.parse(response.body)
@@ -148,7 +148,7 @@ describe ::DiscoursePoll::PollsController do
it "works for OP" do
MessageBus.expects(:publish)
- xhr :put, :toggle_status, { post_id: poll.id, poll_name: "poll", status: "closed" }
+ xhr :put, :toggle_status, post_id: poll.id, poll_name: "poll", status: "closed"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["poll"]["status"]).to eq("closed")
@@ -158,7 +158,7 @@ describe ::DiscoursePoll::PollsController do
log_in(:moderator)
MessageBus.expects(:publish)
- xhr :put, :toggle_status, { post_id: poll.id, poll_name: "poll", status: "closed" }
+ xhr :put, :toggle_status, post_id: poll.id, poll_name: "poll", status: "closed"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["poll"]["status"]).to eq("closed")
@@ -166,7 +166,7 @@ describe ::DiscoursePoll::PollsController do
it "ensures post is not trashed" do
poll.trash!
- xhr :put, :toggle_status, { post_id: poll.id, poll_name: "poll", status: "closed" }
+ xhr :put, :toggle_status, post_id: poll.id, poll_name: "poll", status: "closed"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.post_is_deleted"))
diff --git a/plugins/poll/spec/controllers/posts_controller_spec.rb b/plugins/poll/spec/controllers/posts_controller_spec.rb
index 41bd0a42db8..dd0219f3845 100644
--- a/plugins/poll/spec/controllers/posts_controller_spec.rb
+++ b/plugins/poll/spec/controllers/posts_controller_spec.rb
@@ -12,7 +12,7 @@ describe PostsController do
describe "polls" do
it "works" do
- xhr :post, :create, { title: title, raw: "[poll]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll]\n- A\n- B\n[/poll]"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["cooked"]).to match("data-poll-")
@@ -21,7 +21,7 @@ describe PostsController do
it "works on any post" do
post = Fabricate(:post)
- xhr :post, :create, { topic_id: post.topic.id, raw: "[poll]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, topic_id: post.topic.id, raw: "[poll]\n- A\n- B\n[/poll]"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["cooked"]).to match("data-poll-")
@@ -29,14 +29,14 @@ describe PostsController do
end
it "should have different options" do
- xhr :post, :create, { title: title, raw: "[poll]\n- A\n- A\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll]\n- A\n- A\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.default_poll_must_have_different_options"))
end
it "should have at least 2 options" do
- xhr :post, :create, { title: title, raw: "[poll]\n- A\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll]\n- A\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.default_poll_must_have_at_least_2_options"))
@@ -47,7 +47,7 @@ describe PostsController do
(SiteSetting.poll_maximum_options + 1).times { |n| raw << "\n- #{n}" }
raw << "\n[/poll]"
- xhr :post, :create, { title: title, raw: raw }
+ xhr :post, :create, title: title, raw: raw
expect(response).not_to be_success
json = ::JSON.parse(response.body)
@@ -55,14 +55,14 @@ describe PostsController do
end
it "should have valid parameters" do
- xhr :post, :create, { title: title, raw: "[poll type=multiple min=5]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll type=multiple min=5]\n- A\n- B\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.default_poll_with_multiple_choices_has_invalid_parameters"))
end
it "prevents self-xss" do
- xhr :post, :create, { title: title, raw: "[poll name=]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll name=]\n- A\n- B\n[/poll]"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["cooked"]).to match("data-poll-")
@@ -71,7 +71,7 @@ describe PostsController do
end
it "also works whe there is a link starting with '[poll'" do
- xhr :post, :create, { title: title, raw: "[Polls are awesome](/foobar)\n[poll]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[Polls are awesome](/foobar)\n[poll]\n- A\n- B\n[/poll]"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["cooked"]).to match("data-poll-")
@@ -79,7 +79,7 @@ describe PostsController do
end
it "prevents pollception" do
- xhr :post, :create, { title: title, raw: "[poll name=1]\n- A\n[poll name=2]\n- B\n- C\n[/poll]\n- D\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll name=1]\n- A\n[poll name=2]\n- B\n- C\n[/poll]\n- D\n[/poll]"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["cooked"]).to match("data-poll-")
@@ -93,13 +93,13 @@ describe PostsController do
let(:post_id) do
freeze_time(4.minutes.ago) do
- xhr :post, :create, { title: title, raw: "[poll]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll]\n- A\n- B\n[/poll]"
::JSON.parse(response.body)["id"]
end
end
it "can be changed" do
- xhr :put, :update, { id: post_id, post: { raw: "[poll]\n- A\n- B\n- C\n[/poll]" } }
+ xhr :put, :update, id: post_id, post: { raw: "[poll]\n- A\n- B\n- C\n[/poll]" }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["polls"]["poll"]["options"][2]["html"]).to eq("C")
@@ -107,7 +107,7 @@ describe PostsController do
it "resets the votes" do
DiscoursePoll::Poll.vote(post_id, "poll", ["5c24fc1df56d764b550ceae1b9319125"], user)
- xhr :put, :update, { id: post_id, post: { raw: "[poll]\n- A\n- B\n- C\n[/poll]" } }
+ xhr :put, :update, id: post_id, post: { raw: "[poll]\n- A\n- B\n- C\n[/poll]" }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["polls_votes"]).to_not be
@@ -123,7 +123,7 @@ describe PostsController do
let(:post_id) do
freeze_time(6.minutes.ago) do
- xhr :post, :create, { title: title, raw: poll }
+ xhr :post, :create, title: title, raw: poll
::JSON.parse(response.body)["id"]
end
end
@@ -137,7 +137,7 @@ describe PostsController do
describe "with no vote" do
it "OP can change the options" do
- xhr :put, :update, { id: post_id, post: { raw: new_option } }
+ xhr :put, :update, id: post_id, post: { raw: new_option }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["polls"]["poll"]["options"][1]["html"]).to eq("C")
@@ -145,14 +145,14 @@ describe PostsController do
it "staff can change the options" do
log_in_user(Fabricate(:moderator))
- xhr :put, :update, { id: post_id, post: { raw: new_option } }
+ xhr :put, :update, id: post_id, post: { raw: new_option }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["polls"]["poll"]["options"][1]["html"]).to eq("C")
end
it "support changes on the post" do
- xhr :put, :update, { id: post_id, post: { raw: updated } }
+ xhr :put, :update, id: post_id, post: { raw: updated }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["cooked"]).to match("before")
@@ -167,7 +167,7 @@ describe PostsController do
end
it "OP cannot change the options" do
- xhr :put, :update, { id: post_id, post: { raw: new_option } }
+ xhr :put, :update, id: post_id, post: { raw: new_option }
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t(
@@ -178,7 +178,7 @@ describe PostsController do
it "staff can change the options and votes are merged" do
log_in_user(Fabricate(:moderator))
- xhr :put, :update, { id: post_id, post: { raw: new_option } }
+ xhr :put, :update, id: post_id, post: { raw: new_option }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["polls"]["poll"]["options"][1]["html"]).to eq("C")
@@ -190,10 +190,10 @@ describe PostsController do
it "staff can change the options and anonymous votes are merged" do
post = Post.find_by(id: post_id)
default_poll = post.custom_fields["polls"]["poll"]
- add_anonymous_votes(post, default_poll, 7, {"5c24fc1df56d764b550ceae1b9319125" => 7})
+ add_anonymous_votes(post, default_poll, 7, "5c24fc1df56d764b550ceae1b9319125" => 7)
log_in_user(Fabricate(:moderator))
- xhr :put, :update, { id: post_id, post: { raw: new_option } }
+ xhr :put, :update, id: post_id, post: { raw: new_option }
expect(response).to be_success
json = ::JSON.parse(response.body)
@@ -204,7 +204,7 @@ describe PostsController do
end
it "support changes on the post" do
- xhr :put, :update, { id: post_id, post: { raw: updated } }
+ xhr :put, :update, id: post_id, post: { raw: updated }
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["post"]["cooked"]).to match("before")
@@ -221,14 +221,14 @@ describe PostsController do
describe "named polls" do
it "should have different options" do
- xhr :post, :create, { title: title, raw: "[poll name=""foo""]\n- A\n- A\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll name=""foo""]\n- A\n- A\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.named_poll_must_have_different_options", name: "foo"))
end
it "should have at least 2 options" do
- xhr :post, :create, { title: title, raw: "[poll name='foo']\n- A\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll name='foo']\n- A\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.named_poll_must_have_at_least_2_options", name: "foo"))
@@ -239,7 +239,7 @@ describe PostsController do
describe "multiple polls" do
it "works" do
- xhr :post, :create, { title: title, raw: "[poll]\n- A\n- B\n[/poll]\n[poll name=foo]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll]\n- A\n- B\n[/poll]\n[poll name=foo]\n- A\n- B\n[/poll]"
expect(response).to be_success
json = ::JSON.parse(response.body)
expect(json["cooked"]).to match("data-poll-")
@@ -248,14 +248,14 @@ describe PostsController do
end
it "should have a name" do
- xhr :post, :create, { title: title, raw: "[poll]\n- A\n- B\n[/poll]\n[poll]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll]\n- A\n- B\n[/poll]\n[poll]\n- A\n- B\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.multiple_polls_without_name"))
end
it "should have unique name" do
- xhr :post, :create, { title: title, raw: "[poll name=foo]\n- A\n- B\n[/poll]\n[poll name=foo]\n- A\n- B\n[/poll]" }
+ xhr :post, :create, title: title, raw: "[poll name=foo]\n- A\n- B\n[/poll]\n[poll name=foo]\n- A\n- B\n[/poll]"
expect(response).not_to be_success
json = ::JSON.parse(response.body)
expect(json["errors"][0]).to eq(I18n.t("poll.multiple_polls_with_same_name", name: "foo"))
diff --git a/plugins/poll/spec/integration/poll_endpoints_spec.rb b/plugins/poll/spec/integration/poll_endpoints_spec.rb
index 47c6080585d..67e247e4f21 100644
--- a/plugins/poll/spec/integration/poll_endpoints_spec.rb
+++ b/plugins/poll/spec/integration/poll_endpoints_spec.rb
@@ -13,10 +13,8 @@ describe "DiscoursePoll endpoints" do
user
)
- get "/polls/voters.json", {
- post_id: post.id,
- poll_name: DiscoursePoll::DEFAULT_POLL_NAME
- }
+ get "/polls/voters.json", post_id: post.id,
+ poll_name: DiscoursePoll::DEFAULT_POLL_NAME
expect(response.status).to eq(200)
@@ -36,11 +34,9 @@ describe "DiscoursePoll endpoints" do
user
)
- get "/polls/voters.json", {
- post_id: post.id,
- poll_name: DiscoursePoll::DEFAULT_POLL_NAME,
- option_id: 'e89dec30bbd9bf50fabf6a05b4324edf'
- }
+ get "/polls/voters.json", post_id: post.id,
+ poll_name: DiscoursePoll::DEFAULT_POLL_NAME,
+ option_id: 'e89dec30bbd9bf50fabf6a05b4324edf'
expect(response.status).to eq(200)
@@ -57,7 +53,7 @@ describe "DiscoursePoll endpoints" do
describe 'when post_id is blank' do
it 'should raise the right error' do
- expect { get "/polls/voters.json", { poll_name: DiscoursePoll::DEFAULT_POLL_NAME } }
+ expect { get "/polls/voters.json", poll_name: DiscoursePoll::DEFAULT_POLL_NAME }
.to raise_error(ActionController::ParameterMissing)
end
end
@@ -65,17 +61,15 @@ describe "DiscoursePoll endpoints" do
describe 'when post_id is not valid' do
it 'should raise the right error' do
expect do
- get "/polls/voters.json", {
- post_id: -1,
- poll_name: DiscoursePoll::DEFAULT_POLL_NAME
- }
+ get "/polls/voters.json", post_id: -1,
+ poll_name: DiscoursePoll::DEFAULT_POLL_NAME
end.to raise_error(Discourse::InvalidParameters, 'post_id is invalid')
end
end
describe 'when poll_name is blank' do
it 'should raise the right error' do
- expect { get "/polls/voters.json", { post_id: post.id } }
+ expect { get "/polls/voters.json", post_id: post.id }
.to raise_error(ActionController::ParameterMissing)
end
end
@@ -101,10 +95,8 @@ describe "DiscoursePoll endpoints" do
user
)
- get "/polls/voters.json", {
- post_id: post.id,
- poll_name: DiscoursePoll::DEFAULT_POLL_NAME
- }
+ get "/polls/voters.json", post_id: post.id,
+ poll_name: DiscoursePoll::DEFAULT_POLL_NAME
expect(response.status).to eq(200)
diff --git a/plugins/poll/spec/lib/polls_validator_spec.rb b/plugins/poll/spec/lib/polls_validator_spec.rb
index 629fc0527af..a4d6fc27b63 100644
--- a/plugins/poll/spec/lib/polls_validator_spec.rb
+++ b/plugins/poll/spec/lib/polls_validator_spec.rb
@@ -71,7 +71,6 @@ describe ::DiscoursePoll::PollsValidator do
)
end
-
it 'should ensure that polls have at least 2 options' do
raw = <<~RAW
[poll]
diff --git a/script/bench.rb b/script/bench.rb
index 9de35492a8c..84893f5ebda 100644
--- a/script/bench.rb
+++ b/script/bench.rb
@@ -64,7 +64,6 @@ end
@timings = {}
-
def measure(name)
start = Time.now
yield
@@ -100,7 +99,6 @@ unless $? == 0
abort "Apache Bench is not installed. Try: apt-get install apache2-utils or brew install ab"
end
-
unless File.exists?("config/database.yml")
puts "Copying database.yml.development.sample to database.yml"
`cp config/database.yml.development-sample config/database.yml`
@@ -108,7 +106,6 @@ end
ENV["RAILS_ENV"] = "profile"
-
discourse_env_vars = %w(DISCOURSE_DUMP_HEAP RUBY_GC_HEAP_INIT_SLOTS RUBY_GC_HEAP_FREE_SLOTS RUBY_GC_HEAP_GROWTH_FACTOR RUBY_GC_HEAP_GROWTH_MAX_SLOTS RUBY_GC_MALLOC_LIMIT RUBY_GC_OLDMALLOC_LIMIT RUBY_GC_MALLOC_LIMIT_MAX RUBY_GC_OLDMALLOC_LIMIT_MAX RUBY_GC_MALLOC_LIMIT_GROWTH_FACTOR RUBY_GC_OLDMALLOC_LIMIT_GROWTH_FACTOR RUBY_GC_HEAP_OLDOBJECT_LIMIT_FACTOR)
if @include_env
@@ -124,7 +121,7 @@ else
end
end
-def port_available? port
+def port_available?(port)
server = TCPServer.open("0.0.0.0", port)
server.close
true
@@ -174,13 +171,14 @@ begin
puts "precompiling assets"
run("bundle exec rake assets:precompile")
- pid = if @unicorn
- ENV['UNICORN_PORT'] = @port.to_s
- FileUtils.mkdir_p(File.join('tmp', 'pids'))
- spawn("bundle exec unicorn -c config/unicorn.conf.rb")
- else
- spawn("bundle exec thin start -p #{@port}")
- end
+ pid =
+ if @unicorn
+ ENV['UNICORN_PORT'] = @port.to_s
+ FileUtils.mkdir_p(File.join('tmp', 'pids'))
+ spawn("bundle exec unicorn -c config/unicorn.conf.rb")
+ else
+ spawn("bundle exec thin start -p #{@port}")
+ end
while port_available? @port
sleep 1
@@ -199,7 +197,7 @@ begin
# ["user", "/u/admin1/activity"],
]
- tests = tests.map{|k,url| ["#{k}_admin", "#{url}#{append}"]} + tests
+ tests = tests.map { |k, url| ["#{k}_admin", "#{url}#{append}"] } + tests
# NOTE: we run the most expensive page first in the bench
@@ -210,15 +208,13 @@ begin
a[50] < b[50] ? a : b
end
-
results = {}
@best_of.times do
tests.each do |name, url|
- results[name] = best_of(bench(url, name),results[name])
+ results[name] = best_of(bench(url, name), results[name])
end
end
-
puts "Your Results: (note for timings- percentile is first, duration is second in millisecs)"
# Prevent using external facts because it breaks when running in the
@@ -226,8 +222,8 @@ begin
Facter::Util::Config.external_facts_dirs = []
facts = Facter.to_hash
- facts.delete_if{|k,v|
- !["operatingsystem","architecture","kernelversion",
+ facts.delete_if { |k, v|
+ !["operatingsystem", "architecture", "kernelversion",
"memorysize", "physicalprocessorcount", "processor0",
"virtual"].include?(k)
}
@@ -238,15 +234,12 @@ begin
YAML.load `ruby script/memstats.rb #{pid} --yaml`
end
-
mem = get_mem(pid)
- results = results.merge({
- "timings" => @timings,
- "ruby-version" => "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}",
- "rss_kb" => mem["rss_kb"],
- "pss_kb" => mem["pss_kb"]
- }).merge(facts)
+ results = results.merge("timings" => @timings,
+ "ruby-version" => "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}",
+ "rss_kb" => mem["rss_kb"],
+ "pss_kb" => mem["pss_kb"]).merge(facts)
if @unicorn
child_pids = `ps --ppid #{pid} | awk '{ print $1; }' | grep -v PID`.split("\n")
@@ -270,7 +263,7 @@ begin
end
if @result_file
- File.open(@result_file,"wb") do |f|
+ File.open(@result_file, "wb") do |f|
f.write(results)
end
end
diff --git a/script/benchmarks/markdown/bench.rb b/script/benchmarks/markdown/bench.rb
index a69318ec19d..74926c95a2a 100644
--- a/script/benchmarks/markdown/bench.rb
+++ b/script/benchmarks/markdown/bench.rb
@@ -4,7 +4,6 @@ require File.expand_path('../../../../config/environment', __FILE__)
# set any flags here
# MiniRacer::Platform.set_flags! :noturbo
-
tests = [
["tiny post", "**hello**"],
["giant post", File.read("giant_post.md")],
@@ -28,7 +27,7 @@ PrettyText.v8.eval("window.commonmark = window.markdownit('commonmark')")
# exit
Benchmark.ips do |x|
- [true,false].each do |sanitize|
+ [true, false].each do |sanitize|
tests.each do |test, text|
x.report("#{test} sanitize: #{sanitize}") do
PrettyText.markdown(text, sanitize: sanitize)
@@ -36,7 +35,6 @@ Benchmark.ips do |x|
end
end
-
tests.each do |test, text|
x.report("markdown it no extensions commonmark #{test}") do
PrettyText.v8.eval("window.commonmark.render(#{text.inspect})")
@@ -44,7 +42,6 @@ Benchmark.ips do |x|
end
end
-
# 27-07-2017 - Sam's NUC
#
# v8 5.7
@@ -131,4 +128,3 @@ end
# 1.448k (± 6.7%) i/s - 7.239k in 5.024831s
# markdown it no extensions commonmark lots of mentions
# 1.986k (± 5.2%) i/s - 9.990k in 5.044624s
-
diff --git a/script/bulk_import/base.rb b/script/bulk_import/base.rb
index 377eb8b20a5..7ed396c2fa7 100644
--- a/script/bulk_import/base.rb
+++ b/script/bulk_import/base.rb
@@ -10,7 +10,7 @@ module BulkImport; end
class BulkImport::Base
NOW ||= "now()".freeze
- PRIVATE_OFFSET ||= 2 ** 30
+ PRIVATE_OFFSET ||= 2**30
def initialize
db = ActiveRecord::Base.connection_config
@@ -477,7 +477,7 @@ class BulkImport::Base
start = Time.now
imported_ids = []
process_method_name = "process_#{name}"
- sql = "COPY #{name.pluralize} (#{columns.map {|c| "\"#{c}\""}.join(",")}) FROM STDIN"
+ sql = "COPY #{name.pluralize} (#{columns.map { |c| "\"#{c}\"" }.join(",")}) FROM STDIN"
@raw_connection.copy_data(sql, @encoder) do
rows.each do |row|
diff --git a/script/diff_heaps.rb b/script/diff_heaps.rb
index e854f2f65ae..947f4b02b36 100644
--- a/script/diff_heaps.rb
+++ b/script/diff_heaps.rb
@@ -30,11 +30,10 @@ end
diff.group_by do |x|
[x["type"], x["file"], x["line"]]
-end.map {|x,y|
- [x, y.count]
-}.sort{ |a,b|
- b[1] <=> a[1]
-}.each{ |x,y|
+end.map { |x, y|
+ [x, y.count]
+}.sort { |a, b|
+ b[1] <=> a[1]
+}.each { |x, y|
puts "Leaked #{y} #{x[0]} objects at: #{x[1]}:#{x[2]}"
}
-
diff --git a/script/discourse b/script/discourse
index c07d6685ef4..7bbb2714abe 100755
--- a/script/discourse
+++ b/script/discourse
@@ -28,8 +28,8 @@ class DiscourseCLI < Thor
discourse remap --regex "\[\/?color(=[^\]]*)*]" "" # removing "color" bbcodes
LONGDESC
- option :global, :type => :boolean
- option :regex, :type => :boolean
+ option :global, type: :boolean
+ option :regex, type: :boolean
def remap(from, to)
load_rails
@@ -48,7 +48,7 @@ class DiscourseCLI < Thor
if options[:global]
RailsMultisite::ConnectionManagement.each_connection do |db|
- puts "","Remapping tables on #{db}...",""
+ puts "", "Remapping tables on #{db}...", ""
do_remap(from, to, options[:regex])
end
else
@@ -85,7 +85,7 @@ class DiscourseCLI < Thor
end
desc "restore", "Restore a Discourse backup"
- def restore(filename=nil)
+ def restore(filename = nil)
if !filename
puts "You must provide a filename to restore. Did you mean one of the following?\n\n"
@@ -175,7 +175,7 @@ class DiscourseCLI < Thor
end
desc "export_category", "Export a category, all its topics, and all users who posted in those topics"
- def export_category(category_id, filename=nil)
+ def export_category(category_id, filename = nil)
raise "Category id argument is missing!" unless category_id
load_rails
@@ -225,7 +225,7 @@ class DiscourseCLI < Thor
require File.expand_path(File.dirname(__FILE__) + "/../lib/import_export/import_export")
end
- def do_remap(from, to, regex=false)
+ def do_remap(from, to, regex = false)
sql = "SELECT table_name, column_name
FROM information_schema.columns
WHERE table_schema='public' and (data_type like 'char%' or data_type like 'text%') and is_updatable = 'YES'"
@@ -257,7 +257,6 @@ WHERE table_schema='public' and (data_type like 'char%' or data_type like 'text%
end
end
-
end
DiscourseCLI.start(ARGV)
diff --git a/script/import_scripts/askbot.rb b/script/import_scripts/askbot.rb
index ec0cfc7cb8c..5620f0aa294 100644
--- a/script/import_scripts/askbot.rb
+++ b/script/import_scripts/askbot.rb
@@ -24,11 +24,11 @@ class ImportScripts::MyAskBot < ImportScripts::Base
@tagmap = []
@td = PG::TextDecoder::TimestampWithTimeZone.new
@client = PG.connect(
- :dbname => DB_NAME,
- :host => DB_HOST,
- :port => DB_PORT,
- :user => DB_USER,
- :password => DB_PASS
+ dbname: DB_NAME,
+ host: DB_HOST,
+ port: DB_PORT,
+ user: DB_USER,
+ password: DB_PASS
)
end
@@ -79,7 +79,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
tid = tag["thread_id"].to_i
tnm = tag["name"].downcase
if @tagmap[tid]
- @tagmap[tid].push( tnm )
+ @tagmap[tid].push(tnm)
else
@tagmap[tid] = [ tnm ]
end
@@ -110,7 +110,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
break if users.ntuples() < 1
- next if all_records_exist? :users, users.map {|u| u["id"].to_i}
+ next if all_records_exist? :users, users.map { |u| u["id"].to_i }
create_users(users, total: total_count, offset: offset) do |user|
{
@@ -155,7 +155,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
break if posts.ntuples() < 1
- next if all_records_exist? :posts, posts.map {|p| p["id"].to_i}
+ next if all_records_exist? :posts, posts.map { |p| p["id"].to_i }
create_posts(posts, total: post_count, offset: offset) do |post|
pid = post["id"]
@@ -174,7 +174,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
id: pid,
title: post["title"],
category: cat,
- custom_fields: {import_id: pid, import_thread_id: tid, import_tags: tags},
+ custom_fields: { import_id: pid, import_thread_id: tid, import_tags: tags },
user_id: user_id_from_imported_user_id(post["author_id"]) || Discourse::SYSTEM_USER_ID,
created_at: Time.zone.at(@td.decode(post["added_at"])),
raw: post["text"],
@@ -210,7 +210,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
break if posts.ntuples() < 1
- next if all_records_exist? :posts, posts.map {|p| p["id"].to_i}
+ next if all_records_exist? :posts, posts.map { |p| p["id"].to_i }
create_posts(posts, total: post_count, offset: offset) do |post|
tid = post["thread_id"].to_i
@@ -220,7 +220,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
{
id: pid,
topic_id: parent[:topic_id],
- custom_fields: {import_id: pid},
+ custom_fields: { import_id: pid },
user_id: user_id_from_imported_user_id(post["author_id"]) || Discourse::SYSTEM_USER_ID,
created_at: Time.zone.at(@td.decode(post["added_at"])),
raw: post["text"]
@@ -230,47 +230,48 @@ class ImportScripts::MyAskBot < ImportScripts::Base
end
def post_process_posts
- puts "", "Postprocessing posts..."
- current = 0
- max = Post.count
- # Rewrite internal links; e.g.
- # ask.cvxr.com/question/(\d+)/[^'"}]*
- # I am sure this is incomplete, but we didn't make heavy use of internal
- # links on our site.
- tmp = Regexp.quote("http://" << OLD_SITE)
- r1 = /"(#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?"/
- r2 = /\((#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?\)/
- r3 = /#tmp\/question\/(\d+)\/[a-zA-Z-]*\/?>?/
- Post.find_each do |post|
- raw = post.raw.gsub(r1) do
- if topic = topic_lookup_from_imported_post_id($2)
- "\"#{topic[:url]}\""
- else
- $&
- end
+ puts "", "Postprocessing posts..."
+ current = 0
+ max = Post.count
+ # Rewrite internal links; e.g.
+ # ask.cvxr.com/question/(\d+)/[^'"}]*
+ # I am sure this is incomplete, but we didn't make heavy use of internal
+ # links on our site.
+ tmp = Regexp.quote("http://" << OLD_SITE)
+ r1 = /"(#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?"/
+ r2 = /\((#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?\)/
+ r3 = /#tmp\/question\/(\d+)\/[a-zA-Z-]*\/?>?/
+ Post.find_each do |post|
+ raw = post.raw.gsub(r1) do
+ if topic = topic_lookup_from_imported_post_id($2)
+ "\"#{topic[:url]}\""
+ else
+ $&
end
- raw = raw.gsub(r2) do
- if topic = topic_lookup_from_imported_post_id($2)
- "(#{topic[:url]})"
- else
- $&
- end
- end
- raw = raw.gsub(r3) do
- if topic = topic_lookup_from_imported_post_id($1)
- trec = Topic.find_by(id: topic[:topic_id])
- "[#{trec.title}](#{topic[:url]})"
- else
- $&
- end
- end
- if raw != post.raw
- post.raw = raw
- post.save
- end
- print_status(current += 1, max)
end
+ raw = raw.gsub(r2) do
+ if topic = topic_lookup_from_imported_post_id($2)
+ "(#{topic[:url]})"
+ else
+ $&
+ end
+ end
+ raw = raw.gsub(r3) do
+ if topic = topic_lookup_from_imported_post_id($1)
+ trec = Topic.find_by(id: topic[:topic_id])
+ "[#{trec.title}](#{topic[:url]})"
+ else
+ $&
+ end
+ end
+
+ if raw != post.raw
+ post.raw = raw
+ post.save
+ end
+ print_status(current += 1, max)
end
end
+end
ImportScripts::MyAskBot.new.perform
diff --git a/script/import_scripts/base.rb b/script/import_scripts/base.rb
index 4acd2b76a0c..b156a0f70c8 100644
--- a/script/import_scripts/base.rb
+++ b/script/import_scripts/base.rb
@@ -30,7 +30,7 @@ class ImportScripts::Base
@bbcode_to_md = true if use_bbcode_to_md?
@site_settings_during_import = {}
@old_site_settings = {}
- @start_times = {import: Time.now}
+ @start_times = { import: Time.now }
end
def preload_i18n
@@ -56,7 +56,7 @@ class ImportScripts::Base
reset_topic_counters
elapsed = Time.now - @start_times[:import]
- puts '', '', 'Done (%02dh %02dmin %02dsec)' % [elapsed/3600, elapsed/60%60, elapsed%60]
+ puts '', '', 'Done (%02dh %02dmin %02dsec)' % [elapsed / 3600, elapsed / 60 % 60, elapsed % 60]
ensure
reset_site_settings
@@ -118,7 +118,7 @@ class ImportScripts::Base
delegate method_name, to: :@lookup
end
- def create_admin(opts={})
+ def create_admin(opts = {})
admin = User.new
admin.email = opts[:email] || "sam.saffron@gmail.com"
admin.username = opts[:username] || "sam"
@@ -140,7 +140,7 @@ class ImportScripts::Base
# Required fields are :id and :name, where :id is the id of the
# group in the original datasource. The given id will not be used
# to create the Discourse group record.
- def create_groups(results, opts={})
+ def create_groups(results, opts = {})
created = 0
skipped = 0
failed = 0
@@ -171,12 +171,12 @@ class ImportScripts::Base
end
def create_group(opts, import_id)
- opts = opts.dup.tap {|o| o.delete(:id) }
+ opts = opts.dup.tap { |o| o.delete(:id) }
import_name = opts[:name]
opts[:name] = UserNameSuggester.suggest(import_name)
existing = Group.where(name: opts[:name]).first
- return existing if existing and existing.custom_fields["import_id"].to_i == import_id.to_i
+ return existing if existing && existing.custom_fields["import_id"].to_i == (import_id.to_i)
g = existing || Group.new(opts)
g.custom_fields["import_id"] = import_id
g.custom_fields["import_name"] = import_name
@@ -196,8 +196,8 @@ class ImportScripts::Base
existing = "#{type.to_s.classify}CustomField".constantize
existing = existing.where(name: 'import_id')
- .joins('JOIN import_ids ON val = value')
- .count
+ .joins('JOIN import_ids ON val = value')
+ .count
if existing == import_ids.length
puts "Skipping #{import_ids.length} already imported #{type}"
return true
@@ -216,7 +216,7 @@ class ImportScripts::Base
# Required fields are :id and :email, where :id is the id of the
# user in the original datasource. The given id will not be used to
# create the Discourse user record.
- def create_users(results, opts={})
+ def create_users(results, opts = {})
created = 0
skipped = 0
failed = 0
@@ -422,7 +422,7 @@ class ImportScripts::Base
# Attributes will be passed to the PostCreator.
# Topics should give attributes title and category.
# Replies should provide topic_id. Use topic_lookup_from_imported_post_id to find the topic.
- def create_posts(results, opts={})
+ def create_posts(results, opts = {})
skipped = 0
created = 0
total = opts[:total] || results.size
@@ -502,7 +502,7 @@ class ImportScripts::Base
# Block should return a hash with the attributes for the bookmark.
# Required fields are :user_id and :post_id, where both ids are
# the values in the original datasource.
- def create_bookmarks(results, opts={})
+ def create_bookmarks(results, opts = {})
created = 0
skipped = 0
total = opts[:total] || results.size
@@ -539,7 +539,7 @@ class ImportScripts::Base
[created, skipped]
end
- def close_inactive_topics(opts={})
+ def close_inactive_topics(opts = {})
num_days = opts[:days] || 30
puts '', "Closing topics that have been inactive for more than #{num_days} days."
@@ -775,7 +775,7 @@ class ImportScripts::Base
end
def get_start_time(key)
- @start_times.fetch(key) {|k| @start_times[k] = Time.now}
+ @start_times.fetch(key) { |k| @start_times[k] = Time.now }
end
def batches(batch_size)
diff --git a/script/import_scripts/base/csv_helper.rb b/script/import_scripts/base/csv_helper.rb
index c505ce71015..e70f850bc4f 100644
--- a/script/import_scripts/base/csv_helper.rb
+++ b/script/import_scripts/base/csv_helper.rb
@@ -10,7 +10,7 @@ module ImportScripts
end
def initialize(cols)
- cols.each_with_index do |col,idx|
+ cols.each_with_index do |col, idx|
self.class.send(:define_method, col.downcase.gsub(/[\W]/, '_').squeeze('_')) do
@row[idx]
end
@@ -72,4 +72,4 @@ module ImportScripts
end
end
end
-end
\ No newline at end of file
+end
diff --git a/script/import_scripts/bbpress.rb b/script/import_scripts/bbpress.rb
index e91b8ef8ae5..7b3a35afd5c 100644
--- a/script/import_scripts/bbpress.rb
+++ b/script/import_scripts/bbpress.rb
@@ -1,7 +1,6 @@
require 'mysql2'
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
-
# Before running this script, paste these lines into your shell,
# then use arrow keys to edit the values
=begin
@@ -125,7 +124,7 @@ class ImportScripts::Bbpress < ImportScripts::Base
end
# gather every existent username
- anon_posts.each do |id,post|
+ anon_posts.each do |id, post|
anon_names[post['name']] = Hash.new if not anon_names[post['name']]
# overwriting email address, one user can only use one email address
anon_names[post['name']]['email'] = post['email']
@@ -133,7 +132,7 @@ class ImportScripts::Bbpress < ImportScripts::Base
end
# make sure every user name has a unique email address
- anon_names.each do |k,name|
+ anon_names.each do |k, name|
if not emails.include? name['email']
emails.push ( name['email'])
else
@@ -141,7 +140,6 @@ class ImportScripts::Bbpress < ImportScripts::Base
end
end
-
create_users(anon_names) do |k, n|
{
id: k,
diff --git a/script/import_scripts/bespoke_1.rb b/script/import_scripts/bespoke_1.rb
index 5cd1744212a..0026ff30ace 100644
--- a/script/import_scripts/bespoke_1.rb
+++ b/script/import_scripts/bespoke_1.rb
@@ -43,7 +43,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
end
def initialize(cols)
- cols.each_with_index do |col,idx|
+ cols.each_with_index do |col, idx|
self.class.send(:define_method, col) do
@row[idx]
end
@@ -71,7 +71,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
File.open(filename).each_line do |line|
# escaping is mental here
- line.gsub!(/\\(.{1})/){|m| m[-1] == '"'? '""': m[-1]}
+ line.gsub!(/\\(.{1})/) { |m| m[-1] == '"' ? '""' : m[-1] }
line.strip!
current_row << "\n" unless current_row.empty?
@@ -119,7 +119,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
end
def total_rows(table)
- File.foreach("#{@path}/#{table}.csv").inject(0) {|c, line| c+1} - 1
+ File.foreach("#{@path}/#{table}.csv").inject(0) { |c, line| c + 1 } - 1
end
def import_users
@@ -169,7 +169,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
def import_categories
rows = []
csv_parse("categories") do |row|
- rows << {id: row.id, name: row.name, description: row.description}
+ rows << { id: row.id, name: row.name, description: row.description }
end
create_categories(rows) do |row|
@@ -181,46 +181,46 @@ class ImportScripts::Bespoke < ImportScripts::Base
# purple and #1223f3
raw.gsub!(/\[color=[#a-z0-9]+\]/i, "")
raw.gsub!(/\[\/color\]/i, "")
- raw.gsub!(/\[signature\].+\[\/signature\]/im,"")
+ raw.gsub!(/\[signature\].+\[\/signature\]/im, "")
raw
end
def import_post_batch!(posts, topics, offset, total)
- create_posts(posts, total: total, offset: offset) do |post|
+ create_posts(posts, total: total, offset: offset) do |post|
- mapped = {}
+ mapped = {}
- mapped[:id] = post[:id]
- mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
- mapped[:raw] = post[:body]
- mapped[:created_at] = post[:created_at]
+ mapped[:id] = post[:id]
+ mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
+ mapped[:raw] = post[:body]
+ mapped[:created_at] = post[:created_at]
- topic = topics[post[:topic_id]]
+ topic = topics[post[:topic_id]]
- unless topic[:post_id]
- mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
- mapped[:title] = post[:title]
- topic[:post_id] = post[:id]
- else
- parent = topic_lookup_from_imported_post_id(topic[:post_id])
- next unless parent
+ unless topic[:post_id]
+ mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
+ mapped[:title] = post[:title]
+ topic[:post_id] = post[:id]
+ else
+ parent = topic_lookup_from_imported_post_id(topic[:post_id])
+ next unless parent
- mapped[:topic_id] = parent[:topic_id]
+ mapped[:topic_id] = parent[:topic_id]
- reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
- if reply_to_post_id
- reply_to_post_number = @post_number_map[reply_to_post_id]
- if reply_to_post_number && reply_to_post_number > 1
- mapped[:reply_to_post_number] = reply_to_post_number
- end
+ reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
+ if reply_to_post_id
+ reply_to_post_number = @post_number_map[reply_to_post_id]
+ if reply_to_post_number && reply_to_post_number > 1
+ mapped[:reply_to_post_number] = reply_to_post_number
end
end
-
- next if topic[:deleted] or post[:deleted]
-
- mapped
end
+ next if topic[:deleted] || post[:deleted]
+
+ mapped
+ end
+
posts.clear
end
@@ -262,7 +262,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
created_at: DateTime.parse(row.dcreate)
}
posts << row
- count+=1
+ count += 1
if posts.length > 0 && posts.length % BATCH_SIZE == 0
import_post_batch!(posts, topic_map, count - posts.length, total)
@@ -274,7 +274,6 @@ class ImportScripts::Bespoke < ImportScripts::Base
exit
end
-
end
unless ARGV[0] && Dir.exist?(ARGV[0])
diff --git a/script/import_scripts/discuz_x.rb b/script/import_scripts/discuz_x.rb
index 791b6c38da2..8ee30b359cd 100644
--- a/script/import_scripts/discuz_x.rb
+++ b/script/import_scripts/discuz_x.rb
@@ -176,11 +176,11 @@ class ImportScripts::DiscuzX < ImportScripts::Base
last_posted_at: user['last_posted_at'],
moderator: @moderator_group_id.include?(user['group_id']),
admin: @admin_group_id.include?(user['group_id']),
- website: (user['website'] and user['website'].include?('.')) ? user['website'].strip : ( user['qq'] and user['qq'].strip == user['qq'].strip.to_i and user['qq'].strip.to_i > 10000 ) ? 'http://user.qzone.qq.com/' + user['qq'].strip : nil,
- bio_raw: first_exists((user['bio'] and CGI.unescapeHTML(user['bio'])), user['sightml'], user['spacenote']).strip[0,3000],
- location: first_exists(user['address'], (!user['resideprovince'].blank? ? [user['resideprovince'], user['residecity'], user['residedist'], user['residecommunity']] : [user['birthprovince'], user['birthcity'], user['birthdist'], user['birthcommunity']]).reject{|location|location.blank?}.join(' ')),
+ website: (user['website'] && user['website'].include?('.')) ? user['website'].strip : (user['qq'] && user['qq'].strip == (user['qq'].strip.to_i) && user['qq'].strip.to_i > (10000)) ? 'http://user.qzone.qq.com/' + user['qq'].strip : nil,
+ bio_raw: first_exists((user['bio'] && CGI.unescapeHTML(user['bio'])), user['sightml'], user['spacenote']).strip[0, 3000],
+ location: first_exists(user['address'], (!user['resideprovince'].blank? ? [user['resideprovince'], user['residecity'], user['residedist'], user['residecommunity']] : [user['birthprovince'], user['birthcity'], user['birthdist'], user['birthcommunity']]).reject { |location|location.blank? }.join(' ')),
post_create_action: lambda do |newmember|
- if user['avatar_exists'] == 1 and newmember.uploaded_avatar_id.blank?
+ if user['avatar_exists'] == (1) && newmember.uploaded_avatar_id.blank?
path, filename = discuzx_avatar_fullpath(user['id'])
if path
begin
@@ -199,7 +199,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
end
end
end
- if !user['spacecss'].blank? and newmember.user_profile.profile_background.blank?
+ if !user['spacecss'].blank? && newmember.user_profile.profile_background.blank?
# profile background
if matched = user['spacecss'].match(/body\s*{[^}]*url\('?(.+?)'?\)/i)
body_background = matched[1].split(ORIGINAL_SITE_PREFIX, 2).last
@@ -234,7 +234,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
# we don't send email to the unconfirmed user
newmember.update(email_digests: user['email_confirmed'] == 1) if newmember.email_digests
- newmember.update(name: '') if !newmember.name.blank? and newmember.name == newmember.username
+ newmember.update(name: '') if !newmember.name.blank? && newmember.name == (newmember.username)
end
}
end
@@ -259,10 +259,10 @@ class ImportScripts::DiscuzX < ImportScripts::Base
max_position = Category.all.max_by(&:position).position
create_categories(results) do |row|
- next if row['type'] == 'group' or row['status'] == 2 # or row['status'].to_i == 3 # 如果不想导入群组,取消注释
+ next if row['type'] == ('group') || row['status'] == (2) # or row['status'].to_i == 3 # 如果不想导入群组,取消注释
extra = PHP.unserialize(row['extra']) if !row['extra'].blank?
- if extra and !extra["namecolor"].blank?
- color = extra["namecolor"][1,6]
+ if extra && !extra["namecolor"].blank?
+ color = extra["namecolor"][1, 6]
end
Category.all.max_by(&:position).position
@@ -273,7 +273,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
description: row['description'],
position: row['position'].to_i + max_position,
color: color,
- suppress_from_homepage: (row['status'] == 0 or row['status'] == 3),
+ suppress_from_homepage: (row['status'] == (0) || row['status'] == (3)),
post_create_action: lambda do |category|
if slug = @category_slug[row['id']]
category.update(slug: slug)
@@ -289,7 +289,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
if upload
category.logo_url = upload.url
# FIXME: I don't know how to get '/shared' by script. May change to Rails.root
- category.color = Miro::DominantColors.new(File.join('/shared', category.logo_url)).to_hex.first[1,6] if !color
+ category.color = Miro::DominantColors.new(File.join('/shared', category.logo_url)).to_hex.first[1, 6] if !color
category.save!
end
end
@@ -332,10 +332,10 @@ class ImportScripts::DiscuzX < ImportScripts::Base
LIMIT #{BATCH_SIZE}
OFFSET #{offset};
")
- # u.status != -1 AND u.groupid != 4 AND u.groupid != 5 用户未被锁定、禁访或禁言。在现实中的 Discuz 论坛,禁止的用户通常是广告机或驱逐的用户,这些不需要导入。
+ # u.status != -1 AND u.groupid != 4 AND u.groupid != 5 用户未被锁定、禁访或禁言。在现实中的 Discuz 论坛,禁止的用户通常是广告机或驱逐的用户,这些不需要导入。
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| p["id"].to_i}
+ next if all_records_exist? :posts, results.map { |p| p["id"].to_i }
create_posts(results, total: total_count, offset: offset) do |m|
skip = false
@@ -364,7 +364,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
if results.empty?
puts "WARNING: can't find poll options for topic #{m['topic_id']}, skip poll"
else
- mapped[:raw].prepend "[poll#{poll['multiple'] ? ' type=multiple' : ''}#{poll['maxchoices'] > 0 ? " max=#{poll['maxchoices']}" : ''}]\n#{results.map{|option|'- ' + option['polloption']}.join("\n")}\n[/poll]\n"
+ mapped[:raw].prepend "[poll#{poll['multiple'] ? ' type=multiple' : ''}#{poll['maxchoices'] > 0 ? " max=#{poll['maxchoices']}" : ''}]\n#{results.map { |option|'- ' + option['polloption'] }.join("\n")}\n[/poll]\n"
end
end
else
@@ -398,7 +398,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
end
elsif (m['status'] & 2) >> 1 == 1 # waiting for approve
mapped[:post_create_action] = lambda do |post|
- PostAction.act(Discourse.system_user, post, 6, {take_action: false})
+ PostAction.act(Discourse.system_user, post, 6, take_action: false)
end
end
skip ? nil : mapped
@@ -423,7 +423,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
break if results.size < 1
- # next if all_records_exist?
+ # next if all_records_exist?
create_bookmarks(results, total: total_count, offset: offset) do |row|
{
@@ -434,7 +434,6 @@ class ImportScripts::DiscuzX < ImportScripts::Base
end
end
-
def import_private_messages
puts '', 'creating private messages'
@@ -494,7 +493,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
SELECT plid thread_id, uid user_id
FROM #{table_name 'ucenter_pm_members'}
WHERE plid = #{m['thread_id']};
- ").map {|r| r['user_id']}.uniq
+ ").map { |r| r['user_id'] }.uniq
mapped[:target_usernames] = import_user_ids.map! do |import_user_id|
import_user_id.to_s == m['user_id'].to_s ? nil : User.find_by(id: user_id_from_imported_user_id(import_user_id)).try(:username)
@@ -587,7 +586,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
s.gsub!(/\[img[^\]]*\]https?:\/\/#{ORIGINAL_SITE_PREFIX}\/(.*)\[\/img\]/i, '[x-attach]\1[/x-attach]') # dont convert attachment
s.gsub!(/]*src="https?:\/\/#{ORIGINAL_SITE_PREFIX}\/(.*)".*?>/i, '[x-attach]\1[/x-attach]') # dont convert attachment
s.gsub!(/\[img[^\]]*\]https?:\/\/www\.touhou\.cc\/blog\/(.*)\[\/img\]/i, '[x-attach]../blog/\1[/x-attach]') # 私货
- s.gsub!(/\[img[^\]]*\]https?:\/\/www\.touhou\.cc\/ucenter\/avatar.php\?uid=(\d+)[^\]]*\[\/img\]/i) { "[x-attach]#{discuzx_avatar_fullpath($1,false)[0]}[/x-attach]" } # 私货
+ s.gsub!(/\[img[^\]]*\]https?:\/\/www\.touhou\.cc\/ucenter\/avatar.php\?uid=(\d+)[^\]]*\[\/img\]/i) { "[x-attach]#{discuzx_avatar_fullpath($1, false)[0]}[/x-attach]" } # 私货
s.gsub!(/\[img=(\d+),(\d+)\]([^\]]*)\[\/img\]/i, '')
s.gsub!(/\[img\]([^\]]*)\[\/img\]/i, '')
@@ -671,7 +670,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
# @someone without the url
s.gsub!(/@\[url=[^\[\]]*?\](\S*)\[\/url\]/i, '@\1')
- s.scan(/http(?:s)?:\/\/#{ORIGINAL_SITE_PREFIX.gsub('.', '\.')}\/[^\[\]\s]*/) {|link|puts "WARNING: post #{import_id} can't replace internal url #{link}"}
+ s.scan(/http(?:s)?:\/\/#{ORIGINAL_SITE_PREFIX.gsub('.', '\.')}\/[^\[\]\s]*/) { |link|puts "WARNING: post #{import_id} can't replace internal url #{link}" }
s.strip
end
@@ -785,7 +784,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
FROM #{table_name 'forum_attachment'}
WHERE pid = #{post.custom_fields['import_id']}"
if !inline_attachments.empty?
- sql << " AND aid NOT IN (#{inline_attachments.join(',')})"
+ sql << " AND aid NOT IN (#{inline_attachments.join(',')})"
end
results = mysql_query(sql)
@@ -805,7 +804,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
end
if new_raw != post.raw
- PostRevisor.new(post).revise!(post.user, { raw: new_raw }, { bypass_bump: true, edit_reason: '从 Discuz 中导入附件' })
+ PostRevisor.new(post).revise!(post.user, { raw: new_raw }, bypass_bump: true, edit_reason: '从 Discuz 中导入附件')
end
success_count += 1
@@ -818,7 +817,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
end
# Create the full path to the discuz avatar specified from user id
- def discuzx_avatar_fullpath(user_id, absolute=true)
+ def discuzx_avatar_fullpath(user_id, absolute = true)
padded_id = user_id.to_s.rjust(9, '0')
part_1 = padded_id[0..2]
@@ -945,7 +944,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
end
def first_exists(*items)
- items.find{|item|!item.blank?} || ''
+ items.find { |item|!item.blank? } || ''
end
def mysql_query(sql)
diff --git a/script/import_scripts/disqus.rb b/script/import_scripts/disqus.rb
index 6f0929d8588..a73febe4768 100644
--- a/script/import_scripts/disqus.rb
+++ b/script/import_scripts/disqus.rb
@@ -11,7 +11,7 @@ class ImportScripts::Disqus < ImportScripts::Base
def initialize
abort("File '#{IMPORT_FILE}' not found") if !File.exist?(IMPORT_FILE)
- @category = Category.where(name: IMPORT_CATEGORY).first
+ @category = Category.where(name: IMPORT_CATEGORY).first
abort("Category #{IMPORT_CATEGORY} not found") if @category.blank?
@parser = DisqusSAX.new
@@ -135,7 +135,7 @@ class DisqusSAX < Nokogiri::XML::SAX::Document
thread = @threads[id]
thread[:posts] << @post
else
- @thread = {id: id, posts: []}
+ @thread = { id: id, posts: [] }
end
when 'parent'
if @post
@@ -194,7 +194,7 @@ class DisqusSAX < Nokogiri::XML::SAX::Document
end
def inside?(*params)
- return !params.find{|p| !@inside[p]}
+ return !params.find { |p| !@inside[p] }
end
def normalize
@@ -203,7 +203,7 @@ class DisqusSAX < Nokogiri::XML::SAX::Document
# Remove any threads that have no posts
@threads.delete(id)
else
- t[:posts].delete_if {|p| p[:is_spam] == 'true' || p[:is_deleted] == 'true'}
+ t[:posts].delete_if { |p| p[:is_spam] == 'true' || p[:is_deleted] == 'true' }
end
end
diff --git a/script/import_scripts/drupal-6.rb b/script/import_scripts/drupal-6.rb
index 1df513df409..018a1fba904 100644
--- a/script/import_scripts/drupal-6.rb
+++ b/script/import_scripts/drupal-6.rb
@@ -5,7 +5,7 @@ class ImportScripts::Drupal < ImportScripts::Base
DRUPAL_DB = ENV['DRUPAL_DB'] || "newsite3"
VID = ENV['DRUPAL_VID'] || 1
-
+
def initialize
super
@@ -23,7 +23,7 @@ class ImportScripts::Drupal < ImportScripts::Base
def execute
create_users(@client.query("SELECT uid id, name, mail email, created FROM users;")) do |row|
- {id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created'])}
+ { id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created']) }
end
# You'll need to edit the following query for your Drupal install:
@@ -32,7 +32,7 @@ class ImportScripts::Drupal < ImportScripts::Base
# * Table name may be term_data.
# * May need to select a vid other than 1.
create_categories(categories_query) do |c|
- {id: c['tid'], name: c['name'], description: c['description']}
+ { id: c['tid'], name: c['name'], description: c['description'] }
end
# "Nodes" in Drupal are divided into types. Here we import two types,
@@ -65,8 +65,8 @@ class ImportScripts::Drupal < ImportScripts::Base
results = @client.query("
SELECT n.nid nid,
- n.title title,
- n.uid uid,
+ n.title title,
+ n.uid uid,
n.created created,
n.sticky sticky,
nr.body body
@@ -85,7 +85,7 @@ class ImportScripts::Drupal < ImportScripts::Base
created_at: Time.zone.at(row['created']),
pinned_at: row['sticky'].to_i == 1 ? Time.zone.at(row['created']) : nil,
title: row['title'].try(:strip),
- custom_fields: {import_id: "nid:#{row['nid']}"}
+ custom_fields: { import_id: "nid:#{row['nid']}" }
}
end
end
@@ -123,7 +123,7 @@ class ImportScripts::Drupal < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "nid:#{p['nid']}"}
+ next if all_records_exist? :posts, results.map { |p| "nid:#{p['nid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
{
@@ -141,7 +141,7 @@ class ImportScripts::Drupal < ImportScripts::Base
def create_replies
puts '', "creating replies in topics"
-
+
if ENV['DRUPAL_IMPORT_BLOG']
node_types = "('forum','blog')"
else
@@ -149,7 +149,7 @@ class ImportScripts::Drupal < ImportScripts::Base
end
total_count = @client.query("
- SELECT COUNT(*) count
+ SELECT COUNT(*) count
FROM comments c
LEFT JOIN node n ON n.nid=c.nid
WHERE node.type IN #{node_types}
@@ -167,18 +167,18 @@ class ImportScripts::Drupal < ImportScripts::Base
c.uid,
c.timestamp,
c.comment body
- FROM comments c
+ FROM comments c
LEFT JOIN node n ON n.nid=c.nid
WHERE n.type IN #{node_types}
AND n.status = 1
- AND c.status=0
+ AND c.status=0
LIMIT #{batch_size}
OFFSET #{offset};
", cache_rows: false)
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
+ next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
@@ -192,7 +192,7 @@ class ImportScripts::Drupal < ImportScripts::Base
}
if row['pid']
parent = topic_lookup_from_imported_post_id("cid:#{row['pid']}")
- h[:reply_to_post_number] = parent[:post_number] if parent and parent[:post_number] > 1
+ h[:reply_to_post_number] = parent[:post_number] if parent && parent[:post_number] > (1)
end
h
else
@@ -205,6 +205,6 @@ class ImportScripts::Drupal < ImportScripts::Base
end
-if __FILE__==$0
+if __FILE__ == $0
ImportScripts::Drupal.new.perform
end
diff --git a/script/import_scripts/drupal.rb b/script/import_scripts/drupal.rb
index a15933b10c1..6b773486d00 100644
--- a/script/import_scripts/drupal.rb
+++ b/script/import_scripts/drupal.rb
@@ -23,7 +23,7 @@ class ImportScripts::Drupal < ImportScripts::Base
def execute
create_users(@client.query("SELECT uid id, name, mail email, created FROM users;")) do |row|
- {id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created'])}
+ { id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created']) }
end
# You'll need to edit the following query for your Drupal install:
@@ -32,7 +32,7 @@ class ImportScripts::Drupal < ImportScripts::Base
# * Table name may be term_data.
# * May need to select a vid other than 1.
create_categories(categories_query) do |c|
- {id: c['tid'], name: c['name'], description: c['description']}
+ { id: c['tid'], name: c['name'], description: c['description'] }
end
# "Nodes" in Drupal are divided into types. Here we import two types,
@@ -82,7 +82,7 @@ class ImportScripts::Drupal < ImportScripts::Base
created_at: Time.zone.at(row['created']),
pinned_at: row['sticky'].to_i == 1 ? Time.zone.at(row['created']) : nil,
title: row['title'].try(:strip),
- custom_fields: {import_id: "nid:#{row['nid']}"}
+ custom_fields: { import_id: "nid:#{row['nid']}" }
}
end
end
@@ -121,7 +121,7 @@ class ImportScripts::Drupal < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "nid:#{p['nid']}"}
+ next if all_records_exist? :posts, results.map { |p| "nid:#{p['nid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
{
@@ -169,7 +169,7 @@ class ImportScripts::Drupal < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
+ next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
@@ -183,7 +183,7 @@ class ImportScripts::Drupal < ImportScripts::Base
}
if row['pid']
parent = topic_lookup_from_imported_post_id("cid:#{row['pid']}")
- h[:reply_to_post_number] = parent[:post_number] if parent and parent[:post_number] > 1
+ h[:reply_to_post_number] = parent[:post_number] if parent && parent[:post_number] > (1)
end
h
else
@@ -196,6 +196,6 @@ class ImportScripts::Drupal < ImportScripts::Base
end
-if __FILE__==$0
+if __FILE__ == $0
ImportScripts::Drupal.new.perform
end
diff --git a/script/import_scripts/drupal_json.rb b/script/import_scripts/drupal_json.rb
index ac40525caf6..d99fb86f871 100644
--- a/script/import_scripts/drupal_json.rb
+++ b/script/import_scripts/drupal_json.rb
@@ -8,7 +8,7 @@ class ImportScripts::DrupalJson < ImportScripts::Base
def initialize
super
- @users_json = load_json("formatted_users.json")
+ @users_json = load_json("formatted_users.json")
end
def execute
@@ -40,6 +40,6 @@ class ImportScripts::DrupalJson < ImportScripts::Base
end
end
-if __FILE__==$0
+if __FILE__ == $0
ImportScripts::DrupalJson.new.perform
end
diff --git a/script/import_scripts/drupal_qa.rb b/script/import_scripts/drupal_qa.rb
index 9899f73ad5e..98d79198646 100644
--- a/script/import_scripts/drupal_qa.rb
+++ b/script/import_scripts/drupal_qa.rb
@@ -56,7 +56,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "nid:#{p['nid']}"}
+ next if all_records_exist? :posts, results.map { |p| "nid:#{p['nid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
{
@@ -102,7 +102,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
+ next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
@@ -155,7 +155,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
+ next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
@@ -207,7 +207,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
+ next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
create_posts(results, total: total_count, offset: offset) do |row|
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
@@ -235,6 +235,6 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
end
-if __FILE__==$0
+if __FILE__ == $0
ImportScripts::DrupalQA.new.perform
end
diff --git a/script/import_scripts/fluxbb.rb b/script/import_scripts/fluxbb.rb
index d754c5db0c7..7b886447153 100644
--- a/script/import_scripts/fluxbb.rb
+++ b/script/import_scripts/fluxbb.rb
@@ -74,7 +74,7 @@ class ImportScripts::FluxBB < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :users, results.map {|u| u["id"].to_i}
+ next if all_records_exist? :users, results.map { |u| u["id"].to_i }
create_users(results, total: total_count, offset: offset) do |user|
{ id: user['id'],
@@ -91,7 +91,7 @@ class ImportScripts::FluxBB < ImportScripts::Base
admin: user['group_id'] == 1 }
end
- groupusers = results.select{ |user| user['group_id'] > 2 }
+ groupusers = results.select { |user| user['group_id'] > 2 }
groupusers.each do |user|
if user['group_id']
@@ -164,7 +164,7 @@ class ImportScripts::FluxBB < ImportScripts::Base
").to_a
break if results.size < 1
- next if all_records_exist? :posts, results.map {|m| m['id'].to_i}
+ next if all_records_exist? :posts, results.map { |m| m['id'].to_i }
create_posts(results, total: total_count, offset: offset) do |m|
skip = false
diff --git a/script/import_scripts/getsatisfaction.rb b/script/import_scripts/getsatisfaction.rb
index de5ff13a62b..ffa0b0388e8 100644
--- a/script/import_scripts/getsatisfaction.rb
+++ b/script/import_scripts/getsatisfaction.rb
@@ -37,7 +37,6 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
super
end
-
def execute
c = Category.find_by(name: 'Old Forum') ||
Category.create!(name: 'Old Forum', user: Discourse.system_user)
@@ -61,7 +60,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
end
def initialize(cols)
- cols.each_with_index do |col,idx|
+ cols.each_with_index do |col, idx|
self.class.send(:define_method, col) do
@row[idx]
end
@@ -134,7 +133,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
def total_rows(table)
# In case of Excel export file, I converted it to CSV and used:
# CSV.foreach("#{@path}/#{table}.csv", encoding:'iso-8859-1:utf-8').inject(0) {|c, line| c+1} - 1
- File.foreach("#{@path}/#{table}.csv").inject(0) {|c, line| c+1} - 1
+ File.foreach("#{@path}/#{table}.csv").inject(0) { |c, line| c + 1 } - 1
end
def import_users
@@ -191,7 +190,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
def import_categories
rows = []
csv_parse("categories") do |row|
- rows << {id: row.id, name: row.name, description: row.description}
+ rows << { id: row.id, name: row.name, description: row.description }
end
create_categories(rows) do |row|
@@ -209,7 +208,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
code = $2
hoist = SecureRandom.hex
# tidy code, wow, this is impressively crazy
- code.gsub!(/ (\s*)/,"\n\\1")
+ code.gsub!(/ (\s*)/, "\n\\1")
code.gsub!(/^\s*\n$/, "\n")
code.gsub!(/\n+/m, "\n")
code.strip!
@@ -231,48 +230,47 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
end
def import_post_batch!(posts, topics, offset, total)
- create_posts(posts, total: total, offset: offset) do |post|
+ create_posts(posts, total: total, offset: offset) do |post|
- mapped = {}
+ mapped = {}
- mapped[:id] = post[:id]
- mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
- mapped[:raw] = post[:body]
- mapped[:created_at] = post[:created_at]
+ mapped[:id] = post[:id]
+ mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
+ mapped[:raw] = post[:body]
+ mapped[:created_at] = post[:created_at]
- topic = topics[post[:topic_id]]
+ topic = topics[post[:topic_id]]
- unless topic
- p "MISSING TOPIC #{post[:topic_id]}"
- p post
- next
- end
+ unless topic
+ p "MISSING TOPIC #{post[:topic_id]}"
+ p post
+ next
+ end
+ unless topic[:post_id]
+ mapped[:title] = post[:title] || "Topic title missing"
+ topic[:post_id] = post[:id]
+ mapped[:category] = post[:category]
+ else
+ parent = topic_lookup_from_imported_post_id(topic[:post_id])
+ next unless parent
- unless topic[:post_id]
- mapped[:title] = post[:title] || "Topic title missing"
- topic[:post_id] = post[:id]
- mapped[:category] = post[:category]
- else
- parent = topic_lookup_from_imported_post_id(topic[:post_id])
- next unless parent
+ mapped[:topic_id] = parent[:topic_id]
- mapped[:topic_id] = parent[:topic_id]
-
- reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
- if reply_to_post_id
- reply_to_post_number = @post_number_map[reply_to_post_id]
- if reply_to_post_number && reply_to_post_number > 1
- mapped[:reply_to_post_number] = reply_to_post_number
- end
+ reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
+ if reply_to_post_id
+ reply_to_post_number = @post_number_map[reply_to_post_id]
+ if reply_to_post_number && reply_to_post_number > 1
+ mapped[:reply_to_post_number] = reply_to_post_number
end
end
-
- next if topic[:deleted] or post[:deleted]
-
- mapped
end
+ next if topic[:deleted] || post[:deleted]
+
+ mapped
+ end
+
posts.clear
end
@@ -324,7 +322,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
created_at: DateTime.parse(row.created_at)
}
posts << row
- count+=1
+ count += 1
if posts.length > 0 && posts.length % BATCH_SIZE == 0
import_post_batch!(posts, topic_map, count - posts.length, total)
diff --git a/script/import_scripts/jive.rb b/script/import_scripts/jive.rb
index c64e87a66bb..ae077bda2ee 100644
--- a/script/import_scripts/jive.rb
+++ b/script/import_scripts/jive.rb
@@ -6,7 +6,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
class ImportScripts::Jive < ImportScripts::Base
BATCH_SIZE = 1000
- CATEGORY_IDS = [2023,2003,2004,2042,2036,2029] # categories that should be imported
+ CATEGORY_IDS = [2023, 2003, 2004, 2042, 2036, 2029] # categories that should be imported
def initialize(path)
@path = path
@@ -45,7 +45,7 @@ class ImportScripts::Jive < ImportScripts::Base
end
def initialize(cols)
- cols.each_with_index do |col,idx|
+ cols.each_with_index do |col, idx|
self.class.send(:define_method, col) do
@row[idx]
end
@@ -72,7 +72,7 @@ class ImportScripts::Jive < ImportScripts::Base
File.open(filename).each_line do |line|
- line.gsub!(/\\(.{1})/){|m| m[-1] == '"'? '""': m[-1]}
+ line.gsub!(/\\(.{1})/) { |m| m[-1] == '"' ? '""' : m[-1] }
line.strip!
current_row << "\n" unless current_row.empty?
@@ -120,7 +120,7 @@ class ImportScripts::Jive < ImportScripts::Base
end
def total_rows(table)
- File.foreach("#{@path}/#{table}.csv").inject(0) {|c, line| c+1} - 1
+ File.foreach("#{@path}/#{table}.csv").inject(0) { |c, line| c + 1 } - 1
end
def import_groups
@@ -128,7 +128,7 @@ class ImportScripts::Jive < ImportScripts::Base
rows = []
csv_parse("groups") do |row|
- rows << {id: row.groupid, name: row.name}
+ rows << { id: row.groupid, name: row.name }
end
create_groups(rows) do |row|
@@ -204,7 +204,7 @@ class ImportScripts::Jive < ImportScripts::Base
csv_parse("communities") do |row|
next unless CATEGORY_IDS.include?(row.communityid.to_i)
- rows << {id: row.communityid, name: "#{row.name} (#{row.communityid})"}
+ rows << { id: row.communityid, name: "#{row.name} (#{row.communityid})" }
end
create_categories(rows) do |row|
@@ -228,47 +228,47 @@ class ImportScripts::Jive < ImportScripts::Base
end
def import_post_batch!(posts, topics, offset, total)
- create_posts(posts, total: total, offset: offset) do |post|
+ create_posts(posts, total: total, offset: offset) do |post|
- mapped = {}
+ mapped = {}
- mapped[:id] = post[:id]
- mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
- mapped[:raw] = post[:body]
- mapped[:created_at] = post[:created_at]
+ mapped[:id] = post[:id]
+ mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
+ mapped[:raw] = post[:body]
+ mapped[:created_at] = post[:created_at]
- topic = topics[post[:topic_id]]
+ topic = topics[post[:topic_id]]
- unless topic
- p "MISSING TOPIC #{post[:topic_id]}"
- p post
- next
- end
+ unless topic
+ p "MISSING TOPIC #{post[:topic_id]}"
+ p post
+ next
+ end
- unless topic[:post_id]
- mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
- mapped[:title] = post[:title]
- topic[:post_id] = post[:id]
- else
- parent = topic_lookup_from_imported_post_id(topic[:post_id])
- next unless parent
+ unless topic[:post_id]
+ mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
+ mapped[:title] = post[:title]
+ topic[:post_id] = post[:id]
+ else
+ parent = topic_lookup_from_imported_post_id(topic[:post_id])
+ next unless parent
- mapped[:topic_id] = parent[:topic_id]
+ mapped[:topic_id] = parent[:topic_id]
- reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
- if reply_to_post_id
- reply_to_post_number = @post_number_map[reply_to_post_id]
- if reply_to_post_number && reply_to_post_number > 1
- mapped[:reply_to_post_number] = reply_to_post_number
- end
+ reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
+ if reply_to_post_id
+ reply_to_post_number = @post_number_map[reply_to_post_id]
+ if reply_to_post_number && reply_to_post_number > 1
+ mapped[:reply_to_post_number] = reply_to_post_number
end
end
-
- next if topic[:deleted] or post[:deleted]
-
- mapped
end
+ next if topic[:deleted] || post[:deleted]
+
+ mapped
+ end
+
posts.clear
end
@@ -290,14 +290,14 @@ class ImportScripts::Jive < ImportScripts::Base
#IMAGE UPLOADER
if thread.imagecount
Dir.foreach("/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}") do |item|
- next if item == '.' or item == '..' or item == '.DS_Store'
+ next if item == ('.') || item == ('..') || item == ('.DS_Store')
photo_path = "/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}/#{item}"
upload = create_upload(thread.userid, photo_path, File.basename(photo_path))
if upload.persisted?
- puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
- thread.body.gsub!(item,upload.url)
+ puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
+ thread.body.gsub!(item, upload.url)
else
- puts "Error: Image upload is not successful for #{photo_path}!"
+ puts "Error: Image upload is not successful for #{photo_path}!"
end
end
end
@@ -305,15 +305,15 @@ class ImportScripts::Jive < ImportScripts::Base
#ATTACHMENT UPLOADER
if thread.attachmentcount
Dir.foreach("/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}") do |item|
- next if item == '.' or item == '..' or item == '.DS_Store'
+ next if item == ('.') || item == ('..') || item == ('.DS_Store')
attach_path = "/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}/#{item}"
upload = create_upload(thread.userid, attach_path, File.basename(attach_path))
if upload.persisted?
- puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
- thread.body.gsub!(item,upload.url)
- thread.body << "
#{attachment_html(upload,item)}"
+ puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
+ thread.body.gsub!(item, upload.url)
+ thread.body << "
#{attachment_html(upload, item)}"
else
- puts "Error: Attachment upload is not successful for #{attach_path}!"
+ puts "Error: Attachment upload is not successful for #{attach_path}!"
end
end
end
@@ -337,7 +337,7 @@ class ImportScripts::Jive < ImportScripts::Base
topic_map.each do |_, topic|
posts << topic if topic[:body]
- count+=1
+ count += 1
end
csv_parse("messages") do |thread|
@@ -350,14 +350,14 @@ class ImportScripts::Jive < ImportScripts::Base
#IMAGE UPLOADER
if thread.imagecount
Dir.foreach("/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}") do |item|
- next if item == '.' or item == '..' or item == '.DS_Store'
+ next if item == ('.') || item == ('..') || item == ('.DS_Store')
photo_path = "/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}/#{item}"
upload = create_upload(thread.userid, photo_path, File.basename(photo_path))
if upload.persisted?
- puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
- thread.body.gsub!(item,upload.url)
+ puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
+ thread.body.gsub!(item, upload.url)
else
- puts "Error: Image upload is not successful for #{photo_path}!"
+ puts "Error: Image upload is not successful for #{photo_path}!"
end
end
end
@@ -365,15 +365,15 @@ class ImportScripts::Jive < ImportScripts::Base
#ATTACHMENT UPLOADER
if thread.attachmentcount
Dir.foreach("/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}") do |item|
- next if item == '.' or item == '..' or item == '.DS_Store'
+ next if item == ('.') || item == ('..') || item == ('.DS_Store')
attach_path = "/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}/#{item}"
upload = create_upload(thread.userid, attach_path, File.basename(attach_path))
if upload.persisted?
- puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
- thread.body.gsub!(item,upload.url)
- thread.body << "
#{attachment_html(upload,item)}"
+ puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
+ thread.body.gsub!(item, upload.url)
+ thread.body << "
#{attachment_html(upload, item)}"
else
- puts "Error: Attachment upload is not successful for #{attach_path}!"
+ puts "Error: Attachment upload is not successful for #{attach_path}!"
end
end
end
@@ -387,7 +387,7 @@ class ImportScripts::Jive < ImportScripts::Base
created_at: DateTime.parse(thread.creationdate)
}
posts << row
- count+=1
+ count += 1
if posts.length > 0 && posts.length % BATCH_SIZE == 0
import_post_batch!(posts, topic_map, count - posts.length, total)
diff --git a/script/import_scripts/jive_api.rb b/script/import_scripts/jive_api.rb
index 0441db26280..f8c3734a351 100644
--- a/script/import_scripts/jive_api.rb
+++ b/script/import_scripts/jive_api.rb
@@ -334,7 +334,7 @@ class ImportScripts::JiveApi < ImportScripts::Base
SQL
end
- def get(url_or_path, authenticated=false)
+ def get(url_or_path, authenticated = false)
tries ||= 3
command = ["curl", "--silent"]
diff --git a/script/import_scripts/json_generic.rb b/script/import_scripts/json_generic.rb
index c6474b26674..f98bcebcbc5 100755
--- a/script/import_scripts/json_generic.rb
+++ b/script/import_scripts/json_generic.rb
@@ -6,7 +6,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
class ImportScripts::JsonGeneric < ImportScripts::Base
JSON_FILE_PATH = ENV['JSON_FILE']
- BATCH_SIZE ||= 1000
+ BATCH_SIZE ||= 1000
def initialize
super
@@ -59,7 +59,6 @@ class ImportScripts::JsonGeneric < ImportScripts::Base
end
end
-
def import_discussions
puts "", "Importing discussions"
@@ -103,6 +102,6 @@ class ImportScripts::JsonGeneric < ImportScripts::Base
end
end
-if __FILE__==$0
+if __FILE__ == $0
ImportScripts::JsonGeneric.new.perform
end
diff --git a/script/import_scripts/kunena.rb b/script/import_scripts/kunena.rb
index 3cb52e7b38a..1aa333593ee 100644
--- a/script/import_scripts/kunena.rb
+++ b/script/import_scripts/kunena.rb
@@ -3,7 +3,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
class ImportScripts::Kunena < ImportScripts::Base
- KUNENA_DB = "kunena"
+ KUNENA_DB = "kunena"
def initialize
super
@@ -38,7 +38,7 @@ class ImportScripts::Kunena < ImportScripts::Base
@users = nil
create_categories(@client.query("SELECT id, parent, name, description, ordering FROM jos_kunena_categories ORDER BY parent, id;")) do |c|
- h = {id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i}
+ h = { id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i }
if c['parent'].to_i > 0
h[:parent_category_id] = category_id_from_imported_category_id(c['parent'])
end
@@ -61,12 +61,12 @@ class ImportScripts::Kunena < ImportScripts::Base
puts "fetching Joomla users data from mysql"
results = @client.query("SELECT id, username, email, registerDate FROM jos_users;", cache_rows: false)
results.each do |u|
- next unless u['id'].to_i > 0 and u['username'].present? and u['email'].present?
- username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0,User.username_length.end]
+ next unless u['id'].to_i > (0) && u['username'].present? && u['email'].present?
+ username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0, User.username_length.end]
if username.length < User.username_length.first
username = username * User.username_length.first
end
- @users[u['id'].to_i] = {id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate']}
+ @users[u['id'].to_i] = { id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate'] }
end
puts "fetching Kunena user data from mysql"
@@ -109,7 +109,7 @@ class ImportScripts::Kunena < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| p['id'].to_i}
+ next if all_records_exist? :posts, results.map { |p| p['id'].to_i }
create_posts(results, total: total_count, offset: offset) do |m|
skip = false
diff --git a/script/import_scripts/kunena3.rb b/script/import_scripts/kunena3.rb
index 2f4d7808c44..4a271202748 100644
--- a/script/import_scripts/kunena3.rb
+++ b/script/import_scripts/kunena3.rb
@@ -60,7 +60,7 @@ class ImportScripts::Kunena < ImportScripts::Base
@users = nil
create_categories(@client.query("SELECT id, #{PARENT_FIELD} as parent_id, name, description, ordering FROM #{KUNENA_PREFIX}kunena_categories ORDER BY #{PARENT_FIELD}, id;")) do |c|
- h = {id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i}
+ h = { id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i }
if c['parent_id'].to_i > 0
h[:parent_category_id] = category_id_from_imported_category_id(c['parent_id'])
end
@@ -83,12 +83,12 @@ class ImportScripts::Kunena < ImportScripts::Base
puts "fetching Joomla users data from mysql"
results = @client.query("SELECT id, username, email, registerDate FROM #{KUNENA_PREFIX}users;", cache_rows: false)
results.each do |u|
- next unless u['id'].to_i > 0 and u['username'].present? and u['email'].present?
- username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0,User.username_length.end]
+ next unless u['id'].to_i > (0) && u['username'].present? && u['email'].present?
+ username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0, User.username_length.end]
if username.length < User.username_length.first
username = username * User.username_length.first
end
- @users[u['id'].to_i] = {id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate']}
+ @users[u['id'].to_i] = { id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate'] }
end
puts "fetching Kunena user data from mysql"
@@ -131,7 +131,7 @@ class ImportScripts::Kunena < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :posts, results.map {|p| p['id'].to_i}
+ next if all_records_exist? :posts, results.map { |p| p['id'].to_i }
create_posts(results, total: total_count, offset: offset) do |m|
skip = false
diff --git a/script/import_scripts/lithium.rb b/script/import_scripts/lithium.rb
index 54cd27e1d92..b366a2a4d78 100644
--- a/script/import_scripts/lithium.rb
+++ b/script/import_scripts/lithium.rb
@@ -10,8 +10,6 @@
# that was done using import_scripts/support/convert_mysql_xml_to_mysql.rb
#
-
-
require 'mysql2'
require 'csv'
require 'reverse_markdown'
@@ -19,7 +17,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
require 'htmlentities'
# remove table conversion
-[:table,:td,:tr,:th,:thead,:tbody].each do |tag|
+[:table, :td, :tr, :th, :thead, :tbody].each do |tag|
ReverseMarkdown::Converters.unregister(tag)
end
@@ -101,7 +99,7 @@ class ImportScripts::Lithium < ImportScripts::Base
break if users.size < 1
- next if all_records_exist? :users, users.map {|u| u["id"].to_i}
+ next if all_records_exist? :users, users.map { |u| u["id"].to_i }
create_users(users, total: user_count, offset: offset) do |user|
@@ -123,7 +121,7 @@ class ImportScripts::Lithium < ImportScripts::Base
end
def unix_time(t)
- Time.at(t/1000.0)
+ Time.at(t / 1000.0)
end
def import_profile_picture(old_user, imported_user)
@@ -191,7 +189,6 @@ class ImportScripts::Lithium < ImportScripts::Base
top_level_ids = Set.new
child_ids = Set.new
-
parent = nil
CSV.foreach(CATEGORY_CSV) do |row|
display_id = row[2].strip
@@ -216,7 +213,6 @@ class ImportScripts::Lithium < ImportScripts::Base
top_level_categories = categories.select { |c| top_level_ids.include? c["display_id"] }
-
create_categories(top_level_categories) do |category|
info = category_info[category["display_id"]]
info[:id] = category["node_id"]
@@ -228,7 +224,6 @@ class ImportScripts::Lithium < ImportScripts::Base
}
end
-
puts "", "importing children categories..."
children_categories = categories.select { |c| child_ids.include? c["display_id"] }
@@ -246,7 +241,7 @@ class ImportScripts::Lithium < ImportScripts::Base
end
puts "", "securing categories"
- category_info.each do |_,info|
+ category_info.each do |_, info|
if info[:secure]
id = category_id_from_imported_category_id(info[:id])
if id
@@ -278,7 +273,7 @@ class ImportScripts::Lithium < ImportScripts::Base
break if topics.size < 1
- next if all_records_exist? :posts, topics.map {|topic| "#{topic["node_id"]} #{topic["id"]}"}
+ next if all_records_exist? :posts, topics.map { |topic| "#{topic["node_id"]} #{topic["id"]}" }
create_posts(topics, total: topic_count, offset: offset) do |topic|
@@ -295,7 +290,7 @@ class ImportScripts::Lithium < ImportScripts::Base
raw: raw,
created_at: unix_time(topic["post_date"]),
views: topic["views"],
- custom_fields: {import_unique_id: topic["unique_id"]},
+ custom_fields: { import_unique_id: topic["unique_id"] },
import_mode: true
}
else
@@ -326,7 +321,7 @@ class ImportScripts::Lithium < ImportScripts::Base
break if posts.size < 1
- next if all_records_exist? :posts, posts.map {|post| "#{post["node_id"]} #{post["root_id"]} #{post["id"]}"}
+ next if all_records_exist? :posts, posts.map { |post| "#{post["node_id"]} #{post["root_id"]} #{post["id"]}" }
create_posts(posts, total: post_count, offset: offset) do |post|
raw = post["raw"]
@@ -340,7 +335,7 @@ class ImportScripts::Lithium < ImportScripts::Base
topic_id: topic[:topic_id],
raw: raw,
created_at: unix_time(post["post_date"]),
- custom_fields: {import_unique_id: post["unique_id"]},
+ custom_fields: { import_unique_id: post["unique_id"] },
import_mode: true
}
@@ -365,7 +360,7 @@ class ImportScripts::Lithium < ImportScripts::Base
"smileysurprised" => "dizzy_face",
"smileytongue" => "stuck_out_tongue",
"smileyvery-happy" => "grin",
- "smileywink" => "wink",
+ "smileywink" => "wink",
"smileyfrustrated" => "confounded",
"smileyembarrassed" => "flushed",
"smileylol" => "laughing",
@@ -382,7 +377,6 @@ class ImportScripts::Lithium < ImportScripts::Base
"catlol" => "joy_cat"
}
-
def import_likes
puts "\nimporting likes..."
@@ -395,8 +389,6 @@ class ImportScripts::Lithium < ImportScripts::Base
existing_map[import_id] = post_id
end
-
-
puts "loading data into temp table"
PostAction.exec_sql("create temp table like_data(user_id int, post_id int, created_at timestamp without time zone)")
PostAction.transaction do
@@ -436,7 +428,6 @@ class ImportScripts::Lithium < ImportScripts::Base
WHERE ua.id IS NULL AND pa.post_action_type_id = 2
SQL
-
# reverse action
UserAction.exec_sql <<-SQL
INSERT INTO user_actions (user_id, action_type, target_topic_id, target_post_id, acting_user_id, created_at, updated_at)
@@ -490,7 +481,6 @@ class ImportScripts::Lithium < ImportScripts::Base
existing_map[import_id] = post_id
end
-
puts "loading data into temp table"
PostAction.exec_sql("create temp table accepted_data(post_id int primary key)")
PostAction.transaction do
@@ -507,7 +497,6 @@ class ImportScripts::Lithium < ImportScripts::Base
end
end
-
puts "deleting dupe answers"
PostAction.exec_sql <<-SQL
DELETE FROM accepted_data WHERE post_id NOT IN (
@@ -553,7 +542,7 @@ class ImportScripts::Lithium < ImportScripts::Base
users = {}
- [inbox,outbox].each do |r|
+ [inbox, outbox].each do |r|
r.each do |row|
ary = (users[row["note_id"]] ||= Set.new)
user_id = user_id_from_imported_user_id(row["user_id"])
@@ -567,7 +556,7 @@ class ImportScripts::Lithium < ImportScripts::Base
subject_to_first_note = {}
mysql_query("SELECT note_id, subject, sender_user_id FROM tblia_notes_content order by note_id").each do |row|
- user_id = user_id_from_imported_user_id(row["sender_user_id"])
+ user_id = user_id_from_imported_user_id(row["sender_user_id"])
ary = (users[row["note_id"]] ||= Set.new)
if user_id
ary << user_id
@@ -581,7 +570,7 @@ class ImportScripts::Lithium < ImportScripts::Base
puts "Loading user_id to username map"
user_map = {}
- User.pluck(:id, :username).each do |id,username|
+ User.pluck(:id, :username).each do |id, username|
user_map[id] = username
end
@@ -596,17 +585,16 @@ class ImportScripts::Lithium < ImportScripts::Base
OFFSET #{offset}
SQL
-
break if topics.size < 1
- next if all_records_exist? :posts, topics.map {|topic| "pm_#{topic["note_id"]}"}
+ next if all_records_exist? :posts, topics.map { |topic| "pm_#{topic["note_id"]}" }
create_posts(topics, total: topic_count, offset: offset) do |topic|
user_id = user_id_from_imported_user_id(topic["sender_user_id"]) || Discourse::SYSTEM_USER_ID
participants = users[topic["note_id"]]
- usernames = (participants - [user_id]).map{|id| user_map[id]}
+ usernames = (participants - [user_id]).map { |id| user_map[id] }
subject = topic["subject"]
topic_id = nil
@@ -646,7 +634,6 @@ class ImportScripts::Lithium < ImportScripts::Base
def close_topics
-
puts "\nclosing closed topics..."
sql = "select unique_id post_id from message2 where root_id = id AND (attributes & 0x0002 ) != 0;"
@@ -658,8 +645,8 @@ class ImportScripts::Lithium < ImportScripts::Base
existing_map[import_id.to_i] = post_id.to_i
end
- results.map{|r| r["post_id"]}.each_slice(500) do |ids|
- mapped = ids.map{|id| existing_map[id]}.compact
+ results.map { |r| r["post_id"] }.each_slice(500) do |ids|
+ mapped = ids.map { |id| existing_map[id] }.compact
Topic.exec_sql("
UPDATE topics SET closed = true
WHERE id IN (SELECT topic_id FROM posts where id in (:ids))
@@ -668,7 +655,6 @@ class ImportScripts::Lithium < ImportScripts::Base
end
-
def create_permalinks
puts "Creating permalinks"
@@ -739,7 +725,6 @@ SQL
def post_process_posts
puts "", "Postprocessing posts..."
-
current = 0
max = Post.count
@@ -766,7 +751,6 @@ SQL
end
end
-
def postprocess_post_raw(raw, user_id)
doc = Nokogiri::HTML.fragment(raw)
@@ -827,7 +811,7 @@ SQL
":#{SMILEY_SUBS[$1] || $1}:"
end
# nbsp central
- raw.gsub!(/([a-zA-Z0-9]) ([a-zA-Z0-9])/,"\\1 \\2")
+ raw.gsub!(/([a-zA-Z0-9]) ([a-zA-Z0-9])/, "\\1 \\2")
raw
end
diff --git a/script/import_scripts/mbox-experimental.rb b/script/import_scripts/mbox-experimental.rb
index 065b0111bd8..12c18515abb 100644
--- a/script/import_scripts/mbox-experimental.rb
+++ b/script/import_scripts/mbox-experimental.rb
@@ -14,4 +14,3 @@ module ImportScripts
Importer.new(@settings).perform
end
end
-
diff --git a/script/import_scripts/mbox.rb b/script/import_scripts/mbox.rb
index e7c25ed18c6..d0b379aa12a 100755
--- a/script/import_scripts/mbox.rb
+++ b/script/import_scripts/mbox.rb
@@ -46,7 +46,7 @@ class ImportScripts::Mbox < ImportScripts::Base
exit
end
- validates_format_of :email, :with => /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, :on => :create
+ validates_format_of :email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, on: :create
def execute
import_categories
@@ -63,7 +63,7 @@ class ImportScripts::Mbox < ImportScripts::Base
mappings = CATEGORY_MAPPINGS.values - ['uncategorized']
create_categories(mappings) do |c|
- {id: c, name: c}
+ { id: c, name: c }
end
end
@@ -109,7 +109,7 @@ class ImportScripts::Mbox < ImportScripts::Base
each_line(f) do |line|
line = line.scrub
if line =~ SPLIT_AT
-p message_count += 1
+ p message_count += 1
if !msg.empty?
mail = Mail.read_from_string(msg)
yield mail, f
@@ -163,7 +163,7 @@ p message_count += 1
puts "#{not_found.size} records couldn't be associated with parents"
if not_found.present?
- db.execute "UPDATE emails SET reply_to = NULL WHERE msg_id IN (#{not_found.map {|nf| "'#{nf}'"}.join(',')})"
+ db.execute "UPDATE emails SET reply_to = NULL WHERE msg_id IN (#{not_found.map { |nf| "'#{nf}'" }.join(',')})"
end
dupe_titles = db.execute "SELECT title, COUNT(*) FROM emails GROUP BY title HAVING count(*) > 1"
@@ -197,7 +197,7 @@ p message_count += 1
from_email.gsub!(/ /, '')
end
end
-p end
+ end
display_names = from.try(:display_names)
if display_names.present?
@@ -308,7 +308,7 @@ p end
title.strip
#In case of mixed localized prefixes there could be many of them if the mail client didn't strip the localized ones
- if original_length > title.length
+ if original_length > title.length
clean_title(title)
else
title
@@ -331,9 +331,9 @@ p end
total_count = all_users.size
batches(BATCH_SIZE) do |offset|
- users = all_users[offset..offset+BATCH_SIZE-1]
+ users = all_users[offset..offset + BATCH_SIZE - 1]
break if users.nil?
- next if all_records_exist? :users, users.map {|u| u[1]}
+ next if all_records_exist? :users, users.map { |u| u[1] }
create_users(users, total: total_count, offset: offset) do |u|
{
@@ -374,7 +374,7 @@ p end
new_raw = p.raw.dup
new_raw = new_raw.gsub!(/#{Regexp.escape(find)}/i, replace) || new_raw
if new_raw != p.raw
- p.revise(Discourse.system_user, { raw: new_raw }, { bypass_bump: true })
+ p.revise(Discourse.system_user, { raw: new_raw }, bypass_bump: true)
print_warning "\nReplaced #{find} with #{replace} in topic #{p.topic_id}"
end
end
@@ -411,10 +411,10 @@ p end
topic_count = all_topics.size
batches(BATCH_SIZE) do |offset|
- topics = all_topics[offset..offset+BATCH_SIZE-1]
+ topics = all_topics[offset..offset + BATCH_SIZE - 1]
break if topics.nil?
- next if all_records_exist? :posts, topics.map {|t| t[0]}
+ next if all_records_exist? :posts, topics.map { |t| t[0] }
create_posts(topics, total: topic_count, offset: offset) do |t|
raw_email = t[5]
@@ -454,7 +454,7 @@ p end
raw = clean_raw(raw)
raw = raw.dup.to_s
raw.gsub!(/#{from_email}/, "@#{username}")
- cleaned_email = from_email.dup.sub(/@/,' at ')
+ cleaned_email = from_email.dup.sub(/@/, ' at ')
raw.gsub!(/#{cleaned_email}/, "@#{username}")
{ id: t[0],
title: clean_title(title),
@@ -490,11 +490,11 @@ p end
puts "Replies: #{post_count}"
batches(BATCH_SIZE) do |offset|
- posts = replies[offset..offset+BATCH_SIZE-1]
+ posts = replies[offset..offset + BATCH_SIZE - 1]
break if posts.nil?
break if posts.count < 1
- next if all_records_exist? :posts, posts.map {|p| p[0]}
+ next if all_records_exist? :posts, posts.map { |p| p[0] }
create_posts(posts, total: post_count, offset: offset) do |p|
parent_id = p[6]
@@ -521,7 +521,7 @@ p end
user_id = user_id_from_imported_user_id(from_email) || Discourse::SYSTEM_USER_ID
raw = clean_raw(raw).to_s
raw.gsub!(/#{from_email}/, "@#{username}")
- cleaned_email = from_email.dup.sub(/@/,' at ')
+ cleaned_email = from_email.dup.sub(/@/, ' at ')
raw.gsub!(/#{cleaned_email}/, "@#{username}")
# import the attachments
mail.attachments.each do |attachment|
diff --git a/script/import_scripts/muut.rb b/script/import_scripts/muut.rb
index ccd356d90db..24134ee3f92 100644
--- a/script/import_scripts/muut.rb
+++ b/script/import_scripts/muut.rb
@@ -59,7 +59,6 @@ class ImportScripts::Muut < ImportScripts::Base
end
end
-
def import_categories
puts "", "Importing categories"
@@ -72,7 +71,6 @@ class ImportScripts::Muut < ImportScripts::Base
end
end
-
def import_discussions
puts "", "Importing discussions"
@@ -81,7 +79,6 @@ class ImportScripts::Muut < ImportScripts::Base
@imported_json['categories'].each do |category|
-
@imported_json['threads'][category['path']].each do |thread|
next if thread["seed"]["key"] == "skip-this-topic"
@@ -96,7 +93,7 @@ class ImportScripts::Muut < ImportScripts::Base
end
# update user display name
- if thread["seed"]["author"] && thread["seed"]["author"]["displayname"] != "" && mapped[:user_id] != -1
+ if thread["seed"]["author"] && thread["seed"]["author"]["displayname"] != "" && mapped[:user_id] != -1
user = User.find_by(id: mapped[:user_id])
if user
user.name = thread["seed"]["author"]["displayname"]
@@ -181,6 +178,6 @@ class ImportScripts::Muut < ImportScripts::Base
end
-if __FILE__==$0
+if __FILE__ == $0
ImportScripts::Muut.new.perform
end
diff --git a/script/import_scripts/mybb.rb b/script/import_scripts/mybb.rb
index 8bf101c39ab..f9201ecacfa 100644
--- a/script/import_scripts/mybb.rb
+++ b/script/import_scripts/mybb.rb
@@ -37,7 +37,7 @@ class ImportScripts::MyBB < ImportScripts::Base
end
def execute
- SiteSetting.disable_emails=true
+ SiteSetting.disable_emails = true
import_users
import_categories
import_posts
@@ -66,7 +66,7 @@ class ImportScripts::MyBB < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :users, results.map {|u| u["id"].to_i}
+ next if all_records_exist? :users, results.map { |u| u["id"].to_i }
create_users(results, total: total_count, offset: offset) do |user|
{ id: user['id'],
@@ -87,7 +87,7 @@ class ImportScripts::MyBB < ImportScripts::Base
")
create_categories(results) do |row|
- h = {id: row['id'], name: CGI.unescapeHTML(row['name']), description: CGI.unescapeHTML(row['description'])}
+ h = { id: row['id'], name: CGI.unescapeHTML(row['name']), description: CGI.unescapeHTML(row['description']) }
if row['parent_id'].to_i > 0
h[:parent_category_id] = category_id_from_imported_category_id(row['parent_id'])
end
@@ -120,7 +120,7 @@ class ImportScripts::MyBB < ImportScripts::Base
break if results.size < 1
- next if all_records_exist? :posts, results.map {|m| m['id'].to_i}
+ next if all_records_exist? :posts, results.map { |m| m['id'].to_i }
create_posts(results, total: total_count, offset: offset) do |m|
skip = false
@@ -235,7 +235,7 @@ class ImportScripts::MyBB < ImportScripts::Base
def create_permalinks
puts '', 'Creating redirects...', ''
- SiteSetting.permalink_normalizations= '/(\\w+)-(\\d+)[-.].*/\\1-\\2.html'
+ SiteSetting.permalink_normalizations = '/(\\w+)-(\\d+)[-.].*/\\1-\\2.html'
puts '', 'Users...', ''
total_users = User.count
start_time = Time.now
@@ -244,7 +244,7 @@ class ImportScripts::MyBB < ImportScripts::Base
ucf = u.custom_fields
count += 1
if ucf && ucf["import_id"] && ucf["import_username"]
- Permalink.create(url: "#{BASE}/user-#{ucf['import_id']}.html", external_url: "/u/#{u.username}" ) rescue nil
+ Permalink.create(url: "#{BASE}/user-#{ucf['import_id']}.html", external_url: "/u/#{u.username}") rescue nil
end
print_status(count, total_users, start_time)
end
@@ -260,7 +260,7 @@ class ImportScripts::MyBB < ImportScripts::Base
unless QUIET
puts ("forum-#{id}.html --> /c/#{cat.id}")
end
- Permalink.create( url: "#{BASE}/forum-#{id}.html", category_id: cat.id ) rescue nil
+ Permalink.create(url: "#{BASE}/forum-#{id}.html", category_id: cat.id) rescue nil
print_status(count, total_categories, start_time)
end
@@ -286,7 +286,7 @@ class ImportScripts::MyBB < ImportScripts::Base
count += 1
if topic = topic_lookup_from_imported_post_id(post['id'])
id = post['topic_id']
- Permalink.create( url: "#{BASE}/thread-#{id}.html", topic_id: topic[:topic_id] ) rescue nil
+ Permalink.create(url: "#{BASE}/thread-#{id}.html", topic_id: topic[:topic_id]) rescue nil
unless QUIET
puts ("#{BASE}/thread-#{id}.html --> http://localhost:3000/t/#{topic[:topic_id]}")
end
@@ -296,8 +296,6 @@ class ImportScripts::MyBB < ImportScripts::Base
end
end
-
-
def mysql_query(sql)
@client.query(sql, cache_rows: false)
end
diff --git a/script/import_scripts/mylittleforum.rb b/script/import_scripts/mylittleforum.rb
index fbec5dad462..f0ec1524720 100644
--- a/script/import_scripts/mylittleforum.rb
+++ b/script/import_scripts/mylittleforum.rb
@@ -2,7 +2,6 @@ require "mysql2"
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
require 'htmlentities'
-
# Before running this script, paste these lines into your shell,
# then use arrow keys to edit the values
=begin
@@ -16,7 +15,6 @@ export IMAGE_BASE="http://www.example.com/forum"
export BASE="forum"
=end
-
class ImportScripts::MylittleforumSQL < ImportScripts::Base
DB_HOST ||= ENV['DB_HOST'] || "localhost"
@@ -37,10 +35,9 @@ class ImportScripts::MylittleforumSQL < ImportScripts::Base
# Site settings
SiteSetting.disable_emails = true
if FORCE_HOSTNAME
- SiteSetting.force_hostname=FORCE_HOSTNAME
+ SiteSetting.force_hostname = FORCE_HOSTNAME
end
-
def initialize
if IMPORT_AFTER > "1970-01-01"
@@ -57,7 +54,7 @@ class ImportScripts::MylittleforumSQL < ImportScripts::Base
database: DB_NAME
)
rescue Exception => e
- puts '='*50
+ puts '=' * 50
puts e.message
puts < #{username}")
end
@@ -210,7 +207,7 @@ EOM
OFFSET #{offset};")
break if discussions.size < 1
- next if all_records_exist? :posts, discussions.map {|t| "discussion#" + t['DiscussionID'].to_s}
+ next if all_records_exist? :posts, discussions.map { |t| "discussion#" + t['DiscussionID'].to_s }
create_posts(discussions, total: total_count, offset: offset) do |discussion|
@@ -226,7 +223,7 @@ EOM
{
id: "discussion#" + discussion['DiscussionID'].to_s,
user_id: user_id_from_imported_user_id(discussion['InsertUserID']) || Discourse::SYSTEM_USER_ID,
- title: discussion['Name'].gsub('\\"','"'),
+ title: discussion['Name'].gsub('\\"', '"'),
category: category_id_from_imported_category_id(discussion['CategoryID']),
raw: raw,
created_at: Time.zone.at(discussion['DateInserted']),
@@ -260,7 +257,7 @@ EOM
OFFSET #{offset};")
break if comments.size < 1
- next if all_records_exist? :posts, comments.map {|comment| "comment#" + comment['CommentID'].to_s}
+ next if all_records_exist? :posts, comments.map { |comment| "comment#" + comment['CommentID'].to_s }
create_posts(comments, total: total_count, offset: offset) do |comment|
next unless t = topic_lookup_from_imported_post_id("discussion#" + comment['DiscussionID'].to_s)
@@ -286,9 +283,9 @@ EOM
youtube_cooked = clean_up(youtube_raw.dup.to_s)
# get just src from