2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-08-09 18:28:57 +08:00
|
|
|
require 'csv'
|
|
|
|
|
|
|
|
module Jobs
|
|
|
|
|
2019-10-02 12:01:53 +08:00
|
|
|
class ExportCsvFile < ::Jobs::Base
|
2014-08-09 18:28:57 +08:00
|
|
|
sidekiq_options retry: false
|
2016-05-26 04:20:35 +08:00
|
|
|
|
2017-07-28 09:20:09 +08:00
|
|
|
HEADER_ATTRS_FOR ||= HashWithIndifferentAccess.new(
|
2018-12-04 17:48:16 +08:00
|
|
|
user_archive: ['topic_title', 'category', 'sub_category', 'is_pm', 'post', 'like_count', 'reply_count', 'url', 'created_at'],
|
2019-02-27 17:12:20 +08:00
|
|
|
user_list: ['id', 'name', 'username', 'email', 'title', 'created_at', 'last_seen_at', 'last_posted_at', 'last_emailed_at', 'trust_level', 'approved', 'suspended_at', 'suspended_till', 'silenced_till', 'active', 'admin', 'moderator', 'ip_address', 'staged', 'secondary_emails'],
|
2018-12-04 17:48:16 +08:00
|
|
|
user_stats: ['topics_entered', 'posts_read_count', 'time_read', 'topic_count', 'post_count', 'likes_given', 'likes_received'],
|
|
|
|
user_profile: ['location', 'website', 'views'],
|
|
|
|
user_sso: ['external_id', 'external_email', 'external_username', 'external_name', 'external_avatar_url'],
|
|
|
|
staff_action: ['staff_user', 'action', 'subject', 'created_at', 'details', 'context'],
|
2017-07-28 09:20:09 +08:00
|
|
|
screened_email: ['email', 'action', 'match_count', 'last_match_at', 'created_at', 'ip_address'],
|
2018-12-04 17:48:16 +08:00
|
|
|
screened_ip: ['ip_address', 'action', 'match_count', 'last_match_at', 'created_at'],
|
|
|
|
screened_url: ['domain', 'action', 'match_count', 'last_match_at', 'created_at'],
|
|
|
|
report: ['date', 'value']
|
2017-07-28 09:20:09 +08:00
|
|
|
)
|
2014-08-09 18:28:57 +08:00
|
|
|
|
|
|
|
def execute(args)
|
2015-01-15 00:00:51 +08:00
|
|
|
@entity = args[:entity]
|
2015-09-16 04:45:01 +08:00
|
|
|
@extra = HashWithIndifferentAccess.new(args[:args]) if args[:args]
|
2014-08-09 18:28:57 +08:00
|
|
|
@current_user = User.find_by(id: args[:user_id])
|
|
|
|
|
2016-05-26 04:20:35 +08:00
|
|
|
export_method = :"#{@entity}_export"
|
|
|
|
raise Discourse::InvalidParameters.new(:entity) unless respond_to?(export_method)
|
|
|
|
|
|
|
|
file_name_prefix = if @entity == "user_archive"
|
|
|
|
"#{@entity.split('_').join('-')}-#{@current_user.username}-#{Time.now.strftime("%y%m%d-%H%M%S")}"
|
2018-04-24 23:38:56 +08:00
|
|
|
elsif @entity == "report" && @extra[:name].present?
|
|
|
|
"#{@extra[:name].split('_').join('-')}-#{Time.now.strftime("%y%m%d-%H%M%S")}"
|
2016-05-26 04:20:35 +08:00
|
|
|
else
|
|
|
|
"#{@entity.split('_').join('-')}-#{Time.now.strftime("%y%m%d-%H%M%S")}"
|
|
|
|
end
|
|
|
|
|
2018-04-24 23:38:56 +08:00
|
|
|
export_title = if @entity == "report" && @extra[:name].present?
|
|
|
|
I18n.t("reports.#{@extra[:name]}.title")
|
|
|
|
else
|
|
|
|
@entity.split('_').join(' ').titleize
|
|
|
|
end
|
|
|
|
|
2018-04-19 19:30:31 +08:00
|
|
|
user_export = UserExport.create(file_name: file_name_prefix, user_id: @current_user.id)
|
|
|
|
file_name = "#{file_name_prefix}-#{user_export.id}.csv"
|
2016-05-26 04:20:35 +08:00
|
|
|
absolute_path = "#{UserExport.base_directory}/#{file_name}"
|
|
|
|
|
|
|
|
# ensure directory exists
|
|
|
|
FileUtils.mkdir_p(UserExport.base_directory) unless Dir.exists?(UserExport.base_directory)
|
2014-08-09 18:28:57 +08:00
|
|
|
|
2019-07-18 20:34:48 +08:00
|
|
|
# Generate a compressed CSV file
|
2019-10-03 21:19:35 +08:00
|
|
|
begin
|
|
|
|
CSV.open(absolute_path, "w") do |csv|
|
|
|
|
csv << get_header if @entity != "report"
|
|
|
|
public_send(export_method).each { |d| csv << d }
|
|
|
|
end
|
|
|
|
compressed_file_path = Compression::Zip.new.compress(UserExport.base_directory, file_name)
|
|
|
|
ensure
|
|
|
|
File.delete(absolute_path)
|
2019-07-18 20:34:48 +08:00
|
|
|
end
|
2018-04-19 19:30:31 +08:00
|
|
|
|
|
|
|
# create upload
|
2019-06-11 12:14:31 +08:00
|
|
|
upload = nil
|
2018-04-19 19:30:31 +08:00
|
|
|
|
|
|
|
if File.exist?(compressed_file_path)
|
|
|
|
File.open(compressed_file_path) do |file|
|
|
|
|
upload = UploadCreator.new(
|
|
|
|
file,
|
|
|
|
File.basename(compressed_file_path),
|
|
|
|
type: 'csv_export',
|
|
|
|
for_export: 'true'
|
|
|
|
).create_for(@current_user.id)
|
|
|
|
|
|
|
|
if upload.persisted?
|
|
|
|
user_export.update_columns(upload_id: upload.id)
|
|
|
|
else
|
2019-10-03 21:19:35 +08:00
|
|
|
Rails.logger.warn("Failed to upload the file #{compressed_file_path}")
|
2018-04-19 19:30:31 +08:00
|
|
|
end
|
|
|
|
end
|
2019-06-11 12:14:31 +08:00
|
|
|
|
2018-04-19 19:30:31 +08:00
|
|
|
File.delete(compressed_file_path)
|
|
|
|
end
|
2014-12-29 19:58:33 +08:00
|
|
|
ensure
|
2019-06-11 12:14:31 +08:00
|
|
|
post = notify_user(upload, export_title)
|
|
|
|
|
2019-05-28 19:08:41 +08:00
|
|
|
if user_export.present? && post.present?
|
|
|
|
topic = post.topic
|
|
|
|
user_export.update_columns(topic_id: topic.id)
|
|
|
|
topic.update_status('closed', true, Discourse.system_user)
|
|
|
|
end
|
2014-08-09 18:28:57 +08:00
|
|
|
end
|
|
|
|
|
2014-12-23 00:17:04 +08:00
|
|
|
def user_archive_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:user_archive_export) unless block_given?
|
|
|
|
|
|
|
|
Post.includes(topic: :category)
|
2017-07-28 09:20:09 +08:00
|
|
|
.where(user_id: @current_user.id)
|
|
|
|
.select(:topic_id, :post_number, :raw, :like_count, :reply_count, :created_at)
|
|
|
|
.order(:created_at)
|
|
|
|
.with_deleted
|
|
|
|
.each do |user_archive|
|
2016-05-26 04:20:35 +08:00
|
|
|
yield get_user_archive_fields(user_archive)
|
2014-12-23 00:17:04 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-01-02 14:59:05 +08:00
|
|
|
def user_list_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:user_list_export) unless block_given?
|
|
|
|
|
2015-11-02 13:31:08 +08:00
|
|
|
user_field_ids = UserField.pluck(:id)
|
2016-05-26 04:20:35 +08:00
|
|
|
|
2016-03-07 21:17:10 +08:00
|
|
|
condition = {}
|
|
|
|
if @extra && @extra[:trust_level] && trust_level = TrustLevel.levels[@extra[:trust_level].to_sym]
|
2016-05-26 04:20:35 +08:00
|
|
|
condition = { trust_level: trust_level }
|
2016-03-07 21:17:10 +08:00
|
|
|
end
|
2015-11-02 13:31:08 +08:00
|
|
|
|
|
|
|
if SiteSetting.enable_sso
|
|
|
|
# SSO enabled
|
2019-02-27 17:12:20 +08:00
|
|
|
User.where(condition).includes(:user_profile, :user_stat, :user_emails, :single_sign_on_record, :groups).find_each do |user|
|
2016-07-19 14:43:50 +08:00
|
|
|
user_info_array = get_base_user_array(user)
|
|
|
|
user_info_array = add_single_sign_on(user, user_info_array)
|
|
|
|
user_info_array = add_custom_fields(user, user_info_array, user_field_ids)
|
|
|
|
user_info_array = add_group_names(user, user_info_array)
|
|
|
|
yield user_info_array
|
2015-11-02 13:31:08 +08:00
|
|
|
end
|
|
|
|
else
|
|
|
|
# SSO disabled
|
2019-02-27 17:12:20 +08:00
|
|
|
User.where(condition).includes(:user_profile, :user_stat, :user_emails, :groups).find_each do |user|
|
2016-07-19 14:43:50 +08:00
|
|
|
user_info_array = get_base_user_array(user)
|
|
|
|
user_info_array = add_custom_fields(user, user_info_array, user_field_ids)
|
|
|
|
user_info_array = add_group_names(user, user_info_array)
|
|
|
|
yield user_info_array
|
2015-11-02 13:31:08 +08:00
|
|
|
end
|
2014-11-26 06:43:17 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-07 12:15:22 +08:00
|
|
|
def staff_action_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:staff_action_export) unless block_given?
|
|
|
|
|
|
|
|
staff_action_data = if @current_user.admin?
|
|
|
|
UserHistory.only_staff_actions.order('id DESC')
|
2015-01-02 13:33:14 +08:00
|
|
|
else
|
2016-05-26 04:20:35 +08:00
|
|
|
UserHistory.where(admin_only: false).only_staff_actions.order('id DESC')
|
2015-01-02 13:33:14 +08:00
|
|
|
end
|
|
|
|
|
2016-05-26 04:20:35 +08:00
|
|
|
staff_action_data.each do |staff_action|
|
|
|
|
yield get_staff_action_fields(staff_action)
|
2014-12-07 12:15:22 +08:00
|
|
|
end
|
|
|
|
end
|
2014-11-26 06:43:17 +08:00
|
|
|
|
2014-12-07 12:15:22 +08:00
|
|
|
def screened_email_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:screened_email_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedEmail.order('last_match_at DESC').each do |screened_email|
|
|
|
|
yield get_screened_email_fields(screened_email)
|
2014-12-07 12:15:22 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def screened_ip_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:screened_ip_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedIpAddress.order('id DESC').each do |screened_ip|
|
|
|
|
yield get_screened_ip_fields(screened_ip)
|
2014-11-26 06:43:17 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-07 12:15:22 +08:00
|
|
|
def screened_url_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:screened_url_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedUrl.select("domain, sum(match_count) as match_count, max(last_match_at) as last_match_at, min(created_at) as created_at")
|
2017-07-28 09:20:09 +08:00
|
|
|
.group(:domain)
|
|
|
|
.order('last_match_at DESC')
|
|
|
|
.each do |screened_url|
|
2016-05-26 04:20:35 +08:00
|
|
|
yield get_screened_url_fields(screened_url)
|
2014-12-07 12:15:22 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-09-16 04:45:01 +08:00
|
|
|
def report_export
|
2016-05-26 04:20:35 +08:00
|
|
|
return enum_for(:report_export) unless block_given?
|
|
|
|
|
2018-04-08 19:19:41 +08:00
|
|
|
@extra[:start_date] = @extra[:start_date].to_date.beginning_of_day if @extra[:start_date].is_a?(String)
|
|
|
|
@extra[:end_date] = @extra[:end_date].to_date.end_of_day if @extra[:end_date].is_a?(String)
|
2017-12-02 04:50:36 +08:00
|
|
|
@extra[:category_id] = @extra[:category_id].present? ? @extra[:category_id].to_i : nil
|
|
|
|
@extra[:group_id] = @extra[:group_id].present? ? @extra[:group_id].to_i : nil
|
2018-02-19 02:22:09 +08:00
|
|
|
|
2019-06-28 14:50:31 +08:00
|
|
|
report = Report.find(@extra[:name], @extra)
|
|
|
|
|
|
|
|
header = []
|
|
|
|
titles = {}
|
2018-02-19 02:22:09 +08:00
|
|
|
|
2019-06-28 14:50:31 +08:00
|
|
|
report.labels.each do |label|
|
|
|
|
if label[:type] == :user
|
|
|
|
titles[label[:properties][:username]] = label[:title]
|
|
|
|
header << label[:properties][:username]
|
|
|
|
else
|
|
|
|
titles[label[:property]] = label[:title]
|
|
|
|
header << label[:property]
|
|
|
|
end
|
2015-09-16 04:45:01 +08:00
|
|
|
end
|
2019-06-28 14:50:31 +08:00
|
|
|
|
2020-01-22 00:43:19 +08:00
|
|
|
if report.modes == [:stacked_chart]
|
|
|
|
header = [:x]
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
report.data.map do |series|
|
|
|
|
header << series[:label]
|
|
|
|
series[:data].each do |datapoint|
|
|
|
|
data[datapoint[:x]] ||= { x: datapoint[:x] }
|
|
|
|
data[datapoint[:x]][series[:label]] = datapoint[:y]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
data = data.values
|
|
|
|
else
|
|
|
|
data = report.data
|
|
|
|
end
|
|
|
|
|
2019-06-28 14:50:31 +08:00
|
|
|
yield header.map { |k| titles[k] || k }
|
2020-01-22 00:43:19 +08:00
|
|
|
data.each { |row| yield row.values_at(*header).map(&:to_s) }
|
2015-09-16 04:45:01 +08:00
|
|
|
end
|
|
|
|
|
2015-01-15 00:00:51 +08:00
|
|
|
def get_header
|
2016-05-26 04:20:35 +08:00
|
|
|
if @entity == 'user_list'
|
2017-03-21 18:11:03 +08:00
|
|
|
header_array = HEADER_ATTRS_FOR['user_list'] + HEADER_ATTRS_FOR['user_stats'] + HEADER_ATTRS_FOR['user_profile']
|
2016-05-26 04:20:35 +08:00
|
|
|
header_array.concat(HEADER_ATTRS_FOR['user_sso']) if SiteSetting.enable_sso
|
|
|
|
user_custom_fields = UserField.all
|
|
|
|
if user_custom_fields.present?
|
|
|
|
user_custom_fields.each do |custom_field|
|
|
|
|
header_array.push("#{custom_field.name} (custom user field)")
|
2014-11-26 06:43:17 +08:00
|
|
|
end
|
2014-12-07 12:15:22 +08:00
|
|
|
end
|
2016-05-26 04:20:35 +08:00
|
|
|
header_array.push("group_names")
|
|
|
|
else
|
|
|
|
header_array = HEADER_ATTRS_FOR[@entity]
|
|
|
|
end
|
2014-11-26 06:43:17 +08:00
|
|
|
|
|
|
|
header_array
|
|
|
|
end
|
|
|
|
|
2014-08-09 18:28:57 +08:00
|
|
|
private
|
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def escape_comma(string)
|
2019-02-27 17:12:20 +08:00
|
|
|
string&.include?(",") ? %Q|"#{string}"| : string
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2016-07-19 14:43:50 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def get_base_user_array(user)
|
2019-02-27 17:12:20 +08:00
|
|
|
[
|
|
|
|
user.id,
|
|
|
|
escape_comma(user.name),
|
|
|
|
user.username,
|
|
|
|
user.email,
|
|
|
|
escape_comma(user.title),
|
|
|
|
user.created_at,
|
|
|
|
user.last_seen_at,
|
|
|
|
user.last_posted_at,
|
|
|
|
user.last_emailed_at,
|
|
|
|
user.trust_level,
|
|
|
|
user.approved,
|
|
|
|
user.suspended_at,
|
|
|
|
user.suspended_till,
|
|
|
|
user.silenced_till,
|
|
|
|
user.active,
|
|
|
|
user.admin,
|
|
|
|
user.moderator,
|
|
|
|
user.ip_address,
|
|
|
|
user.staged,
|
|
|
|
user.secondary_emails.join(";"),
|
|
|
|
user.user_stat.topics_entered,
|
|
|
|
user.user_stat.posts_read_count,
|
|
|
|
user.user_stat.time_read,
|
|
|
|
user.user_stat.topic_count,
|
|
|
|
user.user_stat.post_count,
|
|
|
|
user.user_stat.likes_given,
|
|
|
|
user.user_stat.likes_received,
|
|
|
|
escape_comma(user.user_profile.location),
|
|
|
|
user.user_profile.website,
|
|
|
|
user.user_profile.views,
|
|
|
|
]
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2016-02-05 23:16:33 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def add_single_sign_on(user, user_info_array)
|
|
|
|
if user.single_sign_on_record
|
|
|
|
user_info_array.push(user.single_sign_on_record.external_id, user.single_sign_on_record.external_email, user.single_sign_on_record.external_username, escape_comma(user.single_sign_on_record.external_name), user.single_sign_on_record.external_avatar_url)
|
|
|
|
else
|
|
|
|
user_info_array.push(nil, nil, nil, nil, nil)
|
2016-02-05 23:16:33 +08:00
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
user_info_array
|
|
|
|
end
|
2016-02-05 23:16:33 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def add_custom_fields(user, user_info_array, user_field_ids)
|
|
|
|
if user_field_ids.present?
|
|
|
|
user.user_fields.each do |custom_field|
|
|
|
|
user_info_array << escape_comma(custom_field[1])
|
2016-02-05 23:16:33 +08:00
|
|
|
end
|
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
user_info_array
|
|
|
|
end
|
2016-02-05 23:16:33 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def add_group_names(user, user_info_array)
|
2019-02-27 17:12:20 +08:00
|
|
|
group_names = user.groups.map { |g| g.name }.join(";")
|
|
|
|
user_info_array << escape_comma(group_names) if group_names.present?
|
2018-06-07 13:28:18 +08:00
|
|
|
user_info_array
|
|
|
|
end
|
2016-02-05 23:16:33 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def get_user_archive_fields(user_archive)
|
|
|
|
user_archive_array = []
|
|
|
|
topic_data = user_archive.topic
|
|
|
|
user_archive = user_archive.as_json
|
|
|
|
topic_data = Topic.with_deleted.find_by(id: user_archive['topic_id']) if topic_data.nil?
|
|
|
|
return user_archive_array if topic_data.nil?
|
|
|
|
category = topic_data.category
|
|
|
|
sub_category_name = "-"
|
|
|
|
if category
|
|
|
|
category_name = category.name
|
|
|
|
if category.parent_category_id.present?
|
|
|
|
# sub category
|
|
|
|
if parent_category = Category.find_by(id: category.parent_category_id)
|
|
|
|
category_name = parent_category.name
|
|
|
|
sub_category_name = category.name
|
2014-12-29 19:58:33 +08:00
|
|
|
end
|
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
else
|
|
|
|
# PM
|
|
|
|
category_name = "-"
|
|
|
|
end
|
|
|
|
is_pm = topic_data.archetype == "private_message" ? I18n.t("csv_export.boolean_yes") : I18n.t("csv_export.boolean_no")
|
|
|
|
url = "#{Discourse.base_url}/t/#{topic_data.slug}/#{topic_data.id}/#{user_archive['post_number']}"
|
2014-12-23 00:17:04 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
topic_hash = { "post" => user_archive['raw'], "topic_title" => topic_data.title, "category" => category_name, "sub_category" => sub_category_name, "is_pm" => is_pm, "url" => url }
|
|
|
|
user_archive.merge!(topic_hash)
|
2014-12-23 00:17:04 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
HEADER_ATTRS_FOR['user_archive'].each do |attr|
|
|
|
|
user_archive_array.push(user_archive[attr])
|
2014-12-23 00:17:04 +08:00
|
|
|
end
|
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
user_archive_array
|
|
|
|
end
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def get_staff_action_fields(staff_action)
|
|
|
|
staff_action_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR['staff_action'].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == 'action'
|
|
|
|
UserHistory.actions.key(staff_action.attributes[attr]).to_s
|
|
|
|
elsif attr == 'staff_user'
|
|
|
|
user = User.find_by(id: staff_action.attributes['acting_user_id'])
|
|
|
|
user.username if !user.nil?
|
|
|
|
elsif attr == 'subject'
|
|
|
|
user = User.find_by(id: staff_action.attributes['target_user_id'])
|
|
|
|
user.nil? ? staff_action.attributes[attr] : "#{user.username} #{staff_action.attributes[attr]}"
|
|
|
|
else
|
|
|
|
staff_action.attributes[attr]
|
|
|
|
end
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
staff_action_array.push(data)
|
|
|
|
end
|
|
|
|
staff_action_array
|
|
|
|
end
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def get_screened_email_fields(screened_email)
|
|
|
|
screened_email_array = []
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
HEADER_ATTRS_FOR['screened_email'].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == 'action'
|
|
|
|
ScreenedEmail.actions.key(screened_email.attributes['action_type']).to_s
|
|
|
|
else
|
|
|
|
screened_email.attributes[attr]
|
|
|
|
end
|
|
|
|
|
|
|
|
screened_email_array.push(data)
|
2014-12-07 12:15:22 +08:00
|
|
|
end
|
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
screened_email_array
|
|
|
|
end
|
2014-11-12 16:42:50 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def get_screened_ip_fields(screened_ip)
|
|
|
|
screened_ip_array = []
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
HEADER_ATTRS_FOR['screened_ip'].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == 'action'
|
|
|
|
ScreenedIpAddress.actions.key(screened_ip.attributes['action_type']).to_s
|
|
|
|
else
|
|
|
|
screened_ip.attributes[attr]
|
|
|
|
end
|
2014-11-21 23:25:04 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
screened_ip_array.push(data)
|
2014-12-07 12:15:22 +08:00
|
|
|
end
|
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
screened_ip_array
|
|
|
|
end
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
def get_screened_url_fields(screened_url)
|
|
|
|
screened_url_array = []
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
HEADER_ATTRS_FOR['screened_url'].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == 'action'
|
|
|
|
action = ScreenedUrl.actions.key(screened_url.attributes['action_type']).to_s
|
|
|
|
action = "do nothing" if action.blank?
|
|
|
|
else
|
|
|
|
screened_url.attributes[attr]
|
|
|
|
end
|
2014-12-07 12:15:22 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
screened_url_array.push(data)
|
2014-11-21 23:25:04 +08:00
|
|
|
end
|
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
screened_url_array
|
|
|
|
end
|
|
|
|
|
2019-06-11 12:14:31 +08:00
|
|
|
def notify_user(upload, export_title)
|
2019-05-28 19:08:41 +08:00
|
|
|
post = nil
|
2019-06-11 12:14:31 +08:00
|
|
|
|
2018-06-07 13:28:18 +08:00
|
|
|
if @current_user
|
2019-06-11 12:14:31 +08:00
|
|
|
post = if upload
|
2018-06-07 13:28:18 +08:00
|
|
|
SystemMessage.create_from_system_user(
|
|
|
|
@current_user,
|
|
|
|
:csv_export_succeeded,
|
2019-07-25 22:34:46 +08:00
|
|
|
download_link: UploadMarkdown.new(upload).attachment_markdown,
|
2018-06-07 13:28:18 +08:00
|
|
|
export_title: export_title
|
|
|
|
)
|
|
|
|
else
|
|
|
|
SystemMessage.create_from_system_user(@current_user, :csv_export_failed)
|
2014-08-09 18:28:57 +08:00
|
|
|
end
|
|
|
|
end
|
2019-06-11 12:14:31 +08:00
|
|
|
|
2019-05-28 19:08:41 +08:00
|
|
|
post
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2014-08-09 18:28:57 +08:00
|
|
|
end
|
|
|
|
end
|