2019-04-30 08:27:42 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-10-11 17:41:23 +08:00
|
|
|
require 'rails_helper'
|
2014-08-09 18:28:57 +08:00
|
|
|
|
|
|
|
describe Jobs::ExportCsvFile do
|
|
|
|
|
2019-06-11 12:14:31 +08:00
|
|
|
context '#execute' do
|
2020-08-28 06:54:25 +08:00
|
|
|
let(:other_user) { Fabricate(:user) }
|
|
|
|
let(:admin) { Fabricate(:admin) }
|
|
|
|
let(:action_log) { StaffActionLogger.new(admin).log_revoke_moderation(other_user) }
|
2018-04-25 12:12:42 +08:00
|
|
|
|
2014-08-09 18:28:57 +08:00
|
|
|
it 'raises an error when the entity is missing' do
|
2020-08-28 06:54:25 +08:00
|
|
|
expect { Jobs::ExportCsvFile.new.execute(user_id: admin.id) }.to raise_error(Discourse::InvalidParameters)
|
2018-04-25 12:12:42 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'works' do
|
2020-08-28 06:54:25 +08:00
|
|
|
action_log
|
|
|
|
|
2018-04-25 12:12:42 +08:00
|
|
|
begin
|
2019-06-11 12:14:31 +08:00
|
|
|
expect do
|
|
|
|
Jobs::ExportCsvFile.new.execute(
|
2020-08-28 06:54:25 +08:00
|
|
|
user_id: admin.id,
|
|
|
|
entity: "staff_action"
|
2019-06-11 12:14:31 +08:00
|
|
|
)
|
|
|
|
end.to change { Upload.count }.by(1)
|
2018-05-25 09:56:35 +08:00
|
|
|
|
2020-08-28 06:54:25 +08:00
|
|
|
system_message = admin.topics_allowed.last
|
2019-06-11 12:14:31 +08:00
|
|
|
|
2019-05-28 19:08:41 +08:00
|
|
|
expect(system_message.title).to eq(I18n.t(
|
2018-04-25 12:57:56 +08:00
|
|
|
"system_messages.csv_export_succeeded.subject_template",
|
2020-08-28 06:54:25 +08:00
|
|
|
export_title: "Staff Action"
|
2018-04-25 12:57:56 +08:00
|
|
|
))
|
2019-06-11 12:14:31 +08:00
|
|
|
|
|
|
|
upload = system_message.first_post.uploads.first
|
|
|
|
|
|
|
|
expect(system_message.first_post.raw).to eq(I18n.t(
|
|
|
|
"system_messages.csv_export_succeeded.text_body_template",
|
|
|
|
download_link: "[#{upload.original_filename}|attachment](#{upload.short_url}) (#{upload.filesize} Bytes)"
|
|
|
|
).chomp)
|
|
|
|
|
2019-05-28 19:08:41 +08:00
|
|
|
expect(system_message.id).to eq(UserExport.last.topic_id)
|
|
|
|
expect(system_message.closed).to eq(true)
|
2020-04-29 17:09:50 +08:00
|
|
|
|
|
|
|
files = []
|
|
|
|
Zip::File.open(Discourse.store.path_for(upload)) do |zip_file|
|
|
|
|
zip_file.each { |entry| files << entry.name }
|
|
|
|
end
|
|
|
|
|
2020-08-28 06:54:25 +08:00
|
|
|
expect(files.size).to eq(1)
|
2018-04-25 12:12:42 +08:00
|
|
|
ensure
|
2020-08-28 06:54:25 +08:00
|
|
|
admin.uploads.each(&:destroy!)
|
2018-04-25 12:12:42 +08:00
|
|
|
end
|
2014-08-09 18:28:57 +08:00
|
|
|
end
|
2014-11-26 06:43:17 +08:00
|
|
|
end
|
|
|
|
|
2019-06-28 14:50:31 +08:00
|
|
|
context '.report_export' do
|
|
|
|
|
|
|
|
let(:user) { Fabricate(:admin) }
|
|
|
|
|
|
|
|
let(:exporter) do
|
|
|
|
exporter = Jobs::ExportCsvFile.new
|
2020-03-31 01:08:47 +08:00
|
|
|
exporter.entity = 'report'
|
|
|
|
exporter.extra = HashWithIndifferentAccess.new(start_date: '2010-01-01', end_date: '2011-01-01')
|
|
|
|
exporter.current_user = User.find_by(id: user.id)
|
2019-06-28 14:50:31 +08:00
|
|
|
exporter
|
|
|
|
end
|
|
|
|
|
2020-04-08 02:05:27 +08:00
|
|
|
it "does not throw an error when the dates are invalid" do
|
|
|
|
Jobs::ExportCsvFile.new.execute(
|
|
|
|
entity: 'report',
|
|
|
|
user_id: user.id,
|
|
|
|
args: { start_date: 'asdfasdf', end_date: 'not-a-date', name: 'dau_by_mau' }
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-06-28 14:50:31 +08:00
|
|
|
it 'works with single-column reports' do
|
|
|
|
user.user_visits.create!(visited_at: '2010-01-01', posts_read: 42)
|
|
|
|
Fabricate(:user).user_visits.create!(visited_at: '2010-01-03', posts_read: 420)
|
|
|
|
|
2020-03-31 01:08:47 +08:00
|
|
|
exporter.extra['name'] = 'dau_by_mau'
|
2019-06-28 14:50:31 +08:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("Day", "Percent")
|
|
|
|
expect(report.second).to contain_exactly("2010-01-01", "100.0")
|
|
|
|
expect(report.third).to contain_exactly("2010-01-03", "50.0")
|
2019-07-26 15:27:13 +08:00
|
|
|
end
|
|
|
|
|
2020-03-31 01:08:47 +08:00
|
|
|
it 'works with filters' do
|
|
|
|
user.user_visits.create!(visited_at: '2010-01-01', posts_read: 42)
|
|
|
|
|
|
|
|
group = Fabricate(:group)
|
|
|
|
user1 = Fabricate(:user)
|
|
|
|
group_user = Fabricate(:group_user, group: group, user: user1)
|
|
|
|
user1.user_visits.create!(visited_at: '2010-01-03', posts_read: 420)
|
|
|
|
|
|
|
|
exporter.extra['name'] = 'visits'
|
2020-06-10 23:57:39 +08:00
|
|
|
exporter.extra['group'] = group.id
|
2020-03-31 01:08:47 +08:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.length).to eq(2)
|
|
|
|
expect(report.first).to contain_exactly("Day", "Count")
|
|
|
|
expect(report.second).to contain_exactly("2010-01-03", "1")
|
|
|
|
end
|
|
|
|
|
2019-07-26 15:27:13 +08:00
|
|
|
it 'works with single-column reports with default label' do
|
|
|
|
user.user_visits.create!(visited_at: '2010-01-01')
|
|
|
|
Fabricate(:user).user_visits.create!(visited_at: '2010-01-03')
|
|
|
|
|
2020-03-31 01:08:47 +08:00
|
|
|
exporter.extra['name'] = 'visits'
|
2019-07-26 15:27:13 +08:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("Day", "Count")
|
|
|
|
expect(report.second).to contain_exactly("2010-01-01", "1")
|
|
|
|
expect(report.third).to contain_exactly("2010-01-03", "1")
|
2019-06-28 14:50:31 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'works with multi-columns reports' do
|
|
|
|
DiscourseIpInfo.stubs(:get).with("1.1.1.1").returns(location: "Earth")
|
|
|
|
user.user_auth_token_logs.create!(action: "login", client_ip: "1.1.1.1", created_at: '2010-01-01')
|
|
|
|
|
2020-03-31 01:08:47 +08:00
|
|
|
exporter.extra['name'] = 'staff_logins'
|
2019-06-28 14:50:31 +08:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("User", "Location", "Login at")
|
|
|
|
expect(report.second).to contain_exactly(user.username, "Earth", "2010-01-01 00:00:00 UTC")
|
|
|
|
end
|
|
|
|
|
2020-06-02 01:23:58 +08:00
|
|
|
it 'works with topic reports' do
|
|
|
|
freeze_time DateTime.parse('2010-01-01 6:00')
|
|
|
|
|
|
|
|
exporter.extra['name'] = 'top_referred_topics'
|
|
|
|
post1 = Fabricate(:post)
|
|
|
|
post2 = Fabricate(:post)
|
|
|
|
IncomingLink.add(host: "a.com", referer: "http://twitter.com", post_id: post1.id, ip_address: '1.1.1.1')
|
|
|
|
|
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("Topic", "Clicks")
|
|
|
|
expect(report.second).to contain_exactly(post1.topic.id.to_s, "1")
|
|
|
|
end
|
|
|
|
|
2020-01-22 00:43:19 +08:00
|
|
|
it 'works with stacked_chart reports' do
|
|
|
|
ApplicationRequest.create!(date: '2010-01-01', req_type: 'page_view_logged_in', count: 1)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-02', req_type: 'page_view_logged_in', count: 2)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-03', req_type: 'page_view_logged_in', count: 3)
|
|
|
|
|
|
|
|
ApplicationRequest.create!(date: '2010-01-01', req_type: 'page_view_anon', count: 4)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-02', req_type: 'page_view_anon', count: 5)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-03', req_type: 'page_view_anon', count: 6)
|
|
|
|
|
|
|
|
ApplicationRequest.create!(date: '2010-01-01', req_type: 'page_view_crawler', count: 7)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-02', req_type: 'page_view_crawler', count: 8)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-03', req_type: 'page_view_crawler', count: 9)
|
|
|
|
|
2020-03-31 01:08:47 +08:00
|
|
|
exporter.extra['name'] = 'consolidated_page_views'
|
2020-01-22 00:43:19 +08:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report[0]).to contain_exactly("Day", "Logged in users", "Anonymous users", "Crawlers")
|
|
|
|
expect(report[1]).to contain_exactly("2010-01-01", "1", "4", "7")
|
|
|
|
expect(report[2]).to contain_exactly("2010-01-02", "2", "5", "8")
|
|
|
|
expect(report[3]).to contain_exactly("2010-01-03", "3", "6", "9")
|
|
|
|
end
|
|
|
|
|
2020-06-10 23:57:39 +08:00
|
|
|
it 'works with posts reports and filters' do
|
|
|
|
category = Fabricate(:category)
|
|
|
|
subcategory = Fabricate(:category, parent_category: category)
|
|
|
|
|
|
|
|
Fabricate(:post, topic: Fabricate(:topic, category: category), created_at: '2010-01-01 12:00:00 UTC')
|
|
|
|
Fabricate(:post, topic: Fabricate(:topic, category: subcategory), created_at: '2010-01-01 12:00:00 UTC')
|
|
|
|
|
|
|
|
exporter.extra['name'] = 'posts'
|
|
|
|
|
|
|
|
exporter.extra['category'] = category.id
|
|
|
|
report = exporter.report_export.to_a
|
|
|
|
expect(report[0]).to contain_exactly("Count", "Day")
|
|
|
|
expect(report[1]).to contain_exactly("1", "2010-01-01")
|
|
|
|
|
|
|
|
exporter.extra['include_subcategories'] = true
|
|
|
|
report = exporter.report_export.to_a
|
|
|
|
expect(report[0]).to contain_exactly("Count", "Day")
|
|
|
|
expect(report[1]).to contain_exactly("2", "2010-01-01")
|
|
|
|
end
|
2019-06-28 14:50:31 +08:00
|
|
|
end
|
|
|
|
|
2017-09-14 00:09:11 +08:00
|
|
|
let(:user_list_header) {
|
|
|
|
%w{
|
|
|
|
id name username email title created_at last_seen_at last_posted_at
|
|
|
|
last_emailed_at trust_level approved suspended_at suspended_till blocked
|
2019-02-27 17:12:20 +08:00
|
|
|
active admin moderator ip_address staged secondary_emails topics_entered
|
|
|
|
posts_read_count time_read topic_count post_count likes_given
|
|
|
|
likes_received location website views external_id external_email
|
|
|
|
external_username external_name external_avatar_url
|
2017-09-14 00:09:11 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let(:user_list_export) { Jobs::ExportCsvFile.new.user_list_export }
|
2014-11-26 06:43:17 +08:00
|
|
|
|
|
|
|
def to_hash(row)
|
2015-01-02 14:59:05 +08:00
|
|
|
Hash[*user_list_header.zip(row).flatten]
|
2014-11-26 06:43:17 +08:00
|
|
|
end
|
|
|
|
|
2019-04-11 14:55:02 +08:00
|
|
|
it "exports secondary emails" do
|
2019-02-27 17:12:20 +08:00
|
|
|
user = Fabricate(:user)
|
|
|
|
Fabricate(:secondary_email, user: user, primary: false)
|
2019-04-11 14:55:02 +08:00
|
|
|
secondary_emails = user.secondary_emails
|
2019-02-27 17:12:20 +08:00
|
|
|
|
|
|
|
user = to_hash(user_list_export.find { |u| u[0].to_i == user.id })
|
|
|
|
|
2019-04-11 14:55:02 +08:00
|
|
|
expect(user["secondary_emails"].split(";")).to match_array(secondary_emails)
|
2019-02-27 17:12:20 +08:00
|
|
|
end
|
|
|
|
|
2014-11-26 06:43:17 +08:00
|
|
|
it 'exports sso data' do
|
2017-12-23 16:46:48 +08:00
|
|
|
SiteSetting.sso_url = "https://www.example.com/sso"
|
2014-11-26 06:43:17 +08:00
|
|
|
SiteSetting.enable_sso = true
|
|
|
|
user = Fabricate(:user)
|
2019-02-27 17:12:20 +08:00
|
|
|
user.user_profile.update_column(:location, "La,La Land")
|
2014-11-26 06:43:17 +08:00
|
|
|
user.create_single_sign_on_record(external_id: "123", last_payload: "xxx", external_email: 'test@test.com')
|
|
|
|
|
2015-11-02 13:31:08 +08:00
|
|
|
user = to_hash(user_list_export.find { |u| u[0].to_i == user.id })
|
2014-08-09 18:28:57 +08:00
|
|
|
|
2019-02-27 17:12:20 +08:00
|
|
|
expect(user["location"]).to eq('"La,La Land"')
|
2014-12-31 22:55:03 +08:00
|
|
|
expect(user["external_id"]).to eq("123")
|
|
|
|
expect(user["external_email"]).to eq("test@test.com")
|
2014-08-09 18:28:57 +08:00
|
|
|
end
|
|
|
|
end
|