2019-04-30 08:27:42 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-06-16 01:49:57 +08:00
|
|
|
require 'rails_helper'
|
|
|
|
|
|
|
|
describe WebHook do
|
|
|
|
it { is_expected.to validate_presence_of :payload_url }
|
|
|
|
it { is_expected.to validate_presence_of :content_type }
|
|
|
|
it { is_expected.to validate_presence_of :last_delivery_status }
|
|
|
|
it { is_expected.to validate_presence_of :web_hook_event_types }
|
|
|
|
|
|
|
|
describe '#content_types' do
|
|
|
|
subject { WebHook.content_types }
|
|
|
|
|
|
|
|
it "'json' (application/json) should be at 1st position" do
|
|
|
|
expect(subject['application/json']).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "'url_encoded' (application/x-www-form-urlencoded) should be at 2st position" do
|
|
|
|
expect(subject['application/x-www-form-urlencoded']).to eq(2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#last_delivery_statuses' do
|
|
|
|
subject { WebHook.last_delivery_statuses }
|
|
|
|
|
|
|
|
it "inactive should be at 1st position" do
|
|
|
|
expect(subject[:inactive]).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "failed should be at 2st position" do
|
|
|
|
expect(subject[:failed]).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "successful should be at 3st position" do
|
|
|
|
expect(subject[:successful]).to eq(3)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'web hooks' do
|
2019-05-07 11:12:20 +08:00
|
|
|
fab!(:post_hook) { Fabricate(:web_hook, payload_url: " https://example.com ") }
|
|
|
|
fab!(:topic_hook) { Fabricate(:topic_web_hook) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2021-05-21 09:43:47 +08:00
|
|
|
it "removes whitespace from payload_url before saving" do
|
2017-12-11 16:15:50 +08:00
|
|
|
expect(post_hook.payload_url).to eq("https://example.com")
|
|
|
|
end
|
|
|
|
|
2020-03-18 00:39:24 +08:00
|
|
|
it "excludes disabled plugin web_hooks" do
|
|
|
|
web_hook_event_types = WebHookEventType.active.find_by(name: 'solved')
|
|
|
|
expect(web_hook_event_types).to eq(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "includes non-plugin web_hooks" do
|
|
|
|
web_hook_event_types = WebHookEventType.active.where(name: 'topic')
|
|
|
|
expect(web_hook_event_types.count).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "includes enabled plugin web_hooks" do
|
|
|
|
SiteSetting.stubs(:solved_enabled).returns(true)
|
|
|
|
web_hook_event_types = WebHookEventType.active.where(name: 'solved')
|
|
|
|
expect(web_hook_event_types.count).to eq(1)
|
|
|
|
end
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
describe '#active_web_hooks' do
|
2018-01-03 17:00:44 +08:00
|
|
|
it "returns unique hooks" do
|
|
|
|
post_hook.web_hook_event_types << WebHookEventType.find_by(name: 'topic')
|
|
|
|
post_hook.update!(wildcard_web_hook: true)
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(WebHook.active_web_hooks(:post)).to eq([post_hook])
|
2018-01-03 17:00:44 +08:00
|
|
|
end
|
|
|
|
|
2016-06-16 01:49:57 +08:00
|
|
|
it 'find relevant hooks' do
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(WebHook.active_web_hooks(:post)).to eq([post_hook])
|
|
|
|
expect(WebHook.active_web_hooks(:topic)).to eq([topic_hook])
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'excludes inactive hooks' do
|
2018-05-21 16:23:09 +08:00
|
|
|
post_hook.update!(active: false)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(WebHook.active_web_hooks(:post)).to eq([])
|
|
|
|
expect(WebHook.active_web_hooks(:topic)).to eq([topic_hook])
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
describe 'wildcard web hooks' do
|
2019-05-07 11:12:20 +08:00
|
|
|
fab!(:wildcard_hook) { Fabricate(:wildcard_web_hook) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
it 'should include wildcard hooks' do
|
|
|
|
expect(WebHook.active_web_hooks(:wildcard)).to eq([wildcard_hook])
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(WebHook.active_web_hooks(:post)).to contain_exactly(
|
|
|
|
post_hook, wildcard_hook
|
|
|
|
)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(WebHook.active_web_hooks(:topic)).to contain_exactly(
|
|
|
|
topic_hook, wildcard_hook
|
|
|
|
)
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
describe '#enqueue_hooks' do
|
|
|
|
it 'accepts additional parameters' do
|
|
|
|
payload = { test: 'some payload' }.to_json
|
2018-10-05 16:53:59 +08:00
|
|
|
WebHook.enqueue_hooks(:post, :post_created, payload: payload)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.first["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(job_args["web_hook_id"]).to eq(post_hook.id)
|
|
|
|
expect(job_args["event_type"]).to eq('post')
|
|
|
|
expect(job_args["payload"]).to eq(payload)
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
context 'includes wildcard hooks' do
|
2019-05-07 11:12:20 +08:00
|
|
|
fab!(:wildcard_hook) { Fabricate(:wildcard_web_hook) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
describe '#enqueue_hooks' do
|
|
|
|
it 'enqueues hooks with ids' do
|
2018-10-05 16:53:59 +08:00
|
|
|
WebHook.enqueue_hooks(:post, :post_created)
|
2018-05-21 16:23:09 +08:00
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.first["args"].first
|
|
|
|
|
|
|
|
expect(job_args["web_hook_id"]).to eq(post_hook.id)
|
|
|
|
expect(job_args["event_type"]).to eq('post')
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
expect(job_args["web_hook_id"]).to eq(wildcard_hook.id)
|
|
|
|
expect(job_args["event_type"]).to eq('post')
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'enqueues hooks' do
|
|
|
|
let(:user) { Fabricate(:user) }
|
2016-09-13 16:03:17 +08:00
|
|
|
let(:admin) { Fabricate(:admin) }
|
2016-06-16 01:49:57 +08:00
|
|
|
let(:topic) { Fabricate(:topic, user: user) }
|
2016-09-13 16:03:17 +08:00
|
|
|
let(:post) { Fabricate(:post, topic: topic, user: user) }
|
2018-05-21 16:23:09 +08:00
|
|
|
let(:topic_web_hook) { Fabricate(:topic_web_hook) }
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
|
|
before do
|
2018-05-21 16:23:09 +08:00
|
|
|
topic_web_hook
|
2017-03-16 14:44:09 +08:00
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
describe 'when there are no active hooks' do
|
2019-02-19 15:44:06 +08:00
|
|
|
it 'should not generate payload and enqueue anything for topic events' do
|
2018-05-21 16:23:09 +08:00
|
|
|
topic_web_hook.destroy!
|
|
|
|
post = PostCreator.create(user, raw: 'post', title: 'topic', skip_validations: true)
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.length).to eq(0)
|
2019-02-19 14:53:42 +08:00
|
|
|
|
|
|
|
WebHook.expects(:generate_payload).times(0)
|
|
|
|
PostDestroyer.new(admin, post).destroy
|
2019-02-19 15:44:06 +08:00
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.length).to eq(0)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'should not enqueue anything for tag events' do
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
tag.destroy!
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.length).to eq(0)
|
2018-05-21 16:23:09 +08:00
|
|
|
end
|
|
|
|
end
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
it 'should enqueue the right hooks for topic events' do
|
2018-02-27 11:07:37 +08:00
|
|
|
post = PostCreator.create(user, raw: 'post', title: 'topic', skip_validations: true)
|
2018-05-21 16:23:09 +08:00
|
|
|
topic_id = post.topic.id
|
2018-02-27 11:07:37 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("topic_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
PostDestroyer.new(user, post).destroy
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("topic_destroyed")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2018-02-27 11:07:37 +08:00
|
|
|
|
|
|
|
PostDestroyer.new(user, post).recover
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("topic_recovered")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2018-02-27 11:07:37 +08:00
|
|
|
|
|
|
|
%w{archived closed visible}.each do |status|
|
|
|
|
post.topic.update_status(status, true, topic.user)
|
2017-03-16 14:44:09 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("topic_#{status}_status_updated")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2017-03-16 14:44:09 +08:00
|
|
|
end
|
2019-04-16 14:09:51 +08:00
|
|
|
|
|
|
|
category = Fabricate(:category)
|
|
|
|
|
|
|
|
expect do
|
|
|
|
PostRevisor.new(post, post.topic).revise!(
|
|
|
|
post.user,
|
2021-05-24 07:10:22 +08:00
|
|
|
{ category_id: category.id },
|
|
|
|
{ skip_validations: true },
|
2019-04-16 14:09:51 +08:00
|
|
|
)
|
|
|
|
end.to change { Jobs::EmitWebHookEvent.jobs.length }.by(1)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_edited")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
|
|
|
expect(payload["category_id"]).to eq(category.id)
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
|
2018-01-23 03:58:01 +08:00
|
|
|
describe 'when topic has been deleted' do
|
|
|
|
it 'should not enqueue a post/topic edited hooks' do
|
|
|
|
topic.trash!
|
|
|
|
post.reload
|
|
|
|
|
|
|
|
PostRevisor.new(post, topic).revise!(
|
|
|
|
post.user,
|
|
|
|
{
|
|
|
|
category_id: Category.last.id,
|
|
|
|
raw: "#{post.raw} new"
|
|
|
|
},
|
|
|
|
{}
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.count).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-16 01:49:57 +08:00
|
|
|
it 'should enqueue the right hooks for post events' do
|
2017-03-16 14:44:09 +08:00
|
|
|
Fabricate(:web_hook)
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
post = PostCreator.create!(user,
|
2018-02-27 11:07:37 +08:00
|
|
|
raw: 'post',
|
|
|
|
topic_id: topic.id,
|
|
|
|
reply_to_post_number: 1,
|
|
|
|
skip_validations: true
|
|
|
|
)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("post_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
|
|
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.clear
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
# post destroy or recover triggers a moderator post
|
|
|
|
expect { PostDestroyer.new(user, post).destroy }
|
2018-05-21 16:23:09 +08:00
|
|
|
.to change { Jobs::EmitWebHookEvent.jobs.count }.by(3)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[0]["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("post_edited")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[1]["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("post_destroyed")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[2]["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_destroyed")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.topic.id)
|
|
|
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.clear
|
|
|
|
|
|
|
|
expect { PostDestroyer.new(user, post).recover }
|
|
|
|
.to change { Jobs::EmitWebHookEvent.jobs.count }.by(3)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[0]["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("post_edited")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[1]["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("post_recovered")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[2]["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_recovered")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(post.topic.id)
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
2016-09-13 16:03:17 +08:00
|
|
|
|
2019-03-07 01:22:54 +08:00
|
|
|
it 'should enqueue the destroyed hooks with tag filter for post events' do
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
Fabricate(:web_hook, tags: [tag])
|
|
|
|
|
|
|
|
post = PostCreator.create!(user,
|
|
|
|
raw: 'post',
|
|
|
|
topic_id: topic.id,
|
|
|
|
reply_to_post_number: 1,
|
|
|
|
skip_validations: true
|
|
|
|
)
|
|
|
|
|
|
|
|
topic.tags = [tag]
|
|
|
|
topic.save!
|
|
|
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.clear
|
|
|
|
PostDestroyer.new(user, post).destroy
|
|
|
|
|
|
|
|
job = Jobs::EmitWebHookEvent.new
|
2019-04-17 17:03:23 +08:00
|
|
|
job.expects(:send_webhook!).times(2)
|
2019-03-07 01:22:54 +08:00
|
|
|
|
|
|
|
args = Jobs::EmitWebHookEvent.jobs[1]["args"].first
|
|
|
|
job.execute(args.with_indifferent_access)
|
|
|
|
|
|
|
|
args = Jobs::EmitWebHookEvent.jobs[2]["args"].first
|
|
|
|
job.execute(args.with_indifferent_access)
|
|
|
|
end
|
|
|
|
|
2016-12-13 11:26:26 +08:00
|
|
|
it 'should enqueue the right hooks for user events' do
|
2019-01-04 01:03:01 +08:00
|
|
|
SiteSetting.must_approve_users = true
|
|
|
|
|
2018-01-23 03:58:01 +08:00
|
|
|
Fabricate(:user_web_hook, active: true)
|
2016-09-13 16:03:17 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
user
|
2019-04-04 00:04:05 +08:00
|
|
|
Jobs::CreateUserReviewable.new.execute(user_id: user.id)
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-09-13 16:03:17 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("user_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2016-12-13 11:26:26 +08:00
|
|
|
|
2021-04-12 10:48:42 +08:00
|
|
|
email_token = user.email_tokens.create(email: user.email)
|
|
|
|
EmailToken.confirm(email_token.token)
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_confirmed_email")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
admin
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("user_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(admin.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2019-04-17 23:26:43 +08:00
|
|
|
ReviewableUser.find_by(target: user).perform(admin, :approve_user)
|
2018-02-27 11:07:37 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("user_approved")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
UserUpdater.new(admin, user).update(username: 'testing123')
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("user_updated")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2017-12-20 17:47:30 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
user.logged_out
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-12-20 17:47:30 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("user_logged_out")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2017-12-20 17:47:30 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
user.logged_in
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-12-20 17:47:30 +08:00
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
expect(job_args["event_name"]).to eq("user_logged_in")
|
2018-05-21 16:23:09 +08:00
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2018-07-23 15:49:49 +08:00
|
|
|
|
2018-10-05 16:53:59 +08:00
|
|
|
email = user.email
|
|
|
|
user.reload
|
2018-07-23 15:49:49 +08:00
|
|
|
UserDestroyer.new(Discourse.system_user).destroy(user)
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_destroyed")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2018-10-05 16:53:59 +08:00
|
|
|
expect(payload["email"]).to eq(email)
|
2021-03-05 05:41:57 +08:00
|
|
|
|
|
|
|
# Reflects runtime change to user field
|
|
|
|
user_field = Fabricate(:user_field, show_on_profile: true)
|
|
|
|
user.logged_in
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
expect(job_args["event_name"]).to eq("user_logged_in")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["user_fields"].size).to eq(1)
|
2016-09-13 16:03:17 +08:00
|
|
|
end
|
2018-05-21 17:29:19 +08:00
|
|
|
|
|
|
|
it 'should enqueue the right hooks for category events' do
|
|
|
|
Fabricate(:category_web_hook)
|
|
|
|
category = Fabricate(:category)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("category_created")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(category.id)
|
|
|
|
|
|
|
|
category.update!(slug: 'testing')
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("category_updated")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(category.id)
|
|
|
|
expect(payload["slug"]).to eq('testing')
|
|
|
|
|
|
|
|
category.destroy!
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("category_destroyed")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(category.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'should enqueue the right hooks for group events' do
|
|
|
|
Fabricate(:group_web_hook)
|
|
|
|
group = Fabricate(:group)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("group_created")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(group.id)
|
|
|
|
|
|
|
|
group.update!(full_name: 'testing')
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("group_updated")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(group.id)
|
|
|
|
expect(payload["full_name"]).to eq('testing')
|
|
|
|
|
|
|
|
group.destroy!
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("group_destroyed")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["full_name"]).to eq('testing')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'should enqueue the right hooks for tag events' do
|
|
|
|
Fabricate(:tag_web_hook)
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("tag_created")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(tag.id)
|
|
|
|
|
|
|
|
tag.update!(name: 'testing')
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("tag_updated")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(tag.id)
|
|
|
|
expect(payload["name"]).to eq('testing')
|
|
|
|
|
|
|
|
tag.destroy!
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("tag_destroyed")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(tag.id)
|
|
|
|
end
|
|
|
|
|
2019-08-16 02:45:30 +08:00
|
|
|
it 'should enqueue the right hooks for notifications' do
|
|
|
|
Fabricate(:notification_web_hook)
|
|
|
|
notification = Fabricate(:notification)
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("notification_created")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(notification.id)
|
|
|
|
end
|
|
|
|
|
2019-01-04 01:03:01 +08:00
|
|
|
it 'should enqueue the right hooks for reviewables' do
|
|
|
|
Fabricate(:reviewable_web_hook)
|
|
|
|
reviewable = Fabricate(:reviewable)
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("reviewable_created")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(reviewable.id)
|
|
|
|
|
2021-04-27 08:40:32 +08:00
|
|
|
reviewable.add_score(
|
|
|
|
Discourse.system_user,
|
|
|
|
ReviewableScore.types[:off_topic],
|
|
|
|
reason: "test"
|
|
|
|
)
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("reviewable_score_updated")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(reviewable.id)
|
|
|
|
|
2021-06-15 23:35:45 +08:00
|
|
|
reviewable.perform(Discourse.system_user, :delete_user)
|
2019-01-04 01:03:01 +08:00
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("reviewable_transitioned_to")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["id"]).to eq(reviewable.id)
|
2018-07-26 01:54:43 +08:00
|
|
|
end
|
2020-04-09 02:34:35 +08:00
|
|
|
|
|
|
|
it 'should enqueue the right hooks for badge grants' do
|
|
|
|
Fabricate(:user_badge_web_hook)
|
|
|
|
badge = Fabricate(:badge)
|
|
|
|
badge.multiple_grant = true
|
|
|
|
badge.show_posts = true
|
|
|
|
badge.save
|
|
|
|
|
|
|
|
now = Time.now
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
BadgeGranter.grant(badge, user, granted_by: admin, post_id: post.id)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
expect(job_args["event_name"]).to eq("user_badge_granted")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["badge_id"]).to eq(badge.id)
|
|
|
|
expect(payload["user_id"]).to eq(user.id)
|
|
|
|
expect(payload["granted_by_id"]).to eq(admin.id)
|
|
|
|
# be_within required because rounding occurs
|
|
|
|
expect(Time.zone.parse(payload["granted_at"]).to_f).to be_within(0.001).of(now.to_f)
|
|
|
|
expect(payload["post_id"]).to eq(post.id)
|
|
|
|
|
|
|
|
# Future work: revoke badge hook
|
|
|
|
end
|
2021-04-08 23:46:34 +08:00
|
|
|
|
|
|
|
it 'should enqueue the right hooks for group user addition' do
|
|
|
|
Fabricate(:group_user_web_hook)
|
|
|
|
group = Fabricate(:group)
|
|
|
|
|
|
|
|
now = Time.now
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
group.add(user)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
expect(job_args["event_name"]).to eq("user_added_to_group")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["group_id"]).to eq(group.id)
|
|
|
|
expect(payload["user_id"]).to eq(user.id)
|
|
|
|
expect(payload["notification_level"]).to eq(group.default_notification_level)
|
|
|
|
expect(Time.zone.parse(payload["created_at"]).to_f).to be_within(0.001).of(now.to_f)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'should enqueue the right hooks for group user deletion' do
|
|
|
|
Fabricate(:group_user_web_hook)
|
|
|
|
group = Fabricate(:group)
|
|
|
|
group_user = Fabricate(:group_user, group: group, user: user)
|
|
|
|
|
|
|
|
now = Time.now
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
group.remove(user)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
expect(job_args["event_name"]).to eq("user_removed_from_group")
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["group_id"]).to eq(group.id)
|
|
|
|
expect(payload["user_id"]).to eq(user.id)
|
|
|
|
end
|
2021-05-01 08:08:38 +08:00
|
|
|
|
|
|
|
it 'should enqueue hooks for user likes in a group' do
|
|
|
|
group = Fabricate(:group)
|
|
|
|
Fabricate(:like_web_hook, groups: [group])
|
|
|
|
group_user = Fabricate(:group_user, group: group, user: user)
|
|
|
|
poster = Fabricate(:user)
|
|
|
|
post = Fabricate(:post, user: poster)
|
|
|
|
like = Fabricate(:post_action, post: post, user: user, post_action_type_id: PostActionType.types[:like])
|
|
|
|
now = Time.now
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
DiscourseEvent.trigger(:like_created, like)
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
expect(job_args["event_name"]).to eq("post_liked")
|
|
|
|
expect(job_args["group_ids"]).to eq([group.id])
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
expect(payload["post"]["id"]).to eq(post.id)
|
|
|
|
expect(payload["user"]["id"]).to eq(user.id)
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
end
|
|
|
|
end
|