2019-04-30 08:27:42 +08:00
|
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
2022-07-28 10:27:38 +08:00
|
|
|
|
RSpec.describe WebHook do
|
2016-06-16 01:49:57 +08:00
|
|
|
|
it { is_expected.to validate_presence_of :payload_url }
|
|
|
|
|
it { is_expected.to validate_presence_of :content_type }
|
|
|
|
|
it { is_expected.to validate_presence_of :last_delivery_status }
|
|
|
|
|
it { is_expected.to validate_presence_of :web_hook_event_types }
|
|
|
|
|
|
|
|
|
|
describe "#content_types" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
subject(:content_types) { WebHook.content_types }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
|
|
|
|
it "'json' (application/json) should be at 1st position" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
expect(content_types["application/json"]).to eq(1)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "'url_encoded' (application/x-www-form-urlencoded) should be at 2st position" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
expect(content_types["application/x-www-form-urlencoded"]).to eq(2)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "#last_delivery_statuses" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
subject(:statuses) { WebHook.last_delivery_statuses }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
|
|
|
|
it "inactive should be at 1st position" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
expect(statuses[:inactive]).to eq(1)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "failed should be at 2st position" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
expect(statuses[:failed]).to eq(2)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "successful should be at 3st position" do
|
2023-06-21 22:00:19 +08:00
|
|
|
|
expect(statuses[:successful]).to eq(3)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2022-07-28 00:14:14 +08:00
|
|
|
|
context "with web hooks" do
|
2019-05-07 11:12:20 +08:00
|
|
|
|
fab!(:post_hook) { Fabricate(:web_hook, payload_url: " https://example.com ") }
|
|
|
|
|
fab!(:topic_hook) { Fabricate(:topic_web_hook) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2021-05-21 09:43:47 +08:00
|
|
|
|
it "removes whitespace from payload_url before saving" do
|
2017-12-11 16:15:50 +08:00
|
|
|
|
expect(post_hook.payload_url).to eq("https://example.com")
|
|
|
|
|
end
|
|
|
|
|
|
2020-03-18 00:39:24 +08:00
|
|
|
|
it "excludes disabled plugin web_hooks" do
|
2023-10-09 11:35:31 +08:00
|
|
|
|
web_hook_event_types = WebHookEventType.active.where(name: "solved_accept_unaccept")
|
|
|
|
|
expect(web_hook_event_types).to be_empty
|
2020-03-18 00:39:24 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "includes non-plugin web_hooks" do
|
2023-10-09 11:35:31 +08:00
|
|
|
|
web_hook_event_types = WebHookEventType.active.where(group: "topic")
|
|
|
|
|
expect(web_hook_event_types.count).to eq(5)
|
2020-03-18 00:39:24 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "includes enabled plugin web_hooks" do
|
2023-08-12 00:42:28 +08:00
|
|
|
|
SiteSetting.stubs(:assign_enabled).returns(true)
|
2023-10-09 11:35:31 +08:00
|
|
|
|
assign_event_types = WebHookEventType.active.where(group: "assign").pluck(:name)
|
|
|
|
|
expect(assign_event_types).to eq(%w[assigned unassigned])
|
2023-08-12 00:42:28 +08:00
|
|
|
|
|
|
|
|
|
SiteSetting.stubs(:voting_enabled).returns(true)
|
2023-10-09 11:35:31 +08:00
|
|
|
|
voting_event_types = WebHookEventType.active.where(group: "voting").pluck(:name)
|
|
|
|
|
expect(voting_event_types).to eq(%w[topic_upvote topic_unvote])
|
|
|
|
|
#
|
|
|
|
|
SiteSetting.stubs(:solved_enabled).returns(true)
|
|
|
|
|
solved_event_types = WebHookEventType.active.where(group: "solved").pluck(:name)
|
|
|
|
|
expect(solved_event_types).to eq(%w[accepted_solution unaccepted_solution])
|
|
|
|
|
#
|
2023-09-14 04:31:42 +08:00
|
|
|
|
SiteSetting.stubs(:chat_enabled).returns(true)
|
2023-10-09 11:35:31 +08:00
|
|
|
|
chat_event_types = WebHookEventType.active.where(group: "chat").pluck(:name)
|
|
|
|
|
expect(chat_event_types).to eq(
|
|
|
|
|
%w[chat_message_created chat_message_edited chat_message_trashed chat_message_restored],
|
|
|
|
|
)
|
2020-03-18 00:39:24 +08:00
|
|
|
|
end
|
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
describe "#active_web_hooks" do
|
2018-01-03 17:00:44 +08:00
|
|
|
|
it "returns unique hooks" do
|
2023-10-09 11:35:31 +08:00
|
|
|
|
post_hook.web_hook_event_types << WebHookEventType.find_by(group: "topic")
|
2018-01-03 17:00:44 +08:00
|
|
|
|
post_hook.update!(wildcard_web_hook: true)
|
|
|
|
|
|
2023-10-09 11:35:31 +08:00
|
|
|
|
expect(WebHook.active_web_hooks(:post_created)).to eq([post_hook])
|
2018-01-03 17:00:44 +08:00
|
|
|
|
end
|
|
|
|
|
|
2016-06-16 01:49:57 +08:00
|
|
|
|
it "find relevant hooks" do
|
2023-10-09 11:35:31 +08:00
|
|
|
|
expect(WebHook.active_web_hooks(:post_created)).to eq([post_hook])
|
|
|
|
|
expect(WebHook.active_web_hooks(:topic_created)).to eq([topic_hook])
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "excludes inactive hooks" do
|
2018-05-21 16:23:09 +08:00
|
|
|
|
post_hook.update!(active: false)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2023-10-09 11:35:31 +08:00
|
|
|
|
expect(WebHook.active_web_hooks(:post_created)).to eq([])
|
|
|
|
|
expect(WebHook.active_web_hooks(:topic_created)).to eq([topic_hook])
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
describe "wildcard web hooks" do
|
2019-05-07 11:12:20 +08:00
|
|
|
|
fab!(:wildcard_hook) { Fabricate(:wildcard_web_hook) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
it "should include wildcard hooks" do
|
|
|
|
|
expect(WebHook.active_web_hooks(:wildcard)).to eq([wildcard_hook])
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2023-10-09 11:35:31 +08:00
|
|
|
|
expect(WebHook.active_web_hooks(:post_created)).to contain_exactly(
|
|
|
|
|
post_hook,
|
|
|
|
|
wildcard_hook,
|
|
|
|
|
)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2023-10-09 11:35:31 +08:00
|
|
|
|
expect(WebHook.active_web_hooks(:topic_created)).to contain_exactly(
|
|
|
|
|
topic_hook,
|
|
|
|
|
wildcard_hook,
|
|
|
|
|
)
|
2018-05-21 16:23:09 +08:00
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
describe "#enqueue_hooks" do
|
|
|
|
|
it "accepts additional parameters" do
|
|
|
|
|
payload = { test: "some payload" }.to_json
|
2018-10-05 16:53:59 +08:00
|
|
|
|
WebHook.enqueue_hooks(:post, :post_created, payload: payload)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.first["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
expect(job_args["web_hook_id"]).to eq(post_hook.id)
|
|
|
|
|
expect(job_args["event_type"]).to eq("post")
|
|
|
|
|
expect(job_args["payload"]).to eq(payload)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
2022-07-28 00:14:14 +08:00
|
|
|
|
context "when including wildcard hooks" do
|
2019-05-07 11:12:20 +08:00
|
|
|
|
fab!(:wildcard_hook) { Fabricate(:wildcard_web_hook) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
describe "#enqueue_hooks" do
|
|
|
|
|
it "enqueues hooks with ids" do
|
2018-10-05 16:53:59 +08:00
|
|
|
|
WebHook.enqueue_hooks(:post, :post_created)
|
2018-05-21 16:23:09 +08:00
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.first["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["web_hook_id"]).to eq(post_hook.id)
|
|
|
|
|
expect(job_args["event_type"]).to eq("post")
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
expect(job_args["web_hook_id"]).to eq(wildcard_hook.id)
|
|
|
|
|
expect(job_args["event_type"]).to eq("post")
|
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "enqueues hooks" do
|
|
|
|
|
let(:user) { Fabricate(:user) }
|
2016-09-13 16:03:17 +08:00
|
|
|
|
let(:admin) { Fabricate(:admin) }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
let(:topic) { Fabricate(:topic, user: user) }
|
2016-09-13 16:03:17 +08:00
|
|
|
|
let(:post) { Fabricate(:post, topic: topic, user: user) }
|
2018-05-21 16:23:09 +08:00
|
|
|
|
let(:topic_web_hook) { Fabricate(:topic_web_hook) }
|
2022-04-20 01:48:32 +08:00
|
|
|
|
let(:tag) { Fabricate(:tag) }
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
before { topic_web_hook }
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
describe "when there are no active hooks" do
|
2019-02-19 15:44:06 +08:00
|
|
|
|
it "should not generate payload and enqueue anything for topic events" do
|
2018-05-21 16:23:09 +08:00
|
|
|
|
topic_web_hook.destroy!
|
|
|
|
|
post = PostCreator.create(user, raw: "post", title: "topic", skip_validations: true)
|
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.length).to eq(0)
|
2019-02-19 14:53:42 +08:00
|
|
|
|
|
|
|
|
|
WebHook.expects(:generate_payload).times(0)
|
|
|
|
|
PostDestroyer.new(admin, post).destroy
|
2019-02-19 15:44:06 +08:00
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.length).to eq(0)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "should not enqueue anything for tag events" do
|
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
|
tag.destroy!
|
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.length).to eq(0)
|
2018-05-21 16:23:09 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
it "should enqueue the right hooks for topic events" do
|
2018-02-27 11:07:37 +08:00
|
|
|
|
post = PostCreator.create(user, raw: "post", title: "topic", skip_validations: true)
|
2018-05-21 16:23:09 +08:00
|
|
|
|
topic_id = post.topic.id
|
2018-02-27 11:07:37 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("topic_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
PostDestroyer.new(user, post).destroy
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("topic_destroyed")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2018-02-27 11:07:37 +08:00
|
|
|
|
|
|
|
|
|
PostDestroyer.new(user, post).recover
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("topic_recovered")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2018-02-27 11:07:37 +08:00
|
|
|
|
|
|
|
|
|
%w[archived closed visible].each do |status|
|
|
|
|
|
post.topic.update_status(status, true, topic.user)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("topic_#{status}_status_updated")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
end
|
2019-04-16 14:09:51 +08:00
|
|
|
|
|
|
|
|
|
category = Fabricate(:category)
|
|
|
|
|
|
|
|
|
|
expect do
|
|
|
|
|
PostRevisor.new(post, post.topic).revise!(
|
|
|
|
|
post.user,
|
2021-05-24 07:10:22 +08:00
|
|
|
|
{ category_id: category.id },
|
|
|
|
|
{ skip_validations: true },
|
2019-04-16 14:09:51 +08:00
|
|
|
|
)
|
|
|
|
|
end.to change { Jobs::EmitWebHookEvent.jobs.length }.by(1)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_edited")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
|
|
|
|
expect(payload["category_id"]).to eq(category.id)
|
2022-04-20 01:48:32 +08:00
|
|
|
|
|
|
|
|
|
expect do
|
|
|
|
|
successfully_saved_post_and_topic =
|
|
|
|
|
PostRevisor.new(post, post.topic).revise!(
|
|
|
|
|
post.user,
|
|
|
|
|
{ tags: [tag.name] },
|
|
|
|
|
{ skip_validations: true },
|
|
|
|
|
)
|
|
|
|
|
end.to change { Jobs::EmitWebHookEvent.jobs.length }.by(1)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_edited")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
|
|
|
|
expect(payload["tags"]).to contain_exactly(tag.name)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
|
|
|
|
|
2023-10-09 11:35:31 +08:00
|
|
|
|
it "should enqueue granular hooks for topic" do
|
|
|
|
|
topic_web_hook.web_hook_event_types.delete(
|
|
|
|
|
WebHookEventType.where(name: "topic_destroyed").last,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
post = PostCreator.create(user, raw: "post", title: "topic", skip_validations: true)
|
|
|
|
|
topic_id = post.topic.id
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
|
|
|
|
|
|
|
|
|
expect { PostDestroyer.new(user, post).destroy }.not_to change {
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.count
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
2023-05-04 17:15:31 +08:00
|
|
|
|
it "should not log a personal message view when processing new topic" do
|
|
|
|
|
SiteSetting.log_personal_messages_views = true
|
|
|
|
|
Fabricate(:topic_web_hook)
|
|
|
|
|
|
|
|
|
|
post =
|
|
|
|
|
PostCreator.create!(
|
|
|
|
|
user,
|
|
|
|
|
raw: "raw",
|
|
|
|
|
title: "title",
|
|
|
|
|
skip_validations: true,
|
|
|
|
|
archetype: Archetype.private_message,
|
|
|
|
|
target_usernames: user.username,
|
|
|
|
|
)
|
|
|
|
|
topic_id = post.topic.id
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(topic_id)
|
|
|
|
|
|
|
|
|
|
expect(UserHistory.where(action: UserHistory.actions[:check_personal_message]).count).to eq(0)
|
|
|
|
|
end
|
|
|
|
|
|
2018-01-23 03:58:01 +08:00
|
|
|
|
describe "when topic has been deleted" do
|
|
|
|
|
it "should not enqueue a post/topic edited hooks" do
|
|
|
|
|
topic.trash!
|
|
|
|
|
post.reload
|
|
|
|
|
|
|
|
|
|
PostRevisor.new(post, topic).revise!(
|
|
|
|
|
post.user,
|
|
|
|
|
{ category_id: Category.last.id, raw: "#{post.raw} new" },
|
|
|
|
|
{},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
expect(Jobs::EmitWebHookEvent.jobs.count).to eq(0)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2016-06-16 01:49:57 +08:00
|
|
|
|
it "should enqueue the right hooks for post events" do
|
2017-03-16 14:44:09 +08:00
|
|
|
|
Fabricate(:web_hook)
|
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
post =
|
|
|
|
|
PostCreator.create!(
|
|
|
|
|
user,
|
2018-02-27 11:07:37 +08:00
|
|
|
|
raw: "post",
|
|
|
|
|
topic_id: topic.id,
|
|
|
|
|
reply_to_post_number: 1,
|
|
|
|
|
skip_validations: true,
|
|
|
|
|
)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("post_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
|
|
|
|
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.clear
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
# post destroy or recover triggers a moderator post
|
|
|
|
|
expect { PostDestroyer.new(user, post).destroy }.to change {
|
2018-05-21 16:23:09 +08:00
|
|
|
|
Jobs::EmitWebHookEvent.jobs.count
|
|
|
|
|
}.by(3)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[0]["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("post_edited")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[1]["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("post_destroyed")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-05-21 16:23:09 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[2]["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_destroyed")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.topic.id)
|
|
|
|
|
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.clear
|
|
|
|
|
|
|
|
|
|
expect { PostDestroyer.new(user, post).recover }.to change {
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.count
|
|
|
|
|
}.by(3)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[0]["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("post_edited")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[1]["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("post_recovered")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.id)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs[2]["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("topic_recovered")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(post.topic.id)
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
2016-09-13 16:03:17 +08:00
|
|
|
|
|
2019-03-07 01:22:54 +08:00
|
|
|
|
it "should enqueue the destroyed hooks with tag filter for post events" do
|
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
|
Fabricate(:web_hook, tags: [tag])
|
|
|
|
|
|
|
|
|
|
post =
|
|
|
|
|
PostCreator.create!(
|
|
|
|
|
user,
|
|
|
|
|
raw: "post",
|
|
|
|
|
topic_id: topic.id,
|
|
|
|
|
reply_to_post_number: 1,
|
|
|
|
|
skip_validations: true,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
topic.tags = [tag]
|
|
|
|
|
topic.save!
|
|
|
|
|
|
|
|
|
|
Jobs::EmitWebHookEvent.jobs.clear
|
|
|
|
|
PostDestroyer.new(user, post).destroy
|
|
|
|
|
|
|
|
|
|
job = Jobs::EmitWebHookEvent.new
|
2019-04-17 17:03:23 +08:00
|
|
|
|
job.expects(:send_webhook!).times(2)
|
2019-03-07 01:22:54 +08:00
|
|
|
|
|
|
|
|
|
args = Jobs::EmitWebHookEvent.jobs[1]["args"].first
|
|
|
|
|
job.execute(args.with_indifferent_access)
|
|
|
|
|
|
|
|
|
|
args = Jobs::EmitWebHookEvent.jobs[2]["args"].first
|
|
|
|
|
job.execute(args.with_indifferent_access)
|
|
|
|
|
end
|
|
|
|
|
|
2016-12-13 11:26:26 +08:00
|
|
|
|
it "should enqueue the right hooks for user events" do
|
2019-01-04 01:03:01 +08:00
|
|
|
|
SiteSetting.must_approve_users = true
|
|
|
|
|
|
2018-01-23 03:58:01 +08:00
|
|
|
|
Fabricate(:user_web_hook, active: true)
|
2016-09-13 16:03:17 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
user
|
2019-04-04 00:04:05 +08:00
|
|
|
|
Jobs::CreateUserReviewable.new.execute(user_id: user.id)
|
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2016-09-13 16:03:17 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("user_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2016-12-13 11:26:26 +08:00
|
|
|
|
|
2021-11-25 15:34:39 +08:00
|
|
|
|
email_token = Fabricate(:email_token, user: user)
|
2021-04-12 10:48:42 +08:00
|
|
|
|
EmailToken.confirm(email_token.token)
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_confirmed_email")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
admin
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("user_created")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(admin.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2019-04-17 23:26:43 +08:00
|
|
|
|
ReviewableUser.find_by(target: user).perform(admin, :approve_user)
|
2018-02-27 11:07:37 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("user_approved")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
UserUpdater.new(admin, user).update(username: "testing123")
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-03-16 14:44:09 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("user_updated")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2017-12-20 17:47:30 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
user.logged_out
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-12-20 17:47:30 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("user_logged_out")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2017-12-20 17:47:30 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
user.logged_in
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
2017-12-20 17:47:30 +08:00
|
|
|
|
|
2018-02-27 11:07:37 +08:00
|
|
|
|
expect(job_args["event_name"]).to eq("user_logged_in")
|
2018-05-21 16:23:09 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2018-07-23 15:49:49 +08:00
|
|
|
|
|
2018-10-05 16:53:59 +08:00
|
|
|
|
email = user.email
|
|
|
|
|
user.reload
|
2018-07-23 15:49:49 +08:00
|
|
|
|
UserDestroyer.new(Discourse.system_user).destroy(user)
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_destroyed")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(user.id)
|
2018-10-05 16:53:59 +08:00
|
|
|
|
expect(payload["email"]).to eq(email)
|
2021-03-05 05:41:57 +08:00
|
|
|
|
|
|
|
|
|
# Reflects runtime change to user field
|
|
|
|
|
user_field = Fabricate(:user_field, show_on_profile: true)
|
|
|
|
|
user.logged_in
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_logged_in")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["user_fields"].size).to eq(1)
|
2016-09-13 16:03:17 +08:00
|
|
|
|
end
|
2018-05-21 17:29:19 +08:00
|
|
|
|
|
|
|
|
|
it "should enqueue the right hooks for category events" do
|
|
|
|
|
Fabricate(:category_web_hook)
|
|
|
|
|
category = Fabricate(:category)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("category_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(category.id)
|
|
|
|
|
|
|
|
|
|
category.update!(slug: "testing")
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("category_updated")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(category.id)
|
|
|
|
|
expect(payload["slug"]).to eq("testing")
|
|
|
|
|
|
|
|
|
|
category.destroy!
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("category_destroyed")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(category.id)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "should enqueue the right hooks for group events" do
|
|
|
|
|
Fabricate(:group_web_hook)
|
|
|
|
|
group = Fabricate(:group)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("group_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(group.id)
|
|
|
|
|
|
|
|
|
|
group.update!(full_name: "testing")
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("group_updated")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(group.id)
|
|
|
|
|
expect(payload["full_name"]).to eq("testing")
|
|
|
|
|
|
|
|
|
|
group.destroy!
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("group_destroyed")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["full_name"]).to eq("testing")
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "should enqueue the right hooks for tag events" do
|
|
|
|
|
Fabricate(:tag_web_hook)
|
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("tag_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(tag.id)
|
|
|
|
|
|
|
|
|
|
tag.update!(name: "testing")
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("tag_updated")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(tag.id)
|
|
|
|
|
expect(payload["name"]).to eq("testing")
|
|
|
|
|
|
|
|
|
|
tag.destroy!
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("tag_destroyed")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(tag.id)
|
|
|
|
|
end
|
|
|
|
|
|
2019-08-16 02:45:30 +08:00
|
|
|
|
it "should enqueue the right hooks for notifications" do
|
|
|
|
|
Fabricate(:notification_web_hook)
|
|
|
|
|
notification = Fabricate(:notification)
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("notification_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(notification.id)
|
|
|
|
|
end
|
|
|
|
|
|
2019-01-04 01:03:01 +08:00
|
|
|
|
it "should enqueue the right hooks for reviewables" do
|
|
|
|
|
Fabricate(:reviewable_web_hook)
|
|
|
|
|
reviewable = Fabricate(:reviewable)
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("reviewable_created")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(reviewable.id)
|
|
|
|
|
|
2021-04-27 08:40:32 +08:00
|
|
|
|
reviewable.add_score(Discourse.system_user, ReviewableScore.types[:off_topic], reason: "test")
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("reviewable_score_updated")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(reviewable.id)
|
|
|
|
|
|
2021-06-15 23:35:45 +08:00
|
|
|
|
reviewable.perform(Discourse.system_user, :delete_user)
|
2019-01-04 01:03:01 +08:00
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
|
|
|
|
|
expect(job_args["event_name"]).to eq("reviewable_transitioned_to")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(reviewable.id)
|
2018-07-26 01:54:43 +08:00
|
|
|
|
end
|
2020-04-09 02:34:35 +08:00
|
|
|
|
|
|
|
|
|
it "should enqueue the right hooks for badge grants" do
|
|
|
|
|
Fabricate(:user_badge_web_hook)
|
|
|
|
|
badge = Fabricate(:badge)
|
|
|
|
|
badge.multiple_grant = true
|
|
|
|
|
badge.show_posts = true
|
|
|
|
|
badge.save
|
|
|
|
|
|
|
|
|
|
now = Time.now
|
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
|
|
BadgeGranter.grant(badge, user, granted_by: admin, post_id: post.id)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_badge_granted")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["badge_id"]).to eq(badge.id)
|
|
|
|
|
expect(payload["user_id"]).to eq(user.id)
|
|
|
|
|
expect(payload["granted_by_id"]).to eq(admin.id)
|
|
|
|
|
# be_within required because rounding occurs
|
|
|
|
|
expect(Time.zone.parse(payload["granted_at"]).to_f).to be_within(0.001).of(now.to_f)
|
|
|
|
|
expect(payload["post_id"]).to eq(post.id)
|
|
|
|
|
|
|
|
|
|
# Future work: revoke badge hook
|
|
|
|
|
end
|
2021-04-08 23:46:34 +08:00
|
|
|
|
|
|
|
|
|
it "should enqueue the right hooks for group user addition" do
|
|
|
|
|
Fabricate(:group_user_web_hook)
|
|
|
|
|
group = Fabricate(:group)
|
|
|
|
|
|
|
|
|
|
now = Time.now
|
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
|
|
group.add(user)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_added_to_group")
|
2023-04-27 06:38:28 +08:00
|
|
|
|
expect(job_args["group_ids"]).to contain_exactly(group.id)
|
|
|
|
|
|
2021-04-08 23:46:34 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
2023-04-27 06:38:28 +08:00
|
|
|
|
|
2021-04-08 23:46:34 +08:00
|
|
|
|
expect(payload["group_id"]).to eq(group.id)
|
|
|
|
|
expect(payload["user_id"]).to eq(user.id)
|
|
|
|
|
expect(payload["notification_level"]).to eq(group.default_notification_level)
|
|
|
|
|
expect(Time.zone.parse(payload["created_at"]).to_f).to be_within(0.001).of(now.to_f)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "should enqueue the right hooks for group user deletion" do
|
|
|
|
|
Fabricate(:group_user_web_hook)
|
|
|
|
|
group = Fabricate(:group)
|
|
|
|
|
group_user = Fabricate(:group_user, group: group, user: user)
|
|
|
|
|
|
|
|
|
|
now = Time.now
|
|
|
|
|
freeze_time now
|
|
|
|
|
|
|
|
|
|
group.remove(user)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_removed_from_group")
|
2023-04-27 06:38:28 +08:00
|
|
|
|
expect(job_args["group_ids"]).to contain_exactly(group.id)
|
|
|
|
|
|
2021-04-08 23:46:34 +08:00
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
2023-04-27 06:38:28 +08:00
|
|
|
|
|
2021-04-08 23:46:34 +08:00
|
|
|
|
expect(payload["group_id"]).to eq(group.id)
|
|
|
|
|
expect(payload["user_id"]).to eq(user.id)
|
|
|
|
|
end
|
2021-05-01 08:08:38 +08:00
|
|
|
|
|
2022-07-28 00:14:14 +08:00
|
|
|
|
context "with user promoted hooks" do
|
2023-11-10 06:47:59 +08:00
|
|
|
|
fab!(:user_promoted_web_hook)
|
2022-02-22 17:57:18 +08:00
|
|
|
|
fab!(:another_user) { Fabricate(:user, trust_level: 2) }
|
|
|
|
|
|
|
|
|
|
it "should pass the user to the webhook job when a user is promoted" do
|
|
|
|
|
another_user.change_trust_level!(another_user.trust_level + 1)
|
|
|
|
|
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
expect(job_args["event_name"]).to eq("user_promoted")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["id"]).to eq(another_user.id)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "shouldn’t trigger when the user is demoted" do
|
|
|
|
|
expect { another_user.change_trust_level!(another_user.trust_level - 1) }.not_to change {
|
2022-07-19 22:03:03 +08:00
|
|
|
|
Jobs::EmitWebHookEvent.jobs.length
|
|
|
|
|
}
|
2022-02-22 17:57:18 +08:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2022-07-28 00:14:14 +08:00
|
|
|
|
context "with like created hooks" do
|
2023-11-10 06:47:59 +08:00
|
|
|
|
fab!(:like_web_hook)
|
2022-01-14 22:17:38 +08:00
|
|
|
|
fab!(:another_user) { Fabricate(:user) }
|
2021-05-01 08:08:38 +08:00
|
|
|
|
|
2022-01-14 22:17:38 +08:00
|
|
|
|
it "should pass the group id to the emit webhook job" do
|
|
|
|
|
group = Fabricate(:group)
|
|
|
|
|
group_user = Fabricate(:group_user, group: group, user: user)
|
|
|
|
|
post = Fabricate(:post, user: another_user)
|
|
|
|
|
like =
|
|
|
|
|
Fabricate(
|
|
|
|
|
:post_action,
|
|
|
|
|
post: post,
|
|
|
|
|
user: user,
|
|
|
|
|
post_action_type_id: PostActionType.types[:like],
|
|
|
|
|
)
|
|
|
|
|
now = Time.now
|
|
|
|
|
freeze_time now
|
2021-05-01 08:08:38 +08:00
|
|
|
|
|
2022-01-14 22:17:38 +08:00
|
|
|
|
DiscourseEvent.trigger(:like_created, like)
|
|
|
|
|
|
|
|
|
|
assert_hook_was_queued_with(post, user, group_ids: [group.id])
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "should pass the category id to the emit webhook job" do
|
|
|
|
|
category = Fabricate(:category)
|
|
|
|
|
topic.update!(category: category)
|
|
|
|
|
like =
|
|
|
|
|
Fabricate(
|
|
|
|
|
:post_action,
|
|
|
|
|
post: post,
|
|
|
|
|
user: another_user,
|
|
|
|
|
post_action_type_id: PostActionType.types[:like],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
DiscourseEvent.trigger(:like_created, like)
|
|
|
|
|
|
|
|
|
|
assert_hook_was_queued_with(post, another_user, category_id: category.id)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "should pass the tag id to the emit webhook job" do
|
|
|
|
|
tag = Fabricate(:tag)
|
|
|
|
|
topic.update!(tags: [tag])
|
|
|
|
|
like =
|
|
|
|
|
Fabricate(
|
|
|
|
|
:post_action,
|
|
|
|
|
post: post,
|
|
|
|
|
user: another_user,
|
|
|
|
|
post_action_type_id: PostActionType.types[:like],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
DiscourseEvent.trigger(:like_created, like)
|
|
|
|
|
|
|
|
|
|
assert_hook_was_queued_with(post, another_user, tag_ids: [tag.id])
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def assert_hook_was_queued_with(post, user, group_ids: nil, category_id: nil, tag_ids: nil)
|
|
|
|
|
job_args = Jobs::EmitWebHookEvent.jobs.last["args"].first
|
|
|
|
|
expect(job_args["event_name"]).to eq("post_liked")
|
|
|
|
|
payload = JSON.parse(job_args["payload"])
|
|
|
|
|
expect(payload["post"]["id"]).to eq(post.id)
|
|
|
|
|
expect(payload["user"]["id"]).to eq(user.id)
|
|
|
|
|
|
|
|
|
|
expect(job_args["category_id"]).to eq(category_id) if category_id
|
|
|
|
|
expect(job_args["group_ids"]).to contain_exactly(*group_ids) if group_ids
|
|
|
|
|
expect(job_args["tag_ids"]).to contain_exactly(*tag_ids) if tag_ids
|
|
|
|
|
end
|
2021-05-01 08:08:38 +08:00
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|
2022-11-02 00:33:17 +08:00
|
|
|
|
|
|
|
|
|
describe "#payload_url_safety" do
|
|
|
|
|
fab!(:post_hook) { Fabricate(:web_hook, payload_url: "https://example.com") }
|
|
|
|
|
|
|
|
|
|
it "errors if payload_url resolves to a blocked IP" do
|
|
|
|
|
SiteSetting.blocked_ip_blocks = "92.110.0.0/16"
|
|
|
|
|
FinalDestination::SSRFDetector
|
|
|
|
|
.stubs(:lookup_ips)
|
|
|
|
|
.with { |h| h == "badhostname.com" }
|
|
|
|
|
.returns(["92.110.44.17"])
|
|
|
|
|
post_hook.payload_url = "https://badhostname.com"
|
|
|
|
|
post_hook.save
|
|
|
|
|
expect(post_hook.errors.full_messages).to contain_exactly(
|
|
|
|
|
I18n.t("webhooks.payload_url.blocked_or_internal"),
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "errors if payload_url resolves to an internal IP" do
|
|
|
|
|
FinalDestination::SSRFDetector
|
|
|
|
|
.stubs(:lookup_ips)
|
|
|
|
|
.with { |h| h == "badhostname.com" }
|
|
|
|
|
.returns(["172.18.11.39"])
|
|
|
|
|
post_hook.payload_url = "https://badhostname.com"
|
|
|
|
|
post_hook.save
|
|
|
|
|
expect(post_hook.errors.full_messages).to contain_exactly(
|
|
|
|
|
I18n.t("webhooks.payload_url.blocked_or_internal"),
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "doesn't error if payload_url resolves to an allowed IP" do
|
|
|
|
|
FinalDestination::SSRFDetector
|
|
|
|
|
.stubs(:lookup_ips)
|
|
|
|
|
.with { |h| h == "goodhostname.com" }
|
|
|
|
|
.returns(["172.32.11.39"])
|
|
|
|
|
post_hook.payload_url = "https://goodhostname.com"
|
|
|
|
|
post_hook.save!
|
|
|
|
|
end
|
|
|
|
|
end
|
2016-06-16 01:49:57 +08:00
|
|
|
|
end
|