FEATURE: when we fail to ship topic timings attempt to retry (#10916)

* FEATURE: when we fail to ship topic timings attempt to retry

This change amends it so

1. Topic timings are treated as background requests and subject to more
 aggressive rate limits.

2. If we notice an error when we ship timings we back off exponentially

The commit allows 405, 429, 500, 501, 502, 503 and 504 errors to be retried.

500+ errors usually happen when self hosters are rebuilding or some other
weird condition.

405 happens when site is in readonly.
429 happens when user is rate limited.

The retry cadence is hardcoded in AJAX_FAILURE_DELAYS, longest delay is
40 seconds, we may consider enlarging it.

After the last delay passes we give up and do not write timings to the
server.

* Address feedback

- Omit promise, no need to use promises in sendNextConsolidatedTiming
- Correct issue where >= -1 was used and > -1 was intended
- Use objects for consolidated timings instead of Array
- Stop using shift/unshift and instead use push / pop which are faster

* Move consolidated timing initialization to constructor

* Remove TODO and just console.warn if we have an issue
This commit is contained in:
Sam 2020-10-16 09:49:55 +11:00 committed by GitHub
parent d77e30a011
commit 110e9a455b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 130 additions and 36 deletions

View File

@ -7,6 +7,9 @@ const PAUSE_UNLESS_SCROLLED = 1000 * 60 * 3;
const MAX_TRACKING_TIME = 1000 * 60 * 6;
const ANON_MAX_TOPIC_IDS = 5;
const AJAX_FAILURE_DELAYS = [5000, 10000, 20000, 40000];
const ALLOWED_AJAX_FAILURES = [405, 429, 500, 501, 502, 503, 504];
export default class {
constructor(topicTrackingState, siteSettings, session, currentUser) {
this.topicTrackingState = topicTrackingState;
@ -14,6 +17,7 @@ export default class {
this.session = session;
this.currentUser = currentUser;
this.reset();
this._consolidatedTimings = [];
}
start(topicId, topicController) {
@ -85,6 +89,104 @@ export default class {
this._anonCallback = cb;
}
consolidateTimings(timings, topicTime, topicId) {
let foundIndex = this._consolidatedTimings.findIndex(
(elem) => elem.topicId === topicId
);
if (foundIndex > -1) {
let found = this._consolidatedTimings[foundIndex];
const lastIndex = this._consolidatedTimings.length - 1;
if (foundIndex !== lastIndex) {
const last = this._consolidatedTimings[lastIndex];
this._consolidatedTimings[lastIndex] = found;
this._consolidatedTimings[lastIndex - 1] = last;
}
const oldTimings = found.timings;
Object.keys(oldTimings).forEach((id) => {
if (timings[id]) {
oldTimings[id] += timings[id];
}
});
found.topicTime += topicTime;
found.timings = Object.assign({}, timings, found.timings);
} else {
this._consolidatedTimings.push({ timings, topicTime, topicId });
}
return this._consolidatedTimings;
}
sendNextConsolidatedTiming() {
if (this._consolidatedTimings.length === 0) {
return;
}
if (this._inProgress) {
return;
}
if (
this._blockSendingToServerTill &&
this._blockSendingToServerTill > Date.now()
) {
return;
}
this._ajaxFailures = this._ajaxFailures || 0;
const { timings, topicTime, topicId } = this._consolidatedTimings.pop();
this._inProgress = true;
ajax("/topics/timings", {
data: {
timings: timings,
topic_time: topicTime,
topic_id: topicId,
},
cache: false,
type: "POST",
headers: {
"X-SILENCE-LOGGER": "true",
"Discourse-Background": "true",
},
})
.then(() => {
this._ajaxFailures = 0;
const topicController = this._topicController;
if (topicController) {
const postNumbers = Object.keys(timings).map((v) => parseInt(v, 10));
topicController.readPosts(topicId, postNumbers);
}
})
.catch((e) => {
if (ALLOWED_AJAX_FAILURES.indexOf(e.jqXHR.status) > -1) {
const delay = AJAX_FAILURE_DELAYS[this._ajaxFailures];
this._ajaxFailures += 1;
if (delay) {
this._blockSendingToServerTill = Date.now() + delay;
// we did not send to the server, got to re-queue it
this.consolidateTimings(timings, topicTime, topicId);
}
}
if (window.console && window.console.warn) {
window.console.warn(
`Failed to update topic times for topic ${topicId} due to ${e.jqXHR.status} error`
);
}
})
.finally(() => {
this._inProgress = false;
this._lastFlush = 0;
});
}
flush() {
const newTimings = {};
const totalTimings = this._totalTimings;
@ -140,42 +242,8 @@ export default class {
if (!$.isEmptyObject(newTimings)) {
if (this.currentUser && !isTesting()) {
this._inProgress = true;
ajax("/topics/timings", {
data: {
timings: newTimings,
topic_time: this._topicTime,
topic_id: topicId,
},
cache: false,
type: "POST",
headers: {
"X-SILENCE-LOGGER": "true",
},
})
.then(() => {
const topicController = this._topicController;
if (topicController) {
const postNumbers = Object.keys(newTimings).map((v) =>
parseInt(v, 10)
);
topicController.readPosts(topicId, postNumbers);
}
})
.catch((e) => {
const error = e.jqXHR;
if (
error.status === 405 &&
error.responseJSON.error_type === "read_only"
) {
return;
}
})
.finally(() => {
this._inProgress = false;
this._lastFlush = 0;
});
this.consolidateTimings(newTimings, this._topicTime, topicId);
this.sendNextConsolidatedTiming();
} else if (this._anonCallback) {
// Anonymous viewer - save to localStorage
const storage = this.keyValueStore;
@ -240,6 +308,11 @@ export default class {
this.flush();
}
if (!this._inProgress) {
// handles retries so there is no situation where we are stuck with a backlog
this.sendNextConsolidatedTiming();
}
if (this.session.hasFocus) {
this._topicTime += diff;

View File

@ -0,0 +1,21 @@
import { test, module } from "qunit";
import ScreenTrack from "discourse/lib/screen-track";
module("lib:screen-track");
test("consolidateTimings", (assert) => {
const tracker = new ScreenTrack();
tracker.consolidateTimings({ 1: 10, 2: 5 }, 10, 1);
tracker.consolidateTimings({ 1: 5, 3: 1 }, 3, 1);
const consolidated = tracker.consolidateTimings({ 1: 5, 3: 1 }, 3, 2);
assert.deepEqual(
consolidated,
[
{ timings: { 1: 15, 2: 5, 3: 1 }, topicTime: 13, topicId: 1 },
{ timings: { 1: 5, 3: 1 }, topicTime: 3, topicId: 2 },
],
"expecting consolidated timings to match correctly"
);
});