Accept HTTP/429 as a "rate-limited" status code
This commit is contained in:
parent
7431f151b2
commit
45289f519a
9 changed files with 156 additions and 16 deletions
|
@ -462,7 +462,7 @@ export class ChallengeHandler {
|
|||
} catch (error) {
|
||||
if (
|
||||
!(error instanceof HTTPError) ||
|
||||
error.code !== 413 ||
|
||||
!(error.code === 413 || error.code === 429) ||
|
||||
!error.responseHeaders
|
||||
) {
|
||||
this.options.onChallengeFailed();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import type { LoggerType } from '../../types/Logging';
|
||||
import { sleepFor413RetryAfterTime } from './sleepFor413RetryAfterTime';
|
||||
import { sleepForRateLimitRetryAfterTime } from './sleepForRateLimitRetryAfterTime';
|
||||
import { getHttpErrorCode } from './getHttpErrorCode';
|
||||
|
||||
export async function handleCommonJobRequestError({
|
||||
|
@ -16,7 +16,8 @@ export async function handleCommonJobRequestError({
|
|||
}>): Promise<void> {
|
||||
switch (getHttpErrorCode(err)) {
|
||||
case 413:
|
||||
await sleepFor413RetryAfterTime({ err, log, timeRemaining });
|
||||
case 429:
|
||||
await sleepForRateLimitRetryAfterTime({ err, log, timeRemaining });
|
||||
return;
|
||||
case 508:
|
||||
log.info('server responded with 508. Giving up on this job');
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import type { LoggerType } from '../../types/Logging';
|
||||
import * as Errors from '../../types/errors';
|
||||
import { sleepFor413RetryAfterTime } from './sleepFor413RetryAfterTime';
|
||||
import { sleepForRateLimitRetryAfterTime } from './sleepForRateLimitRetryAfterTime';
|
||||
import { getHttpErrorCode } from './getHttpErrorCode';
|
||||
import { strictAssert } from '../../util/assert';
|
||||
import { findRetryAfterTimeFromError } from './findRetryAfterTimeFromError';
|
||||
|
@ -47,7 +47,7 @@ export async function handleMultipleSendErrors({
|
|||
formattedErrors.push(Errors.toLogFormat(error));
|
||||
|
||||
const errorCode = getHttpErrorCode(error);
|
||||
if (errorCode === 413) {
|
||||
if (errorCode === 413 || errorCode === 429) {
|
||||
const retryAfterTime = findRetryAfterTimeFromError(error);
|
||||
if (retryAfterTime > longestRetryAfterTime) {
|
||||
retryAfterError = error;
|
||||
|
@ -72,7 +72,7 @@ export async function handleMultipleSendErrors({
|
|||
}
|
||||
|
||||
if (retryAfterError && !isFinalAttempt) {
|
||||
await sleepFor413RetryAfterTime({
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err: retryAfterError,
|
||||
log,
|
||||
timeRemaining,
|
||||
|
|
|
@ -5,7 +5,7 @@ import type { LoggerType } from '../../types/Logging';
|
|||
import { sleep } from '../../util/sleep';
|
||||
import { findRetryAfterTimeFromError } from './findRetryAfterTimeFromError';
|
||||
|
||||
export async function sleepFor413RetryAfterTime({
|
||||
export async function sleepForRateLimitRetryAfterTime({
|
||||
err,
|
||||
log,
|
||||
timeRemaining,
|
||||
|
@ -21,7 +21,7 @@ export async function sleepFor413RetryAfterTime({
|
|||
const retryAfter = Math.min(findRetryAfterTimeFromError(err), timeRemaining);
|
||||
|
||||
log.info(
|
||||
`Got a 413 response code. Sleeping for ${retryAfter} millisecond(s)`
|
||||
`Got a 413 or 429 response code. Sleeping for ${retryAfter} millisecond(s)`
|
||||
);
|
||||
|
||||
await sleep(retryAfter);
|
|
@ -247,6 +247,20 @@ describe('sendToGroup', () => {
|
|||
'testing OutgoingMessageError'
|
||||
)
|
||||
);
|
||||
assert.isTrue(
|
||||
_shouldFailSend(
|
||||
new OutgoingMessageError(
|
||||
'something',
|
||||
null,
|
||||
null,
|
||||
new HTTPError('something', {
|
||||
code: 429,
|
||||
headers: {},
|
||||
})
|
||||
),
|
||||
'testing OutgoingMessageError'
|
||||
)
|
||||
);
|
||||
assert.isTrue(
|
||||
_shouldFailSend(
|
||||
new SendMessageNetworkError(
|
||||
|
|
|
@ -31,6 +31,20 @@ describe('findRetryAfterTimeFromError', () => {
|
|||
response: {},
|
||||
}),
|
||||
},
|
||||
{
|
||||
httpError: new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: {},
|
||||
response: {},
|
||||
}),
|
||||
},
|
||||
{
|
||||
httpError: new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': 'garbage' },
|
||||
response: {},
|
||||
}),
|
||||
},
|
||||
].forEach(input => {
|
||||
assert.strictEqual(findRetryAfterTimeFromError(input), MINUTE);
|
||||
});
|
||||
|
@ -64,6 +78,17 @@ describe('findRetryAfterTimeFromError', () => {
|
|||
assert.strictEqual(findRetryAfterTimeFromError(input), 1234 * 1000);
|
||||
});
|
||||
|
||||
it("finds the retry-after time on an HTTP error's response headers", () => {
|
||||
const input = {
|
||||
httpError: new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': '1234' },
|
||||
response: {},
|
||||
}),
|
||||
};
|
||||
assert.strictEqual(findRetryAfterTimeFromError(input), 1234 * 1000);
|
||||
});
|
||||
|
||||
it('prefers the top-level response headers over an HTTP error', () => {
|
||||
const input = {
|
||||
responseHeaders: { 'retry-after': '1234' },
|
||||
|
@ -75,4 +100,16 @@ describe('findRetryAfterTimeFromError', () => {
|
|||
};
|
||||
assert.strictEqual(findRetryAfterTimeFromError(input), 1234 * 1000);
|
||||
});
|
||||
|
||||
it('prefers the top-level response headers over an HTTP error', () => {
|
||||
const input = {
|
||||
responseHeaders: { 'retry-after': '1234' },
|
||||
httpError: new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': '999' },
|
||||
response: {},
|
||||
}),
|
||||
};
|
||||
assert.strictEqual(findRetryAfterTimeFromError(input), 1234 * 1000);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,7 +6,7 @@ import * as sinon from 'sinon';
|
|||
import { HTTPError } from '../../../textsecure/Errors';
|
||||
import * as durations from '../../../util/durations';
|
||||
|
||||
import { sleepFor413RetryAfterTime } from '../../../jobs/helpers/sleepFor413RetryAfterTime';
|
||||
import { sleepForRateLimitRetryAfterTime } from '../../../jobs/helpers/sleepForRateLimitRetryAfterTime';
|
||||
|
||||
describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
||||
const createLogger = () => ({ info: sinon.spy() });
|
||||
|
@ -28,7 +28,7 @@ describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
|||
|
||||
await Promise.all(
|
||||
[-1, 0].map(timeRemaining =>
|
||||
sleepFor413RetryAfterTime({
|
||||
sleepForRateLimitRetryAfterTime({
|
||||
err: {},
|
||||
log,
|
||||
timeRemaining,
|
||||
|
@ -43,7 +43,7 @@ describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
|||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepFor413RetryAfterTime({
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err: {},
|
||||
log: createLogger(),
|
||||
timeRemaining: 12345678,
|
||||
|
@ -68,7 +68,32 @@ describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
|||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepFor413RetryAfterTime({
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err,
|
||||
log: createLogger(),
|
||||
timeRemaining: 123456789,
|
||||
});
|
||||
done = true;
|
||||
})();
|
||||
|
||||
await clock.tickAsync(199 * durations.SECOND);
|
||||
assert.isFalse(done);
|
||||
|
||||
await clock.tickAsync(2 * durations.SECOND);
|
||||
assert.isTrue(done);
|
||||
});
|
||||
|
||||
it('finds the Retry-After header on an HTTPError', async () => {
|
||||
const err = new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': '200' },
|
||||
response: {},
|
||||
});
|
||||
|
||||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err,
|
||||
log: createLogger(),
|
||||
timeRemaining: 123456789,
|
||||
|
@ -93,7 +118,32 @@ describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
|||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepFor413RetryAfterTime({
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err: { httpError },
|
||||
log: createLogger(),
|
||||
timeRemaining: 123456789,
|
||||
});
|
||||
done = true;
|
||||
})();
|
||||
|
||||
await clock.tickAsync(199 * durations.SECOND);
|
||||
assert.isFalse(done);
|
||||
|
||||
await clock.tickAsync(2 * durations.SECOND);
|
||||
assert.isTrue(done);
|
||||
});
|
||||
|
||||
it('finds the Retry-After on an HTTPError nested under a wrapper error', async () => {
|
||||
const httpError = new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': '200' },
|
||||
response: {},
|
||||
});
|
||||
|
||||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err: { httpError },
|
||||
log: createLogger(),
|
||||
timeRemaining: 123456789,
|
||||
|
@ -118,7 +168,29 @@ describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
|||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepFor413RetryAfterTime({
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err,
|
||||
log: createLogger(),
|
||||
timeRemaining: 3 * durations.SECOND,
|
||||
});
|
||||
done = true;
|
||||
})();
|
||||
|
||||
await clock.tickAsync(4 * durations.SECOND);
|
||||
assert.isTrue(done);
|
||||
});
|
||||
|
||||
it("won't wait longer than the remaining time", async () => {
|
||||
const err = new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': '99999' },
|
||||
response: {},
|
||||
});
|
||||
|
||||
let done = false;
|
||||
|
||||
(async () => {
|
||||
await sleepForRateLimitRetryAfterTime({
|
||||
err,
|
||||
log: createLogger(),
|
||||
timeRemaining: 3 * durations.SECOND,
|
||||
|
@ -138,7 +210,22 @@ describe('sleepFor413RetryAfterTimeIfApplicable', () => {
|
|||
response: {},
|
||||
});
|
||||
|
||||
sleepFor413RetryAfterTime({ err, log, timeRemaining: 9999999 });
|
||||
sleepForRateLimitRetryAfterTime({ err, log, timeRemaining: 9999999 });
|
||||
await clock.nextAsync();
|
||||
|
||||
sinon.assert.calledOnce(log.info);
|
||||
sinon.assert.calledWith(log.info, sinon.match(/123000 millisecond\(s\)/));
|
||||
});
|
||||
|
||||
it('logs how long it will wait', async () => {
|
||||
const log = createLogger();
|
||||
const err = new HTTPError('Slow down', {
|
||||
code: 429,
|
||||
headers: { 'retry-after': '123' },
|
||||
response: {},
|
||||
});
|
||||
|
||||
sleepForRateLimitRetryAfterTime({ err, log, timeRemaining: 9999999 });
|
||||
await clock.nextAsync();
|
||||
|
||||
sinon.assert.calledOnce(log.info);
|
|
@ -25,6 +25,7 @@ export function translateError(error: HTTPError): HTTPError | undefined {
|
|||
'Failed to connect to the server, please check your network connection.';
|
||||
break;
|
||||
case 413:
|
||||
case 429:
|
||||
message = 'Rate limit exceeded, please try again later.';
|
||||
break;
|
||||
case 403:
|
||||
|
|
|
@ -720,7 +720,7 @@ export function _shouldFailSend(error: unknown, logId: string): boolean {
|
|||
return true;
|
||||
}
|
||||
|
||||
if (error.code === 413) {
|
||||
if (error.code === 413 || error.code === 429) {
|
||||
logError('Rate limit error, failing.');
|
||||
return true;
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue