Skip to content

Commit 00835fc

Browse files
ikreymertw4l
andauthoredFeb 7, 2025··
Retry same queue (#757)
- follow up to #743 - page retries are simply added back to the same queue with `retry` param incremented and a higher scope, after extraHops, to ensure retries are added at the end. - score calculation is: `score = depth + (extraHops * MAX_DEPTH) + (retry * MAX_DEPTH * 2)`, this ensures that retries have lower priority than extraHops, and additional retries even lower priority (higher score). - warning is logged when a retry happens, error only when all retries are exhausted. - back to one failure list, urls added there only when all retries are exhausted. - rename --numRetries -> --maxRetries / --retries for clarity - state load: allow retrying previously failed URLs if --maxRetries is higher then on previous run. - ensure working with --failOnFailedStatus, if provided, invalid status codes (>= 400) are retried along with page load failures - fixes #132 --------- Co-authored-by: Tessa Walsh <tessa@bitarchivist.net>
1 parent 5c9d808 commit 00835fc

File tree

7 files changed

+218
-131
lines changed

7 files changed

+218
-131
lines changed
 

‎docs/docs/user-guide/cli-options.md

+3-2
Original file line numberDiff line numberDiff line change
@@ -240,8 +240,9 @@ Options:
240240
s [boolean] [default: false]
241241
--writePagesToRedis If set, write page objects to redis
242242
[boolean] [default: false]
243-
--numRetries If set, number of times to retry a p
244-
age that failed to load
243+
--maxPageRetries, --retries If set, number of times to retry a p
244+
age that failed to load before page
245+
is considered to have failed
245246
[number] [default: 1]
246247
--failOnFailedSeed If set, crawler will fail with exit
247248
code 1 if any seed fails. When combi

‎src/crawler.ts

+48-25
Original file line numberDiff line numberDiff line change
@@ -379,7 +379,7 @@ export class Crawler {
379379
this.crawlId,
380380
this.maxPageTime,
381381
os.hostname(),
382-
this.params.numRetries,
382+
this.params.maxPageRetries,
383383
);
384384

385385
// load full state from config
@@ -1202,22 +1202,30 @@ self.__bx_behaviors.selectMainBehavior();
12021202

12031203
await this.checkLimits();
12041204
} else {
1205-
if (retry >= this.params.numRetries && !pageSkipped) {
1205+
if (retry >= this.params.maxPageRetries && !pageSkipped) {
12061206
await this.writePage(data);
12071207
}
12081208
if (pageSkipped) {
12091209
await this.crawlState.markExcluded(url);
12101210
} else {
1211-
await this.crawlState.markFailed(url);
1212-
}
1213-
if (this.healthChecker) {
1214-
this.healthChecker.incError();
1215-
}
1211+
const retry = await this.crawlState.markFailed(url);
12161212

1217-
await this.serializeConfig();
1213+
if (retry < 0) {
1214+
if (this.healthChecker) {
1215+
this.healthChecker.incError();
1216+
}
1217+
1218+
await this.serializeConfig();
12181219

1219-
if (depth === 0 && this.params.failOnFailedSeed) {
1220-
logger.fatal("Seed Page Load Failed, failing crawl", {}, "general", 1);
1220+
if (depth === 0 && this.params.failOnFailedSeed) {
1221+
logger.fatal(
1222+
"Seed Page Load Failed, failing crawl",
1223+
{},
1224+
"general",
1225+
1,
1226+
);
1227+
}
1228+
}
12211229
}
12221230

12231231
await this.checkLimits();
@@ -1407,7 +1415,7 @@ self.__bx_behaviors.selectMainBehavior();
14071415
}
14081416

14091417
if (this.params.failOnFailedLimit) {
1410-
const numFailed = await this.crawlState.numFailedWillRetry();
1418+
const numFailed = await this.crawlState.numFailed();
14111419
const failedLimit = this.params.failOnFailedLimit;
14121420
if (numFailed >= failedLimit) {
14131421
logger.fatal(
@@ -1875,15 +1883,13 @@ self.__bx_behaviors.selectMainBehavior();
18751883
const pendingPages = await this.crawlState.getPendingList();
18761884
const pending = pendingPages.length;
18771885
const crawled = await this.crawlState.numDone();
1878-
const failedWillRetry = await this.crawlState.numFailedWillRetry();
1879-
const failed = await this.crawlState.numFailedNoRetry();
1886+
const failed = await this.crawlState.numFailed();
18801887
const total = realSize + pendingPages.length + crawled;
18811888
const limit = { max: this.pageLimit || 0, hit: this.limitHit };
18821889
const stats = {
18831890
crawled,
18841891
total,
18851892
pending,
1886-
failedWillRetry,
18871893
failed,
18881894
limit,
18891895
pendingPages,
@@ -1904,8 +1910,26 @@ self.__bx_behaviors.selectMainBehavior();
19041910
}
19051911
}
19061912

1913+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
1914+
pageFailed(msg: string, retry: number, msgData: any) {
1915+
if (retry < this.params.maxPageRetries) {
1916+
logger.warn(
1917+
msg + ": will retry",
1918+
{ retry, retries: this.params.maxPageRetries, ...msgData },
1919+
"pageStatus",
1920+
);
1921+
} else {
1922+
logger.error(
1923+
msg + ": retry limit reached",
1924+
{ retry, retries: this.params.maxPageRetries, ...msgData },
1925+
"pageStatus",
1926+
);
1927+
}
1928+
throw new Error("logged");
1929+
}
1930+
19071931
async loadPage(page: Page, data: PageState, seed: ScopedSeed) {
1908-
const { url, depth } = data;
1932+
const { url, depth, retry } = data;
19091933

19101934
const logDetails = data.logDetails;
19111935

@@ -1999,22 +2023,24 @@ self.__bx_behaviors.selectMainBehavior();
19992023
data.pageSkipped = true;
20002024
logger.warn("Page Load Blocked, skipping", { msg, loadState });
20012025
} else {
2002-
logger.error("Page Load Failed, will retry", {
2026+
return this.pageFailed("Page Load Failed", retry, {
20032027
msg,
2028+
url,
20042029
loadState,
20052030
...logDetails,
20062031
});
20072032
}
2008-
e.message = "logged";
20092033
}
2010-
throw e;
20112034
}
20122035
}
20132036

20142037
const resp = fullLoadedResponse || downloadResponse || firstResponse;
20152038

20162039
if (!resp) {
2017-
throw new Error("no response for page load, assuming failed");
2040+
return this.pageFailed("Page Load Failed, no response", retry, {
2041+
url,
2042+
...logDetails,
2043+
});
20182044
}
20192045

20202046
const respUrl = resp.url().split("#")[0];
@@ -2051,14 +2077,11 @@ self.__bx_behaviors.selectMainBehavior();
20512077
}
20522078

20532079
if (failed) {
2054-
logger.error(
2080+
return this.pageFailed(
20552081
isChromeError ? "Page Crashed on Load" : "Page Invalid Status",
2056-
{
2057-
status,
2058-
...logDetails,
2059-
},
2082+
retry,
2083+
{ url, status, ...logDetails },
20602084
);
2061-
throw new Error("logged");
20622085
}
20632086

20642087
const contentType = resp.headers()["content-type"];

‎src/util/argParser.ts

+5-4
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import {
1717
DEFAULT_SELECTORS,
1818
BEHAVIOR_TYPES,
1919
ExtractSelector,
20-
DEFAULT_NUM_RETRIES,
20+
DEFAULT_MAX_RETRIES,
2121
} from "./constants.js";
2222
import { ScopedSeed } from "./seeds.js";
2323
import { interpolateFilename } from "./storage.js";
@@ -550,11 +550,12 @@ class ArgParser {
550550
default: false,
551551
},
552552

553-
numRetries: {
553+
maxPageRetries: {
554+
alias: "retries",
554555
describe:
555-
"If set, number of times to retry a page that failed to load",
556+
"If set, number of times to retry a page that failed to load before page is considered to have failed",
556557
type: "number",
557-
default: DEFAULT_NUM_RETRIES,
558+
default: DEFAULT_MAX_RETRIES,
558559
},
559560

560561
failOnFailedSeed: {

‎src/util/constants.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ export const ADD_LINK_FUNC = "__bx_addLink";
2727
export const FETCH_FUNC = "__bx_fetch";
2828

2929
export const MAX_DEPTH = 1000000;
30-
export const DEFAULT_NUM_RETRIES = 1;
30+
export const DEFAULT_MAX_RETRIES = 1;
3131

3232
export const FETCH_HEADERS_TIMEOUT_SECS = 30;
3333
export const PAGE_OP_TIMEOUT_SECS = 5;

‎src/util/state.ts

+49-82
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import { v4 as uuidv4 } from "uuid";
33

44
import { logger } from "./logger.js";
55

6-
import { MAX_DEPTH, DEFAULT_NUM_RETRIES } from "./constants.js";
6+
import { MAX_DEPTH, DEFAULT_MAX_RETRIES } from "./constants.js";
77
import { ScopedSeed } from "./seeds.js";
88
import { Frame } from "puppeteer-core";
99
import { interpolateFilename } from "./storage.js";
@@ -120,16 +120,6 @@ declare module "ioredis" {
120120
uid: string,
121121
): Result<void, Context>;
122122

123-
movefailed(pkey: string, fkey: string, url: string): Result<void, Context>;
124-
125-
requeuefailed(
126-
fkey: string,
127-
qkey: string,
128-
ffkey: string,
129-
maxRetries: number,
130-
maxRegularDepth: number,
131-
): Result<number, Context>;
132-
133123
unlockpending(
134124
pkeyUrl: string,
135125
uid: string,
@@ -145,6 +135,15 @@ declare module "ioredis" {
145135
maxRegularDepth: number,
146136
): Result<number, Context>;
147137

138+
requeuefailed(
139+
pkey: string,
140+
qkey: string,
141+
fkey: string,
142+
url: string,
143+
maxRetries: number,
144+
maxRegularDepth: number,
145+
): Result<number, Context>;
146+
148147
addnewseed(
149148
esKey: string,
150149
esMap: string,
@@ -181,7 +180,6 @@ export class RedisCrawlState {
181180
skey: string;
182181
dkey: string;
183182
fkey: string;
184-
ffkey: string;
185183
ekey: string;
186184
pageskey: string;
187185
esKey: string;
@@ -203,17 +201,15 @@ export class RedisCrawlState {
203201
this.uid = uid;
204202
this.key = key;
205203
this.maxPageTime = maxPageTime;
206-
this.maxRetries = maxRetries || DEFAULT_NUM_RETRIES;
204+
this.maxRetries = maxRetries ?? DEFAULT_MAX_RETRIES;
207205

208206
this.qkey = this.key + ":q";
209207
this.pkey = this.key + ":p";
210208
this.skey = this.key + ":s";
211209
// done (integer)
212210
this.dkey = this.key + ":d";
213-
// failed
214-
this.fkey = this.key + ":f";
215211
// failed final, no more retry
216-
this.ffkey = this.key + ":ff";
212+
this.fkey = this.key + ":f";
217213
// crawler errors
218214
this.ekey = this.key + ":e";
219215
// pages
@@ -288,42 +284,30 @@ end
288284
`,
289285
});
290286

291-
redis.defineCommand("movefailed", {
292-
numberOfKeys: 2,
287+
redis.defineCommand("requeuefailed", {
288+
numberOfKeys: 3,
293289
lua: `
294290
local json = redis.call('hget', KEYS[1], ARGV[1]);
295291
296292
if json then
297293
local data = cjson.decode(json);
298-
json = cjson.encode(data);
294+
local retry = data['retry'] or 0;
299295
300-
redis.call('lpush', KEYS[2], json);
301296
redis.call('hdel', KEYS[1], ARGV[1]);
302-
end
303-
304-
`,
305-
});
306297
307-
redis.defineCommand("requeuefailed", {
308-
numberOfKeys: 3,
309-
lua: `
310-
local json = redis.call('rpop', KEYS[1]);
311-
312-
if json then
313-
local data = cjson.decode(json);
314-
data['retry'] = (data['retry'] or 0) + 1;
315-
316-
if data['retry'] <= tonumber(ARGV[1]) then
317-
local json = cjson.encode(data);
318-
local score = (data['depth'] or 0) + ((data['extraHops'] or 0) * ARGV[2]);
298+
if retry < tonumber(ARGV[2]) then
299+
retry = retry + 1;
300+
data['retry'] = retry;
301+
json = cjson.encode(data);
302+
local score = (data['depth'] or 0) + ((data['extraHops'] or 0) * ARGV[3]) + (retry * ARGV[3] * 2);
319303
redis.call('zadd', KEYS[2], score, json);
320-
return data['retry'];
304+
return retry;
321305
else
322306
redis.call('lpush', KEYS[3], json);
323-
return 0;
324307
end
325308
end
326309
return -1;
310+
327311
`,
328312
});
329313

@@ -335,11 +319,15 @@ if not res then
335319
local json = redis.call('hget', KEYS[1], ARGV[1]);
336320
if json then
337321
local data = cjson.decode(json);
338-
data['retry'] = (data['retry'] or 0) + 1;
322+
local retry = data['retry'] or 0;
323+
339324
redis.call('hdel', KEYS[1], ARGV[1]);
340-
if tonumber(data['retry']) <= tonumber(ARGV[2]) then
325+
326+
if retry < tonumber(ARGV[2]) then
327+
retry = retry + 1;
328+
data['retry'] = retry;
341329
json = cjson.encode(data);
342-
local score = (data['depth'] or 0) + ((data['extraHops'] or 0) * ARGV[3]);
330+
local score = (data['depth'] or 0) + ((data['extraHops'] or 0) * ARGV[3]) + (retry * ARGV[3] * 2);
343331
redis.call('zadd', KEYS[2], score, json);
344332
return 1;
345333
else
@@ -395,7 +383,14 @@ return inx;
395383
}
396384

397385
async markFailed(url: string) {
398-
await this.redis.movefailed(this.pkey, this.fkey, url);
386+
return await this.redis.requeuefailed(
387+
this.pkey,
388+
this.qkey,
389+
this.fkey,
390+
url,
391+
this.maxRetries,
392+
MAX_DEPTH,
393+
);
399394
}
400395

401396
async markExcluded(url: string) {
@@ -411,10 +406,7 @@ return inx;
411406
}
412407

413408
async isFinished() {
414-
return (
415-
(await this.queueSize()) + (await this.numFailedWillRetry()) == 0 &&
416-
(await this.numDone()) + (await this.numFailedNoRetry()) > 0
417-
);
409+
return (await this.queueSize()) == 0 && (await this.numDone()) > 0;
418410
}
419411

420412
async setStatus(status_: string) {
@@ -608,25 +600,7 @@ return inx;
608600
}
609601

610602
async nextFromQueue() {
611-
let json = await this._getNext();
612-
let retry = 0;
613-
614-
if (!json) {
615-
retry = await this.redis.requeuefailed(
616-
this.fkey,
617-
this.qkey,
618-
this.ffkey,
619-
this.maxRetries,
620-
MAX_DEPTH,
621-
);
622-
623-
if (retry > 0) {
624-
json = await this._getNext();
625-
} else if (retry === 0) {
626-
logger.debug("Did not retry failed, already retried", {}, "state");
627-
return null;
628-
}
629-
}
603+
const json = await this._getNext();
630604

631605
if (!json) {
632606
return null;
@@ -641,10 +615,6 @@ return inx;
641615
return null;
642616
}
643617

644-
if (retry) {
645-
logger.debug("Retrying failed URL", { url: data.url, retry }, "state");
646-
}
647-
648618
await this.markStarted(data.url);
649619

650620
return new PageState(data);
@@ -660,14 +630,11 @@ return inx;
660630
const seen = await this._iterSet(this.skey);
661631
const queued = await this._iterSortedKey(this.qkey, seen);
662632
const pending = await this.getPendingList();
663-
const failedWillRetry = await this._iterListKeys(this.fkey, seen);
664-
const failedNoRetry = await this._iterListKeys(this.ffkey, seen);
633+
const failed = await this._iterListKeys(this.fkey, seen);
665634
const errors = await this.getErrorList();
666635
const extraSeeds = await this._iterListKeys(this.esKey, seen);
667636
const sitemapDone = await this.isSitemapDone();
668637

669-
const failed = failedWillRetry.concat(failedNoRetry);
670-
671638
const finished = [...seen.values()];
672639

673640
return {
@@ -682,7 +649,11 @@ return inx;
682649
}
683650

684651
_getScore(data: QueueEntry) {
685-
return (data.depth || 0) + (data.extraHops || 0) * MAX_DEPTH;
652+
return (
653+
(data.depth || 0) +
654+
(data.extraHops || 0) * MAX_DEPTH +
655+
(data.retry || 0) * MAX_DEPTH * 2
656+
);
686657
}
687658

688659
async _iterSet(key: string, count = 100) {
@@ -758,7 +729,6 @@ return inx;
758729
await this.redis.del(this.pkey);
759730
await this.redis.del(this.dkey);
760731
await this.redis.del(this.fkey);
761-
await this.redis.del(this.ffkey);
762732
await this.redis.del(this.skey);
763733
await this.redis.del(this.ekey);
764734

@@ -842,10 +812,11 @@ return inx;
842812
for (const json of state.failed) {
843813
const data = JSON.parse(json);
844814
const retry = data.retry || 0;
845-
if (retry <= this.maxRetries) {
815+
// allow retrying failed URLs if number of retries has increased
816+
if (retry < this.maxRetries) {
846817
await this.redis.zadd(this.qkey, this._getScore(data), json);
847818
} else {
848-
await this.redis.rpush(this.ffkey, json);
819+
await this.redis.rpush(this.fkey, json);
849820
}
850821
seen.push(data.url);
851822
}
@@ -874,14 +845,10 @@ return inx;
874845
return res;
875846
}
876847

877-
async numFailedWillRetry() {
848+
async numFailed() {
878849
return await this.redis.llen(this.fkey);
879850
}
880851

881-
async numFailedNoRetry() {
882-
return await this.redis.llen(this.ffkey);
883-
}
884-
885852
async getPendingList() {
886853
return await this.redis.hvals(this.pkey);
887854
}

‎tests/file_stats.test.js

-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@ test("check that stats file format is correct", () => {
5050
expect(dataJSON.total).toEqual(3);
5151
expect(dataJSON.pending).toEqual(0);
5252
expect(dataJSON.failed).toEqual(0);
53-
expect(dataJSON.failedWillRetry).toEqual(0);
5453
expect(dataJSON.limit.max).toEqual(3);
5554
expect(dataJSON.limit.hit).toBe(true);
5655
expect(dataJSON.pendingPages.length).toEqual(0);

‎tests/retry-failed.test.js

+112-16
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
import { execSync, spawn } from "child_process";
1+
import { exec, execSync } from "child_process";
22
import fs from "fs";
3+
import http from "http";
34
import Redis from "ioredis";
45

56
const DOCKER_HOST_NAME = process.env.DOCKER_HOST_NAME || "host.docker.internal";
@@ -8,19 +9,37 @@ async function sleep(time) {
89
await new Promise((resolve) => setTimeout(resolve, time));
910
}
1011

11-
test("run crawl", async () => {
12-
let status = 0;
13-
execSync(`docker run -d -v $PWD/test-crawls:/crawls -e CRAWL_ID=test -p 36387:6379 --rm webrecorder/browsertrix-crawler crawl --url http://${DOCKER_HOST_NAME}:31501 --url https://example.com/ --limit 2 --pageExtraDelay 10 --debugAccessRedis --collection retry-fail --numRetries 5`);
12+
let requests = 0;
13+
let success = false;
14+
let server = null;
1415

15-
/*
16-
async function runServer() {
17-
console.log("Waiting to start server");
18-
await sleep(2000);
16+
beforeAll(() => {
17+
server = http.createServer((req, res) => {
18+
// 3 requests: 2 from browser, 1 direct fetch per attempt
19+
// succeed on 6th request == after 2 retries
20+
if (requests >= 6) {
21+
res.writeHead(200, {"Content-Type": "text/html"});
22+
res.end("<html><body>Test Data</body></html>");
23+
success = true;
24+
} else {
25+
res.writeHead(503, {"Content-Type": "text/html"});
26+
res.end("<html><body>Test Data</body></html>");
27+
}
28+
requests++;
29+
});
30+
31+
server.listen(31501, "0.0.0.0");
32+
});
33+
34+
afterAll(() => {
35+
server.close();
36+
});
37+
38+
39+
40+
test("run crawl with retries for no response", async () => {
41+
execSync(`docker run -d -v $PWD/test-crawls:/crawls -e CRAWL_ID=test -p 36387:6379 --rm webrecorder/browsertrix-crawler crawl --url http://invalid-host-x:31501 --url https://example.com/ --limit 2 --pageExtraDelay 10 --debugAccessRedis --collection retry-fail --retries 5`);
1942

20-
console.log("Starting server");
21-
//spawn("../../node_modules/.bin/http-server", ["-p", "31501", "--username", "user", "--password", "pass"], {cwd: "./docs/site"});
22-
}
23-
*/
2443
const redis = new Redis("redis://127.0.0.1:36387/0", { lazyConnect: true, retryStrategy: () => null });
2544

2645
await sleep(3000);
@@ -32,10 +51,8 @@ test("run crawl", async () => {
3251
maxRetriesPerRequest: 100,
3352
});
3453

35-
//runServer();
36-
3754
while (true) {
38-
const res = await redis.lrange("test:ff", 0, -1);
55+
const res = await redis.lrange("test:f", 0, -1);
3956
if (res.length) {
4057
const data = JSON.parse(res);
4158
if (data.retry) {
@@ -67,5 +84,84 @@ test("check only one failed page entry is made", () => {
6784
).trim().split("\n").length
6885
).toBe(3);
6986
});
70-
87+
88+
89+
test("run crawl with retries for 503, enough retries to succeed", async () => {
90+
requests = 0;
91+
success = false;
92+
93+
const child = exec(`docker run -v $PWD/test-crawls:/crawls --rm webrecorder/browsertrix-crawler crawl --url http://${DOCKER_HOST_NAME}:31501 --url https://example.com/ --limit 2 --collection retry-fail-2 --retries 2 --failOnInvalidStatus --failOnFailedSeed --logging stats,debug`);
94+
95+
let status = 0;
96+
97+
const crawlFinished = new Promise(r => resolve = r);
98+
99+
// detect crawler exit
100+
let crawler_exited = false;
101+
child.on("exit", function (code) {
102+
status = code;
103+
resolve();
104+
});
105+
106+
await crawlFinished;
107+
108+
expect(status).toBe(0);
109+
110+
// (1 + 2) * 3 == 9 requests
111+
expect(requests).toBe(9);
112+
expect(success).toBe(true);
113+
});
114+
115+
116+
test("run crawl with retries for 503, not enough retries, fail", async () => {
117+
requests = 0;
118+
success = false;
119+
120+
const child = exec(`docker run -v $PWD/test-crawls:/crawls --rm webrecorder/browsertrix-crawler crawl --url http://${DOCKER_HOST_NAME}:31501 --url https://example.com/ --limit 2 --collection retry-fail-3 --retries 1 --failOnInvalidStatus --failOnFailedSeed --logging stats,debug`);
121+
122+
let status = 0;
123+
124+
const crawlFinished = new Promise(r => resolve = r);
125+
126+
// detect crawler exit
127+
let crawler_exited = false;
128+
child.on("exit", function (code) {
129+
status = code;
130+
resolve();
131+
});
132+
133+
await crawlFinished;
134+
135+
expect(status).toBe(1);
136+
// (1 + 1) * 3 requests == 6 requests
137+
expect(requests).toBe(6);
138+
expect(success).toBe(false);
139+
});
140+
141+
142+
test("run crawl with retries for 503, no retries, fail", async () => {
143+
requests = 0;
144+
success = false;
145+
146+
const child = exec(`docker run -v $PWD/test-crawls:/crawls --rm webrecorder/browsertrix-crawler crawl --url http://${DOCKER_HOST_NAME}:31501 --url https://example.com/ --limit 2 --collection retry-fail-4 --retries 0 --failOnInvalidStatus --failOnFailedSeed --logging stats,debug`);
147+
148+
let status = 0;
149+
150+
const crawlFinished = new Promise(r => resolve = r);
151+
152+
// detect crawler exit
153+
let crawler_exited = false;
154+
child.on("exit", function (code) {
155+
status = code;
156+
resolve();
157+
});
158+
159+
await crawlFinished;
160+
161+
expect(status).toBe(1);
162+
// (1) * 3 requests == 3 requests
163+
expect(requests).toBe(3);
164+
expect(success).toBe(false);
165+
});
166+
71167

0 commit comments

Comments
 (0)
Please sign in to comment.