Skip to content

Commit

Permalink
Swap to continual concurrency rather than chunks
Browse files Browse the repository at this point in the history
  • Loading branch information
MattIPv4 committed Apr 2, 2024
1 parent 60e16fa commit b3bf6ae
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 25 deletions.
11 changes: 0 additions & 11 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
"@sentry/node": "^6.18.0",
"algoliasearch": "^4.12.1",
"babel-polyfill": "^6.26.0",
"chunk": "0.0.3",
"consola": "^2.15.3",
"copyfiles": "^2.4.1",
"core-js": "^3.21.1",
Expand Down
30 changes: 17 additions & 13 deletions util/build/routes.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import chunk from 'chunk';
import fetch from 'node-fetch';
import consola from 'consola';
import { baseApi } from '../../data/config';
Expand All @@ -11,6 +10,7 @@ export default async () => {
consola.info(` Fetched ${libsJson.results.length} libraries in ${Date.now() - libsStart}ms`);

// Track timings for individual libraries
const libsAsyncStart = Date.now();
const timings = [];

// Create the lib promises
Expand All @@ -21,7 +21,7 @@ export default async () => {
const libJson = await libRaw.json();

if (!libJson.versions) {
console.warn(`No versions array for ${lib.name}`, libJson);
consola.warn(`No versions array for ${lib.name}`, libJson);
}

const versions = (libJson.versions || []).map((version) => {
Expand All @@ -43,22 +43,26 @@ export default async () => {
}];
});

// Split into chunks and fetch
const libsChunksStart = Date.now();
const libsChunks = chunk(libsAsync, 100);
const failed = [];
// Run with a continual concurrency of 100
const libs = [];
for (const libsChunk of libsChunks) {
const chunkRes = await Promise.all(libsChunk.map(cb => cb[1]().catch(() => failed.push(cb))));
libs.push(...chunkRes.flat(1));
}
for (const failure of failed) {
const result = await failure[1]().catch(e => console.warn(failure[0], e));
const failed = [];
const libPromise = (name, callback) => callback()
.then(result => libs.push(...result))
.catch(() => failed.push([name, callback]))
.finally(() => {
const next = libsAsync.shift();
if (next) { libPromise(...next); }
});
await Promise.all(libsAsync.splice(0, 100).map(next => libPromise(...next)));

// Re-run any failures with a concurrency of 1
for (const [name, callback] of failed) {
const result = await callback().catch(e => consola.warn(name, e));
if (result) { libs.push(...result); }
}

// Report the timings
consola.info(` Fetched ${timings.length} libraries in ${Date.now() - libsChunksStart}ms`);
consola.info(` Fetched ${timings.length} libraries in ${Date.now() - libsAsyncStart}ms`);
timings.sort((a, b) => a[1] - b[1]);
consola.info(` p99: ${timings[Math.floor(timings.length * 0.99)][1]}ms`);
consola.info(` p90: ${timings[Math.floor(timings.length * 0.9)][1]}ms`);
Expand Down

0 comments on commit b3bf6ae

Please sign in to comment.