From e9dc1585b027dc6542c1b474b13f35ba4b4e766c Mon Sep 17 00:00:00 2001
From: Manoj Vivek
Date: Mon, 13 Apr 2026 11:15:26 +0530
Subject: [PATCH] Concurrent sourcemap uploads and retries
---
README.md | 2 +
packages/cli/README.md | 2 +
packages/cli/src/cli.ts | 21 +-
.../core/src/upload/debuginfo-uploader.ts | 120 +++++++++-
packages/core/src/upload/sticky-progress.ts | 71 ++++++
packages/esbuild/README.md | 2 +
packages/esbuild/src/plugin.ts | 214 ++++++++++--------
7 files changed, 319 insertions(+), 113 deletions(-)
create mode 100644 packages/core/src/upload/sticky-progress.ts
diff --git a/README.md b/README.md
index 6683549..e1af9b5 100644
--- a/README.md
+++ b/README.md
@@ -95,6 +95,8 @@ Optional:
--insecure Skip TLS verification
--include Glob pattern for JS files (default: **/*.js)
--exclude Glob pattern to exclude (default: **/node_modules/**)
+ --concurrency Maximum parallel uploads, 1 for serial (default: 50)
+ --retries Number of retry passes for failed uploads (default: 3)
```
Environment variables (`POLARSIGNALS_PROJECT_ID`, `POLARSIGNALS_TOKEN`, `POLARSIGNALS_SERVER_URL`) can be used instead of flags.
diff --git a/packages/cli/README.md b/packages/cli/README.md
index 757807c..872675a 100644
--- a/packages/cli/README.md
+++ b/packages/cli/README.md
@@ -46,6 +46,8 @@ Optional:
--insecure Skip TLS verification
--include Glob pattern for JS files (default: **/*.js)
--exclude Glob pattern to exclude (default: **/node_modules/**)
+ --concurrency Maximum parallel uploads, 1 for serial (default: 50)
+ --retries Number of retry passes for failed uploads (default: 3)
```
## Environment Variables
diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts
index 9084ec9..6723ac8 100644
--- a/packages/cli/src/cli.ts
+++ b/packages/cli/src/cli.ts
@@ -29,6 +29,8 @@ Optional:
--insecure Skip TLS verification (default: false)
--include Glob pattern for JS files (default: **/*.js)
--exclude Glob pattern to exclude (default: **/node_modules/**)
+ --concurrency Maximum parallel uploads, 1 for serial (default: 50)
+ --retries Number of retry passes for failed uploads (default: 3)
Environment variable overrides:
POLARSIGNALS_PROJECT_ID --project-id
@@ -54,6 +56,8 @@ async function main(): Promise {
'insecure': { type: 'boolean', default: false },
'include': { type: 'string' },
'exclude': { type: 'string' },
+ 'concurrency': { type: 'string' },
+ 'retries': { type: 'string' },
'help': { type: 'boolean', default: false },
},
});
@@ -87,6 +91,14 @@ async function main(): Promise {
const insecure = values['insecure'] ?? false;
const include = values['include'] ? [values['include']] : undefined;
const exclude = values['exclude'] ? [values['exclude']] : undefined;
+ const concurrency = values['concurrency'] ? parseInt(values['concurrency'], 10) : 50;
+ if (!Number.isInteger(concurrency) || concurrency < 1) {
+ die(`Invalid --concurrency value: must be a positive integer, got "${values['concurrency']}"`);
+ }
+ const maxRetries = values['retries'] ? parseInt(values['retries'], 10) : 3;
+ if (!Number.isInteger(maxRetries) || maxRetries < 0) {
+ die(`Invalid --retries value: must be a non-negative integer, got "${values['retries']}"`);
+ }
if (!dryRun) {
if (!projectId) {
@@ -111,11 +123,6 @@ async function main(): Promise {
console.log(`Processed ${results.processed} file(s), skipped ${results.skipped}, errors ${results.errors}`);
- if (results.processed === 0) {
- console.log('No files were processed. Nothing to upload.');
- process.exit(0);
- }
-
if (dryRun) {
console.log('Dry run — skipping upload.');
if (verbose) {
@@ -160,7 +167,7 @@ async function main(): Promise {
}
if (verbose) {
- console.log(`Uploading ${bundles.length} source map(s) to ${serverUrl}...`);
+ console.log(`Uploading ${bundles.length} source map(s) to ${serverUrl} (concurrency=${concurrency})...`);
}
// Step 3: Upload
@@ -170,6 +177,8 @@ async function main(): Promise {
projectID: projectId!,
verbose,
insecure,
+ concurrency,
+ maxRetries,
});
// Step 4: Report
diff --git a/packages/core/src/upload/debuginfo-uploader.ts b/packages/core/src/upload/debuginfo-uploader.ts
index 7ebbfe9..aaeb548 100644
--- a/packages/core/src/upload/debuginfo-uploader.ts
+++ b/packages/core/src/upload/debuginfo-uploader.ts
@@ -8,6 +8,7 @@ import {
BuildIDType,
UploadInstructions_UploadStrategy
} from '@parca/client';
+import { StickyProgress } from './sticky-progress';
/**
* Options for uploading source maps to debuginfo server
@@ -25,6 +26,10 @@ export interface UploadOptions {
force?: boolean;
/** Allow insecure SSL connections (skip certificate validation) */
insecure?: boolean;
+ /** Maximum number of concurrent uploads (default: 50, set to 1 for serial) */
+ concurrency?: number;
+ /** Maximum number of retry passes for failed uploads (default: 3, set to 0 to disable) */
+ maxRetries?: number;
}
/**
@@ -89,21 +94,37 @@ function calculateSourceMapHash(content: Uint8Array): string {
}
/**
- * Uploads source map to debuginfo server
+ * Uploads source map to debuginfo server.
+ * Creates a one-shot gRPC client for this single upload. For batch uploads,
+ * use `uploadSourceMaps` which reuses a single client across all files.
*/
export async function uploadSourceMap(
sourceMapInfo: SourceMapInfo,
options: UploadOptions
+): Promise {
+ const { serverUrl, insecure = false } = options;
+ const client = createDebuginfoClient(serverUrl, insecure);
+ return uploadOneSourceMap(client, sourceMapInfo, options);
+}
+
+/**
+ * Internal: uploads a single source map using the provided gRPC client.
+ */
+async function uploadOneSourceMap(
+ client: DebuginfoServiceClient,
+ sourceMapInfo: SourceMapInfo,
+ options: UploadOptions
): Promise {
const { debugId, content, jsFilePath } = sourceMapInfo;
- const { serverUrl, token, projectID, verbose = false, force = false, insecure = false } = options;
+ const { token, projectID, verbose = false, force = false } = options;
+
+ const startTime = Date.now();
if (verbose) {
console.log(`[upload] Uploading source map for ${jsFilePath} (debug ID: ${debugId})`);
}
try {
- const client = createDebuginfoClient(serverUrl, insecure);
const metadata = createRpcMetadata(token, projectID);
const hash = calculateSourceMapHash(content);
const size = content.length;
@@ -123,7 +144,8 @@ export async function uploadSourceMap(
if (!shouldUploadResponse.response.shouldInitiateUpload) {
if (verbose) {
- console.log(` [skip] Skipping upload: ${shouldUploadResponse.response.reason}`);
+ const elapsed = Date.now() - startTime;
+ console.log(` [skip] Skipping upload: ${shouldUploadResponse.response.reason} (${elapsed}ms)`);
}
return {
success: true,
@@ -233,7 +255,8 @@ export async function uploadSourceMap(
}, { meta: metadata });
if (verbose) {
- console.log(` [ok] Source map uploaded successfully`);
+ const elapsed = Date.now() - startTime;
+ console.log(` [ok] Source map uploaded successfully (${elapsed}ms)`);
}
return {
@@ -249,7 +272,8 @@ export async function uploadSourceMap(
: 'Unknown error';
if (verbose) {
- console.log(` [error] Upload failed: ${errorMessage}`);
+ const elapsed = Date.now() - startTime;
+ console.log(` [error] Upload failed: ${errorMessage} (${elapsed}ms)`);
}
return {
@@ -261,18 +285,92 @@ export async function uploadSourceMap(
}
/**
- * Uploads multiple source maps to debuginfo server
+ * Uploads multiple source maps to debuginfo server.
+ * Creates a single gRPC client and reuses it across all uploads.
+ * Runs uploads in parallel up to `options.concurrency` (default 50, set to 1 for serial).
+ * Failed uploads are retried up to `options.maxRetries` times (default 3).
*/
export async function uploadSourceMaps(
sourceMaps: SourceMapInfo[],
options: UploadOptions
): Promise {
- const results: UploadResult[] = [];
+ const { serverUrl, insecure = false, verbose = false, concurrency = 50, maxRetries = 3 } = options;
+ const startTime = Date.now();
+
+ const client = createDebuginfoClient(serverUrl, insecure);
+
+ // Results indexed by original position in sourceMaps
+ const results: UploadResult[] = new Array(sourceMaps.length);
+ // Indices of items still needing an attempt (initially: all of them)
+ let pendingIndices: number[] = sourceMaps.map((_, i) => i);
+
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
+ if (pendingIndices.length === 0) break;
+
+ if (attempt > 0 && verbose) {
+ console.log(`[upload] Retry ${attempt}/${maxRetries}: ${pendingIndices.length} failed upload(s)`);
+ }
+
+ const passLabel = attempt === 0 ? undefined : `retry ${attempt}/${maxRetries}`;
+ const progress = new StickyProgress(pendingIndices.length, passLabel);
+ progress.start();
+
+ try {
+ const passResults = await parallelMap(pendingIndices, concurrency, async (originalIndex) => {
+ const result = await uploadOneSourceMap(client, sourceMaps[originalIndex], options);
+ progress.recordResult(result);
+ return { originalIndex, result };
+ });
- for (const sourceMapInfo of sourceMaps) {
- const result = await uploadSourceMap(sourceMapInfo, options);
- results.push(result);
+ const stillFailed: number[] = [];
+ for (const { originalIndex, result } of passResults) {
+ results[originalIndex] = result;
+ if (!result.success) {
+ stillFailed.push(originalIndex);
+ }
+ }
+ pendingIndices = stillFailed;
+ } finally {
+ progress.done();
+ }
+ }
+
+ if (verbose) {
+ const elapsed = Date.now() - startTime;
+ const uploaded = results.filter(r => r.success && !r.skipped).length;
+ const skipped = results.filter(r => r.skipped).length;
+ const failed = results.filter(r => !r.success).length;
+ const seconds = (elapsed / 1000).toFixed(2);
+ const avgPerFile = sourceMaps.length > 0 ? (elapsed / sourceMaps.length).toFixed(0) : '0';
+ console.log(`[upload] Total: ${sourceMaps.length} files in ${seconds}s (avg ${avgPerFile}ms/file, concurrency=${concurrency}) — uploaded: ${uploaded}, skipped: ${skipped}, failed: ${failed}`);
}
return results;
}
+
+
+/**
+ * Runs `fn` over `items` with at most `concurrency` in flight at a time.
+ * Preserves result order. Errors are returned as values via the result type
+ * (the caller's `fn` must not throw).
+ */
+async function parallelMap(
+ items: T[],
+ concurrency: number,
+ fn: (item: T, index: number) => Promise
+): Promise {
+ const results: R[] = new Array(items.length);
+ let nextIndex = 0;
+
+ const worker = async (): Promise => {
+ while (true) {
+ const i = nextIndex++;
+ if (i >= items.length) return;
+ results[i] = await fn(items[i], i);
+ }
+ };
+
+ const workerCount = Math.max(1, Math.min(concurrency, items.length));
+ await Promise.all(Array.from({ length: workerCount }, () => worker()));
+ return results;
+}
diff --git a/packages/core/src/upload/sticky-progress.ts b/packages/core/src/upload/sticky-progress.ts
new file mode 100644
index 0000000..a732495
--- /dev/null
+++ b/packages/core/src/upload/sticky-progress.ts
@@ -0,0 +1,71 @@
+import type { UploadResult } from './debuginfo-uploader';
+
+/**
+ * Renders a sticky progress line at the bottom of the terminal that updates
+ * in place as items complete. Verbose logs from `console.log` continue to
+ * scroll above it; the line is cleared, the log is printed, then the line
+ * is redrawn. No-op when stdout is not a TTY (CI/piped output).
+ */
+export class StickyProgress {
+ private current = 0;
+ private uploaded = 0;
+ private skipped = 0;
+ private failed = 0;
+ private startTime = 0;
+ private readonly total: number;
+ private readonly enabled: boolean;
+ private readonly label?: string;
+ private originalLog: typeof console.log | null = null;
+
+ constructor(total: number, label?: string) {
+ this.total = total;
+ this.label = label;
+ this.enabled = process.stdout.isTTY === true && total > 0;
+ }
+
+ start(): void {
+ if (!this.enabled) return;
+ this.startTime = Date.now();
+ this.originalLog = console.log.bind(console);
+ console.log = (...args: unknown[]) => {
+ this.clearLine();
+ this.originalLog!(...args);
+ this.drawLine();
+ };
+ this.drawLine();
+ }
+
+ recordResult(result: UploadResult): void {
+ if (!this.enabled) return;
+ this.current++;
+ if (result.success) {
+ if (result.skipped) this.skipped++;
+ else this.uploaded++;
+ } else {
+ this.failed++;
+ }
+ this.drawLine();
+ }
+
+ done(): void {
+ if (!this.enabled) return;
+ this.clearLine();
+ if (this.originalLog) {
+ console.log = this.originalLog;
+ this.originalLog = null;
+ }
+ }
+
+ private clearLine(): void {
+ process.stdout.write('\r\x1b[K');
+ }
+
+ private drawLine(): void {
+ const elapsedSec = (Date.now() - this.startTime) / 1000;
+ const rate = elapsedSec > 0 ? (this.current / elapsedSec).toFixed(1) : '0.0';
+ const prefix = this.label ? `(${this.label}) ` : '';
+ process.stdout.write(
+ `\r${prefix}[${this.current}/${this.total}] ${rate} files/s — uploaded: ${this.uploaded}, skipped: ${this.skipped}, failed: ${this.failed}`
+ );
+ }
+}
diff --git a/packages/esbuild/README.md b/packages/esbuild/README.md
index 03a8858..b3a9b28 100644
--- a/packages/esbuild/README.md
+++ b/packages/esbuild/README.md
@@ -39,6 +39,8 @@ Debug ID injection and source map upload happen automatically at the end of each
| `debuginfoServerUrl` | `string` | No | Debuginfo server URL (default: `grpc.polarsignals.com:443`) |
| `verbose` | `boolean` | No | Enable verbose logging (default: `false`) |
| `insecure` | `boolean` | No | Skip TLS verification (default: `false`) |
+| `concurrency` | `number` | No | Maximum parallel uploads, 1 for serial (default: `50`) |
+| `maxRetries` | `number` | No | Number of retry passes for failed uploads (default: `3`) |
## How It Works
diff --git a/packages/esbuild/src/plugin.ts b/packages/esbuild/src/plugin.ts
index d021d07..34be11a 100644
--- a/packages/esbuild/src/plugin.ts
+++ b/packages/esbuild/src/plugin.ts
@@ -6,7 +6,7 @@ import {
injectDebugIdIntoSourceMap,
isValidSourceMap,
injectDebugIdIntoJs,
- uploadSourceMap,
+ uploadSourceMaps,
type SourceMapInfo,
} from '@polarsignals/sourcemap-core';
@@ -24,6 +24,10 @@ export interface DebugIdPluginOptions {
token: string;
/** Allow insecure SSL connections (skip certificate validation) */
insecure?: boolean;
+ /** Maximum parallel uploads (default: 50, set to 1 for serial) */
+ concurrency?: number;
+ /** Number of retry passes for failed uploads (default: 3, set to 0 to disable) */
+ maxRetries?: number;
}
/**
@@ -63,7 +67,15 @@ export function debugIdPlugin(options: DebugIdPluginOptions): Plugin {
console.log(`Injecting debug IDs in output directory: ${outputDir}`);
}
- await injectDebugIdsInOutputDir(outputDir, { verbose, projectID, debuginfoServerUrl: options.debuginfoServerUrl ?? 'grpc.polarsignals.com:443', token: options.token, insecure: options.insecure });
+ await injectDebugIdsInOutputDir(outputDir, {
+ verbose,
+ projectID,
+ debuginfoServerUrl: options.debuginfoServerUrl ?? 'grpc.polarsignals.com:443',
+ token: options.token,
+ insecure: options.insecure,
+ concurrency: options.concurrency,
+ maxRetries: options.maxRetries,
+ });
} catch (error) {
// Add error to build results but don't fail the build
@@ -88,118 +100,128 @@ export function debugIdPlugin(options: DebugIdPluginOptions): Plugin {
}
/**
- * Injects debug IDs into JavaScript files and source maps in the output directory
- * @param outputDir - The output directory to process
- * @param options - Processing options
+ * Injects debug IDs into JavaScript files and source maps in the output directory,
+ * then uploads them in parallel via the batch upload API.
*/
async function injectDebugIdsInOutputDir(
outputDir: string,
- options: { verbose: boolean; projectID: string; debuginfoServerUrl: string; token: string; insecure?: boolean }
+ options: {
+ verbose: boolean;
+ projectID: string;
+ debuginfoServerUrl: string;
+ token: string;
+ insecure?: boolean;
+ concurrency?: number;
+ maxRetries?: number;
+ }
): Promise {
- const { verbose, projectID, debuginfoServerUrl, token, insecure = false } = options;
-
- // Upload is now always configured since all fields are required
- const shouldUpload = true;
+ const { verbose, projectID, debuginfoServerUrl, token, insecure = false, concurrency, maxRetries } = options;
+ let files: import('fs').Dirent[];
try {
- // Get all files in the output directory
- const files = await fs.readdir(outputDir, { withFileTypes: true });
-
- // Find JavaScript files with corresponding source maps
- const jsFiles = files
- .filter(file => file.isFile() && file.name.endsWith('.js'))
- .map(file => file.name);
-
- for (const jsFileName of jsFiles) {
- const jsFilePath = join(outputDir, jsFileName);
- const sourceMapPath = join(outputDir, `${jsFileName}.map`);
-
- try {
- // Check if source map exists
- await fs.access(sourceMapPath);
-
- // Read both files
- const [jsContent, sourceMapContent] = await Promise.all([
- fs.readFile(jsFilePath, 'utf-8'),
- fs.readFile(sourceMapPath, 'utf-8'),
- ]);
-
- // Validate source map
- if (!isValidSourceMap(sourceMapContent)) {
- if (verbose) {
- console.log(`${projectID}: Skipping ${jsFileName} - invalid source map`);
- }
- continue;
- }
+ files = await fs.readdir(outputDir, { withFileTypes: true });
+ } catch (error) {
+ throw new Error(`Failed to process output directory ${outputDir}: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ }
- // Generate debug ID
- const debugId = generateDebugId(sourceMapContent);
+ const jsFiles = files
+ .filter(file => file.isFile() && file.name.endsWith('.js'))
+ .map(file => file.name);
- if (verbose) {
- console.log(`${projectID}: Generated debug ID ${debugId} for ${jsFileName}`);
- }
+ // Phase 1: inject debug IDs and collect upload bundles
+ const bundles: SourceMapInfo[] = [];
- // Inject debug ID into both files
- const updatedSourceMapContent = injectDebugIdIntoSourceMap(sourceMapContent, debugId);
- const updatedJsContent = injectDebugIdIntoJs(jsContent, debugId);
+ for (const jsFileName of jsFiles) {
+ const jsFilePath = join(outputDir, jsFileName);
+ const sourceMapPath = join(outputDir, `${jsFileName}.map`);
- // Write updated files
- await Promise.all([
- fs.writeFile(jsFilePath, updatedJsContent, 'utf-8'),
- fs.writeFile(sourceMapPath, updatedSourceMapContent, 'utf-8'),
- ]);
+ try {
+ await fs.access(sourceMapPath);
- if (verbose) {
- console.log(`${projectID}: Injected debug ID into ${jsFileName} and its source map`);
- }
-
- // Upload source map bundle if configured
- if (shouldUpload) {
- try {
- // Create binary bundle: [js_len: u64][sm_len: u64][js_bytes][sm_bytes]
- const jsBytes = Buffer.from(updatedJsContent, 'utf-8');
- const smBytes = Buffer.from(updatedSourceMapContent, 'utf-8');
- const header = Buffer.alloc(16);
- header.writeBigUInt64LE(BigInt(jsBytes.length), 0);
- header.writeBigUInt64LE(BigInt(smBytes.length), 8);
- const bundleBuffer = Buffer.concat([header, jsBytes, smBytes]);
-
- const sourceMapInfo: SourceMapInfo = {
- debugId,
- content: new Uint8Array(bundleBuffer),
- jsFilePath: jsFileName,
- };
-
- const uploadResult = await uploadSourceMap(sourceMapInfo, {
- serverUrl: debuginfoServerUrl,
- token: token,
- projectID,
- verbose,
- insecure,
- });
-
- if (!uploadResult.success && !uploadResult.skipped) {
- if (verbose) {
- console.log(`${projectID}: Failed to upload source map: ${uploadResult.error}`);
- }
- }
- } catch (uploadError) {
- if (verbose) {
- console.log(`${projectID}: Upload error: ${uploadError instanceof Error ? uploadError.message : 'Unknown error'}`);
- }
- }
- }
+ const [jsContent, sourceMapContent] = await Promise.all([
+ fs.readFile(jsFilePath, 'utf-8'),
+ fs.readFile(sourceMapPath, 'utf-8'),
+ ]);
- } catch (error) {
+ if (!isValidSourceMap(sourceMapContent)) {
if (verbose) {
- console.log(`${projectID}: Skipping ${jsFileName} - no source map or error: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ console.log(`${projectID}: Skipping ${jsFileName} - invalid source map`);
}
continue;
}
+
+ const debugId = generateDebugId(sourceMapContent);
+
+ if (verbose) {
+ console.log(`${projectID}: Generated debug ID ${debugId} for ${jsFileName}`);
+ }
+
+ const updatedSourceMapContent = injectDebugIdIntoSourceMap(sourceMapContent, debugId);
+ const updatedJsContent = injectDebugIdIntoJs(jsContent, debugId);
+
+ await Promise.all([
+ fs.writeFile(jsFilePath, updatedJsContent, 'utf-8'),
+ fs.writeFile(sourceMapPath, updatedSourceMapContent, 'utf-8'),
+ ]);
+
+ if (verbose) {
+ console.log(`${projectID}: Injected debug ID into ${jsFileName} and its source map`);
+ }
+
+ // Build the upload bundle: [js_len: u64][sm_len: u64][js_bytes][sm_bytes]
+ const jsBytes = Buffer.from(updatedJsContent, 'utf-8');
+ const smBytes = Buffer.from(updatedSourceMapContent, 'utf-8');
+ const header = Buffer.alloc(16);
+ header.writeBigUInt64LE(BigInt(jsBytes.length), 0);
+ header.writeBigUInt64LE(BigInt(smBytes.length), 8);
+ const bundleBuffer = Buffer.concat([header, jsBytes, smBytes]);
+
+ bundles.push({
+ debugId,
+ content: new Uint8Array(bundleBuffer),
+ jsFilePath: jsFileName,
+ });
+ } catch (error) {
+ if (verbose) {
+ console.log(`${projectID}: Skipping ${jsFileName} - no source map or error: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ }
+ continue;
}
+ }
- } catch (error) {
- throw new Error(`Failed to process output directory ${outputDir}: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ if (bundles.length === 0) {
+ if (verbose) {
+ console.log(`${projectID}: No source maps to upload`);
+ }
+ return;
+ }
+
+ // Phase 2: batch upload (shared client, parallel, with retries + sticky progress)
+ if (verbose) {
+ console.log(`${projectID}: Uploading ${bundles.length} source map(s)...`);
+ }
+
+ try {
+ const results = await uploadSourceMaps(bundles, {
+ serverUrl: debuginfoServerUrl,
+ token,
+ projectID,
+ verbose,
+ insecure,
+ concurrency,
+ maxRetries,
+ });
+
+ if (verbose) {
+ const failed = results.filter(r => !r.success);
+ for (const f of failed) {
+ console.log(`${projectID}: Failed to upload source map ${f.debugId}: ${f.error}`);
+ }
+ }
+ } catch (uploadError) {
+ if (verbose) {
+ console.log(`${projectID}: Upload error: ${uploadError instanceof Error ? uploadError.message : 'Unknown error'}`);
+ }
}
}