Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
195 changes: 195 additions & 0 deletions .github/workflows/storage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
name: Storage Benchmark

on:
pull_request:
schedule:
- cron: '0 0 * * *' # Daily at midnight UTC
workflow_dispatch:
inputs:
iterations:
description: 'Iterations per provider'
required: false
default: '100'
file_size:
description: 'File size to test (leave empty to run all)'
required: false
default: ''
type: choice
options:
- ''
- 1MB
- 10MB
- 100MB

concurrency:
group: storage-benchmarks
cancel-in-progress: true

permissions:
contents: write
pull-requests: write

jobs:
bench:
name: Bench ${{ matrix.provider }}
runs-on: namespace-profile-default
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
provider:
- aws-s3
- cloudflare-r2
- tigris
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 24
cache: 'npm'
- run: npm ci
- name: Clear stale results from checkout
run: rm -rf results/storage/
- name: Run storage benchmark
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ secrets.AWS_REGION }}
S3_BUCKET: ${{ secrets.S3_BUCKET }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_BUCKET: ${{ secrets.R2_BUCKET }}
TIGRIS_STORAGE_ACCESS_KEY_ID: ${{ secrets.TIGRIS_STORAGE_ACCESS_KEY_ID }}
TIGRIS_STORAGE_SECRET_ACCESS_KEY: ${{ secrets.TIGRIS_STORAGE_SECRET_ACCESS_KEY }}
TIGRIS_STORAGE_BUCKET: ${{ secrets.TIGRIS_STORAGE_BUCKET }}
run: |
FILE_SIZE_FLAG=""
if [ -n "${{ github.event.inputs.file_size }}" ]; then
FILE_SIZE_FLAG="--file-size ${{ github.event.inputs.file_size }}"
fi
npm run bench -- \
--mode storage \
--provider ${{ matrix.provider }} \
--iterations ${{ github.event.inputs.iterations || (github.event_name == 'pull_request' && '10') || '100' }} \
$FILE_SIZE_FLAG
- name: Upload results
if: always()
uses: actions/upload-artifact@v4
with:
name: storage-results-${{ matrix.provider }}
path: results/storage/
if-no-files-found: ignore
retention-days: 7

collect:
name: Collect Results
runs-on: namespace-profile-default
needs: bench
if: always()
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 24
cache: 'npm'
- run: npm ci
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
pattern: storage-results-*
- name: Merge results
run: npx tsx src/merge-results.ts --input artifacts --mode storage
- run: npm run generate-storage-svg
- name: Upload SVGs as artifacts
if: github.event_name == 'pull_request'
uses: actions/upload-artifact@v4
with:
name: storage-benchmark-svgs
path: storage_*.svg
if-no-files-found: ignore
retention-days: 7
- name: Post results to PR
if: github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const path = require('path');

const runUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const sizes = ['1mb', '10mb', '100mb'];
let body = '## Storage Benchmark Results\n\n';
let hasResults = false;

for (const size of sizes) {
const latestPath = path.join('results', 'storage', size, 'latest.json');
if (!fs.existsSync(latestPath)) continue;

const data = JSON.parse(fs.readFileSync(latestPath, 'utf-8'));
const results = data.results
.filter(r => !r.skipped)
.sort((a, b) => (b.compositeScore || 0) - (a.compositeScore || 0));

if (results.length === 0) continue;
hasResults = true;

body += `### ${size.toUpperCase()} Files\n\n`;
body += '| # | Provider | Score | Download | Throughput | Upload | Status |\n';
body += '|---|----------|-------|----------|------------|--------|--------|\n';

results.forEach((r, i) => {
const name = r.provider === 'aws-s3' ? 'AWS S3' : r.provider === 'cloudflare-r2' ? 'Cloudflare R2' : r.provider.charAt(0).toUpperCase() + r.provider.slice(1);
const score = r.compositeScore !== undefined ? r.compositeScore.toFixed(1) : '--';
const dl = (r.summary.downloadMs.median / 1000).toFixed(2) + 's';
const tp = r.summary.throughputMbps.median.toFixed(1) + ' Mbps';
const ul = (r.summary.uploadMs.median / 1000).toFixed(2) + 's';
const ok = r.iterations.filter(it => !it.error).length;
const total = r.iterations.length;
body += `| ${i + 1} | ${name} | ${score} | ${dl} | ${tp} | ${ul} | ${ok}/${total} |\n`;
});

body += '\n';
}

if (!hasResults) {
body += '> No storage benchmark results were generated.\n\n';
}

body += `---\n*[View full run](${runUrl}) · SVGs available as [build artifacts](${runUrl}#artifacts)*`;

// Find and update existing comment or create new one
const marker = '## Storage Benchmark Results';
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});

const existing = comments.find(c => c.body.startsWith(marker));

if (existing) {
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
} else {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body,
});
}
- name: Commit and push
if: github.event_name != 'pull_request'
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add storage_*.svg results/storage/
git diff --cached --quiet && echo "No changes to commit" && exit 0
git commit -m "chore: update storage benchmark results [skip ci]"
git push
131 changes: 129 additions & 2 deletions src/merge-results.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@ import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { computeCompositeScores } from './scoring.js';
import { computeStorageCompositeScores, sortStorageByCompositeScore } from './storage/scoring.js';
import { printResultsTable, writeResultsJson } from './table.js';
import type { BenchmarkResult } from './types.js';
import type { StorageBenchmarkResult } from './storage/types.js';

const __dirname = path.dirname(fileURLToPath(import.meta.url));
const ROOT = path.resolve(__dirname, '..');
Expand All @@ -24,8 +26,9 @@ function getArgValue(flag: string): string | undefined {
}

const inputDir = getArgValue('--input');
const mergeMode = getArgValue('--mode');
if (!inputDir) {
console.error('Usage: tsx src/merge-results.ts --input <artifacts-dir>');
console.error('Usage: tsx src/merge-results.ts --input <artifacts-dir> [--mode storage]');
process.exit(1);
}

Expand All @@ -37,6 +40,14 @@ interface ResultFile {
results: BenchmarkResult[];
}

interface StorageResultFile {
version: string;
timestamp: string;
environment: Record<string, any>;
config: Record<string, any>;
results: StorageBenchmarkResult[];
}

/** Map mode to results subdirectory name, matching run.ts logic */
function modeToDir(mode: string): string {
switch (mode) {
Expand Down Expand Up @@ -138,7 +149,123 @@ async function main() {
}
}

main().catch(err => {
/**
* Print a storage results table to stdout.
*/
function printStorageResultsTable(results: StorageBenchmarkResult[], fileSize: string): void {
const sorted = sortStorageByCompositeScore(results);

console.log(`\n${'='.repeat(95)}`);
console.log(` STORAGE BENCHMARK RESULTS - ${fileSize.toUpperCase()}`);
console.log('='.repeat(95));
console.log(
['Provider', 'Score', 'Download', 'Throughput', 'Upload', 'Status']
.map((h, i) => h.padEnd([14, 8, 14, 14, 14, 10][i]))
.join(' | ')
);
console.log(
[14, 8, 14, 14, 14, 10].map(w => '-'.repeat(w)).join('-+-')
);

for (const r of sorted) {
if (r.skipped) {
console.log([r.provider.padEnd(14), '--'.padEnd(8), '--'.padEnd(14), '--'.padEnd(14), '--'.padEnd(14), 'SKIPPED'.padEnd(10)].join(' | '));
continue;
}
const ok = r.iterations.filter(i => !i.error).length;
const total = r.iterations.length;
if (ok === 0 && total > 0) {
console.log([r.provider.padEnd(14), '--'.padEnd(8), '--'.padEnd(14), '--'.padEnd(14), '--'.padEnd(14), 'FAILED'.padEnd(10)].join(' | '));
continue;
}
const score = r.compositeScore !== undefined ? r.compositeScore.toFixed(1) : '--';
const dl = (r.summary.downloadMs.median / 1000).toFixed(2) + 's';
const tp = r.summary.throughputMbps.median.toFixed(1) + ' Mbps';
const ul = (r.summary.uploadMs.median / 1000).toFixed(2) + 's';
console.log([r.provider.padEnd(14), score.padEnd(8), dl.padEnd(14), tp.padEnd(14), ul.padEnd(14), `${ok}/${total} OK`.padEnd(10)].join(' | '));
}
console.log('='.repeat(95));
}

/**
* Merge storage benchmark results, grouped by file size.
*/
async function mainStorage() {
const jsonFiles: string[] = [];
function walk(dir: string) {
if (!fs.existsSync(dir)) return;
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const full = path.join(dir, entry.name);
if (entry.isDirectory()) walk(full);
else if (entry.name === 'latest.json') jsonFiles.push(full);
}
}
walk(inputDir!);

if (jsonFiles.length === 0) {
console.error(`No latest.json files found in ${inputDir}`);
process.exit(1);
}

console.log(`Found ${jsonFiles.length} result files`);

// Group results by file size (e.g. "1mb", "10mb", "100mb")
const bySize: Record<string, { results: { result: StorageBenchmarkResult; fromSingleProvider: boolean }[] }> = {};

for (const file of jsonFiles) {
const raw: StorageResultFile = JSON.parse(fs.readFileSync(file, 'utf-8'));
const fromSingleProvider = raw.results.length === 1;
for (const result of raw.results) {
// Infer file size from the directory name (e.g. artifacts/storage-results-aws-s3/storage/10mb/latest.json)
const dirName = path.basename(path.dirname(file));
const fileSize = dirName.toLowerCase();

if (!bySize[fileSize]) {
bySize[fileSize] = { results: [] };
}
bySize[fileSize].results.push({ result, fromSingleProvider });
}
}

for (const [fileSize, { results }] of Object.entries(bySize)) {
// Deduplicate by provider, preferring single-provider files
const seen = new Map<string, { result: StorageBenchmarkResult; fromSingleProvider: boolean }>();
for (const entry of results) {
const existing = seen.get(entry.result.provider);
if (!existing || (entry.fromSingleProvider && !existing.fromSingleProvider)) {
seen.set(entry.result.provider, entry);
}
}
const deduped = Array.from(seen.values()).map(e => e.result);

if (deduped.length !== results.length) {
console.log(`\nMerging ${deduped.length} provider results for storage/${fileSize} (deduplicated from ${results.length})`);
} else {
console.log(`\nMerging ${deduped.length} provider results for storage/${fileSize}`);
}

// Compute storage-specific composite scores
computeStorageCompositeScores(deduped);

// Print storage table
printStorageResultsTable(deduped, fileSize);

// Write combined results
const timestamp = new Date().toISOString().slice(0, 10);
const { writeStorageResultsJson } = await import('./storage/benchmark.js');
const resultsDir = path.resolve(ROOT, `results/storage/${fileSize}`);
fs.mkdirSync(resultsDir, { recursive: true });

const outPath = path.join(resultsDir, `${timestamp}.json`);
await writeStorageResultsJson(deduped, outPath);

const latestPath = path.join(resultsDir, 'latest.json');
fs.copyFileSync(outPath, latestPath);
console.log(`Copied latest: ${latestPath}`);
}
}

(mergeMode === 'storage' ? mainStorage() : main()).catch(err => {
console.error('Merge failed:', err);
process.exit(1);
});
3 changes: 3 additions & 0 deletions src/scoring.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ function computeTimingScore(
stats: Stats,
weights: ScoringWeights = DEFAULT_WEIGHTS,
): number {
if (!stats || stats.median === undefined || stats.p95 === undefined || stats.p99 === undefined) {
return 0;
}
return (
weights.median * scoreMetric(stats.median) +
weights.p95 * scoreMetric(stats.p95) +
Expand Down
Loading