-
Notifications
You must be signed in to change notification settings - Fork 333
/
Copy pathaggregate.ts
252 lines (227 loc) · 9.71 KB
/
aggregate.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
// Given a local folder with the e2e benchmark files, generates a single file
// output with the grouped metrics to be published. This script can probably
// be replaced by a single call to jq, but I found this easier to write,
// and pretty much every CI comes with a working version of node.
//
// To test this locally, first run the benchmark tests from the yarn-project/end-to-end folder
// BENCHMARK=1 yarn test bench
//
// And then run this script from the yarn-project/scripts folder
// LOG_FOLDER=../end-to-end/log yarn bench-aggregate
import { createConsoleLogger } from '@aztec/foundation/log';
import {
BENCHMARK_BLOCK_SIZES,
BENCHMARK_HISTORY_BLOCK_SIZE,
BENCHMARK_HISTORY_CHAIN_LENGTHS,
BenchmarkMetricResults,
BenchmarkResults,
BenchmarkResultsWithTimestamp,
CircuitSimulationStats,
L1PublishStats,
L2BlockBuiltStats,
L2BlockHandledStats,
MetricName,
NodeSyncedChainHistoryStats,
NoteProcessorCaughtUpStats,
Stats,
TreeInsertionStats,
TxAddedToPoolStats,
TxPXEProcessingStats,
TxSequencerProcessingStats,
} from '@aztec/types/stats';
import * as fs from 'fs';
import { mkdirpSync } from 'fs-extra';
import * as path from 'path';
import * as readline from 'readline';
import { BenchDir, BenchFile, LogsDir } from './paths.js';
const log = createConsoleLogger();
/** Appends a data point to the final results for the given metric in the given bucket */
function append(
results: BenchmarkCollectedResults,
metric: MetricName,
bucket: number | string,
value: number | bigint,
) {
if (value === undefined) {
log(`Undefined value for ${metric} in bucket ${bucket}`);
return;
}
const numeric = Number(value);
if (Number.isNaN(numeric)) {
log(`Value ${value} for ${metric} in ${bucket} is not a number`);
return;
}
if (!results[metric]) {
results[metric] = {};
}
if (!results[metric]![bucket]) {
results[metric]![bucket] = [];
}
results[metric]![bucket].push(numeric);
}
/** Processes an entry with event name 'rollup-published-to-l1' and updates results */
function processRollupPublished(entry: L1PublishStats, results: BenchmarkCollectedResults) {
const bucket = entry.txCount;
if (!BENCHMARK_BLOCK_SIZES.includes(bucket)) {
return;
}
append(results, 'l1_rollup_calldata_gas', bucket, entry.calldataGas);
append(results, 'l1_rollup_calldata_size_in_bytes', bucket, entry.calldataSize);
append(results, 'l1_rollup_execution_gas', bucket, entry.gasUsed);
}
/**
* Processes an entry with event name 'l2-block-handled' and updates results
* Skips instances where the block was emitted by the same node where the processing is skipped
*/
function processRollupBlockSynced(entry: L2BlockHandledStats, results: BenchmarkCollectedResults) {
const bucket = entry.txCount;
if (!BENCHMARK_BLOCK_SIZES.includes(bucket)) {
return;
}
if (entry.isBlockOurs) {
return;
}
append(results, 'l2_block_processing_time_in_ms', bucket, entry.duration);
}
/**
* Processes an entry with event name 'circuit-simulated' and updates results
* Buckets are circuit names
*/
function processCircuitSimulation(entry: CircuitSimulationStats, results: BenchmarkCollectedResults) {
const bucket = entry.circuitName;
if (!bucket) {
return;
}
append(results, 'circuit_simulation_time_in_ms', bucket, entry.duration);
append(results, 'circuit_input_size_in_bytes', bucket, entry.inputSize);
append(results, 'circuit_output_size_in_bytes', bucket, entry.outputSize);
}
/**
* Processes an entry with event name 'note-processor-caught-up' and updates results
* Buckets are rollup sizes for NOTE_DECRYPTING_TIME, or chain sizes for NOTE_HISTORY_DECRYPTING_TIME
*/
function processNoteProcessorCaughtUp(entry: NoteProcessorCaughtUpStats, results: BenchmarkCollectedResults) {
const { seen, decrypted, blocks, duration, dbSize } = entry;
if (BENCHMARK_BLOCK_SIZES.includes(decrypted)) {
append(results, 'note_successful_decrypting_time_in_ms', decrypted, duration);
}
if (BENCHMARK_BLOCK_SIZES.includes(seen) && decrypted === 0) {
append(results, 'note_trial_decrypting_time_in_ms', seen, duration);
}
if (BENCHMARK_HISTORY_CHAIN_LENGTHS.includes(blocks) && decrypted > 0) {
append(results, 'note_history_successful_decrypting_time_in_ms', blocks, duration);
append(results, 'pxe_database_size_in_bytes', blocks, dbSize);
}
if (BENCHMARK_HISTORY_CHAIN_LENGTHS.includes(blocks) && decrypted === 0) {
append(results, 'note_history_trial_decrypting_time_in_ms', blocks, duration);
}
}
/** Processes an entry with event name 'l2-block-built' and updates results where buckets are rollup sizes */
function processL2BlockBuilt(entry: L2BlockBuiltStats, results: BenchmarkCollectedResults) {
const bucket = entry.txCount;
if (!BENCHMARK_BLOCK_SIZES.includes(bucket)) {
return;
}
append(results, 'l2_block_building_time_in_ms', bucket, entry.duration);
append(results, 'l2_block_rollup_simulation_time_in_ms', bucket, entry.rollupCircuitsDuration);
append(results, 'l2_block_public_tx_process_time_in_ms', bucket, entry.publicProcessDuration);
}
/** Processes entries with event name node-synced-chain-history emitted by benchmark tests where buckets are chain lengths */
function processNodeSyncedChain(entry: NodeSyncedChainHistoryStats, results: BenchmarkCollectedResults) {
const bucket = entry.blockCount;
if (!BENCHMARK_HISTORY_CHAIN_LENGTHS.includes(bucket)) {
return;
}
if (entry.txsPerBlock !== BENCHMARK_HISTORY_BLOCK_SIZE) {
return;
}
append(results, 'node_history_sync_time_in_ms', bucket, entry.duration);
append(results, 'node_database_size_in_bytes', bucket, entry.dbSize);
}
/** Processes entries for events tx-added-to-pool, with grouping by deployed contract count. */
function processTxAddedToPool(entry: TxAddedToPoolStats, results: BenchmarkCollectedResults) {
append(results, 'tx_size_in_bytes', entry.newContractCount, entry.size);
}
/** Process entries for events tx-private-part-processed, grouped by new commitments */
function processTxPXEProcessingStats(entry: TxPXEProcessingStats, results: BenchmarkCollectedResults) {
append(results, 'tx_pxe_processing_time_ms', entry.newCommitmentCount, entry.duration);
}
/** Process entries for events tx-public-part-processed, grouped by public data writes */
function processTxSequencerProcessingStats(entry: TxSequencerProcessingStats, results: BenchmarkCollectedResults) {
append(results, 'tx_sequencer_processing_time_ms', entry.publicDataUpdateRequests, entry.duration);
}
/** Process a tree insertion event and updates results */
function processTreeInsertion(entry: TreeInsertionStats, results: BenchmarkCollectedResults) {
const bucket = entry.batchSize;
if (entry.treeType === 'append-only') {
append(results, 'batch_insert_into_append_only_tree_ms', bucket, entry.duration);
} else if (entry.treeType === 'indexed') {
append(results, 'batch_insert_into_indexed_tree_ms', bucket, entry.duration);
}
}
/** Processes a parsed entry from a log-file and updates results */
function processEntry(entry: Stats, results: BenchmarkCollectedResults) {
switch (entry.eventName) {
case 'rollup-published-to-l1':
return processRollupPublished(entry, results);
case 'l2-block-handled':
return processRollupBlockSynced(entry, results);
case 'circuit-simulation':
return processCircuitSimulation(entry, results);
case 'note-processor-caught-up':
return processNoteProcessorCaughtUp(entry, results);
case 'l2-block-built':
return processL2BlockBuilt(entry, results);
case 'node-synced-chain-history':
return processNodeSyncedChain(entry, results);
case 'tx-added-to-pool':
return processTxAddedToPool(entry, results);
case 'tx-pxe-processing':
return processTxPXEProcessingStats(entry, results);
case 'tx-sequencer-processing':
return processTxSequencerProcessingStats(entry, results);
case 'tree-insertion':
return processTreeInsertion(entry, results);
default:
return;
}
}
/** Array of collected raw results for a given metric. */
type BenchmarkCollectedMetricResults = Record<string, number[]>;
/** Collected raw results pending averaging each bucket within each metric. */
type BenchmarkCollectedResults = Partial<Record<MetricName, BenchmarkCollectedMetricResults>>;
/** Parses all jsonl files downloaded and aggregates them into a single results object. */
export async function main() {
const collected: BenchmarkCollectedResults = {};
// Get all jsonl files in the logs dir
const files = fs.readdirSync(LogsDir).filter(f => f.endsWith('.jsonl'));
// Iterate over each .jsonl file
for (const file of files) {
const filePath = path.join(LogsDir, file);
const fileStream = fs.createReadStream(filePath);
const rl = readline.createInterface({ input: fileStream });
for await (const line of rl) {
const entry = JSON.parse(line);
processEntry(entry, collected);
}
}
log(`Collected entries: ${JSON.stringify(collected)}`);
// For each bucket of each metric compute the average all collected data points
const results: BenchmarkResults = {};
for (const [metricName, metric] of Object.entries(collected)) {
const resultMetric: BenchmarkMetricResults = {};
results[metricName as MetricName] = resultMetric;
for (const [bucketName, bucket] of Object.entries(metric)) {
let avg = bucket.reduce((acc, val) => acc + val, 0) / bucket.length;
if (avg > 100) {
avg = Math.floor(avg);
}
resultMetric[bucketName] = avg;
}
}
const timestampedResults: BenchmarkResultsWithTimestamp = { ...results, timestamp: new Date().toISOString() };
// Write results to disk
log(`Aggregated results: ${JSON.stringify(timestampedResults, null, 2)}`);
mkdirpSync(BenchDir);
fs.writeFileSync(BenchFile, JSON.stringify(timestampedResults, null, 2));
}