Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Validate blobs posted to sink belong to our L2 #12587

Merged
merged 1 commit into from
Mar 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions spartan/aztec-network/templates/blob-sink.yaml
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changes in this file are the ones I'm least confident about, please double-check

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

looks great!

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

looks great

Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,16 @@ spec:
{{- include "aztec-network.gcpLocalSsd" . | nindent 6 }}
{{- end }}
dnsPolicy: ClusterFirstWithHostNet
initContainers:
{{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }}
containers:
- name: blob-sink
{{- include "aztec-network.image" . | nindent 10 }}
command:
- /bin/bash
- -c
- |
source /shared/config/service-addresses && \
env && \
node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --blob-sink
startupProbe:
Expand Down Expand Up @@ -87,6 +90,8 @@ spec:
value: "{{ .Values.blobSink.dataStoreConfig.dataStoreMapSize }}"
- name: USE_GCLOUD_LOGGING
value: "{{ .Values.telemetry.useGcloudLogging }}"
- name: L1_CHAIN_ID
value: "{{ .Values.ethereum.chainId }}"
ports:
- containerPort: {{ .Values.blobSink.service.nodePort }}
resources:
Expand Down
1 change: 1 addition & 0 deletions yarn-project/blob-sink/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
},
"dependencies": {
"@aztec/blob-lib": "workspace:^",
"@aztec/ethereum": "workspace:^",
"@aztec/foundation": "workspace:^",
"@aztec/kv-store": "workspace:*",
"@aztec/stdlib": "workspace:^",
Expand Down
16 changes: 10 additions & 6 deletions yarn-project/blob-sink/src/server/config.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config';
import { pickConfigMappings } from '@aztec/foundation/config';
import {
type L1ContractAddresses,
type L1ReaderConfig,
l1ContractAddressesMapping,
l1ReaderConfigMappings,
} from '@aztec/ethereum';
import { type ConfigMappingsType, getConfigFromMappings, pickConfigMappings } from '@aztec/foundation/config';
import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
import type { ChainConfig } from '@aztec/stdlib/config';
import { chainConfigMappings } from '@aztec/stdlib/config';

export type BlobSinkConfig = {
port?: number;
archiveApiUrl?: string;
dataStoreConfig?: DataStoreConfig;
} & Partial<Pick<ChainConfig, 'l1ChainId'>>;
} & Partial<Pick<L1ReaderConfig, 'l1ChainId' | 'l1RpcUrls'> & Pick<L1ContractAddresses, 'rollupAddress'>>;

export const blobSinkConfigMappings: ConfigMappingsType<BlobSinkConfig> = {
port: {
Expand All @@ -23,7 +26,8 @@ export const blobSinkConfigMappings: ConfigMappingsType<BlobSinkConfig> = {
env: 'BLOB_SINK_ARCHIVE_API_URL',
description: 'The URL of the archive API',
},
...pickConfigMappings(chainConfigMappings, ['l1ChainId']),
...pickConfigMappings(l1ReaderConfigMappings, ['l1ChainId', 'l1RpcUrls']),
...pickConfigMappings(l1ContractAddressesMapping, ['rollupAddress']),
};

/**
Expand Down
7 changes: 5 additions & 2 deletions yarn-project/blob-sink/src/server/factory.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { getPublicClient } from '@aztec/ethereum';
import type { AztecAsyncKVStore } from '@aztec/kv-store';
import { createStore } from '@aztec/kv-store/lmdb-v2';
import type { TelemetryClient } from '@aztec/telemetry-client';
Expand All @@ -19,11 +20,13 @@ async function getDataStoreConfig(config?: BlobSinkConfig): Promise<AztecAsyncKV
* Creates a blob sink service from the provided config.
*/
export async function createBlobSinkServer(
config?: BlobSinkConfig,
config: BlobSinkConfig = {},
telemetry?: TelemetryClient,
): Promise<BlobSinkServer> {
const store = await getDataStoreConfig(config);
const archiveClient = createBlobArchiveClient(config);
const { l1ChainId, l1RpcUrls } = config;
const l1PublicClient = l1ChainId && l1RpcUrls ? getPublicClient({ l1ChainId, l1RpcUrls }) : undefined;

return new BlobSinkServer(config, store, archiveClient, telemetry);
return new BlobSinkServer(config, store, archiveClient, l1PublicClient, telemetry);
}
66 changes: 63 additions & 3 deletions yarn-project/blob-sink/src/server/server.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Blob } from '@aztec/blob-lib';
import { makeEncodedBlob } from '@aztec/blob-lib/testing';
import { hexToBuffer } from '@aztec/foundation/string';
import type { L2BlockProposedEvent, ViemPublicClient } from '@aztec/ethereum';
import { bufferToHex, hexToBuffer } from '@aztec/foundation/string';
import { fileURLToPath } from '@aztec/foundation/url';

import { readFile } from 'fs/promises';
Expand All @@ -18,8 +19,10 @@ import { BlobSinkServer } from './server.js';
describe('BlobSinkService', () => {
let service: BlobSinkServer;

const startServer = async (config: Partial<BlobSinkConfig & { blobArchiveClient: BlobArchiveClient }> = {}) => {
service = new BlobSinkServer({ ...config, port: 0 }, undefined, config.blobArchiveClient);
const startServer = async (
config: Partial<BlobSinkConfig & { blobArchiveClient: BlobArchiveClient; l1Client: ViemPublicClient }> = {},
) => {
service = new BlobSinkServer({ ...config, port: 0 }, undefined, config.blobArchiveClient, config.l1Client);
await service.start();
};

Expand Down Expand Up @@ -163,6 +166,63 @@ describe('BlobSinkService', () => {
});
});

describe('with l1 client', () => {
let l1Client: MockProxy<ViemPublicClient>;
let blob: Blob;
let blob2: Blob;

const blockId = '0x1234';

beforeEach(async () => {
blob = await makeEncodedBlob(3);
blob2 = await makeEncodedBlob(3);
l1Client = mock<ViemPublicClient>();
l1Client.getContractEvents.mockResolvedValue([
{
args: {
versionedBlobHashes: [bufferToHex(blob.getEthVersionedBlobHash())],
archive: '0x5678',
blockNumber: 1234n,
} satisfies L2BlockProposedEvent,
} as any,
]);

await startServer({ l1Client });
});

afterEach(() => {
expect(l1Client.getContractEvents).toHaveBeenCalledTimes(1);
expect(l1Client.getContractEvents).toHaveBeenCalledWith(expect.objectContaining({ blockHash: blockId }));
});

it('should accept blobs emitted by rollup contract', async () => {
const postResponse = await request(service.getApp())
.post('/blob_sidecar')
.send({
// eslint-disable-next-line camelcase
block_id: blockId,
blobs: [{ index: 0, blob: outboundTransform(blob.toBuffer()) }],
});

expect(postResponse.status).toBe(200);
});

it('should reject blobs not emitted by rollup contract', async () => {
const postResponse = await request(service.getApp())
.post('/blob_sidecar')
.send({
// eslint-disable-next-line camelcase
block_id: blockId,
blobs: [
{ index: 0, blob: outboundTransform(blob.toBuffer()) },
{ index: 1, blob: outboundTransform(blob2.toBuffer()) },
],
});

expect(postResponse.status).toBe(400);
});
});

describe('with archive', () => {
let archiveClient: MockProxy<BlobArchiveClient>;

Expand Down
64 changes: 52 additions & 12 deletions yarn-project/blob-sink/src/server/server.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import { Blob, type BlobJson } from '@aztec/blob-lib';
import { type ViemPublicClient, getL2BlockProposalEvents } from '@aztec/ethereum';
import { type Logger, createLogger } from '@aztec/foundation/log';
import { pluralize } from '@aztec/foundation/string';
import { bufferToHex, pluralize } from '@aztec/foundation/string';
import type { AztecAsyncKVStore } from '@aztec/kv-store';
import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';

import express, { type Express, type Request, type Response, json } from 'express';
import type { Server } from 'http';
import type { AddressInfo } from 'net';
import type { Hex } from 'viem';
import { z } from 'zod';

import type { BlobArchiveClient } from '../archive/index.js';
Expand All @@ -33,11 +35,13 @@ export class BlobSinkServer {
private blobStore: BlobStore;
private metrics: BlobSinkMetrics;
private log: Logger = createLogger('aztec:blob-sink');
private l1PublicClient: ViemPublicClient | undefined;

constructor(
config?: BlobSinkConfig,
private config: BlobSinkConfig = {},
store?: AztecAsyncKVStore,
private blobArchiveClient?: BlobArchiveClient,
l1PublicClient?: ViemPublicClient,
telemetry: TelemetryClient = getTelemetryClient(),
) {
this.port = config?.port ?? 5052; // 5052 is beacon chain default http port
Expand All @@ -47,6 +51,7 @@ export class BlobSinkServer {
this.app.use(json({ limit: '1mb' })); // Increase the limit to allow for a blob to be sent

this.metrics = new BlobSinkMetrics(telemetry);
this.l1PublicClient = l1PublicClient;

this.blobStore = store === undefined ? new MemoryBlobStore() : new DiskBlobStore(store);

Expand Down Expand Up @@ -195,30 +200,35 @@ export class BlobSinkServer {
// eslint-disable-next-line camelcase
const { block_id, blobs } = req.body;

let parsedBlockId: Hex;
let blobObjects: BlobWithIndex[];

try {
// eslint-disable-next-line camelcase
const parsedBlockId = blockIdSchema.parse(block_id);
parsedBlockId = blockIdSchema.parse(block_id);
if (!parsedBlockId) {
res.status(400).json({
error: 'Invalid block_id parameter',
});
res.status(400).json({ error: 'Invalid block_id parameter' });
return;
}

this.log.info(`Received blob sidecar for block ${parsedBlockId}`);
blobObjects = this.parseBlobData(blobs);
await this.validateBlobs(parsedBlockId, blobObjects);
} catch (error: any) {
res.status(400).json({ error: 'Invalid blob data', details: error.message });
return;
}

const blobObjects: BlobWithIndex[] = this.parseBlobData(blobs);

try {
await this.blobStore.addBlobSidecars(parsedBlockId.toString(), blobObjects);
this.metrics.recordBlobReciept(blobObjects);

this.log.info(`Blob sidecar stored successfully for block ${parsedBlockId}`);

res.json({ message: 'Blob sidecar stored successfully' });
} catch (error) {
res.status(400).json({
error: 'Invalid blob data',
});
} catch (error: any) {
this.log.error(`Error storing blob sidecar for block ${parsedBlockId}`, error);
res.status(500).json({ error: 'Error storing blob sidecar', details: error.message });
}
}

Expand All @@ -241,6 +251,36 @@ export class BlobSinkServer {
);
}

/**
* Validates the given blobs were actually emitted by a rollup contract.
* Skips validation if the L1 public client is not set.
* If the rollupAddress is set in config, it checks that the event came from that contract.
* Throws on validation failure.
*/
private async validateBlobs(blockId: Hex, blobs: BlobWithIndex[]): Promise<void> {
if (!this.l1PublicClient) {
this.log.debug('Skipping blob validation due to no L1 public client set');
return;
}

const rollupAddress = this.config.rollupAddress?.isZero() ? undefined : this.config.rollupAddress;
const events = await getL2BlockProposalEvents(this.l1PublicClient, blockId, rollupAddress);
const eventBlobHashes = events.flatMap(event => event.versionedBlobHashes);
const blobHashesToValidate = blobs.map(blob => bufferToHex(blob.blob.getEthVersionedBlobHash()));

this.log.debug(
`Retrieved ${events.length} events with blob hashes ${
eventBlobHashes ? eventBlobHashes.join(', ') : 'none'
} for block ${blockId} to verify blobs ${blobHashesToValidate.join(', ')}`,
);

const notFoundBlobHashes = blobHashesToValidate.filter(blobHash => !eventBlobHashes.includes(blobHash));

if (notFoundBlobHashes.length > 0) {
throw new Error(`Blobs ${notFoundBlobHashes.join(', ')} not found in block proposal event at block ${blockId}`);
}
}

public start(): Promise<void> {
return new Promise((resolve, reject) => {
this.server = this.app.listen(this.port, () => {
Expand Down
15 changes: 9 additions & 6 deletions yarn-project/blob-sink/src/types/api.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import type { Hex } from 'viem';
import { z } from 'zod';

export interface PostBlobSidecarRequest {
Expand All @@ -15,7 +16,9 @@ export interface PostBlobSidecarRequest {
export const blockRootSchema = z
.string()
.regex(/^0x[0-9a-fA-F]{0,64}$/)
.max(66);
.max(66)
.transform(str => str as Hex);

export const slotSchema = z.number().int().positive();

// Define the Zod schema for an array of numbers
Expand All @@ -28,11 +31,11 @@ export const indicesSchema = z.optional(
.transform(str => str.split(',').map(Number)),
); // Convert to an array of numbers

// Validation schemas
// Block identifier. Can be one of: <slot>, <hex encoded blockRoot with 0x prefix>.
// Note the spec https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars does allows for "head", "genesis", "finalized" as valid block ids,
// but we explicitly do not support these values.
export const blockIdSchema = blockRootSchema.or(slotSchema);
/**
* Block identifier. The spec allows for <hex encoded blockRoot with 0x prefix>, <slot>, "head", "genesis", "finalized", but we only support the block root.
* See https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars.
*/
export const blockIdSchema = blockRootSchema;

export const postBlobSidecarSchema = z.object({
// eslint-disable-next-line camelcase
Expand Down
3 changes: 3 additions & 0 deletions yarn-project/blob-sink/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@
{
"path": "../blob-lib"
},
{
"path": "../ethereum"
},
{
"path": "../foundation"
},
Expand Down
Loading