Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
docs/plans/
.zig-cache/
zig-out/
zig-pkg/
Expand Down
43 changes: 43 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
ZIG ?= zig

.PHONY: build test fmt lint ci integration-test bench bench-u256 bench-keccak clean

## Build the library (default)
build:
$(ZIG) build

## Run unit tests (no network required)
test:
$(ZIG) build test

## Check formatting — mirrors the CI fmt job
fmt:
$(ZIG) fmt --check src/ tests/

## Auto-fix formatting
fmt-fix:
$(ZIG) fmt src/ tests/

## Run fmt + test — everything CI checks locally (no Anvil required)
lint: fmt test

## Full CI check: build + fmt + test (matches all CI jobs, still no Anvil)
ci: build fmt test

## Run integration tests (requires Anvil running on localhost:8545)
integration-test:
$(ZIG) build integration-test

## Run all benchmarks (ReleaseFast)
bench:
$(ZIG) build bench -Doptimize=ReleaseFast

bench-u256:
$(ZIG) build bench-u256 -Doptimize=ReleaseFast

bench-keccak:
$(ZIG) build bench-keccak -Doptimize=ReleaseFast

## Remove build artifacts
clean:
rm -rf zig-out zig-cache .zig-cache
18 changes: 9 additions & 9 deletions src/abi_encode.zig
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ fn encodeValuesInto(allocator: std.mem.Allocator, buf: *std.ArrayList(u8), value

// First pass: calculate tail offsets for dynamic values
// and pre-compute the offset each dynamic value will be at
std.debug.assert(values.len <= max_tuple_values);
if (values.len > max_tuple_values) return error.TooManyValues;
var offsets: [max_tuple_values]usize = undefined;
for (values, 0..) |val, i| {
if (val.isDynamic()) {
Expand All @@ -157,7 +157,7 @@ fn encodeValuesInto(allocator: std.mem.Allocator, buf: *std.ArrayList(u8), value
// Third pass: write tail section directly into buf (no temp allocations)
for (values) |val| {
if (val.isDynamic()) {
encodeDynamicValueInto(buf, val);
try encodeDynamicValueInto(buf, val);
}
}
}
Expand Down Expand Up @@ -209,7 +209,7 @@ fn encodeStaticValueNoAlloc(buf: *std.ArrayList(u8), val: AbiValue) void {
}

/// Encode a dynamic value directly into the output buffer (no temp allocation).
fn encodeDynamicValueInto(buf: *std.ArrayList(u8), val: AbiValue) void {
fn encodeDynamicValueInto(buf: *std.ArrayList(u8), val: AbiValue) EncodeError!void {
switch (val) {
.bytes => |data| {
writeUint256NoAlloc(buf, @intCast(data.len));
Expand All @@ -225,28 +225,28 @@ fn encodeDynamicValueInto(buf: *std.ArrayList(u8), val: AbiValue) void {
},
.array => |items| {
writeUint256NoAlloc(buf, @intCast(items.len));
encodeValuesIntoNoAlloc(buf, items);
try encodeValuesIntoNoAlloc(buf, items);
},
.fixed_array => |items| {
encodeValuesIntoNoAlloc(buf, items);
try encodeValuesIntoNoAlloc(buf, items);
},
.tuple => |items| {
encodeValuesIntoNoAlloc(buf, items);
try encodeValuesIntoNoAlloc(buf, items);
},
else => unreachable,
}
}

/// Encode values into an ArrayList that already has sufficient capacity.
fn encodeValuesIntoNoAlloc(buf: *std.ArrayList(u8), values: []const AbiValue) void {
fn encodeValuesIntoNoAlloc(buf: *std.ArrayList(u8), values: []const AbiValue) EncodeError!void {
const n = values.len;
if (n == 0) return;

const head_size = n * 32;
var tail_offset: usize = head_size;

// Calculate offsets for dynamic values
std.debug.assert(values.len <= max_tuple_values);
if (values.len > max_tuple_values) return error.TooManyValues;
var offsets: [max_tuple_values]usize = undefined;
for (values, 0..) |val, i| {
if (val.isDynamic()) {
Expand All @@ -267,7 +267,7 @@ fn encodeValuesIntoNoAlloc(buf: *std.ArrayList(u8), values: []const AbiValue) vo
// Write tails
for (values) |val| {
if (val.isDynamic()) {
encodeDynamicValueInto(buf, val);
try encodeDynamicValueInto(buf, val);
}
}
}
Expand Down
23 changes: 14 additions & 9 deletions src/multicall.zig
Original file line number Diff line number Diff line change
Expand Up @@ -171,11 +171,12 @@ pub fn decodeAggregate3Results(allocator: std.mem.Allocator, data: []const u8) !

// First word: offset to array data (should be 0x20)
const array_offset = readWord(data[0..32]);
if (array_offset + 32 > data.len) return error.InvalidAbiData;
const array_header_end = std.math.add(usize, array_offset, 32) catch return error.InvalidAbiData;
if (array_header_end > data.len) return error.InvalidAbiData;

// Array length
const array_len = readWord(data[array_offset .. array_offset + 32]);
const array_data_start = array_offset + 32;
const array_data_start = array_offset + 32; // safe: array_header_end <= data.len

var results = try allocator.alloc(Result, array_len);
errdefer {
Expand All @@ -188,25 +189,29 @@ pub fn decodeAggregate3Results(allocator: std.mem.Allocator, data: []const u8) !
// Read offsets for each result tuple
for (0..array_len) |i| {
const offset_pos = array_data_start + i * 32;
if (offset_pos + 32 > data.len) return error.InvalidAbiData;
const offset_end = std.math.add(usize, offset_pos, 32) catch return error.InvalidAbiData;
if (offset_end > data.len) return error.InvalidAbiData;
const tuple_offset = readWord(data[offset_pos .. offset_pos + 32]);
const tuple_start = array_data_start + tuple_offset;
const tuple_start = std.math.add(usize, array_data_start, tuple_offset) catch return error.InvalidAbiData;

// Each tuple: (bool success, bytes returnData)
// word 0: success (bool)
// word 1: offset to returnData within the tuple
// At that offset: length word + data
if (tuple_start + 64 > data.len) return error.InvalidAbiData;
const tuple_end = std.math.add(usize, tuple_start, 64) catch return error.InvalidAbiData;
if (tuple_end > data.len) return error.InvalidAbiData;

const success_word = readWord(data[tuple_start .. tuple_start + 32]);
const return_data_offset = readWord(data[tuple_start + 32 .. tuple_start + 64]);
const return_data_abs = tuple_start + return_data_offset;
const return_data_abs = std.math.add(usize, tuple_start, return_data_offset) catch return error.InvalidAbiData;

if (return_data_abs + 32 > data.len) return error.InvalidAbiData;
const return_data_header_end = std.math.add(usize, return_data_abs, 32) catch return error.InvalidAbiData;
if (return_data_header_end > data.len) return error.InvalidAbiData;
const return_data_len = readWord(data[return_data_abs .. return_data_abs + 32]);
const return_data_start = return_data_abs + 32;
const return_data_start = return_data_abs + 32; // safe: return_data_header_end <= data.len

if (return_data_start + return_data_len > data.len) return error.InvalidAbiData;
const return_data_end = std.math.add(usize, return_data_start, return_data_len) catch return error.InvalidAbiData;
if (return_data_end > data.len) return error.InvalidAbiData;

var return_data: []const u8 = &.{};
if (return_data_len > 0) {
Expand Down
4 changes: 2 additions & 2 deletions src/provider.zig
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,7 @@ fn parseLogsArray(allocator: std.mem.Allocator, obj: std.json.ObjectMap) ![]cons
}

/// Parse a single Log from a JSON object.
fn parseSingleLog(allocator: std.mem.Allocator, obj: std.json.ObjectMap) !receipt_mod.Log {
pub fn parseSingleLog(allocator: std.mem.Allocator, obj: std.json.ObjectMap) !receipt_mod.Log {
const address = (try parseOptionalAddress(jsonGetString(obj, "address"))) orelse return error.InvalidResponse;
const data_str = jsonGetString(obj, "data") orelse "0x";
const data = try parseHexBytes(allocator, data_str);
Expand Down Expand Up @@ -816,7 +816,7 @@ fn parseLogsResponse(allocator: std.mem.Allocator, raw: []const u8) ![]receipt_m
}

/// Parse a block header from a raw JSON-RPC response.
fn parseBlockHeader(allocator: std.mem.Allocator, raw: []const u8) !?block_mod.BlockHeader {
pub fn parseBlockHeader(allocator: std.mem.Allocator, raw: []const u8) !?block_mod.BlockHeader {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, raw, .{}) catch {
return error.InvalidResponse;
};
Expand Down
Loading
Loading