Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(blockstore): benchmarks #275

Open
wants to merge 22 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ pub fn build(b: *Build) void {
benchmark_exe.root_module.addImport("zig-network", zig_network_module);
benchmark_exe.root_module.addImport("httpz", httpz_mod);
benchmark_exe.root_module.addImport("zstd", zstd_mod);
benchmark_exe.root_module.addImport("rocksdb", rocksdb_mod);
benchmark_exe.linkLibC();

const benchmark_exe_run = b.addRunArtifact(benchmark_exe);
Expand Down
Binary file not shown.
Binary file not shown.
8 changes: 8 additions & 0 deletions src/benchmarks.zig
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,14 @@ pub fn main() !void {
.microseconds,
);
}

if (std.mem.startsWith(u8, filter, "ledger") or run_all_benchmarks) {
try benchmark(
@import("ledger/benchmarks.zig").BenchmarLegder,
max_time_per_bench,
TimeUnits.microseconds,
);
}
}

const TimeUnits = enum {
Expand Down
162 changes: 162 additions & 0 deletions src/ledger/benchmarks.zig
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
const std = @import("std");
const sig = @import("../sig.zig");
const ledger_tests = @import("./tests.zig");
const ledger = @import("lib.zig");

const Reward = ledger.transaction_status.Reward;
const Rewards = ledger.transaction_status.Rewards;
const RewardType = ledger.transaction_status.RewardType;
const Pubkey = sig.core.Pubkey;
const TestState = ledger_tests.TestState;
const TestDB = ledger_tests.TestDB;

const schema = ledger.schema.schema;
const deinitShreds = ledger_tests.deinitShreds;
const testShreds = ledger_tests.testShreds;

const test_shreds_dir = sig.TEST_DATA_DIR ++ "/shreds";
const State = TestState("global");
const DB = TestDB("global");

fn createRewards(allocator: std.mem.Allocator, count: usize) !Rewards {
var rng = std.Random.DefaultPrng.init(100);
const rand = rng.random();
var rewards: Rewards = Rewards.init(allocator);
for (0..count) |i| {
try rewards.append(Reward{
.pubkey = &Pubkey.random(rand).data,
.lamports = @intCast(42 + i),
.post_balance = std.math.maxInt(u64),
.reward_type = RewardType.Fee,
.commission = null,
});
}
return rewards;
}

pub const BenchmarLegder = struct {
pub const min_iterations = 5;
pub const max_iterations = 5;

// Analogous to [bench_write_small](https://github.com/anza-xyz/agave/blob/cfd393654f84c36a3c49f15dbe25e16a0269008d/ledger/benches/blockstore.rs#L59)
pub fn benchWriteSmall() !u64 {
const allocator = std.heap.c_allocator;
var state = try State.init(allocator, "bench write small");
defer state.deinit();
var inserter = try state.shredInserter();

const prefix = "agave.blockstore.bench_write_small.";
const shreds = try testShreds(std.heap.c_allocator, prefix ++ "shreds.bin");
defer inline for (.{shreds}) |slice| {
deinitShreds(allocator, slice);
};

const is_repairs = try inserter.allocator.alloc(bool, shreds.len);
defer inserter.allocator.free(is_repairs);
for (0..shreds.len) |i| {
is_repairs[i] = false;
}

var timer = try std.time.Timer.start();
_ = try inserter.insertShreds(shreds, is_repairs, null, false, null);
return timer.read();
}

// Analogous to [bench_read_sequential]https://github.com/anza-xyz/agave/blob/cfd393654f84c36a3c49f15dbe25e16a0269008d/ledger/benches/blockstore.rs#L78
pub fn benchReadSequential() !u64 {
const allocator = std.heap.c_allocator;
var state = try State.init(allocator, "bentch read sequential");
defer state.deinit();
var inserter = try state.shredInserter();
var reader = try state.reader();

const prefix = "agave.blockstore.bench_read.";
const shreds = try testShreds(std.heap.c_allocator, prefix ++ "shreds.bin");
defer inline for (.{shreds}) |slice| {
deinitShreds(allocator, slice);
};

const total_shreds = shreds.len;

_ = try ledger.insert_shred.insertShredsForTest(&inserter, shreds);

const slot: u32 = 0;
const num_reads = total_shreds / 15;

var rng = std.Random.DefaultPrng.init(100);

var timer = try std.time.Timer.start();
const start_index = rng.random().intRangeAtMost(u32, 0, @intCast(total_shreds));
for (start_index..start_index + num_reads) |i| {
const shred_index = i % total_shreds;
_ = try reader.getDataShred(slot, shred_index);
}
return timer.read();
}

// Analogous to [bench_read_random]https://github.com/anza-xyz/agave/blob/92eca1192b055d896558a78759d4e79ab4721ff1/ledger/benches/blockstore.rs#L103
pub fn benchReadRandom() !u64 {
const allocator = std.heap.c_allocator;
var state = try State.init(allocator, "bench read randmom");
defer state.deinit();
var inserter = try state.shredInserter();
var reader = try state.reader();

const prefix = "agave.blockstore.bench_read.";
const shreds = try testShreds(std.heap.c_allocator, prefix ++ "shreds.bin");
defer inline for (.{shreds}) |slice| {
deinitShreds(allocator, slice);
};

const total_shreds = shreds.len;
_ = try ledger.insert_shred.insertShredsForTest(&inserter, shreds);

const slot: u32 = 0;

var rng = std.Random.DefaultPrng.init(100);

var indices = try std.ArrayList(u32).initCapacity(inserter.allocator, total_shreds);
defer indices.deinit();
for (total_shreds) |_| {
indices.appendAssumeCapacity(rng.random().uintAtMost(u32, @intCast(total_shreds)));
}

var timer = try std.time.Timer.start();
for (indices.items) |shred_index| {
_ = try reader.getDataShred(slot, shred_index);
}
return timer.read();
}

// Analogous to [bench_serialize_write_bincode](https://github.com/anza-xyz/agave/blob/9c2098450ca7e5271e3690277992fbc910be27d0/ledger/benches/protobuf.rs#L88)
pub fn benchSerializeWriteBincode() !u64 {
const allocator = std.heap.c_allocator;
var state = try State.init(allocator, "bench serialize write bincode");
defer state.deinit();
const slot: u32 = 0;

var rewards: Rewards = try createRewards(allocator, 100);
var timer = try std.time.Timer.start();
try state.db.put(schema.rewards, slot, .{
.rewards = try rewards.toOwnedSlice(),
.num_partitions = null,
});
return timer.read();
}

pub fn benchReadBincode() !u64 {
const allocator = std.heap.c_allocator;
var state = try State.init(allocator, "bench read bincode");
defer state.deinit();
const slot: u32 = 1;

var rewards: Rewards = try createRewards(allocator, 100);
try state.db.put(schema.rewards, slot, .{
.rewards = try rewards.toOwnedSlice(),
.num_partitions = null,
});
var timer = try std.time.Timer.start();
_ = try state.db.getBytes(schema.rewards, slot);
return timer.read();
}
};
1 change: 0 additions & 1 deletion src/ledger/rocksdb.zig
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,6 @@ pub fn RocksDB(comptime column_families: []const ColumnFamily) type {
defer key_bytes.deinit();
const val_bytes = try value_serializer.serializeToRef(self.allocator, value);
defer val_bytes.deinit();

self.inner.put(
self.cf_handles[cf.find(column_families)],
key_bytes.data,
Expand Down
23 changes: 18 additions & 5 deletions src/ledger/tests.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
const std = @import("std");
const sig = @import("../sig.zig");
const ledger = @import("lib.zig");
const transaction_status = @import("./transaction_status.zig");
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This can be used as ledger.transaction_status and does not require the file to be imported.

Currently our convention is that you only import root source files from the current module. For example you can import sig.zig if you're in the sig project, and you can import lib.zig from the current folder. But all other imports go through the decls defined by those imports.

This is definitely up for discussion. If you'd like to adopt a different convention, I'm happy to discuss it. But if you have no preference, let's import this transaction_status the ledger decl.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think Importing via the root source file is indeed better. Updated in 692d561


const Allocator = std.mem.Allocator;

Expand Down Expand Up @@ -73,9 +74,9 @@ test "insert shreds and transaction statuses then get blocks" {
const blockhash = entries[entries.len - 1].hash;
const blockhash_string = blockhash.base58String();

const shreds = try testShreds(prefix ++ "shreds.bin");
const more_shreds = try testShreds(prefix ++ "more_shreds.bin");
const unrooted_shreds = try testShreds(prefix ++ "unrooted_shreds.bin");
const shreds = try testShreds(std.testing.allocator, prefix ++ "shreds.bin");
const more_shreds = try testShreds(std.testing.allocator, prefix ++ "more_shreds.bin");
const unrooted_shreds = try testShreds(std.testing.allocator, prefix ++ "unrooted_shreds.bin");
defer inline for (.{ shreds, more_shreds, unrooted_shreds }) |slice| {
deinitShreds(std.testing.allocator, slice);
};
Expand Down Expand Up @@ -251,9 +252,9 @@ pub fn freshDir(path: []const u8) !void {

const test_shreds_dir = sig.TEST_DATA_DIR ++ "/shreds";

fn testShreds(comptime filename: []const u8) ![]const Shred {
pub fn testShreds(allocator: std.mem.Allocator, comptime filename: []const u8) ![]const Shred {
const path = comptimePrint("{s}/{s}", .{ test_shreds_dir, filename });
return loadShredsFromFile(std.testing.allocator, path);
return loadShredsFromFile(allocator, path);
}

/// Read shreds from binary file structured like this:
Expand Down Expand Up @@ -325,6 +326,18 @@ pub fn deinitShreds(allocator: Allocator, shreds: []const Shred) void {

/// Read entries from binary file structured like this:
/// [entry0_len: u64(little endian)][entry0_bincode][entry1_len...
///
/// loadEntriesFromFile can read entries produced by this rust function:
/// ```rust
/// fn save_entries_to_file(shreds: &[Entry], path: &str) {
/// let mut file = std::fs::File::create(path).unwrap();
/// for entry in &entries {
/// let payload = bincode::serialize(&entry).unwrap();
/// file.write(&payload.len().to_le_bytes()).unwrap();
/// file.write(&*payload).unwrap();
/// }
/// }
/// ```
pub fn loadEntriesFromFile(allocator: Allocator, path: []const u8) ![]const Entry {
const file = try std.fs.cwd().openFile(path, .{});
const reader = file.reader();
Expand Down
Loading