This commit is contained in:
2025-11-20 11:11:46 +01:00
parent 27f985bedf
commit 311d0e19e3
2 changed files with 14 additions and 14 deletions

View File

@@ -42,7 +42,7 @@ pub fn block(
Range.compare, Range.compare,
); );
log.debug( log.debug(
"block: range: {}, alignment: {}, aligned_range: {}, insert_idx: {}", "block: range: {f}, alignment: {}, aligned_range: {f}, insert_idx: {}",
.{ range, alignment, aligned_range, insert_idx }, .{ range, alignment, aligned_range, insert_idx },
); );
// If the new range is the greatest one OR if the entry at `insert_idx` is greater than the // If the new range is the greatest one OR if the entry at `insert_idx` is greater than the
@@ -78,7 +78,7 @@ pub fn block(
return; return;
} else if (aligned_range.contains(first.*)) { } else if (aligned_range.contains(first.*)) {
log.debug( log.debug(
"block: New range contains existing range at index {}: {} -> {}", "block: New range contains existing range at index {}: {f} -> {f}",
.{ insert_idx, first, aligned_range }, .{ insert_idx, first, aligned_range },
); );
first.* = aligned_range; first.* = aligned_range;
@@ -117,7 +117,7 @@ pub fn block(
first.end = neighbor.end; first.end = neighbor.end;
} }
const removed = address_allocator.ranges.orderedRemove(insert_idx + 1); const removed = address_allocator.ranges.orderedRemove(insert_idx + 1);
log.debug("block: Removed merged range: {}", .{removed}); log.debug("block: Removed merged range: {f}", .{removed});
i += 1; i += 1;
} }
log.debug("block: Removed {} ranges.", .{i}); log.debug("block: Removed {} ranges.", .{i});
@@ -131,7 +131,7 @@ pub fn allocate(
size: u64, size: u64,
valid_range: Range, valid_range: Range,
) !?Range { ) !?Range {
log.debug("allocate: Allocating size {} in range {}", .{ size, valid_range }); log.debug("allocate: Allocating size {} in range {f}", .{ size, valid_range });
if (valid_range.size() < size) return null; if (valid_range.size() < size) return null;
if (size == 0) return null; if (size == 0) return null;
const size_i: i64 = @intCast(size); const size_i: i64 = @intCast(size);
@@ -157,7 +157,7 @@ pub fn allocate(
}; };
try address_allocator.block(gpa, new_range, 0); try address_allocator.block(gpa, new_range, 0);
assert(valid_range.contains(new_range)); assert(valid_range.contains(new_range));
log.debug("allocate: Found free gap: {}", .{new_range}); log.debug("allocate: Found free gap: {f}", .{new_range});
return new_range; return new_range;
} }
} }
@@ -175,7 +175,7 @@ pub fn allocate(
}; };
try address_allocator.block(gpa, new_range, 0); try address_allocator.block(gpa, new_range, 0);
assert(valid_range.contains(new_range)); assert(valid_range.contains(new_range));
log.debug("allocate: Found free gap at end: {}", .{new_range}); log.debug("allocate: Found free gap at end: {f}", .{new_range});
return new_range; return new_range;
} }

View File

@@ -1,4 +1,4 @@
//! Iterates through all possible valid address ranges for a `jmp rel32` instruction based on a //! Iterates through all possible valid address ranges for a `jmp rel33` instruction based on a
//! 4-byte pattern of "free" and "used" bytes. //! 4-byte pattern of "free" and "used" bytes.
//! //!
//! This is the core utility for implementing E9Patch-style instruction punning (B2) and padded //! This is the core utility for implementing E9Patch-style instruction punning (B2) and padded
@@ -48,9 +48,9 @@ trailing_free_count: u8,
/// Initializes the iterator. /// Initializes the iterator.
/// - `patch_bytes`: The 4-byte pattern of the `rel32` offset, in little-endian order. /// - `patch_bytes`: The 4-byte pattern of the `rel32` offset, in little-endian order.
/// - `offset`: The address of the *next* instruction (i.e., `RIP` after the 5-byte `jmp`). /// The base address (e.g., RIP of the *next* instruction) that the 32-bit relative offset is
/// All returned ranges will be relative to this offset. /// calculated from.
pub fn init(patch_bytes: [patch_size]PatchByte, offset: u64) PatchLocationIterator { pub fn init(patch_bytes: [patch_size]PatchByte, addr: u64) PatchLocationIterator {
log.debug("hi", .{}); log.debug("hi", .{});
assert(patch_bytes.len == patch_size); assert(patch_bytes.len == patch_size);
@@ -80,7 +80,7 @@ pub fn init(patch_bytes: [patch_size]PatchByte, offset: u64) PatchLocationIterat
} }
const out = PatchLocationIterator{ const out = PatchLocationIterator{
.offset = @intCast(offset), .offset = @intCast(addr),
.patch_bytes = patch_bytes, .patch_bytes = patch_bytes,
.trailing_free_count = trailing_free, .trailing_free_count = trailing_free,
.start = start, .start = start,
@@ -110,7 +110,7 @@ pub fn next(self: *PatchLocationIterator) ?Range {
if (self.trailing_free_count == patch_size) { if (self.trailing_free_count == patch_size) {
if (self.first) { if (self.first) {
const range = Range{ .start = self.offset, .end = self.offset + std.math.maxInt(i32) }; const range = Range{ .start = self.offset, .end = self.offset + std.math.maxInt(i32) };
log.debug("next: All bytes free, returning full positive range: {}", .{range}); log.debug("next: All bytes free, returning full positive range: {f}", .{range});
return range; return range;
} else { } else {
log.info("next: All bytes free, iteration finished.", .{}); log.info("next: All bytes free, iteration finished.", .{});
@@ -123,7 +123,7 @@ pub fn next(self: *PatchLocationIterator) ?Range {
.start = std.mem.readInt(PatchInt, self.start[0..], .little) + self.offset, .start = std.mem.readInt(PatchInt, self.start[0..], .little) + self.offset,
.end = std.mem.readInt(PatchInt, self.end[0..], .little) + self.offset, .end = std.mem.readInt(PatchInt, self.end[0..], .little) + self.offset,
}; };
log.debug("next: First call, returning initial range: {}", .{range}); log.debug("next: First call, returning initial range: {f}", .{range});
return range; return range;
} }
@@ -160,7 +160,7 @@ pub fn next(self: *PatchLocationIterator) ?Range {
.start = start + self.offset, .start = start + self.offset,
.end = end + self.offset, .end = end + self.offset,
}; };
log.debug("next: new range: {}", .{range}); log.debug("next: new range: {f}", .{range});
return range; return range;
} }