| 
   | 
const Allocator = @import("std").mem.Allocator;
const assert = @import("std").debug.assert;
const copyForwards = @import("std").mem.copyForwards;
const RingBuffer = @This();
data: []u8,
read_index: usize,
write_index: usize,
 | 
| Error | 
pub const Error = error{ Full, ReadLengthInvalid };
 | 
| init() Allocate a new  | 
pub fn init(allocator: Allocator, capacity: usize) Allocator.Error!RingBuffer {
    const bytes = try allocator.alloc(u8, capacity);
    return RingBuffer{
        .data = bytes,
        .write_index = 0,
        .read_index = 0,
    };
}
 | 
| deinit() Free the data backing a  | 
pub fn deinit(self: *RingBuffer, allocator: Allocator) void {
    allocator.free(self.data);
    self.* = undefined;
}
 | 
| mask() Returns  | 
pub fn mask(self: RingBuffer, index: usize) usize {
    return index % self.data.len;
}
 | 
| mask2() Returns  | 
pub fn mask2(self: RingBuffer, index: usize) usize {
    return index % (2 * self.data.len);
}
 | 
| write() Write  | 
pub fn write(self: *RingBuffer, byte: u8) Error!void {
    if (self.isFull()) return error.Full;
    self.writeAssumeCapacity(byte);
}
 | 
| writeAssumeCapacity() Write  | 
pub fn writeAssumeCapacity(self: *RingBuffer, byte: u8) void {
    self.data[self.mask(self.write_index)] = byte;
    self.write_index = self.mask2(self.write_index + 1);
}
 | 
| writeSlice() Write  | 
pub fn writeSlice(self: *RingBuffer, bytes: []const u8) Error!void {
    if (self.len() + bytes.len > self.data.len) return error.Full;
    self.writeSliceAssumeCapacity(bytes);
}
 | 
| writeSliceAssumeCapacity() Write  | 
pub fn writeSliceAssumeCapacity(self: *RingBuffer, bytes: []const u8) void {
    const data_start = self.mask(self.write_index);
    const part1_data_end = @min(data_start + bytes.len, self.data.len);
    const part1_len = part1_data_end - data_start;
    @memcpy(self.data[data_start..part1_data_end], bytes[0..part1_len]);
    const remaining = bytes.len - part1_len;
    const to_write = @min(remaining, remaining % self.data.len + self.data.len);
    const part2_bytes_start = bytes.len - to_write;
    const part2_bytes_end = @min(part2_bytes_start + self.data.len, bytes.len);
    const part2_len = part2_bytes_end - part2_bytes_start;
    @memcpy(self.data[0..part2_len], bytes[part2_bytes_start..part2_bytes_end]);
    if (part2_bytes_end != bytes.len) {
        const part3_len = bytes.len - part2_bytes_end;
        @memcpy(self.data[0..part3_len], bytes[part2_bytes_end..bytes.len]);
    }
    self.write_index = self.mask2(self.write_index + bytes.len);
}
 | 
| writeSliceForwards() Write  | 
pub fn writeSliceForwards(self: *RingBuffer, bytes: []const u8) Error!void {
    if (self.len() + bytes.len > self.data.len) return error.Full;
    self.writeSliceForwardsAssumeCapacity(bytes);
}
 | 
| writeSliceForwardsAssumeCapacity() Write  | 
pub fn writeSliceForwardsAssumeCapacity(self: *RingBuffer, bytes: []const u8) void {
    const data_start = self.mask(self.write_index);
    const part1_data_end = @min(data_start + bytes.len, self.data.len);
    const part1_len = part1_data_end - data_start;
    copyForwards(u8, self.data[data_start..], bytes[0..part1_len]);
    const remaining = bytes.len - part1_len;
    const to_write = @min(remaining, remaining % self.data.len + self.data.len);
    const part2_bytes_start = bytes.len - to_write;
    const part2_bytes_end = @min(part2_bytes_start + self.data.len, bytes.len);
    copyForwards(u8, self.data[0..], bytes[part2_bytes_start..part2_bytes_end]);
    if (part2_bytes_end != bytes.len)
        copyForwards(u8, self.data[0..], bytes[part2_bytes_end..bytes.len]);
    self.write_index = self.mask2(self.write_index + bytes.len);
}
 | 
| read() Consume a byte from the ring buffer and return it. Returns  | 
pub fn read(self: *RingBuffer) ?u8 {
    if (self.isEmpty()) return null;
    return self.readAssumeLength();
}
 | 
| readAssumeLength()Consume a byte from the ring buffer and return it; asserts that the buffer is not empty. | 
pub fn readAssumeLength(self: *RingBuffer) u8 {
    assert(!self.isEmpty());
    const byte = self.data[self.mask(self.read_index)];
    self.read_index = self.mask2(self.read_index + 1);
    return byte;
}
 | 
| readFirst() Reads first  | 
pub fn readFirst(self: *RingBuffer, dest: []u8, length: usize) Error!void {
    if (length > self.len() or length > dest.len) return error.ReadLengthInvalid;
    self.readFirstAssumeLength(dest, length);
}
 | 
| readFirstAssumeLength() Reads first  | 
pub fn readFirstAssumeLength(self: *RingBuffer, dest: []u8, length: usize) void {
    assert(length <= self.len() and length <= dest.len);
    const data_start = self.mask(self.read_index);
    const part1_data_end = @min(self.data.len, data_start + length);
    const part1_len = part1_data_end - data_start;
    const part2_len = length - part1_len;
    @memcpy(dest[0..part1_len], self.data[data_start..part1_data_end]);
    @memcpy(dest[part1_len..length], self.data[0..part2_len]);
    self.read_index = self.mask2(self.read_index + length);
}
 | 
| readLast() Reads last  | 
pub fn readLast(self: *RingBuffer, dest: []u8, length: usize) Error!void {
    if (length > self.len() or length > dest.len) return error.ReadLengthInvalid;
    self.readLastAssumeLength(dest, length);
}
 | 
| readLastAssumeLength() Reads last  | 
pub fn readLastAssumeLength(self: *RingBuffer, dest: []u8, length: usize) void {
    assert(length <= self.len() and length <= dest.len);
    const data_start = self.mask(self.write_index + self.data.len - length);
    const part1_data_end = @min(self.data.len, data_start + length);
    const part1_len = part1_data_end - data_start;
    const part2_len = length - part1_len;
    @memcpy(dest[0..part1_len], self.data[data_start..part1_data_end]);
    @memcpy(dest[part1_len..length], self.data[0..part2_len]);
    self.write_index = if (self.write_index >= self.data_len) self.write_index - length else data_start;
}
 | 
| isEmpty() Returns  | 
pub fn isEmpty(self: RingBuffer) bool {
    return self.write_index == self.read_index;
}
 | 
| isFull() Returns  | 
pub fn isFull(self: RingBuffer) bool {
    return self.mask2(self.write_index + self.data.len) == self.read_index;
}
 | 
| len()Returns the length | 
pub fn len(self: RingBuffer) usize {
    const wrap_offset = 2 * self.data.len * @intFromBool(self.write_index < self.read_index);
    const adjusted_write_index = self.write_index + wrap_offset;
    return adjusted_write_index - self.read_index;
}
 | 
| Slice A  | 
pub const Slice = struct {
    first: []u8,
    second: []u8,
};
 | 
| sliceAt() Returns a  | 
pub fn sliceAt(self: RingBuffer, start_unmasked: usize, length: usize) Slice {
    assert(length <= self.data.len);
    const slice1_start = self.mask(start_unmasked);
    const slice1_end = @min(self.data.len, slice1_start + length);
    const slice1 = self.data[slice1_start..slice1_end];
    const slice2 = self.data[0 .. length - slice1.len];
    return Slice{
        .first = slice1,
        .second = slice2,
    };
}
 | 
| sliceLast() Returns a  | 
pub fn sliceLast(self: RingBuffer, length: usize) Slice {
    return self.sliceAt(self.write_index + self.data.len - length, length);
}
 | 
| Generated by zstd-browse2 on 2023-11-04 14:12:35 -0400. |