1
0
Fork 0
mirror of https://github.com/azalea-rs/simdnbt.git synced 2025-08-02 07:26:04 +00:00

optimize writing

This commit is contained in:
mat 2025-03-15 09:25:14 +00:00
parent ffe5b2575d
commit 246dc18709
8 changed files with 233 additions and 101 deletions

View file

@ -8,12 +8,12 @@ use super::{
};
use crate::{
common::{
extend_unchecked, push_unchecked, read_int_array, read_long_array, read_string,
read_with_u32_length, write_string, write_string_unchecked, BYTE_ARRAY_ID, BYTE_ID,
COMPOUND_ID, DOUBLE_ID, END_ID, FLOAT_ID, INT_ARRAY_ID, INT_ID, LIST_ID, LONG_ARRAY_ID,
LONG_ID, MAX_DEPTH, SHORT_ID, STRING_ID,
read_int_array, read_long_array, read_string, read_with_u32_length, write_string,
write_string_unchecked, BYTE_ARRAY_ID, BYTE_ID, COMPOUND_ID, DOUBLE_ID, END_ID, FLOAT_ID,
INT_ARRAY_ID, INT_ID, LIST_ID, LONG_ARRAY_ID, LONG_ID, MAX_DEPTH, SHORT_ID, STRING_ID,
},
error::NonRootError,
fastvec::{FastVec, FastVecFromVec},
reader::Reader,
Mutf8Str,
};
@ -47,13 +47,17 @@ impl<'a: 'tape, 'tape> NbtCompound<'a, 'tape> {
}
pub fn write(&self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
pub(crate) fn write_fastvec(&self, data: &mut FastVec<u8>) {
for (name, tag) in self.iter() {
// reserve 4 bytes extra so we can avoid reallocating for small tags
data.reserve(1 + 2 + name.len() + 4);
// SAFETY: We just reserved enough space for the tag ID, the name length, the
// name, and 4 bytes of tag data.
unsafe {
push_unchecked(data, tag.id());
data.push_unchecked(tag.id());
write_string_unchecked(data, name);
}
@ -467,23 +471,23 @@ fn handle_compound_end(tapes: &mut Tapes, stack: &mut ParsingStack) {
};
}
pub(crate) fn write_tag(tag: NbtTag, data: &mut Vec<u8>) {
pub(crate) fn write_tag(tag: NbtTag, data: &mut FastVec<u8>) {
let el = tag.element();
match el.kind() {
TapeTagKind::Byte => unsafe {
push_unchecked(data, tag.byte().unwrap() as u8);
data.push_unchecked(tag.byte().unwrap() as u8);
},
TapeTagKind::Short => unsafe {
extend_unchecked(data, &tag.short().unwrap().to_be_bytes());
data.extend_from_slice_unchecked(&tag.short().unwrap().to_be_bytes());
},
TapeTagKind::Int => unsafe {
extend_unchecked(data, &tag.int().unwrap().to_be_bytes());
data.extend_from_slice_unchecked(&tag.int().unwrap().to_be_bytes());
},
TapeTagKind::Long => {
data.extend_from_slice(&tag.long().unwrap().to_be_bytes());
}
TapeTagKind::Float => unsafe {
extend_unchecked(data, &tag.float().unwrap().to_be_bytes());
data.extend_from_slice_unchecked(&tag.float().unwrap().to_be_bytes());
},
TapeTagKind::Double => {
data.extend_from_slice(&tag.double().unwrap().to_be_bytes());
@ -491,7 +495,7 @@ pub(crate) fn write_tag(tag: NbtTag, data: &mut Vec<u8>) {
TapeTagKind::ByteArray => {
let byte_array = tag.byte_array().unwrap();
unsafe {
extend_unchecked(data, &(byte_array.len() as u32).to_be_bytes());
data.extend_from_slice_unchecked(&(byte_array.len() as u32).to_be_bytes());
}
data.extend_from_slice(byte_array);
}
@ -500,16 +504,16 @@ pub(crate) fn write_tag(tag: NbtTag, data: &mut Vec<u8>) {
write_string(data, string);
}
kind if kind.is_list() => {
tag.list().unwrap().write(data);
tag.list().unwrap().write_fastvec(data);
}
TapeTagKind::Compound => {
tag.compound().unwrap().write(data);
tag.compound().unwrap().write_fastvec(data);
}
TapeTagKind::IntArray => {
let int_array =
unsafe { list::u32_prefixed_list_to_rawlist_unchecked::<i32>(el.ptr()).unwrap() };
unsafe {
extend_unchecked(data, &(int_array.len() as u32).to_be_bytes());
data.extend_from_slice_unchecked(&(int_array.len() as u32).to_be_bytes());
}
data.extend_from_slice(int_array.as_big_endian());
}
@ -517,7 +521,7 @@ pub(crate) fn write_tag(tag: NbtTag, data: &mut Vec<u8>) {
let long_array =
unsafe { list::u32_prefixed_list_to_rawlist_unchecked::<i64>(el.ptr()).unwrap() };
unsafe {
extend_unchecked(data, &(long_array.len() as u32).to_be_bytes());
data.extend_from_slice_unchecked(&(long_array.len() as u32).to_be_bytes());
}
data.extend_from_slice(long_array.as_big_endian());
}

View file

@ -14,6 +14,7 @@ use crate::{
LIST_ID, LONG_ARRAY_ID, LONG_ID, SHORT_ID, STRING_ID,
},
error::NonRootError,
fastvec::{FastVec, FastVecFromVec},
raw_list::RawList,
reader::Reader,
swap_endianness::SwappableNumber,
@ -156,14 +157,18 @@ impl<'a, 'tape> NbtList<'a, 'tape> {
Ok(())
}
pub fn write(&self, data: &mut Vec<u8>) {
pub fn write(self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
pub(crate) fn write_fastvec(&self, data: &mut FastVec<u8>) {
let el = self.element();
data.push(self.id());
match el.kind() {
TapeTagKind::EmptyList => {
data.extend(&0u32.to_be_bytes());
data.extend_from_slice(&0u32.to_be_bytes());
}
TapeTagKind::ByteList => {
write_with_u32_length(data, 1, slice_i8_into_u8(self.bytes().unwrap()));
@ -228,14 +233,14 @@ impl<'a, 'tape> NbtList<'a, 'tape> {
TapeTagKind::ListList => {
let lists = self.lists().unwrap();
for list in lists {
list.write(data);
list.write_fastvec(data);
}
}
TapeTagKind::CompoundList => {
let compounds = self.compounds().unwrap();
write_u32(data, compounds.clone().len() as u32);
for compound in compounds {
compound.write(data);
compound.write_fastvec(data);
}
}
TapeTagKind::IntArrayList => {

View file

@ -30,6 +30,7 @@ use crate::{
read_string, write_string, BYTE_ARRAY_ID, BYTE_ID, COMPOUND_ID, DOUBLE_ID, END_ID,
FLOAT_ID, INT_ARRAY_ID, INT_ID, LIST_ID, LONG_ARRAY_ID, LONG_ID, SHORT_ID, STRING_ID,
},
fastvec::{FastVec, FastVecFromVec},
reader::{Reader, ReaderFromCursor},
Error, Mutf8Str,
};
@ -332,9 +333,13 @@ impl PartialEq for BaseNbt<'_> {
impl BaseNbt<'_> {
pub fn write(&self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
fn write_fastvec(&self, data: &mut FastVec<u8>) {
data.push(COMPOUND_ID);
write_string(data, self.name);
self.as_compound().write(data);
self.as_compound().write_fastvec(data);
data.push(END_ID);
}
}

View file

@ -2,6 +2,7 @@ use std::{mem, slice};
use crate::{
error::UnexpectedEofError,
fastvec::FastVec,
raw_list::RawList,
reader::Reader,
swap_endianness::{swap_endianness_as_u8, SwappableNumber},
@ -75,19 +76,19 @@ pub fn slice_i8_into_u8(s: &[i8]) -> &[u8] {
}
#[inline(always)]
pub fn write_with_u32_length(data: &mut Vec<u8>, width: usize, value: &[u8]) {
pub fn write_with_u32_length(data: &mut FastVec<u8>, width: usize, value: &[u8]) {
let length = value.len() / width;
data.reserve(4 + value.len());
unsafe {
extend_unchecked(data, &(length as u32).to_be_bytes());
extend_unchecked(data, value);
data.extend_from_slice_unchecked(&(length as u32).to_be_bytes());
data.extend_from_slice_unchecked(value);
}
}
pub fn write_u32(data: &mut Vec<u8>, value: u32) {
pub fn write_u32(data: &mut FastVec<u8>, value: u32) {
data.extend_from_slice(&value.to_be_bytes());
}
pub fn write_string(data: &mut Vec<u8>, value: &Mutf8Str) {
pub fn write_string(data: &mut FastVec<u8>, value: &Mutf8Str) {
data.reserve(2 + value.len());
// SAFETY: We reserved enough capacity
unsafe {
@ -103,33 +104,9 @@ pub fn write_string(data: &mut Vec<u8>, value: &Mutf8Str) {
/// You must reserve enough capacity (2 + value.len()) in the Vec before calling
/// this function.
#[inline]
pub unsafe fn write_string_unchecked(data: &mut Vec<u8>, value: &Mutf8Str) {
extend_unchecked(data, &(value.len() as u16).to_be_bytes());
extend_unchecked(data, value.as_bytes());
}
/// Extend a Vec<u8> with a slice of u8 without checking if the Vec has enough
/// capacity.
///
/// This optimization is barely measurable, but it does make it slightly faster!
///
/// # Safety
///
/// You must reserve enough capacity in the Vec before calling this function.
#[inline]
pub unsafe fn extend_unchecked(data: &mut Vec<u8>, value: &[u8]) {
let ptr = data.as_mut_ptr();
let len = data.len();
std::ptr::copy_nonoverlapping(value.as_ptr(), ptr.add(len), value.len());
data.set_len(len + value.len());
}
#[inline]
pub unsafe fn push_unchecked(data: &mut Vec<u8>, value: u8) {
let ptr = data.as_mut_ptr();
let len = data.len();
std::ptr::write(ptr.add(len), value);
data.set_len(len + 1);
pub unsafe fn write_string_unchecked(data: &mut FastVec<u8>, value: &Mutf8Str) {
data.extend_from_slice_unchecked(&(value.len() as u16).to_be_bytes());
data.extend_from_slice_unchecked(value.as_bytes());
}
/// Convert a slice of any type into a slice of u8. This will probably return

View file

@ -1,6 +1,8 @@
use std::{
alloc::{self, Allocator, Layout},
mem::{self},
fmt::Debug,
mem::{self, ManuallyDrop},
ops::{Deref, DerefMut},
ptr::NonNull,
};
@ -17,13 +19,9 @@ pub struct FastVec<T, A: Allocator = alloc::Global> {
alloc: A,
}
impl<T> FastVec<T, alloc::Global> {
/// Create a new FastVec with the given capacity.
///
/// Note that a capacity of 0 is not supported (as it would require an extra
/// instruction that's usually unnecessary), and will be treated as 1.
pub fn with_capacity(capacity: usize) -> Self {
let capacity = capacity.min(1);
impl<T, A: Allocator> FastVec<T, A> {
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
let capacity = capacity.max(1);
let element_size = mem::size_of::<T>();
let ptr = unsafe {
@ -34,21 +32,19 @@ impl<T> FastVec<T, alloc::Global> {
))
};
let ptr = NonNull::new(ptr as *mut T).expect("allocation failed");
let end = unsafe { ptr.add(DEFAULT_CAPACITY) };
let end = unsafe { ptr.add(capacity) };
Self {
cur: ptr,
end,
ptr: ptr.cast(),
alloc: alloc::Global,
alloc,
}
}
}
impl<T, A: Allocator> FastVec<T, A> {
#[inline(always)]
pub fn push(&mut self, element: T) {
if self.cur == self.end {
self.grow()
unsafe { self.grow_at_max() }
}
unsafe { self.push_unchecked(element) };
@ -56,12 +52,16 @@ impl<T, A: Allocator> FastVec<T, A> {
#[inline]
pub unsafe fn push_unchecked(&mut self, element: T) {
debug_assert!(self.cur != self.end);
unsafe { self.cur.write(element) };
self.cur = unsafe { self.cur.add(1) };
}
/// Grow the capacity slightly more efficiently if we already know the
/// length is the same as the capacity.
#[inline]
fn grow(&mut self) {
unsafe fn grow_at_max(&mut self) {
let old_cap = self.capacity();
let extending_by = old_cap;
let new_cap = old_cap + extending_by;
@ -86,6 +86,33 @@ impl<T, A: Allocator> FastVec<T, A> {
self.end = unsafe { self.cur.add(extending_by) };
}
#[inline]
fn grow(&mut self) {
let old_cap = self.capacity();
let len = self.len();
let extending_by = old_cap;
let new_cap = old_cap + extending_by;
let element_size = mem::size_of::<T>();
let old_cap_bytes = old_cap * element_size;
let new_cap_bytes = new_cap * element_size;
let new_ptr = unsafe {
self.alloc.grow(
self.ptr,
Layout::from_size_align_unchecked(old_cap_bytes, element_size),
Layout::from_size_align_unchecked(new_cap_bytes, element_size),
)
};
let new_ptr = new_ptr.expect("allocation failed");
self.ptr = new_ptr.cast();
// update cur in case the ptr changed
let base = NonNull::new(self.ptr.as_ptr() as *mut T).unwrap();
self.cur = unsafe { base.add(len) };
// and end has to be updated anyways since we're updating the capacity
self.end = unsafe { base.add(new_cap) };
}
#[inline]
pub fn len(&self) -> usize {
unsafe { self.cur.offset_from(self.ptr.cast()) as usize }
@ -118,9 +145,74 @@ impl<T, A: Allocator> FastVec<T, A> {
mem::forget(self);
vec
}
pub fn extend_from_slice(&mut self, slice: &[T])
where
T: Copy,
{
self.reserve(slice.len());
unsafe { self.extend_from_slice_unchecked(slice) };
}
/// Extend your FastVec with a slice without checking if it has enough
/// capacity.
///
/// This optimization is barely measurable, but it does make it slightly
/// faster!
///
/// # Safety
///
/// You must reserve enough capacity in the Vec before calling this
/// function.
pub unsafe fn extend_from_slice_unchecked(&mut self, slice: &[T])
where
T: Copy,
{
for element in slice {
// SAFETY: We just reserved enough space for the slice
unsafe { self.push_unchecked(*element) };
}
}
pub fn reserve(&mut self, additional: usize) {
let new_len = self.len() + additional;
while new_len > self.capacity() {
self.grow()
}
}
pub unsafe fn from_raw_parts_in(ptr: *mut T, len: usize, capacity: usize, alloc: A) -> Self {
if capacity == 0 {
// capacity 0 means the vec is probably a null pointer, create it ourselves
return Self::with_capacity_in(0, alloc);
}
let ptr = NonNull::new(ptr).expect("null pointer");
let end = unsafe { ptr.add(capacity) };
Self {
cur: ptr.add(len),
end,
ptr: ptr.cast(),
alloc: alloc,
}
}
}
const DEFAULT_CAPACITY: usize = 1024;
impl<T> FastVec<T, alloc::Global> {
/// Create a new FastVec with the given capacity.
///
/// Note that a capacity of 0 is not supported (as it would require an extra
/// instruction that's usually unnecessary), and will be treated as 1.
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, alloc::Global)
}
pub unsafe fn from_raw_parts(ptr: *mut T, len: usize, capacity: usize) -> Self {
Self::from_raw_parts_in(ptr, len, capacity, alloc::Global)
}
}
const DEFAULT_CAPACITY: usize = 1;
impl<T> Default for FastVec<T> {
fn default() -> Self {
@ -167,3 +259,39 @@ impl<T> From<FastVec<T>> for Vec<T> {
fastvec.to_vec()
}
}
pub struct FastVecFromVec<'orig, T> {
fastvec: ManuallyDrop<FastVec<T>>,
original: &'orig mut Vec<T>,
}
impl<'orig, T> FastVecFromVec<'orig, T> {
pub fn new(data: &'orig mut Vec<T>) -> Self {
Self {
fastvec: ManuallyDrop::new(unsafe {
FastVec::from_raw_parts(data.as_mut_ptr(), data.len(), data.capacity())
}),
original: data,
}
}
}
impl<T> Drop for FastVecFromVec<'_, T> {
fn drop(&mut self) {
// we intentionally don't drop the fastvec since the allocation is moved into
// the vec
*self.original = unsafe { ManuallyDrop::take(&mut self.fastvec).to_vec() };
}
}
impl<T> Deref for FastVecFromVec<'_, T> {
type Target = FastVec<T>;
fn deref(&self) -> &Self::Target {
&self.fastvec
}
}
impl<T> DerefMut for FastVecFromVec<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.fastvec
}
}

View file

@ -2,8 +2,9 @@ use std::mem::{self, MaybeUninit};
use super::{list::NbtList, NbtTag};
use crate::{
common::{push_unchecked, read_string, write_string_unchecked, END_ID, MAX_DEPTH},
common::{read_string, write_string_unchecked, END_ID, MAX_DEPTH},
error::NonRootError,
fastvec::{FastVec, FastVecFromVec},
mutf8::Mutf8String,
reader::Reader,
Mutf8Str, ToNbtTag,
@ -77,13 +78,17 @@ impl NbtCompound {
}
pub fn write(&self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
pub(crate) fn write_fastvec(&self, data: &mut FastVec<u8>) {
for (name, tag) in &self.values {
// reserve 4 bytes extra so we can avoid reallocating for small tags
data.reserve(1 + 2 + name.len() + 4);
// SAFETY: We just reserved enough space for the tag ID, the name length, the
// name, and 4 bytes of tag data.
unsafe {
push_unchecked(data, tag.id());
data.push_unchecked(tag.id());
write_string_unchecked(data, name);
tag.write_without_tag_type_unchecked(data);
}

View file

@ -1,13 +1,13 @@
use super::{compound::NbtCompound, MAX_DEPTH};
use crate::{
common::{
extend_unchecked, push_unchecked, read_i8_array, read_int_array, read_long_array,
read_string, read_u8_array, read_with_u32_length, slice_i8_into_u8,
slice_into_u8_big_endian, write_string, write_u32, write_with_u32_length, BYTE_ARRAY_ID,
BYTE_ID, COMPOUND_ID, DOUBLE_ID, END_ID, FLOAT_ID, INT_ARRAY_ID, INT_ID, LIST_ID,
LONG_ARRAY_ID, LONG_ID, SHORT_ID, STRING_ID,
read_i8_array, read_int_array, read_long_array, read_string, read_u8_array,
read_with_u32_length, slice_i8_into_u8, slice_into_u8_big_endian, write_string, write_u32,
write_with_u32_length, BYTE_ARRAY_ID, BYTE_ID, COMPOUND_ID, DOUBLE_ID, END_ID, FLOAT_ID,
INT_ARRAY_ID, INT_ID, LIST_ID, LONG_ARRAY_ID, LONG_ID, SHORT_ID, STRING_ID,
},
error::NonRootError,
fastvec::{FastVec, FastVecFromVec},
mutf8::Mutf8String,
reader::Reader,
swap_endianness::swap_endianness,
@ -111,16 +111,20 @@ impl NbtList {
}
pub fn write(&self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
pub(crate) fn write_fastvec(&self, data: &mut FastVec<u8>) {
// fast path for compound since it's very common to have lists of compounds
if let NbtList::Compound(compounds) = self {
data.reserve(5);
// SAFETY: we just reserved 5 bytes
unsafe {
push_unchecked(data, COMPOUND_ID);
extend_unchecked(data, &(compounds.len() as u32).to_be_bytes());
data.push_unchecked(COMPOUND_ID);
data.extend_from_slice_unchecked(&(compounds.len() as u32).to_be_bytes());
}
for compound in compounds {
compound.write(data);
compound.write_fastvec(data);
}
return;
}
@ -128,7 +132,7 @@ impl NbtList {
data.push(self.id());
match self {
NbtList::Empty => {
data.extend(&0u32.to_be_bytes());
data.extend_from_slice(&0u32.to_be_bytes());
}
NbtList::Byte(bytes) => {
write_with_u32_length(data, 1, slice_i8_into_u8(bytes));
@ -163,7 +167,7 @@ impl NbtList {
NbtList::List(lists) => {
write_u32(data, lists.len() as u32);
for list in lists {
list.write(data);
list.write_fastvec(data);
}
}
NbtList::Compound(_) => {

View file

@ -9,12 +9,13 @@ use std::{io::Cursor, ops::Deref};
pub use self::{compound::NbtCompound, list::NbtList};
use crate::{
common::{
extend_unchecked, push_unchecked, read_int_array, read_long_array, read_string,
read_with_u32_length, slice_into_u8_big_endian, write_string, BYTE_ARRAY_ID, BYTE_ID,
COMPOUND_ID, DOUBLE_ID, END_ID, FLOAT_ID, INT_ARRAY_ID, INT_ID, LIST_ID, LONG_ARRAY_ID,
LONG_ID, MAX_DEPTH, SHORT_ID, STRING_ID,
read_int_array, read_long_array, read_string, read_with_u32_length,
slice_into_u8_big_endian, write_string, BYTE_ARRAY_ID, BYTE_ID, COMPOUND_ID, DOUBLE_ID,
END_ID, FLOAT_ID, INT_ARRAY_ID, INT_ID, LIST_ID, LONG_ARRAY_ID, LONG_ID, MAX_DEPTH,
SHORT_ID, STRING_ID,
},
error::NonRootError,
fastvec::{FastVec, FastVecFromVec},
mutf8::Mutf8String,
reader::{Reader, ReaderFromCursor},
Error, Mutf8Str,
@ -205,9 +206,14 @@ impl BaseNbt {
/// Writes the NBT to the given buffer.
pub fn write(&self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
/// Writes the NBT to the given buffer.
fn write_fastvec(&self, data: &mut FastVec<u8>) {
data.push(COMPOUND_ID);
write_string(data, &self.name);
self.tag.write(data);
self.tag.write_fastvec(data);
}
pub fn write_unnamed(&self, data: &mut Vec<u8>) {
@ -329,61 +335,59 @@ impl NbtTag {
/// space in the data. 4 bytes MUST be reserved before calling this
/// function.
#[inline]
unsafe fn write_without_tag_type_unchecked(&self, data: &mut Vec<u8>) {
unsafe fn write_without_tag_type_unchecked(&self, data: &mut FastVec<u8>) {
match self {
NbtTag::Byte(byte) => unsafe {
push_unchecked(data, *byte as u8);
data.push_unchecked(*byte as u8);
},
NbtTag::Short(short) => unsafe {
extend_unchecked(data, &short.to_be_bytes());
data.extend_from_slice_unchecked(&short.to_be_bytes());
},
NbtTag::Int(int) => unsafe {
extend_unchecked(data, &int.to_be_bytes());
data.extend_from_slice_unchecked(&int.to_be_bytes());
},
NbtTag::Long(long) => {
data.extend_from_slice(&long.to_be_bytes());
}
NbtTag::Float(float) => unsafe {
extend_unchecked(data, &float.to_be_bytes());
data.extend_from_slice_unchecked(&float.to_be_bytes());
},
NbtTag::Double(double) => {
data.extend_from_slice(&double.to_be_bytes());
}
NbtTag::ByteArray(byte_array) => {
unsafe {
extend_unchecked(data, &(byte_array.len() as u32).to_be_bytes());
}
data.extend_from_slice_unchecked(&(byte_array.len() as u32).to_be_bytes());
data.extend_from_slice(byte_array);
}
NbtTag::String(string) => {
write_string(data, string);
}
NbtTag::List(list) => {
list.write(data);
list.write_fastvec(data);
}
NbtTag::Compound(compound) => {
compound.write(data);
compound.write_fastvec(data);
}
NbtTag::IntArray(int_array) => {
unsafe {
extend_unchecked(data, &(int_array.len() as u32).to_be_bytes());
}
data.extend_from_slice_unchecked(&(int_array.len() as u32).to_be_bytes());
data.extend_from_slice(&slice_into_u8_big_endian(int_array));
}
NbtTag::LongArray(long_array) => {
unsafe {
extend_unchecked(data, &(long_array.len() as u32).to_be_bytes());
}
data.extend_from_slice_unchecked(&(long_array.len() as u32).to_be_bytes());
data.extend_from_slice(&slice_into_u8_big_endian(long_array));
}
}
}
pub fn write(&self, data: &mut Vec<u8>) {
self.write_fastvec(&mut FastVecFromVec::new(data));
}
fn write_fastvec(&self, data: &mut FastVec<u8>) {
data.reserve(1 + 4);
// SAFETY: We just reserved enough space for the tag ID and 4 bytes of tag data.
unsafe {
push_unchecked(data, self.id());
data.push_unchecked(self.id());
self.write_without_tag_type_unchecked(data);
}
}