1
2
Fork 0
mirror of https://github.com/mat-1/azalea.git synced 2025-08-02 06:16:04 +00:00
This commit is contained in:
mat 2021-12-18 20:33:16 -06:00
parent 76e1985fc4
commit b030b9de93
10 changed files with 181 additions and 109 deletions

1
Cargo.lock generated
View file

@ -85,6 +85,7 @@ name = "azalea-nbt"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"flate2",
"num-derive", "num-derive",
"num-traits", "num-traits",
] ]

View file

@ -7,5 +7,6 @@ version = "0.1.0"
[dependencies] [dependencies]
byteorder = "^1.4.3" byteorder = "^1.4.3"
flate2 = "^1.0.22"
num-derive = "^0.3.3" num-derive = "^0.3.3"
num-traits = "^0.2.14" num-traits = "^0.2.14"

View file

@ -1,10 +1,12 @@
use crate::Error; use crate::Error;
use crate::Tag; use crate::Tag;
use byteorder::{ReadBytesExt, BE}; use byteorder::{ReadBytesExt, BE};
use flate2::read::{GzDecoder, ZlibDecoder};
use std::{collections::HashMap, io::Read}; use std::{collections::HashMap, io::Read};
impl Tag { impl Tag {
fn read_known(stream: &mut impl Read, id: u8) -> Result<Tag, Error> { fn read_known(stream: &mut impl Read, id: u8) -> Result<Tag, Error> {
println!("read_known: id={}", id);
let tag = match id { let tag = match id {
// Signifies the end of a TAG_Compound. It is only ever used inside // Signifies the end of a TAG_Compound. It is only ever used inside
// a TAG_Compound, and is not named despite being in a TAG_Compound // a TAG_Compound, and is not named despite being in a TAG_Compound
@ -65,7 +67,9 @@ impl Tag {
10 => { 10 => {
let mut map = HashMap::new(); let mut map = HashMap::new();
loop { loop {
println!("compound loop");
let tag_id = stream.read_u8().unwrap_or(0); let tag_id = stream.read_u8().unwrap_or(0);
println!("compound loop tag_id={}", tag_id);
if tag_id == 0 { if tag_id == 0 {
break; break;
} }
@ -108,4 +112,14 @@ impl Tag {
// default to compound tag // default to compound tag
Tag::read_known(stream, 10) Tag::read_known(stream, 10)
} }
pub fn read_zlib(stream: &mut impl Read) -> Result<Tag, Error> {
let mut gz = ZlibDecoder::new(stream);
Tag::read(&mut gz)
}
pub fn read_gzip(stream: &mut impl Read) -> Result<Tag, Error> {
let mut gz = GzDecoder::new(stream);
Tag::read(&mut gz)
}
} }

View file

@ -1,117 +1,112 @@
use byteorder::{ReadBytesExt, BE}; use crate::Error;
use error::Error; use crate::Tag;
use std::{collections::HashMap, io::Read}; use byteorder::{WriteBytesExt, BE};
use tag::Tag; use flate2::write::{GzEncoder, ZlibEncoder};
use std::io::Write;
impl Tag { impl Tag {
fn write(&self, stream: &mut impl Read) -> Result<(), Error> { pub fn write_without_end(&self, writer: &mut dyn Write) -> Result<(), Error> {
println!("read_known: id={}", id); match self {
let tag = match id { Tag::End => {}
// Signifies the end of a TAG_Compound. It is only ever used inside Tag::Byte(value) => writer.write_i8(*value).map_err(|_| Error::WriteError)?,
// a TAG_Compound, and is not named despite being in a TAG_Compound Tag::Short(value) => writer
0 => Tag::End, .write_i16::<BE>(*value)
// A single signed byte .map_err(|_| Error::WriteError)?,
1 => Tag::Byte(stream.read_i8().map_err(|_| Error::InvalidTag)?), Tag::Int(value) => writer
// A single signed, big endian 16 bit integer .write_i32::<BE>(*value)
2 => Tag::Short(stream.read_i16::<BE>().map_err(|_| Error::InvalidTag)?), .map_err(|_| Error::WriteError)?,
// A single signed, big endian 32 bit integer Tag::Long(value) => writer
3 => Tag::Int(stream.read_i32::<BE>().map_err(|_| Error::InvalidTag)?), .write_i64::<BE>(*value)
// A single signed, big endian 64 bit integer .map_err(|_| Error::WriteError)?,
4 => Tag::Long(stream.read_i64::<BE>().map_err(|_| Error::InvalidTag)?), Tag::Float(value) => writer
// A single, big endian IEEE-754 single-precision floating point .write_f32::<BE>(*value)
// number (NaN possible) .map_err(|_| Error::WriteError)?,
5 => Tag::Float(stream.read_f32::<BE>().map_err(|_| Error::InvalidTag)?), Tag::Double(value) => writer
// A single, big endian IEEE-754 double-precision floating point .write_f64::<BE>(*value)
// number (NaN possible) .map_err(|_| Error::WriteError)?,
6 => Tag::Double(stream.read_f64::<BE>().map_err(|_| Error::InvalidTag)?), Tag::ByteArray(value) => {
// A length-prefixed array of signed bytes. The prefix is a signed writer
// integer (thus 4 bytes) .write_i32::<BE>(value.len() as i32)
7 => { .map_err(|_| Error::WriteError)?;
let length = stream.read_i32::<BE>().map_err(|_| Error::InvalidTag)?; for byte in value {
let mut bytes = Vec::new(); writer.write_i8(*byte).map_err(|_| Error::WriteError)?;
for _ in 0..length {
bytes.push(stream.read_i8().map_err(|_| Error::InvalidTag)?);
} }
Tag::ByteArray(bytes)
} }
// A length-prefixed modified UTF-8 string. The prefix is an Tag::String(value) => {
// unsigned short (thus 2 bytes) signifying the length of the writer
// string in bytes .write_i16::<BE>(value.len() as i16)
8 => { .map_err(|_| Error::WriteError)?;
let length = stream.read_u16::<BE>().map_err(|_| Error::InvalidTag)?; writer
let mut bytes = Vec::new(); .write_all(value.as_bytes())
for _ in 0..length { .map_err(|_| Error::WriteError)?;
bytes.push(stream.read_u8().map_err(|_| Error::InvalidTag)?); }
Tag::List(value) => {
// we just get the type from the first item, or default the type to END
let type_id = value.first().and_then(|f| Some(f.id())).unwrap_or(0);
writer.write_u8(type_id).map_err(|_| Error::WriteError)?;
writer
.write_i32::<BE>(value.len() as i32)
.map_err(|_| Error::WriteError)?;
for tag in value {
tag.write_without_end(writer)?;
} }
Tag::String(String::from_utf8(bytes).map_err(|_| Error::InvalidTag)?)
} }
// A list of nameless tags, all of the same type. The list is Tag::Compound(value) => {
// prefixed with the Type ID of the items it contains (thus 1 for (key, tag) in value {
// byte), and the length of the list as a signed integer (a further writer.write_u8(tag.id()).map_err(|_| Error::WriteError)?;
// 4 bytes). If the length of the list is 0 or negative, the type Tag::String(key.clone()).write_without_end(writer)?;
// may be 0 (TAG_End) but otherwise it must be any other type. (The tag.write_without_end(writer)?;
// notchian implementation uses TAG_End in that situation, but
// another reference implementation by Mojang uses 1 instead;
// parsers should accept any type if the length is <= 0).
9 => {
let type_id = stream.read_u8().map_err(|_| Error::InvalidTag)?;
let length = stream.read_i32::<BE>().map_err(|_| Error::InvalidTag)?;
let mut list = Vec::new();
for _ in 0..length {
list.push(Tag::read_known(stream, type_id)?);
} }
Tag::List(list) writer
.write_u8(Tag::End.id())
.map_err(|_| Error::WriteError)?;
} }
// Effectively a list of a named tags. Order is not guaranteed. Tag::IntArray(value) => {
10 => { writer
println!("reading compound {{"); .write_i32::<BE>(value.len() as i32)
let mut map = HashMap::new(); .map_err(|_| Error::WriteError)?;
loop { for int in value {
let tag_id = stream.read_u8().unwrap_or(0); writer
println!("compound tag id: {}", tag_id); .write_i32::<BE>(*int)
if tag_id == 0 { .map_err(|_| Error::WriteError)?;
break;
}
let name = match Tag::read_known(stream, 8)? {
Tag::String(name) => name,
_ => panic!("Expected a string tag"),
};
println!("compound name: {}", name);
let tag = Tag::read_known(stream, tag_id).map_err(|_| Error::InvalidTag)?;
println!("aight read tag: {:?}", tag);
map.insert(name, tag);
} }
println!("}} compound map: {:?}", map);
Tag::Compound(map)
} }
// A length-prefixed array of signed integers. The prefix is a Tag::LongArray(value) => {
// signed integer (thus 4 bytes) and indicates the number of 4 byte writer
// integers. .write_i32::<BE>(value.len() as i32)
11 => { .map_err(|_| Error::WriteError)?;
let length = stream.read_i32::<BE>().map_err(|_| Error::InvalidTag)?; for long in value {
let mut ints = Vec::new(); writer
for _ in 0..length { .write_i64::<BE>(*long)
ints.push(stream.read_i32::<BE>().map_err(|_| Error::InvalidTag)?); .map_err(|_| Error::WriteError)?;
} }
Tag::IntArray(ints)
} }
// A length-prefixed array of signed longs. The prefix is a signed }
// integer (thus 4 bytes) and indicates the number of 8 byte longs.
12 => { Ok(())
let length = stream.read_i32::<BE>().map_err(|_| Error::InvalidTag)?;
let mut longs = Vec::new();
for _ in 0..length {
longs.push(stream.read_i64::<BE>().map_err(|_| Error::InvalidTag)?);
}
Tag::LongArray(longs)
}
_ => return Err(Error::InvalidTagType(id)),
};
Ok(tag)
} }
pub fn read(stream: &mut impl Read) -> Result<Tag, Error> { pub fn write(&self, writer: &mut impl Write) -> Result<(), Error> {
// default to compound tag match self {
Tag::read_known(stream, 10) Tag::Compound(value) => {
for (key, tag) in value {
writer.write_u8(tag.id()).map_err(|_| Error::WriteError)?;
Tag::String(key.clone()).write_without_end(writer)?;
tag.write_without_end(writer)?;
}
Ok(())
}
_ => Err(Error::InvalidTag),
}
}
pub fn write_zlib(&self, writer: &mut impl Write) -> Result<(), Error> {
let mut encoder = ZlibEncoder::new(writer, flate2::Compression::default());
self.write(&mut encoder)
}
pub fn write_gzip(&self, writer: &mut impl Write) -> Result<(), Error> {
let mut encoder = GzEncoder::new(writer, flate2::Compression::default());
self.write(&mut encoder)
} }
} }

View file

@ -2,4 +2,5 @@
pub enum Error { pub enum Error {
InvalidTagType(u8), InvalidTagType(u8),
InvalidTag, InvalidTag,
WriteError,
} }

View file

@ -1,4 +1,5 @@
mod decode; mod decode;
mod encode;
mod error; mod error;
mod tag; mod tag;

View file

@ -16,3 +16,23 @@ pub enum Tag {
IntArray(Vec<i32>), // 11 IntArray(Vec<i32>), // 11
LongArray(Vec<i64>), // 12 LongArray(Vec<i64>), // 12
} }
impl Tag {
pub fn id(&self) -> u8 {
match self {
Tag::End => 0,
Tag::Byte(value) => 1,
Tag::Short(value) => 2,
Tag::Int(value) => 3,
Tag::Long(value) => 4,
Tag::Float(value) => 5,
Tag::Double(value) => 6,
Tag::ByteArray(value) => 7,
Tag::String(value) => 8,
Tag::List(value) => 9,
Tag::Compound(value) => 10,
Tag::IntArray(value) => 11,
Tag::LongArray(value) => 12,
}
}
}

View file

@ -1,8 +1,15 @@
use azalea_nbt::Tag; use azalea_nbt::Tag;
use std::collections::HashMap; use flate2::{
read::{GzDecoder, ZlibDecoder},
write::{GzEncoder, ZlibEncoder},
};
use std::{
collections::HashMap,
io::{Cursor, Read},
};
#[test] #[test]
fn test_hello_world() { fn test_decode_hello_world() {
// read hello_world.nbt // read hello_world.nbt
let mut file = std::fs::File::open("tests/hello_world.nbt").unwrap(); let mut file = std::fs::File::open("tests/hello_world.nbt").unwrap();
let tag = Tag::read(&mut file).unwrap(); let tag = Tag::read(&mut file).unwrap();
@ -17,3 +24,39 @@ fn test_hello_world() {
)])) )]))
); );
} }
#[test]
fn test_roundtrip_hello_world() {
let mut file = std::fs::File::open("tests/hello_world.nbt").unwrap();
let mut original = Vec::new();
file.read_to_end(&mut original).unwrap();
let mut original_stream = Cursor::new(original.clone());
let tag = Tag::read(&mut original_stream).unwrap();
println!("ok read {:?}", tag);
// write hello_world.nbt
let mut result = Cursor::new(Vec::new());
tag.write(&mut result).unwrap();
assert_eq!(result.into_inner(), original);
}
#[test]
fn test_bigtest() {
// read bigtest.nbt
let mut file = std::fs::File::open("tests/bigtest.nbt").unwrap();
let mut original = Vec::new();
file.read_to_end(&mut original).unwrap();
let mut original_stream = Cursor::new(original.clone());
let original_tag = Tag::read_gzip(&mut original_stream).unwrap();
let mut result = Vec::new();
original_tag.write(&mut result).unwrap();
let decoded_tag = Tag::read(&mut Cursor::new(result)).unwrap();
assert_eq!(decoded_tag, original_tag);
}

View file

@ -1,8 +1,6 @@
use std::io::Cursor;
use crate::{connect::PacketFlow, mc_buf::Readable, packets::ProtocolPacket}; use crate::{connect::PacketFlow, mc_buf::Readable, packets::ProtocolPacket};
use async_compression::tokio::bufread::ZlibDecoder; use async_compression::tokio::bufread::ZlibDecoder;
use tokio::io::{AsyncRead, AsyncReadExt, BufReader}; use tokio::io::{AsyncRead, AsyncReadExt};
async fn frame_splitter<R>(stream: &mut R) -> Result<Vec<u8>, String> async fn frame_splitter<R>(stream: &mut R) -> Result<Vec<u8>, String>
where where

View file

@ -1,9 +1,7 @@
use std::io::Read;
use crate::{mc_buf::Writable, packets::ProtocolPacket, read::MAXIMUM_UNCOMPRESSED_LENGTH}; use crate::{mc_buf::Writable, packets::ProtocolPacket, read::MAXIMUM_UNCOMPRESSED_LENGTH};
use async_compression::tokio::bufread::ZlibEncoder; use async_compression::tokio::bufread::ZlibEncoder;
use tokio::{ use tokio::{
io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
net::TcpStream, net::TcpStream,
}; };