mirror of
https://github.com/mat-1/azalea.git
synced 2025-08-02 06:16:04 +00:00
more formatting fixes
This commit is contained in:
parent
e37524899e
commit
f27c87b291
33 changed files with 1410 additions and 1094 deletions
|
@ -716,10 +716,10 @@ pub fn make_block_states(input: TokenStream) -> TokenStream {
|
|||
// ```
|
||||
// match state_id {
|
||||
// // this is just an example of how it might look, these state ids are definitely not correct
|
||||
// 0|3|6 => Some(Self::Axis::X),
|
||||
// 1|4|7 => Some(Self::Axis::Y),
|
||||
// 2|5|8 => Some(Self::Axis::Z),
|
||||
// _ => None
|
||||
// 0 | 3 | 6 => Some(Self::Axis::X),
|
||||
// 1 | 4 | 7 => Some(Self::Axis::Y),
|
||||
// 2 | 5 | 8 => Some(Self::Axis::Z),
|
||||
// _ => None,
|
||||
// }
|
||||
// ```
|
||||
let mut property_impls = quote! {};
|
||||
|
|
|
@ -63,9 +63,7 @@ impl<S> ArgumentBuilder<S> {
|
|||
/// ```
|
||||
/// # use azalea_brigadier::prelude::*;
|
||||
/// # let mut subject = CommandDispatcher::<()>::new();
|
||||
/// literal("foo").then(
|
||||
/// literal("bar").executes(|ctx: &CommandContext<()>| 42)
|
||||
/// )
|
||||
/// literal("foo").then(literal("bar").executes(|ctx: &CommandContext<()>| 42))
|
||||
/// # ;
|
||||
/// ```
|
||||
pub fn then(self, argument: ArgumentBuilder<S>) -> Self {
|
||||
|
|
|
@ -168,7 +168,7 @@ impl Client {
|
|||
/// # Examples
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use azalea_client::{Client, Account};
|
||||
/// use azalea_client::{Account, Client};
|
||||
///
|
||||
/// #[tokio::main]
|
||||
/// async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
|
|
@ -43,8 +43,8 @@ impl Client {
|
|||
/// Note that this will very likely change in the future.
|
||||
/// ```
|
||||
/// use azalea_client::{Client, GameProfileComponent};
|
||||
/// use bevy_ecs::query::With;
|
||||
/// use azalea_entity::{Position, metadata::Player};
|
||||
/// use bevy_ecs::query::With;
|
||||
///
|
||||
/// # fn example(mut bot: Client, sender_name: String) {
|
||||
/// let entity = bot.entity_by::<With<Player>, (&GameProfileComponent,)>(
|
||||
|
|
|
@ -21,10 +21,7 @@ use crate::{PlayerInfo, client::InGameState, connection::RawConnection};
|
|||
/// # use bevy_ecs::event::EventReader;
|
||||
///
|
||||
/// fn handle_packets(mut events: EventReader<ReceiveGamePacketEvent>) {
|
||||
/// for ReceiveGamePacketEvent {
|
||||
/// entity,
|
||||
/// packet,
|
||||
/// } in events.read() {
|
||||
/// for ReceiveGamePacketEvent { entity, packet } in events.read() {
|
||||
/// match packet.as_ref() {
|
||||
/// ClientboundGamePacket::LevelParticles(p) => {
|
||||
/// // ...
|
||||
|
|
|
@ -376,11 +376,8 @@ impl BlockPos {
|
|||
/// ```
|
||||
/// # use azalea_core::position::BlockPos;
|
||||
/// assert_eq!(
|
||||
/// BlockPos::min(
|
||||
/// &BlockPos::new(1, 20, 300),
|
||||
/// &BlockPos::new(50, 40, 30),
|
||||
/// ),
|
||||
/// BlockPos::new(1, 20, 30),
|
||||
/// BlockPos::min(&BlockPos::new(1, 20, 300), &BlockPos::new(50, 40, 30),),
|
||||
/// BlockPos::new(1, 20, 30),
|
||||
/// );
|
||||
/// ```
|
||||
pub fn min(&self, other: &Self) -> Self {
|
||||
|
@ -396,11 +393,8 @@ impl BlockPos {
|
|||
/// ```
|
||||
/// # use azalea_core::position::BlockPos;
|
||||
/// assert_eq!(
|
||||
/// BlockPos::max(
|
||||
/// &BlockPos::new(1, 20, 300),
|
||||
/// &BlockPos::new(50, 40, 30),
|
||||
/// ),
|
||||
/// BlockPos::new(50, 40, 300),
|
||||
/// BlockPos::max(&BlockPos::new(1, 20, 300), &BlockPos::new(50, 40, 30),),
|
||||
/// BlockPos::new(50, 40, 300),
|
||||
/// );
|
||||
/// ```
|
||||
pub fn max(&self, other: &Self) -> Self {
|
||||
|
|
|
@ -901,7 +901,9 @@ pub struct DamageResistant {
|
|||
// in the vanilla code this is
|
||||
// ```
|
||||
// StreamCodec.composite(
|
||||
// TagKey.streamCodec(Registries.DAMAGE_TYPE), DamageResistant::types, DamageResistant::new
|
||||
// TagKey.streamCodec(Registries.DAMAGE_TYPE),
|
||||
// DamageResistant::types,
|
||||
// DamageResistant::new,
|
||||
// );
|
||||
// ```
|
||||
// i'm not entirely sure if this is meant to be a vec or something, i just made it a
|
||||
|
|
|
@ -122,14 +122,14 @@ impl ItemStackData {
|
|||
/// # use azalea_inventory::ItemStackData;
|
||||
/// # use azalea_registry::Item;
|
||||
/// let mut a = ItemStackData {
|
||||
/// kind: Item::Stone,
|
||||
/// count: 1,
|
||||
/// components: Default::default(),
|
||||
/// kind: Item::Stone,
|
||||
/// count: 1,
|
||||
/// components: Default::default(),
|
||||
/// };
|
||||
/// let mut b = ItemStackData {
|
||||
/// kind: Item::Stone,
|
||||
/// count: 2,
|
||||
/// components: Default::default(),
|
||||
/// kind: Item::Stone,
|
||||
/// count: 2,
|
||||
/// components: Default::default(),
|
||||
/// };
|
||||
/// assert!(a.is_same_item_and_components(&b));
|
||||
///
|
||||
|
|
|
@ -68,18 +68,13 @@ pub struct WriteConnection<W: ProtocolPacket> {
|
|||
/// Join an offline-mode server and go through the handshake.
|
||||
/// ```rust,no_run
|
||||
/// use azalea_protocol::{
|
||||
/// resolver,
|
||||
/// connect::Connection,
|
||||
/// packets::{
|
||||
/// self,
|
||||
/// ClientIntention, PROTOCOL_VERSION,
|
||||
/// login::{
|
||||
/// ClientboundLoginPacket,
|
||||
/// ServerboundHello,
|
||||
/// ServerboundKey
|
||||
/// },
|
||||
/// handshake::ServerboundIntention
|
||||
/// }
|
||||
/// self, ClientIntention, PROTOCOL_VERSION,
|
||||
/// handshake::ServerboundIntention,
|
||||
/// login::{ClientboundLoginPacket, ServerboundHello, ServerboundKey},
|
||||
/// },
|
||||
/// resolver,
|
||||
/// };
|
||||
///
|
||||
/// #[tokio::main]
|
||||
|
@ -93,7 +88,8 @@ pub struct WriteConnection<W: ProtocolPacket> {
|
|||
/// hostname: resolved_address.ip().to_string(),
|
||||
/// port: resolved_address.port(),
|
||||
/// intention: ClientIntention::Login,
|
||||
/// }).await?;
|
||||
/// })
|
||||
/// .await?;
|
||||
///
|
||||
/// let mut conn = conn.login();
|
||||
///
|
||||
|
@ -101,7 +97,8 @@ pub struct WriteConnection<W: ProtocolPacket> {
|
|||
/// conn.write(ServerboundHello {
|
||||
/// name: "bot".to_string(),
|
||||
/// profile_id: uuid::Uuid::nil(),
|
||||
/// }).await?;
|
||||
/// })
|
||||
/// .await?;
|
||||
///
|
||||
/// let (conn, game_profile) = loop {
|
||||
/// let packet = conn.read().await?;
|
||||
|
@ -112,7 +109,8 @@ pub struct WriteConnection<W: ProtocolPacket> {
|
|||
/// conn.write(ServerboundKey {
|
||||
/// key_bytes: e.encrypted_public_key,
|
||||
/// encrypted_challenge: e.encrypted_challenge,
|
||||
/// }).await?;
|
||||
/// })
|
||||
/// .await?;
|
||||
/// conn.set_encryption_key(e.secret_key);
|
||||
/// }
|
||||
/// ClientboundLoginPacket::LoginCompression(p) => {
|
||||
|
@ -402,19 +400,20 @@ impl Connection<ClientboundLoginPacket, ServerboundLoginPacket> {
|
|||
///
|
||||
/// ```rust,no_run
|
||||
/// use azalea_auth::AuthResult;
|
||||
/// use azalea_protocol::connect::Connection;
|
||||
/// use azalea_protocol::packets::login::{
|
||||
/// ClientboundLoginPacket,
|
||||
/// ServerboundKey
|
||||
/// use azalea_protocol::{
|
||||
/// connect::Connection,
|
||||
/// packets::login::{ClientboundLoginPacket, ServerboundKey},
|
||||
/// };
|
||||
/// use uuid::Uuid;
|
||||
/// # use azalea_protocol::ServerAddress;
|
||||
///
|
||||
/// # async fn example() -> Result<(), Box<dyn std::error::Error>> {
|
||||
/// let AuthResult { access_token, profile } = azalea_auth::auth(
|
||||
/// "example@example.com",
|
||||
/// azalea_auth::AuthOpts::default()
|
||||
/// ).await.expect("Couldn't authenticate");
|
||||
/// let AuthResult {
|
||||
/// access_token,
|
||||
/// profile,
|
||||
/// } = azalea_auth::auth("example@example.com", azalea_auth::AuthOpts::default())
|
||||
/// .await
|
||||
/// .expect("Couldn't authenticate");
|
||||
/// #
|
||||
/// # let address = ServerAddress::try_from("example@example.com").unwrap();
|
||||
/// # let resolved_address = azalea_protocol::resolver::resolve_address(&address).await?;
|
||||
|
@ -428,16 +427,13 @@ impl Connection<ClientboundLoginPacket, ServerboundLoginPacket> {
|
|||
/// ClientboundLoginPacket::Hello(p) => {
|
||||
/// // tell Mojang we're joining the server & enable encryption
|
||||
/// let e = azalea_crypto::encrypt(&p.public_key, &p.challenge).unwrap();
|
||||
/// conn.authenticate(
|
||||
/// &access_token,
|
||||
/// &profile.id,
|
||||
/// e.secret_key,
|
||||
/// &p
|
||||
/// ).await?;
|
||||
/// conn.authenticate(&access_token, &profile.id, e.secret_key, &p)
|
||||
/// .await?;
|
||||
/// conn.write(ServerboundKey {
|
||||
/// key_bytes: e.encrypted_public_key,
|
||||
/// encrypted_challenge: e.encrypted_challenge,
|
||||
/// }).await?;
|
||||
/// })
|
||||
/// .await?;
|
||||
/// conn.set_encryption_key(e.secret_key);
|
||||
/// }
|
||||
/// _ => {}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// NOTE: This file is generated automatically by codegen/packet.py.
|
||||
// NOTE: This file is @generated automatically by codegen/packet.py.
|
||||
// Don't edit it directly!
|
||||
|
||||
use azalea_protocol_macros::declare_state_packets;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// NOTE: This file is generated automatically by codegen/packet.py.
|
||||
// NOTE: This file is @generated automatically by codegen/packet.py.
|
||||
// Don't edit it directly!
|
||||
|
||||
use azalea_protocol_macros::declare_state_packets;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// NOTE: This file is generated automatically by codegen/packet.py.
|
||||
// NOTE: This file is @generated automatically by codegen/packet.py.
|
||||
// Don't edit it directly!
|
||||
|
||||
use azalea_protocol_macros::declare_state_packets;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// NOTE: This file is generated automatically by codegen/packet.py.
|
||||
// NOTE: This file is @generated automatically by codegen/packet.py.
|
||||
// Don't edit it directly!
|
||||
|
||||
use azalea_protocol_macros::declare_state_packets;
|
||||
|
|
|
@ -101,4 +101,4 @@ impl AzaleaWrite for ClientIntention {
|
|||
fn azalea_write(&self, buf: &mut impl Write) -> io::Result<()> {
|
||||
(*self as i32).azalea_write_var(buf)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// NOTE: This file is generated automatically by codegen/packet.py.
|
||||
// NOTE: This file is @generated automatically by codegen/packet.py.
|
||||
// Don't edit it directly!
|
||||
|
||||
use azalea_protocol_macros::declare_state_packets;
|
||||
|
|
|
@ -20,7 +20,10 @@ impl Instance {
|
|||
///
|
||||
/// ```
|
||||
/// # fn example(client: &azalea_client::Client) {
|
||||
/// client.world().read().find_block(client.position(), &azalea_registry::Block::Chest.into());
|
||||
/// client
|
||||
/// .world()
|
||||
/// .read()
|
||||
/// .find_block(client.position(), &azalea_registry::Block::Chest.into());
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn find_block(
|
||||
|
|
|
@ -11,7 +11,7 @@ use azalea_core::position::{BlockPos, ChunkPos};
|
|||
///
|
||||
/// let mut iter = BlockIterator::new(BlockPos::default(), 4);
|
||||
/// for block_pos in iter {
|
||||
/// println!("{:?}", block_pos);
|
||||
/// println!("{:?}", block_pos);
|
||||
/// }
|
||||
/// ```
|
||||
pub struct BlockIterator {
|
||||
|
@ -86,7 +86,7 @@ impl Iterator for BlockIterator {
|
|||
///
|
||||
/// let mut iter = SquareChunkIterator::new(ChunkPos::default(), 4);
|
||||
/// for chunk_pos in iter {
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// }
|
||||
/// ```
|
||||
pub struct SquareChunkIterator {
|
||||
|
@ -123,11 +123,11 @@ impl SquareChunkIterator {
|
|||
///
|
||||
/// let mut iter = SquareChunkIterator::new(ChunkPos::default(), 2);
|
||||
/// while let Some(chunk_pos) = iter.next() {
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// }
|
||||
/// iter.set_max_distance(4);
|
||||
/// while let Some(chunk_pos) = iter.next() {
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// }
|
||||
/// ```
|
||||
pub fn set_max_distance(&mut self, max_distance: u32) {
|
||||
|
@ -174,7 +174,7 @@ impl Iterator for SquareChunkIterator {
|
|||
///
|
||||
/// let mut iter = ChunkIterator::new(ChunkPos::default(), 4);
|
||||
/// for chunk_pos in iter {
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// println!("{:?}", chunk_pos);
|
||||
/// }
|
||||
/// ```
|
||||
pub struct ChunkIterator {
|
||||
|
|
|
@ -107,7 +107,11 @@ impl ClientBuilder<NoState, ()> {
|
|||
/// use azalea::app::PluginGroup;
|
||||
///
|
||||
/// let client_builder = ClientBuilder::new_without_plugins()
|
||||
/// .add_plugins(azalea::DefaultPlugins.build().disable::<azalea::chat_signing::ChatSigningPlugin>())
|
||||
/// .add_plugins(
|
||||
/// azalea::DefaultPlugins
|
||||
/// .build()
|
||||
/// .disable::<azalea::chat_signing::ChatSigningPlugin>(),
|
||||
/// )
|
||||
/// .add_plugins(azalea::DefaultBotPlugins);
|
||||
/// # client_builder.set_handler(handle);
|
||||
/// # #[derive(Component, Clone, Default)]
|
||||
|
|
|
@ -15,13 +15,16 @@ use bevy_ecs::{
|
|||
/// applied filter.
|
||||
///
|
||||
/// ```
|
||||
/// use azalea::chat::SendChatEvent;
|
||||
/// use azalea::nearest_entity::EntityFinder;
|
||||
/// use azalea_entity::metadata::{Player, AbstractMonster};
|
||||
/// use azalea_entity::LocalEntity;
|
||||
/// use bevy_ecs::system::Query;
|
||||
/// use bevy_ecs::prelude::{Entity, EventWriter};
|
||||
/// use bevy_ecs::query::With;
|
||||
/// use azalea::{chat::SendChatEvent, nearest_entity::EntityFinder};
|
||||
/// use azalea_entity::{
|
||||
/// LocalEntity,
|
||||
/// metadata::{AbstractMonster, Player},
|
||||
/// };
|
||||
/// use bevy_ecs::{
|
||||
/// prelude::{Entity, EventWriter},
|
||||
/// query::With,
|
||||
/// system::Query,
|
||||
/// };
|
||||
///
|
||||
/// /// All bots near aggressive mobs will scream in chat.
|
||||
/// pub fn bots_near_aggressive_mobs(
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
from lib.utils import get_dir_location, to_camel_case
|
||||
from ..mappings import Mappings
|
||||
from typing import Optional
|
||||
import re
|
||||
|
||||
BLOCKS_RS_DIR = get_dir_location('../azalea-block/src/generated.rs')
|
||||
BLOCKS_RS_DIR = get_dir_location("../azalea-block/src/generated.rs")
|
||||
|
||||
# Terminology:
|
||||
# - Property: A property of a block, like "direction"
|
||||
|
@ -12,18 +9,23 @@ BLOCKS_RS_DIR = get_dir_location('../azalea-block/src/generated.rs')
|
|||
# - Block: Has properties and states.
|
||||
|
||||
|
||||
def generate_blocks(blocks_report: dict, pumpkin_block_datas: dict, ordered_blocks: list[str], burger_data: dict):
|
||||
with open(BLOCKS_RS_DIR, 'r') as f:
|
||||
def generate_blocks(
|
||||
blocks_report: dict,
|
||||
pumpkin_block_datas: dict,
|
||||
ordered_blocks: list[str],
|
||||
burger_data: dict,
|
||||
):
|
||||
with open(BLOCKS_RS_DIR, "r") as f:
|
||||
existing_code = f.read().splitlines()
|
||||
|
||||
new_make_block_states_macro_code = []
|
||||
new_make_block_states_macro_code.append('make_block_states! {')
|
||||
new_make_block_states_macro_code.append("make_block_states! {")
|
||||
|
||||
burger_block_datas = burger_data[0]['blocks']['block']
|
||||
burger_block_datas = burger_data[0]["blocks"]["block"]
|
||||
|
||||
pumpkin_block_map = {}
|
||||
for block_data in pumpkin_block_datas['blocks']:
|
||||
block_id = block_data['name']
|
||||
for block_data in pumpkin_block_datas["blocks"]:
|
||||
block_id = block_data["name"]
|
||||
pumpkin_block_map[block_id] = block_data
|
||||
|
||||
# Find properties
|
||||
|
@ -32,20 +34,22 @@ def generate_blocks(blocks_report: dict, pumpkin_block_datas: dict, ordered_bloc
|
|||
# This dict looks like { 'FloweringAzaleaLeavesDistance': 'distance' }
|
||||
property_struct_names_to_names = {}
|
||||
for block_id in ordered_blocks:
|
||||
block_data_report = blocks_report[f'minecraft:{block_id}']
|
||||
block_data_report = blocks_report[f"minecraft:{block_id}"]
|
||||
|
||||
block_properties = {}
|
||||
for property_id in list(block_data_report.get('properties', {}).keys()):
|
||||
property_variants = block_data_report['properties'][property_id]
|
||||
for property_id in list(block_data_report.get("properties", {}).keys()):
|
||||
property_variants = block_data_report["properties"][property_id]
|
||||
|
||||
property_struct_name = get_property_struct_name(block_id, property_id, property_variants)
|
||||
property_struct_name = get_property_struct_name(
|
||||
block_id, property_id, property_variants
|
||||
)
|
||||
|
||||
if property_struct_name in properties:
|
||||
if not properties[property_struct_name] == property_variants:
|
||||
raise Exception(
|
||||
'There are multiple enums with the same name! '
|
||||
f'Name: {property_struct_name}, variants: {property_variants}/{properties[property_struct_name]}. '
|
||||
'This can be fixed by hardcoding a name in the get_property_struct_name function.'
|
||||
"There are multiple enums with the same name! "
|
||||
f"Name: {property_struct_name}, variants: {property_variants}/{properties[property_struct_name]}. "
|
||||
"This can be fixed by hardcoding a name in the get_property_struct_name function."
|
||||
)
|
||||
|
||||
block_properties[property_struct_name] = property_variants
|
||||
|
@ -55,7 +59,7 @@ def generate_blocks(blocks_report: dict, pumpkin_block_datas: dict, ordered_bloc
|
|||
properties.update(block_properties)
|
||||
|
||||
# Property codegen
|
||||
new_make_block_states_macro_code.append(' Properties => {')
|
||||
new_make_block_states_macro_code.append(" Properties => {")
|
||||
for property_struct_name, property_variants in properties.items():
|
||||
# "face" => Face {
|
||||
# Floor,
|
||||
|
@ -65,99 +69,104 @@ def generate_blocks(blocks_report: dict, pumpkin_block_datas: dict, ordered_bloc
|
|||
property_id = property_struct_names_to_names[property_struct_name]
|
||||
|
||||
# if the only variants are true and false, we make it unit struct with a boolean instead of an enum
|
||||
if property_variants == ['true', 'false']:
|
||||
property_shape_code = f'{property_struct_name}(bool)'
|
||||
if property_variants == ["true", "false"]:
|
||||
property_shape_code = f"{property_struct_name}(bool)"
|
||||
else:
|
||||
property_shape_code = f'{property_struct_name} {{\n'
|
||||
property_shape_code = f"{property_struct_name} {{\n"
|
||||
for variant in property_variants:
|
||||
property_shape_code += f' {to_camel_case(variant)},\n'
|
||||
property_shape_code += ' }'
|
||||
property_shape_code += f" {to_camel_case(variant)},\n"
|
||||
property_shape_code += " }"
|
||||
|
||||
new_make_block_states_macro_code.append(
|
||||
f' "{property_id}" => {property_shape_code},')
|
||||
f' "{property_id}" => {property_shape_code},'
|
||||
)
|
||||
|
||||
new_make_block_states_macro_code.append(' },')
|
||||
new_make_block_states_macro_code.append(" },")
|
||||
|
||||
# Block codegen
|
||||
new_make_block_states_macro_code.append(' Blocks => {')
|
||||
new_make_block_states_macro_code.append(" Blocks => {")
|
||||
for block_id in ordered_blocks:
|
||||
block_data_report = blocks_report['minecraft:' + block_id]
|
||||
block_data_report = blocks_report["minecraft:" + block_id]
|
||||
block_data_burger = burger_block_datas.get(block_id, {})
|
||||
block_data_pumpkin = pumpkin_block_map[block_id]
|
||||
|
||||
default_property_variants: dict[str, str] = {}
|
||||
for state in block_data_report['states']:
|
||||
if state.get('default'):
|
||||
default_property_variants = state.get('properties', {})
|
||||
for state in block_data_report["states"]:
|
||||
if state.get("default"):
|
||||
default_property_variants = state.get("properties", {})
|
||||
|
||||
properties_code = '{'
|
||||
for property_id in list(block_data_report.get('properties', {}).keys()):
|
||||
properties_code = "{"
|
||||
for property_id in list(block_data_report.get("properties", {}).keys()):
|
||||
property_default = default_property_variants.get(property_id)
|
||||
property_variants = block_data_report['properties'][property_id]
|
||||
property_variants = block_data_report["properties"][property_id]
|
||||
|
||||
property_struct_name = get_property_struct_name(
|
||||
block_id, property_id, property_variants)
|
||||
block_id, property_id, property_variants
|
||||
)
|
||||
|
||||
is_boolean_property = property_variants == ['true', 'false']
|
||||
is_boolean_property = property_variants == ["true", "false"]
|
||||
|
||||
if is_boolean_property:
|
||||
# if it's a boolean, keep the type lowercase
|
||||
# (so it's either `true` or `false`)
|
||||
property_default_type = f'{property_struct_name}({property_default})'
|
||||
property_default_type = f"{property_struct_name}({property_default})"
|
||||
else:
|
||||
property_default_type = f'{property_struct_name}::{to_camel_case(property_default)}'
|
||||
property_default_type = (
|
||||
f"{property_struct_name}::{to_camel_case(property_default)}"
|
||||
)
|
||||
|
||||
assert property_default is not None
|
||||
|
||||
this_property_code = f'"{property_id}": {property_default_type}'
|
||||
|
||||
properties_code += f'\n {this_property_code},'
|
||||
properties_code += f"\n {this_property_code},"
|
||||
# if there's nothing inside the properties, keep it in one line
|
||||
if properties_code == '{':
|
||||
properties_code += '}'
|
||||
if properties_code == "{":
|
||||
properties_code += "}"
|
||||
else:
|
||||
properties_code += '\n }'
|
||||
properties_code += "\n }"
|
||||
|
||||
# make the block behavior
|
||||
behavior_constructor = 'BlockBehavior::new()'
|
||||
behavior_constructor = "BlockBehavior::new()"
|
||||
# requires tool
|
||||
if block_data_burger.get('requires_correct_tool_for_drops'):
|
||||
behavior_constructor += '.requires_correct_tool_for_drops()'
|
||||
if block_data_burger.get("requires_correct_tool_for_drops"):
|
||||
behavior_constructor += ".requires_correct_tool_for_drops()"
|
||||
# strength
|
||||
destroy_time = block_data_pumpkin.get('hardness')
|
||||
explosion_resistance = block_data_pumpkin.get('blast_resistance')
|
||||
destroy_time = block_data_pumpkin.get("hardness")
|
||||
explosion_resistance = block_data_pumpkin.get("blast_resistance")
|
||||
if destroy_time and explosion_resistance:
|
||||
behavior_constructor += f'.strength({destroy_time}, {explosion_resistance})'
|
||||
behavior_constructor += f".strength({destroy_time}, {explosion_resistance})"
|
||||
elif destroy_time:
|
||||
behavior_constructor += f'.destroy_time({destroy_time})'
|
||||
behavior_constructor += f".destroy_time({destroy_time})"
|
||||
elif explosion_resistance:
|
||||
behavior_constructor += f'.explosion_resistance({explosion_resistance})'
|
||||
behavior_constructor += f".explosion_resistance({explosion_resistance})"
|
||||
# friction
|
||||
friction = block_data_burger.get('friction')
|
||||
if friction != None:
|
||||
behavior_constructor += f'.friction({friction})'
|
||||
|
||||
friction = block_data_burger.get("friction")
|
||||
if friction is not None:
|
||||
behavior_constructor += f".friction({friction})"
|
||||
|
||||
force_solid = None
|
||||
if block_data_burger.get('force_solid_on'):
|
||||
force_solid = 'true'
|
||||
elif block_data_burger.get('force_solid_off'):
|
||||
force_solid = 'false'
|
||||
if force_solid != None:
|
||||
behavior_constructor += f'.force_solid({force_solid})'
|
||||
if block_data_burger.get("force_solid_on"):
|
||||
force_solid = "true"
|
||||
elif block_data_burger.get("force_solid_off"):
|
||||
force_solid = "false"
|
||||
if force_solid is not None:
|
||||
behavior_constructor += f".force_solid({force_solid})"
|
||||
|
||||
# TODO: use burger to generate the blockbehavior
|
||||
new_make_block_states_macro_code.append(
|
||||
f' {block_id} => {behavior_constructor}, {properties_code},')
|
||||
f" {block_id} => {behavior_constructor}, {properties_code},"
|
||||
)
|
||||
|
||||
new_make_block_states_macro_code.append(' }')
|
||||
new_make_block_states_macro_code.append('}')
|
||||
new_make_block_states_macro_code.append(" }")
|
||||
new_make_block_states_macro_code.append("}")
|
||||
|
||||
new_code = []
|
||||
in_macro = False
|
||||
for line in existing_code:
|
||||
if line == 'make_block_states! {':
|
||||
if line == "make_block_states! {":
|
||||
in_macro = True
|
||||
elif line == '}':
|
||||
elif line == "}":
|
||||
if in_macro:
|
||||
in_macro = False
|
||||
new_code.extend(new_make_block_states_macro_code)
|
||||
|
@ -166,69 +175,94 @@ def generate_blocks(blocks_report: dict, pumpkin_block_datas: dict, ordered_bloc
|
|||
continue
|
||||
new_code.append(line)
|
||||
# empty line at the end
|
||||
new_code.append('')
|
||||
new_code.append("")
|
||||
|
||||
with open(BLOCKS_RS_DIR, 'w') as f:
|
||||
f.write('\n'.join(new_code))
|
||||
with open(BLOCKS_RS_DIR, "w") as f:
|
||||
f.write("\n".join(new_code))
|
||||
|
||||
def get_property_struct_name(block_id: str, property_id: str, property_variants: list[str]) -> str:
|
||||
|
||||
def get_property_struct_name(
|
||||
block_id: str, property_id: str, property_variants: list[str]
|
||||
) -> str:
|
||||
# these are hardcoded because otherwise they cause conflicts
|
||||
# some names inspired by https://github.com/feather-rs/feather/blob/main/feather/blocks/src/generated/table.rs
|
||||
if property_variants == ['north', 'east', 'south', 'west', 'up', 'down']:
|
||||
return 'FacingCubic'
|
||||
if property_variants == ['north', 'south', 'west', 'east']:
|
||||
return 'FacingCardinal'
|
||||
if property_variants == ['top', 'bottom']:
|
||||
return 'TopBottom'
|
||||
if property_variants == ['north_south', 'east_west', 'ascending_east', 'ascending_west', 'ascending_north', 'ascending_south']:
|
||||
return 'RailShape'
|
||||
if property_variants == ['straight', 'inner_left', 'inner_right', 'outer_left', 'outer_right']:
|
||||
return 'StairShape'
|
||||
if property_variants == ['normal', 'sticky']:
|
||||
return 'PistonType'
|
||||
if property_variants == ['x', 'z']:
|
||||
return 'AxisXZ'
|
||||
if property_variants == ['single', 'left', 'right']:
|
||||
return 'ChestType'
|
||||
if property_variants == ['compare', 'subtract']:
|
||||
return 'ComparatorType'
|
||||
if property_variants == ['inactive', 'waiting_for_players', 'active', 'waiting_for_reward_ejection', 'ejecting_reward', 'cooldown']:
|
||||
return 'TrialSpawnerState'
|
||||
if property_variants == ['inactive', 'active', 'unlocking', 'ejecting']:
|
||||
return 'VaultState'
|
||||
if property_variants == ['start', 'log', 'fail', 'accept']:
|
||||
return 'TestMode'
|
||||
if property_variants == ['save', 'load', 'corner', 'data']:
|
||||
return 'StructureMode'
|
||||
if 'harp' in property_variants and 'didgeridoo' in property_variants:
|
||||
return 'Sound'
|
||||
if property_variants == ["north", "east", "south", "west", "up", "down"]:
|
||||
return "FacingCubic"
|
||||
if property_variants == ["north", "south", "west", "east"]:
|
||||
return "FacingCardinal"
|
||||
if property_variants == ["top", "bottom"]:
|
||||
return "TopBottom"
|
||||
if property_variants == [
|
||||
"north_south",
|
||||
"east_west",
|
||||
"ascending_east",
|
||||
"ascending_west",
|
||||
"ascending_north",
|
||||
"ascending_south",
|
||||
]:
|
||||
return "RailShape"
|
||||
if property_variants == [
|
||||
"straight",
|
||||
"inner_left",
|
||||
"inner_right",
|
||||
"outer_left",
|
||||
"outer_right",
|
||||
]:
|
||||
return "StairShape"
|
||||
if property_variants == ["normal", "sticky"]:
|
||||
return "PistonType"
|
||||
if property_variants == ["x", "z"]:
|
||||
return "AxisXZ"
|
||||
if property_variants == ["single", "left", "right"]:
|
||||
return "ChestType"
|
||||
if property_variants == ["compare", "subtract"]:
|
||||
return "ComparatorType"
|
||||
if property_variants == [
|
||||
"inactive",
|
||||
"waiting_for_players",
|
||||
"active",
|
||||
"waiting_for_reward_ejection",
|
||||
"ejecting_reward",
|
||||
"cooldown",
|
||||
]:
|
||||
return "TrialSpawnerState"
|
||||
if property_variants == ["inactive", "active", "unlocking", "ejecting"]:
|
||||
return "VaultState"
|
||||
if property_variants == ["start", "log", "fail", "accept"]:
|
||||
return "TestMode"
|
||||
if property_variants == ["save", "load", "corner", "data"]:
|
||||
return "StructureMode"
|
||||
if "harp" in property_variants and "didgeridoo" in property_variants:
|
||||
return "Sound"
|
||||
if is_list_of_string_integers(property_variants):
|
||||
# if the values are all integers, then prepend the block name
|
||||
return to_camel_case(block_id) + to_camel_case(property_id)
|
||||
if property_variants == ['up', 'side', 'none']:
|
||||
return 'Wire' + to_camel_case(property_id)
|
||||
if property_variants == ['none', 'low', 'tall']:
|
||||
return 'Wall' + to_camel_case(property_id)
|
||||
if property_variants == ["up", "side", "none"]:
|
||||
return "Wire" + to_camel_case(property_id)
|
||||
if property_variants == ["none", "low", "tall"]:
|
||||
return "Wall" + to_camel_case(property_id)
|
||||
|
||||
return to_camel_case(property_id)
|
||||
|
||||
def is_list_of_string_integers(l: list[str]) -> bool:
|
||||
return all(map(str.isdigit, l))
|
||||
|
||||
def is_list_of_string_integers(list_to_check: list[str]) -> bool:
|
||||
return all(map(str.isdigit, list_to_check))
|
||||
|
||||
|
||||
def get_ordered_blocks(registries_report: dict[str, dict]) -> list[str]:
|
||||
'''
|
||||
"""
|
||||
Returns a list of block ids (like ['air', 'stone', ...]) ordered by their protocol id.
|
||||
'''
|
||||
blocks_registry = registries_report['minecraft:block']
|
||||
"""
|
||||
blocks_registry = registries_report["minecraft:block"]
|
||||
|
||||
blocks_to_ids = {}
|
||||
for block_id, value in blocks_registry['entries'].items():
|
||||
prefix = 'minecraft:'
|
||||
blocks_to_ids = {}
|
||||
for block_id, value in blocks_registry["entries"].items():
|
||||
prefix = "minecraft:"
|
||||
assert block_id.startswith(prefix)
|
||||
block_id = block_id[len(prefix):]
|
||||
protocol_id = value['protocol_id']
|
||||
block_id = block_id[len(prefix) :]
|
||||
protocol_id = value["protocol_id"]
|
||||
blocks_to_ids[block_id] = protocol_id
|
||||
|
||||
|
||||
ordered_blocks = []
|
||||
for block_id in sorted(blocks_to_ids, key=blocks_to_ids.get):
|
||||
ordered_blocks.append(block_id)
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
from lib.utils import to_camel_case, to_snake_case, get_dir_location, upper_first_letter
|
||||
from lib.code.packet import burger_instruction_to_code
|
||||
from lib.code.utils import burger_type_to_rust_type
|
||||
from lib.mappings import Mappings
|
||||
from typing import Optional
|
||||
import re
|
||||
|
||||
METADATA_RS_DIR = get_dir_location(
|
||||
'../azalea-entity/src/metadata.rs')
|
||||
METADATA_RS_DIR = get_dir_location("../azalea-entity/src/metadata.rs")
|
||||
|
||||
DATA_RS_DIR = get_dir_location(
|
||||
'../azalea-entity/src/data.rs')
|
||||
DATA_RS_DIR = get_dir_location("../azalea-entity/src/data.rs")
|
||||
|
||||
DIMENSIONS_RS_DIR = get_dir_location("../azalea-entity/src/dimensions.rs")
|
||||
|
||||
DIMENSIONS_RS_DIR = get_dir_location(
|
||||
'../azalea-entity/src/dimensions.rs')
|
||||
|
||||
def generate_metadata_names(burger_dataserializers: dict, mappings: Mappings):
|
||||
serializer_names: list[Optional[str]] = [None] * len(burger_dataserializers)
|
||||
|
@ -20,56 +16,63 @@ def generate_metadata_names(burger_dataserializers: dict, mappings: Mappings):
|
|||
print(burger_serializer)
|
||||
|
||||
# burger gives us the wrong class, so we do this instead
|
||||
data_serializers_class = mappings.get_class_from_deobfuscated_name('net.minecraft.network.syncher.EntityDataSerializers')
|
||||
mojmap_name = mappings.get_field(data_serializers_class, burger_serializer['field']).lower()
|
||||
data_serializers_class = mappings.get_class_from_deobfuscated_name(
|
||||
"net.minecraft.network.syncher.EntityDataSerializers"
|
||||
)
|
||||
mojmap_name = mappings.get_field(
|
||||
data_serializers_class, burger_serializer["field"]
|
||||
).lower()
|
||||
|
||||
if mojmap_name == 'component':
|
||||
mojmap_name = 'formatted_text'
|
||||
elif mojmap_name == 'optional_component':
|
||||
mojmap_name = 'optional_formatted_text'
|
||||
if mojmap_name == "component":
|
||||
mojmap_name = "formatted_text"
|
||||
elif mojmap_name == "optional_component":
|
||||
mojmap_name = "optional_formatted_text"
|
||||
|
||||
serializer_names[burger_serializer['id']] = upper_first_letter(to_camel_case(mojmap_name))
|
||||
serializer_names[burger_serializer["id"]] = upper_first_letter(
|
||||
to_camel_case(mojmap_name)
|
||||
)
|
||||
return serializer_names
|
||||
|
||||
|
||||
def parse_metadata_types_from_code():
|
||||
with open(DATA_RS_DIR, 'r') as f:
|
||||
with open(DATA_RS_DIR, "r") as f:
|
||||
lines = f.read().splitlines()
|
||||
|
||||
|
||||
data = []
|
||||
|
||||
in_enum = False
|
||||
for line in lines:
|
||||
if line == 'pub enum EntityDataValue {':
|
||||
if line == "pub enum EntityDataValue {":
|
||||
in_enum = True
|
||||
elif line == '}':
|
||||
elif line == "}":
|
||||
in_enum = False
|
||||
elif in_enum:
|
||||
line = line.strip()
|
||||
if line.startswith('//'): continue
|
||||
name, type = line.rstrip('),').split('(')
|
||||
if line.startswith("//"):
|
||||
continue
|
||||
name, type = line.rstrip("),").split("(")
|
||||
is_var = False
|
||||
if type.startswith('#[var] '):
|
||||
if type.startswith("#[var] "):
|
||||
is_var = True
|
||||
type = type[len('#[var] '):]
|
||||
data.append({
|
||||
'name': name,
|
||||
'type': type,
|
||||
'var': is_var
|
||||
})
|
||||
type = type[len("#[var] ") :]
|
||||
data.append({"name": name, "type": type, "var": is_var})
|
||||
print(data)
|
||||
return data
|
||||
|
||||
def generate_entity_metadata(burger_entities_data: dict, mappings: Mappings):
|
||||
burger_entity_metadata = burger_entities_data['entity']
|
||||
|
||||
new_metadata_names = generate_metadata_names(burger_entities_data['dataserializers'], mappings)
|
||||
def generate_entity_metadata(burger_entities_data: dict, mappings: Mappings):
|
||||
burger_entity_metadata = burger_entities_data["entity"]
|
||||
|
||||
new_metadata_names = generate_metadata_names(
|
||||
burger_entities_data["dataserializers"], mappings
|
||||
)
|
||||
parsed_metadata_types = parse_metadata_types_from_code()
|
||||
|
||||
parsed_metadata_names = []
|
||||
for t in parsed_metadata_types:
|
||||
parsed_metadata_names.append(t['name'])
|
||||
parsed_metadata_names.append(t["name"])
|
||||
|
||||
with open(DATA_RS_DIR, 'r') as f:
|
||||
with open(DATA_RS_DIR, "r") as f:
|
||||
lines = f.read().splitlines()
|
||||
# add the metadata names that weren't there before to the end of the enum.
|
||||
# this technically might cause them to be in the wrong order but i decided
|
||||
|
@ -81,26 +84,28 @@ def generate_entity_metadata(burger_entities_data: dict, mappings: Mappings):
|
|||
if added_metadata_names != []:
|
||||
in_enum = False
|
||||
for i, line in enumerate(list(lines)):
|
||||
if line == 'pub enum EntityDataValue {':
|
||||
if line == "pub enum EntityDataValue {":
|
||||
in_enum = True
|
||||
elif in_enum and line == '}':
|
||||
elif in_enum and line == "}":
|
||||
in_enum = False
|
||||
for n in added_metadata_names:
|
||||
lines.insert(i, f'{n}(TODO),')
|
||||
lines.insert(i, f"{n}(TODO),")
|
||||
i += 1
|
||||
print(lines)
|
||||
with open(DATA_RS_DIR, 'w') as f:
|
||||
f.write('\n'.join(lines))
|
||||
print('Expected metadata types:\n' + '\n'.join(new_metadata_names))
|
||||
print('Updated metadata types in azalea-entity/src/data.rs, go make sure they\'re correct (check EntityDataSerializers.java) and then press enter')
|
||||
with open(DATA_RS_DIR, "w") as f:
|
||||
f.write("\n".join(lines))
|
||||
print("Expected metadata types:\n" + "\n".join(new_metadata_names))
|
||||
print(
|
||||
"Updated metadata types in azalea-entity/src/data.rs, go make sure they're correct (check EntityDataSerializers.java) and then press enter"
|
||||
)
|
||||
input()
|
||||
|
||||
|
||||
metadata_types = parse_metadata_types_from_code()
|
||||
|
||||
code = []
|
||||
code.append('''#![allow(clippy::single_match)]
|
||||
code.append("""#![allow(clippy::single_match)]
|
||||
|
||||
// This file is generated from codegen/lib/code/entity.py.
|
||||
// This file is @generated from codegen/lib/code/entity.py.
|
||||
// Don't change it manually!
|
||||
|
||||
use azalea_chat::FormattedText;
|
||||
|
@ -131,10 +136,10 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
Self::WrongType(value)
|
||||
}
|
||||
}
|
||||
''')
|
||||
""")
|
||||
|
||||
# types that are only ever used in one entity
|
||||
single_use_imported_types = {'particle', 'pose'}
|
||||
single_use_imported_types = {"particle", "pose"}
|
||||
|
||||
added_metadata_fields = set()
|
||||
|
||||
|
@ -146,11 +151,13 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
duplicate_field_names = set()
|
||||
|
||||
# some generic names... we don't like these
|
||||
duplicate_field_names.add('state') # SnifferState instead of State
|
||||
duplicate_field_names.add("state") # SnifferState instead of State
|
||||
|
||||
for entity_id in burger_entity_metadata.keys():
|
||||
field_name_map[entity_id] = {}
|
||||
for field_name_or_bitfield in get_entity_metadata_names(entity_id, burger_entity_metadata, mappings).values():
|
||||
for field_name_or_bitfield in get_entity_metadata_names(
|
||||
entity_id, burger_entity_metadata, mappings
|
||||
).values():
|
||||
if isinstance(field_name_or_bitfield, str):
|
||||
if field_name_or_bitfield in previous_field_names:
|
||||
duplicate_field_names.add(field_name_or_bitfield)
|
||||
|
@ -170,25 +177,30 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# make sure these types are only ever made once
|
||||
for name in single_use_imported_types:
|
||||
if name in duplicate_field_names:
|
||||
raise Exception(f'{name} should only exist once')
|
||||
raise Exception(f"{name} should only exist once")
|
||||
|
||||
# and now figure out what to rename them to
|
||||
for entity_id in burger_entity_metadata.keys():
|
||||
for index, field_name_or_bitfield in get_entity_metadata_names(entity_id, burger_entity_metadata, mappings).items():
|
||||
for index, field_name_or_bitfield in get_entity_metadata_names(
|
||||
entity_id, burger_entity_metadata, mappings
|
||||
).items():
|
||||
if isinstance(field_name_or_bitfield, str):
|
||||
new_field_name = field_name_or_bitfield
|
||||
if new_field_name == 'type':
|
||||
new_field_name = 'kind'
|
||||
if new_field_name == "type":
|
||||
new_field_name = "kind"
|
||||
if field_name_or_bitfield in duplicate_field_names:
|
||||
field_name_map[entity_id][
|
||||
field_name_or_bitfield] = f'{entity_id.strip("~")}_{new_field_name}'
|
||||
field_name_map[entity_id][field_name_or_bitfield] = (
|
||||
f"{entity_id.strip('~')}_{new_field_name}"
|
||||
)
|
||||
else:
|
||||
for mask, name in field_name_or_bitfield.items():
|
||||
new_field_name = name
|
||||
if new_field_name == 'type':
|
||||
new_field_name = 'kind'
|
||||
if new_field_name == "type":
|
||||
new_field_name = "kind"
|
||||
if name in duplicate_field_names:
|
||||
field_name_map[entity_id][name] = f'{entity_id.strip("~")}_{new_field_name}'
|
||||
field_name_map[entity_id][name] = (
|
||||
f"{entity_id.strip('~')}_{new_field_name}"
|
||||
)
|
||||
|
||||
def new_entity(entity_id: str):
|
||||
# note: fields are components
|
||||
|
@ -199,25 +211,35 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
entity_metadatas = []
|
||||
|
||||
def maybe_rename_field(name: str, index: int) -> str:
|
||||
if name in field_name_map[entity_ids_for_all_field_names_or_bitfields[index]]:
|
||||
return field_name_map[entity_ids_for_all_field_names_or_bitfields[index]][name]
|
||||
if (
|
||||
name
|
||||
in field_name_map[entity_ids_for_all_field_names_or_bitfields[index]]
|
||||
):
|
||||
return field_name_map[
|
||||
entity_ids_for_all_field_names_or_bitfields[index]
|
||||
][name]
|
||||
return name
|
||||
|
||||
parents = get_entity_parents(entity_id, burger_entity_metadata)
|
||||
for parent_id in list(reversed(parents)):
|
||||
for index, name_or_bitfield in get_entity_metadata_names(parent_id, burger_entity_metadata, mappings).items():
|
||||
for index, name_or_bitfield in get_entity_metadata_names(
|
||||
parent_id, burger_entity_metadata, mappings
|
||||
).items():
|
||||
assert index == len(all_field_names_or_bitfields)
|
||||
all_field_names_or_bitfields.append(name_or_bitfield)
|
||||
entity_ids_for_all_field_names_or_bitfields.append(parent_id)
|
||||
entity_metadatas.extend(get_entity_metadata(
|
||||
parent_id, burger_entity_metadata))
|
||||
entity_metadatas.extend(
|
||||
get_entity_metadata(parent_id, burger_entity_metadata)
|
||||
)
|
||||
parent_id = parents[1] if len(parents) > 1 else None
|
||||
|
||||
# now add all the fields/component structs
|
||||
for index, name_or_bitfield in enumerate(all_field_names_or_bitfields):
|
||||
# make sure we only ever make these structs once
|
||||
hashable_name_or_bitfield = str(
|
||||
name_or_bitfield) + entity_ids_for_all_field_names_or_bitfields[index]
|
||||
hashable_name_or_bitfield = (
|
||||
str(name_or_bitfield)
|
||||
+ entity_ids_for_all_field_names_or_bitfields[index]
|
||||
)
|
||||
if hashable_name_or_bitfield in added_metadata_fields:
|
||||
continue
|
||||
added_metadata_fields.add(hashable_name_or_bitfield)
|
||||
|
@ -229,30 +251,33 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
|
||||
name_or_bitfield = maybe_rename_field(name_or_bitfield, index)
|
||||
|
||||
struct_name = upper_first_letter(
|
||||
to_camel_case(name_or_bitfield))
|
||||
type_id = next(filter(lambda i: i['index'] == index, entity_metadatas))['type_id']
|
||||
struct_name = upper_first_letter(to_camel_case(name_or_bitfield))
|
||||
type_id = next(filter(lambda i: i["index"] == index, entity_metadatas))[
|
||||
"type_id"
|
||||
]
|
||||
metadata_type_data = metadata_types[type_id]
|
||||
rust_type = metadata_type_data['type']
|
||||
rust_type = metadata_type_data["type"]
|
||||
|
||||
code.append(f'#[derive(Component, Deref, DerefMut, Clone)]')
|
||||
code.append(f'pub struct {struct_name}(pub {rust_type});')
|
||||
code.append("#[derive(Component, Deref, DerefMut, Clone)]")
|
||||
code.append(f"pub struct {struct_name}(pub {rust_type});")
|
||||
else:
|
||||
# if it's a bitfield just make a struct for each bit
|
||||
for mask, name in name_or_bitfield.items():
|
||||
name = maybe_rename_field(name, index)
|
||||
struct_name = upper_first_letter(to_camel_case(name))
|
||||
code.append(f'#[derive(Component, Deref, DerefMut, Clone, Copy)]')
|
||||
code.append(f'pub struct {struct_name}(pub bool);')
|
||||
code.append("#[derive(Component, Deref, DerefMut, Clone, Copy)]")
|
||||
code.append(f"pub struct {struct_name}(pub bool);")
|
||||
|
||||
# add the entity struct and Bundle struct
|
||||
struct_name: str = upper_first_letter(
|
||||
to_camel_case(entity_id.lstrip('~')))
|
||||
code.append(f'#[derive(Component)]')
|
||||
code.append(f'pub struct {struct_name};')
|
||||
struct_name: str = upper_first_letter(to_camel_case(entity_id.lstrip("~")))
|
||||
code.append("#[derive(Component)]")
|
||||
code.append(f"pub struct {struct_name};")
|
||||
|
||||
parent_struct_name = upper_first_letter(
|
||||
to_camel_case(parent_id.lstrip("~"))) if parent_id else None
|
||||
parent_struct_name = (
|
||||
upper_first_letter(to_camel_case(parent_id.lstrip("~")))
|
||||
if parent_id
|
||||
else None
|
||||
)
|
||||
|
||||
# impl Allay {
|
||||
# pub fn apply_metadata(
|
||||
|
@ -267,58 +292,67 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# Ok(())
|
||||
# }
|
||||
# }
|
||||
code.append(f'impl {struct_name} {{')
|
||||
code.append(f"impl {struct_name} {{")
|
||||
code.append(
|
||||
f' pub fn apply_metadata(entity: &mut bevy_ecs::system::EntityCommands, d: EntityDataItem) -> Result<(), UpdateMetadataError> {{')
|
||||
code.append(f' match d.index {{')
|
||||
" pub fn apply_metadata(entity: &mut bevy_ecs::system::EntityCommands, d: EntityDataItem) -> Result<(), UpdateMetadataError> {"
|
||||
)
|
||||
code.append(" match d.index {")
|
||||
|
||||
parent_last_index = -1
|
||||
for index, name_or_bitfield in enumerate(all_field_names_or_bitfields):
|
||||
is_from_parent = entity_ids_for_all_field_names_or_bitfields[index] != entity_id
|
||||
is_from_parent = (
|
||||
entity_ids_for_all_field_names_or_bitfields[index] != entity_id
|
||||
)
|
||||
if is_from_parent:
|
||||
parent_last_index = index
|
||||
if parent_last_index != -1:
|
||||
code.append(
|
||||
f' 0..={parent_last_index} => {parent_struct_name}::apply_metadata(entity, d)?,')
|
||||
f" 0..={parent_last_index} => {parent_struct_name}::apply_metadata(entity, d)?,"
|
||||
)
|
||||
|
||||
for index, name_or_bitfield in enumerate(all_field_names_or_bitfields):
|
||||
if index <= parent_last_index:
|
||||
continue
|
||||
if isinstance(name_or_bitfield, str):
|
||||
name_or_bitfield = maybe_rename_field(
|
||||
name_or_bitfield, index)
|
||||
name_or_bitfield = maybe_rename_field(name_or_bitfield, index)
|
||||
|
||||
field_struct_name = upper_first_letter(
|
||||
to_camel_case(name_or_bitfield))
|
||||
field_struct_name = upper_first_letter(to_camel_case(name_or_bitfield))
|
||||
if name_or_bitfield in single_use_imported_types:
|
||||
field_struct_name = ''
|
||||
field_struct_name = ""
|
||||
|
||||
type_id = next(filter(lambda i: i['index'] == index, entity_metadatas))['type_id']
|
||||
type_id = next(filter(lambda i: i["index"] == index, entity_metadatas))[
|
||||
"type_id"
|
||||
]
|
||||
metadata_type_data = metadata_types[type_id]
|
||||
rust_type = metadata_type_data['type']
|
||||
type_name = metadata_type_data['name']
|
||||
rust_type = metadata_type_data["type"]
|
||||
type_name = metadata_type_data["name"]
|
||||
|
||||
type_name_field = to_snake_case(type_name)
|
||||
read_field_code = f'{field_struct_name}(d.value.into_{type_name_field}()?)' if field_struct_name else f'd.value.into_{type_name_field}()?'
|
||||
read_field_code = (
|
||||
f"{field_struct_name}(d.value.into_{type_name_field}()?)"
|
||||
if field_struct_name
|
||||
else f"d.value.into_{type_name_field}()?"
|
||||
)
|
||||
code.append(
|
||||
f' {index} => {{ entity.insert({read_field_code}); }},')
|
||||
f" {index} => {{ entity.insert({read_field_code}); }},"
|
||||
)
|
||||
else:
|
||||
code.append(f' {index} => {{')
|
||||
code.append(
|
||||
f'let bitfield = d.value.into_byte()?;')
|
||||
code.append(f" {index} => {{")
|
||||
code.append("let bitfield = d.value.into_byte()?;")
|
||||
for mask, name in name_or_bitfield.items():
|
||||
name = maybe_rename_field(name, index)
|
||||
field_struct_name = upper_first_letter(to_camel_case(name))
|
||||
|
||||
code.append(
|
||||
f'entity.insert({field_struct_name}(bitfield & {mask} != 0));')
|
||||
code.append(' },')
|
||||
code.append(' _ => {}')
|
||||
code.append(' }')
|
||||
code.append(' Ok(())')
|
||||
code.append(' }')
|
||||
code.append('}')
|
||||
code.append('')
|
||||
f"entity.insert({field_struct_name}(bitfield & {mask} != 0));"
|
||||
)
|
||||
code.append(" },")
|
||||
code.append(" _ => {}")
|
||||
code.append(" }")
|
||||
code.append(" Ok(())")
|
||||
code.append(" }")
|
||||
code.append("}")
|
||||
code.append("")
|
||||
|
||||
# #[derive(Bundle)]
|
||||
# struct AllayBundle {
|
||||
|
@ -327,30 +361,27 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# dancing: Dancing,
|
||||
# can_duplicate: CanDuplicate,
|
||||
# }
|
||||
bundle_struct_name = f'{struct_name}MetadataBundle'
|
||||
code.append(f'')
|
||||
code.append(f'#[derive(Bundle)]')
|
||||
code.append(f'pub struct {bundle_struct_name} {{')
|
||||
code.append(
|
||||
f' _marker: {struct_name},')
|
||||
bundle_struct_name = f"{struct_name}MetadataBundle"
|
||||
code.append("")
|
||||
code.append("#[derive(Bundle)]")
|
||||
code.append(f"pub struct {bundle_struct_name} {{")
|
||||
code.append(f" _marker: {struct_name},")
|
||||
if parent_struct_name:
|
||||
code.append(
|
||||
f' parent: {parent_struct_name}MetadataBundle,')
|
||||
for index, name_or_bitfield in get_entity_metadata_names(entity_id, burger_entity_metadata, mappings).items():
|
||||
code.append(f" parent: {parent_struct_name}MetadataBundle,")
|
||||
for index, name_or_bitfield in get_entity_metadata_names(
|
||||
entity_id, burger_entity_metadata, mappings
|
||||
).items():
|
||||
if isinstance(name_or_bitfield, str):
|
||||
name_or_bitfield = maybe_rename_field(
|
||||
name_or_bitfield, index)
|
||||
struct_name = upper_first_letter(
|
||||
to_camel_case(name_or_bitfield))
|
||||
code.append(
|
||||
f' {name_or_bitfield}: {struct_name},')
|
||||
name_or_bitfield = maybe_rename_field(name_or_bitfield, index)
|
||||
struct_name = upper_first_letter(to_camel_case(name_or_bitfield))
|
||||
code.append(f" {name_or_bitfield}: {struct_name},")
|
||||
else:
|
||||
for mask, name in name_or_bitfield.items():
|
||||
name = maybe_rename_field(name, index)
|
||||
|
||||
struct_name = upper_first_letter(to_camel_case(name))
|
||||
code.append(f' {name}: {struct_name},')
|
||||
code.append('}')
|
||||
code.append(f" {name}: {struct_name},")
|
||||
code.append("}")
|
||||
|
||||
# impl Default for AllayBundle {
|
||||
# fn default() -> Self {
|
||||
|
@ -365,9 +396,8 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# }
|
||||
# }
|
||||
# }
|
||||
code.append(f'impl Default for {bundle_struct_name} {{')
|
||||
code.append(
|
||||
' fn default() -> Self {')
|
||||
code.append(f"impl Default for {bundle_struct_name} {{")
|
||||
code.append(" fn default() -> Self {")
|
||||
|
||||
def generate_fields(this_entity_id: str):
|
||||
# on_fire: OnFire(false),
|
||||
|
@ -375,32 +405,39 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
|
||||
# _marker
|
||||
this_entity_struct_name = upper_first_letter(
|
||||
to_camel_case(this_entity_id.lstrip('~')))
|
||||
code.append(
|
||||
f' _marker: {this_entity_struct_name},')
|
||||
to_camel_case(this_entity_id.lstrip("~"))
|
||||
)
|
||||
code.append(f" _marker: {this_entity_struct_name},")
|
||||
|
||||
# if it has a parent, put it (do recursion)
|
||||
# parent: AbstractCreatureBundle { ... },
|
||||
this_entity_parent_ids = get_entity_parents(
|
||||
this_entity_id, burger_entity_metadata)
|
||||
this_entity_parent_id = this_entity_parent_ids[1] if len(
|
||||
this_entity_parent_ids) > 1 else None
|
||||
this_entity_id, burger_entity_metadata
|
||||
)
|
||||
this_entity_parent_id = (
|
||||
this_entity_parent_ids[1] if len(this_entity_parent_ids) > 1 else None
|
||||
)
|
||||
if this_entity_parent_id:
|
||||
bundle_struct_name = upper_first_letter(
|
||||
to_camel_case(this_entity_parent_id.lstrip('~'))) + 'MetadataBundle'
|
||||
code.append(
|
||||
f' parent: {bundle_struct_name} {{')
|
||||
bundle_struct_name = (
|
||||
upper_first_letter(to_camel_case(this_entity_parent_id.lstrip("~")))
|
||||
+ "MetadataBundle"
|
||||
)
|
||||
code.append(f" parent: {bundle_struct_name} {{")
|
||||
generate_fields(this_entity_parent_id)
|
||||
code.append(
|
||||
' },')
|
||||
code.append(" },")
|
||||
|
||||
for index, name_or_bitfield in get_entity_metadata_names(this_entity_id, burger_entity_metadata, mappings).items():
|
||||
default = next(filter(lambda i: i['index'] == index, entity_metadatas)).get('default', 'Default::default()')
|
||||
for index, name_or_bitfield in get_entity_metadata_names(
|
||||
this_entity_id, burger_entity_metadata, mappings
|
||||
).items():
|
||||
default = next(
|
||||
filter(lambda i: i["index"] == index, entity_metadatas)
|
||||
).get("default", "Default::default()")
|
||||
if isinstance(name_or_bitfield, str):
|
||||
type_id = next(filter(lambda i: i['index'] == index, entity_metadatas))[
|
||||
'type_id']
|
||||
type_id = next(
|
||||
filter(lambda i: i["index"] == index, entity_metadatas)
|
||||
)["type_id"]
|
||||
metadata_type_data = metadata_types[type_id]
|
||||
type_name = metadata_type_data['name']
|
||||
type_name = metadata_type_data["name"]
|
||||
|
||||
name = maybe_rename_field(name_or_bitfield, index)
|
||||
|
||||
|
@ -409,8 +446,8 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# wrong default metadatas. This should be added to Burger.
|
||||
if default is None:
|
||||
# some types don't have Default implemented
|
||||
if type_name == 'CompoundTag':
|
||||
default = 'simdnbt::owned::NbtCompound::default()'
|
||||
if type_name == "CompoundTag":
|
||||
default = "simdnbt::owned::NbtCompound::default()"
|
||||
# elif type_name == 'CatVariant':
|
||||
# # TODO: the default should be Tabby but we don't have a way to get that from here
|
||||
# default = 'azalea_registry::CatVariant::new_raw(0)'
|
||||
|
@ -418,49 +455,84 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# default = 'azalea_registry::PaintingVariant::Kebab'
|
||||
# elif type_name == 'FrogVariant':
|
||||
# default = 'azalea_registry::FrogVariant::Temperate'
|
||||
elif type_name.endswith('Variant'):
|
||||
default = f'azalea_registry::{type_name}::new_raw(0)'
|
||||
elif type_name == 'VillagerData':
|
||||
default = 'VillagerData { kind: azalea_registry::VillagerKind::Plains, profession: azalea_registry::VillagerProfession::None, level: 0 }'
|
||||
elif type_name.endswith("Variant"):
|
||||
default = f"azalea_registry::{type_name}::new_raw(0)"
|
||||
elif type_name == "VillagerData":
|
||||
default = "VillagerData { kind: azalea_registry::VillagerKind::Plains, profession: azalea_registry::VillagerProfession::None, level: 0 }"
|
||||
else:
|
||||
default = f'{type_name}::default()' if name in single_use_imported_types else 'Default::default()'
|
||||
default = (
|
||||
f"{type_name}::default()"
|
||||
if name in single_use_imported_types
|
||||
else "Default::default()"
|
||||
)
|
||||
else:
|
||||
if type_name == 'Boolean':
|
||||
default = 'true' if default else 'false'
|
||||
elif type_name == 'String':
|
||||
if type_name == "Boolean":
|
||||
default = "true" if default else "false"
|
||||
elif type_name == "String":
|
||||
string_escaped = default.replace('"', '\\"')
|
||||
default = f'"{string_escaped}".to_string()'
|
||||
elif type_name == 'BlockPos':
|
||||
default = f'BlockPos::new{default}'
|
||||
elif type_name == 'OptionalBlockPos': # Option<BlockPos>
|
||||
default = f'Some(BlockPos::new{default})' if default != 'Empty' else 'None'
|
||||
elif type_name == 'OptionalLivingEntityReference':
|
||||
default = f'Some(uuid::uuid!({default}))' if default != 'Empty' else 'None'
|
||||
elif type_name == 'OptionalUnsignedInt':
|
||||
default = f'OptionalUnsignedInt(Some({default}))' if default != 'Empty' else 'OptionalUnsignedInt(None)'
|
||||
elif type_name == 'ItemStack':
|
||||
default = f'ItemStack::Present({default})' if default != 'Empty' else 'ItemStack::Empty'
|
||||
elif type_name == 'BlockState':
|
||||
default = f'{default}' if default != 'Empty' else 'azalea_block::BlockState::AIR'
|
||||
elif type_name == 'OptionalBlockState':
|
||||
default = f'{default}' if default != 'Empty' else 'azalea_block::BlockState::AIR'
|
||||
elif type_name == 'OptionalFormattedText':
|
||||
default = f'Some({default})' if default != 'Empty' else 'None'
|
||||
elif type_name == 'CompoundTag':
|
||||
default = f'simdnbt::owned::NbtCompound({default})' if default != 'Empty' else 'simdnbt::owned::NbtCompound::default()'
|
||||
elif type_name == 'Quaternion':
|
||||
default = f'Quaternion {{ x: {float(default["x"])}, y: {float(default["y"])}, z: {float(default["z"])}, w: {float(default["w"])} }}'
|
||||
elif type_name == 'Vector3':
|
||||
default = f'Vec3 {{ x: {float(default["x"])}, y: {float(default["y"])}, z: {float(default["z"])} }}'
|
||||
elif type_name == 'Byte':
|
||||
elif type_name == "BlockPos":
|
||||
default = f"BlockPos::new{default}"
|
||||
elif type_name == "OptionalBlockPos": # Option<BlockPos>
|
||||
default = (
|
||||
f"Some(BlockPos::new{default})"
|
||||
if default != "Empty"
|
||||
else "None"
|
||||
)
|
||||
elif type_name == "OptionalLivingEntityReference":
|
||||
default = (
|
||||
f"Some(uuid::uuid!({default}))"
|
||||
if default != "Empty"
|
||||
else "None"
|
||||
)
|
||||
elif type_name == "OptionalUnsignedInt":
|
||||
default = (
|
||||
f"OptionalUnsignedInt(Some({default}))"
|
||||
if default != "Empty"
|
||||
else "OptionalUnsignedInt(None)"
|
||||
)
|
||||
elif type_name == "ItemStack":
|
||||
default = (
|
||||
f"ItemStack::Present({default})"
|
||||
if default != "Empty"
|
||||
else "ItemStack::Empty"
|
||||
)
|
||||
elif type_name == "BlockState":
|
||||
default = (
|
||||
f"{default}"
|
||||
if default != "Empty"
|
||||
else "azalea_block::BlockState::AIR"
|
||||
)
|
||||
elif type_name == "OptionalBlockState":
|
||||
default = (
|
||||
f"{default}"
|
||||
if default != "Empty"
|
||||
else "azalea_block::BlockState::AIR"
|
||||
)
|
||||
elif type_name == "OptionalFormattedText":
|
||||
default = (
|
||||
f"Some({default})" if default != "Empty" else "None"
|
||||
)
|
||||
elif type_name == "CompoundTag":
|
||||
default = (
|
||||
f"simdnbt::owned::NbtCompound({default})"
|
||||
if default != "Empty"
|
||||
else "simdnbt::owned::NbtCompound::default()"
|
||||
)
|
||||
elif type_name == "Quaternion":
|
||||
default = f"Quaternion {{ x: {float(default['x'])}, y: {float(default['y'])}, z: {float(default['z'])}, w: {float(default['w'])} }}"
|
||||
elif type_name == "Vector3":
|
||||
default = f"Vec3 {{ x: {float(default['x'])}, y: {float(default['y'])}, z: {float(default['z'])} }}"
|
||||
elif type_name == "Byte":
|
||||
# in 1.19.4 TextOpacity is a -1 by default
|
||||
if default < 0:
|
||||
default += 128
|
||||
if name in single_use_imported_types:
|
||||
code.append(f' {name}: {default},')
|
||||
code.append(f" {name}: {default},")
|
||||
else:
|
||||
code.append(
|
||||
f' {name}: {upper_first_letter(to_camel_case(name))}({default}),')
|
||||
f" {name}: {upper_first_letter(to_camel_case(name))}({default}),"
|
||||
)
|
||||
else:
|
||||
# if it's a bitfield, we'll have to extract the default for
|
||||
# each bool from each bit in the default
|
||||
|
@ -468,17 +540,19 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
name = maybe_rename_field(name, index)
|
||||
mask = int(mask, 0)
|
||||
if default is None:
|
||||
bit_default = 'false'
|
||||
bit_default = "false"
|
||||
else:
|
||||
bit_default = 'true' if (default & mask != 0) else 'false'
|
||||
bit_default = "true" if (default & mask != 0) else "false"
|
||||
code.append(
|
||||
f' {name}: {upper_first_letter(to_camel_case(name))}({bit_default}),')
|
||||
code.append(' Self {')
|
||||
f" {name}: {upper_first_letter(to_camel_case(name))}({bit_default}),"
|
||||
)
|
||||
|
||||
code.append(" Self {")
|
||||
generate_fields(entity_id)
|
||||
code.append(' }')
|
||||
code.append(' }')
|
||||
code.append('}')
|
||||
code.append('')
|
||||
code.append(" }")
|
||||
code.append(" }")
|
||||
code.append("}")
|
||||
code.append("")
|
||||
|
||||
# parent_field_name = None
|
||||
for entity_id in burger_entity_metadata:
|
||||
|
@ -499,28 +573,27 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# Ok(())
|
||||
# }
|
||||
code.append(
|
||||
f'''pub fn apply_metadata(
|
||||
"""pub fn apply_metadata(
|
||||
entity: &mut bevy_ecs::system::EntityCommands,
|
||||
entity_kind: azalea_registry::EntityKind,
|
||||
items: Vec<EntityDataItem>,
|
||||
) -> Result<(), UpdateMetadataError> {{
|
||||
match entity_kind {{''')
|
||||
) -> Result<(), UpdateMetadataError> {
|
||||
match entity_kind {"""
|
||||
)
|
||||
for entity_id in burger_entity_metadata:
|
||||
if entity_id.startswith('~'):
|
||||
if entity_id.startswith("~"):
|
||||
# not actually an entity
|
||||
continue
|
||||
struct_name: str = upper_first_letter(to_camel_case(entity_id))
|
||||
code.append(
|
||||
f' azalea_registry::EntityKind::{struct_name} => {{')
|
||||
code.append(' for d in items {')
|
||||
code.append(
|
||||
f' {struct_name}::apply_metadata(entity, d)?;')
|
||||
code.append(' }')
|
||||
code.append(' },')
|
||||
code.append(' }')
|
||||
code.append(' Ok(())')
|
||||
code.append('}')
|
||||
code.append('')
|
||||
code.append(f" azalea_registry::EntityKind::{struct_name} => {{")
|
||||
code.append(" for d in items {")
|
||||
code.append(f" {struct_name}::apply_metadata(entity, d)?;")
|
||||
code.append(" }")
|
||||
code.append(" },")
|
||||
code.append(" }")
|
||||
code.append(" Ok(())")
|
||||
code.append("}")
|
||||
code.append("")
|
||||
|
||||
# pub fn apply_default_metadata(entity: &mut bevy_ecs::system::EntityCommands, kind: azalea_registry::EntityKind) {
|
||||
# match kind {
|
||||
|
@ -530,57 +603,61 @@ impl From<EntityDataValue> for UpdateMetadataError {
|
|||
# }
|
||||
# }
|
||||
code.append(
|
||||
'pub fn apply_default_metadata(entity: &mut bevy_ecs::system::EntityCommands, kind: azalea_registry::EntityKind) {')
|
||||
code.append(' match kind {')
|
||||
"pub fn apply_default_metadata(entity: &mut bevy_ecs::system::EntityCommands, kind: azalea_registry::EntityKind) {"
|
||||
)
|
||||
code.append(" match kind {")
|
||||
for entity_id in burger_entity_metadata:
|
||||
if entity_id.startswith('~'):
|
||||
if entity_id.startswith("~"):
|
||||
# not actually an entity
|
||||
continue
|
||||
struct_name: str = upper_first_letter(to_camel_case(entity_id))
|
||||
code.append(f" azalea_registry::EntityKind::{struct_name} => {{")
|
||||
code.append(
|
||||
f' azalea_registry::EntityKind::{struct_name} => {{')
|
||||
code.append(
|
||||
f' entity.insert({struct_name}MetadataBundle::default());')
|
||||
code.append(' },')
|
||||
code.append(' }')
|
||||
code.append('}')
|
||||
code.append('')
|
||||
f" entity.insert({struct_name}MetadataBundle::default());"
|
||||
)
|
||||
code.append(" },")
|
||||
code.append(" }")
|
||||
code.append("}")
|
||||
code.append("")
|
||||
|
||||
with open(METADATA_RS_DIR, "w") as f:
|
||||
f.write("\n".join(code))
|
||||
|
||||
with open(METADATA_RS_DIR, 'w') as f:
|
||||
f.write('\n'.join(code))
|
||||
|
||||
def generate_entity_dimensions(burger_entities_data: dict):
|
||||
# lines look like
|
||||
# EntityKind::Player => EntityDimensions::new(0.6, 1.8),
|
||||
new_match_lines = []
|
||||
for entity_id, entity_data in burger_entities_data['entity'].items():
|
||||
if entity_id.startswith('~'):
|
||||
for entity_id, entity_data in burger_entities_data["entity"].items():
|
||||
if entity_id.startswith("~"):
|
||||
# not actually an entity
|
||||
continue
|
||||
variant_name: str = upper_first_letter(to_camel_case(entity_id))
|
||||
width = entity_data['width']
|
||||
height = entity_data['height']
|
||||
width = entity_data["width"]
|
||||
height = entity_data["height"]
|
||||
new_match_lines.append(
|
||||
f' EntityKind::{variant_name} => EntityDimensions::new({width}, {height}),')
|
||||
f" EntityKind::{variant_name} => EntityDimensions::new({width}, {height}),"
|
||||
)
|
||||
|
||||
with open(DIMENSIONS_RS_DIR, 'r') as f:
|
||||
lines = f.read().split('\n')
|
||||
with open(DIMENSIONS_RS_DIR, "r") as f:
|
||||
lines = f.read().split("\n")
|
||||
new_lines = []
|
||||
|
||||
in_match = False
|
||||
for i, line in enumerate(lines):
|
||||
if not in_match:
|
||||
new_lines.append(line)
|
||||
if line.strip() == 'match entity {':
|
||||
if line.strip() == "match entity {":
|
||||
in_match = True
|
||||
else:
|
||||
if line.strip() == '}':
|
||||
if line.strip() == "}":
|
||||
new_lines.extend(new_match_lines)
|
||||
new_lines.extend(lines[i:])
|
||||
break
|
||||
|
||||
with open(DIMENSIONS_RS_DIR, 'w') as f:
|
||||
f.write('\n'.join(new_lines))
|
||||
with open(DIMENSIONS_RS_DIR, "w") as f:
|
||||
f.write("\n".join(new_lines))
|
||||
|
||||
|
||||
def get_entity_parents(entity_id: str, burger_entity_metadata: dict):
|
||||
parents = []
|
||||
|
@ -591,59 +668,68 @@ def get_entity_parents(entity_id: str, burger_entity_metadata: dict):
|
|||
|
||||
|
||||
def get_entity_parent(entity_id: str, burger_entity_metadata: dict):
|
||||
entity_metadata = burger_entity_metadata[entity_id]['metadata']
|
||||
entity_metadata = burger_entity_metadata[entity_id]["metadata"]
|
||||
first_metadata = entity_metadata[0]
|
||||
return first_metadata.get('entity')
|
||||
return first_metadata.get("entity")
|
||||
|
||||
|
||||
def get_entity_metadata(entity_id: str, burger_entity_metadata: dict):
|
||||
entity_metadata = burger_entity_metadata[entity_id]['metadata']
|
||||
entity_metadata = burger_entity_metadata[entity_id]["metadata"]
|
||||
entity_useful_metadata = []
|
||||
for metadata_item in entity_metadata:
|
||||
if 'data' in metadata_item:
|
||||
for metadata_attribute in metadata_item['data']:
|
||||
entity_useful_metadata.append({
|
||||
'index': metadata_attribute['index'],
|
||||
'type_id': metadata_attribute['serializer_id'],
|
||||
'default': metadata_attribute.get('default')
|
||||
})
|
||||
if "data" in metadata_item:
|
||||
for metadata_attribute in metadata_item["data"]:
|
||||
entity_useful_metadata.append(
|
||||
{
|
||||
"index": metadata_attribute["index"],
|
||||
"type_id": metadata_attribute["serializer_id"],
|
||||
"default": metadata_attribute.get("default"),
|
||||
}
|
||||
)
|
||||
return entity_useful_metadata
|
||||
|
||||
|
||||
# returns a dict of {index: (name or bitfield)}
|
||||
|
||||
|
||||
def get_entity_metadata_names(entity_id: str, burger_entity_metadata: dict, mappings: Mappings):
|
||||
entity_metadata = burger_entity_metadata[entity_id]['metadata']
|
||||
def get_entity_metadata_names(
|
||||
entity_id: str, burger_entity_metadata: dict, mappings: Mappings
|
||||
):
|
||||
entity_metadata = burger_entity_metadata[entity_id]["metadata"]
|
||||
mapped_metadata_names = {}
|
||||
|
||||
for metadata_item in entity_metadata:
|
||||
if 'data' in metadata_item:
|
||||
obfuscated_class = metadata_item['class']
|
||||
mojang_class = mappings.get_class(obfuscated_class)
|
||||
if "data" in metadata_item:
|
||||
obfuscated_class = metadata_item["class"]
|
||||
# mojang_class = mappings.get_class(obfuscated_class)
|
||||
|
||||
first_byte_index = None
|
||||
|
||||
for metadata_attribute in metadata_item['data']:
|
||||
obfuscated_field = metadata_attribute['field']
|
||||
mojang_field = mappings.get_field(
|
||||
obfuscated_class, obfuscated_field)
|
||||
for metadata_attribute in metadata_item["data"]:
|
||||
obfuscated_field = metadata_attribute["field"]
|
||||
mojang_field = mappings.get_field(obfuscated_class, obfuscated_field)
|
||||
pretty_mojang_name = prettify_mojang_field(mojang_field)
|
||||
mapped_metadata_names[metadata_attribute['index']
|
||||
] = pretty_mojang_name
|
||||
mapped_metadata_names[metadata_attribute["index"]] = pretty_mojang_name
|
||||
|
||||
if metadata_attribute['serializer'] == 'Byte' and first_byte_index is None:
|
||||
first_byte_index = metadata_attribute['index']
|
||||
if (
|
||||
metadata_attribute["serializer"] == "Byte"
|
||||
and first_byte_index is None
|
||||
):
|
||||
first_byte_index = metadata_attribute["index"]
|
||||
|
||||
if metadata_item['bitfields'] and first_byte_index is not None:
|
||||
if metadata_item["bitfields"] and first_byte_index is not None:
|
||||
clean_bitfield = {}
|
||||
for bitfield_item in metadata_item['bitfields']:
|
||||
for bitfield_item in metadata_item["bitfields"]:
|
||||
bitfield_item_obfuscated_class = bitfield_item.get(
|
||||
'class', obfuscated_class)
|
||||
"class", obfuscated_class
|
||||
)
|
||||
mojang_bitfield_item_name = mappings.get_method(
|
||||
bitfield_item_obfuscated_class, bitfield_item['method'], '')
|
||||
bitfield_item_obfuscated_class, bitfield_item["method"], ""
|
||||
)
|
||||
bitfield_item_name = prettify_mojang_method(
|
||||
mojang_bitfield_item_name)
|
||||
bitfield_hex_mask = hex(bitfield_item['mask'])
|
||||
mojang_bitfield_item_name
|
||||
)
|
||||
bitfield_hex_mask = hex(bitfield_item["mask"])
|
||||
clean_bitfield[bitfield_hex_mask] = bitfield_item_name
|
||||
mapped_metadata_names[first_byte_index] = clean_bitfield
|
||||
return mapped_metadata_names
|
||||
|
@ -652,14 +738,14 @@ def get_entity_metadata_names(entity_id: str, burger_entity_metadata: dict, mapp
|
|||
def prettify_mojang_field(mojang_field: str):
|
||||
# mojang names are like "DATA_AIR_SUPPLY" and that's ugly
|
||||
better_name = mojang_field
|
||||
if better_name.startswith('DATA_'):
|
||||
if better_name.startswith("DATA_"):
|
||||
better_name = better_name[5:]
|
||||
|
||||
# remove the weird "Id" from the end of names
|
||||
if better_name.endswith('_ID'):
|
||||
if better_name.endswith("_ID"):
|
||||
better_name = better_name[:-3]
|
||||
# remove the weird "id" from the front of names
|
||||
if better_name.startswith('ID_'):
|
||||
if better_name.startswith("ID_"):
|
||||
better_name = better_name[3:]
|
||||
|
||||
return better_name.lower()
|
||||
|
@ -667,9 +753,8 @@ def prettify_mojang_field(mojang_field: str):
|
|||
|
||||
def prettify_mojang_method(mojang_method: str):
|
||||
better_name = mojang_method
|
||||
if better_name.endswith('()'):
|
||||
if better_name.endswith("()"):
|
||||
better_name = better_name[:-2]
|
||||
if re.match(r'is[A-Z]', better_name):
|
||||
if re.match(r"is[A-Z]", better_name):
|
||||
better_name = better_name[2:]
|
||||
return to_snake_case(better_name)
|
||||
|
||||
|
|
|
@ -1,25 +1,21 @@
|
|||
from lib.utils import padded_hex, to_snake_case, to_camel_case, get_dir_location
|
||||
from lib.code.utils import burger_type_to_rust_type, write_packet_file
|
||||
from lib.mappings import Mappings
|
||||
from typing import Any, Optional
|
||||
import os
|
||||
import re
|
||||
from typing import Any
|
||||
from lib.utils import to_camel_case, get_dir_location
|
||||
|
||||
# The directory where declare_menus! {} is done
|
||||
inventory_menus_dir = get_dir_location(f'../azalea-inventory/src/lib.rs')
|
||||
inventory_menus_dir = get_dir_location("../azalea-inventory/src/lib.rs")
|
||||
|
||||
|
||||
def update_menus(initial_menu_entries: dict[str, Any]):
|
||||
# new_menus is a dict of { menu_id: { "protocol_id": protocol_id } }
|
||||
# so convert that into an array where the protocol id is the index and the
|
||||
# values are enum variant names
|
||||
new_menus: list[str] = [''] * len(initial_menu_entries)
|
||||
new_menus: list[str] = [""] * len(initial_menu_entries)
|
||||
for menu_id, menu in initial_menu_entries.items():
|
||||
new_menus[menu['protocol_id']] = menu_name_to_enum_name(menu_id)
|
||||
new_menus[menu["protocol_id"]] = menu_name_to_enum_name(menu_id)
|
||||
|
||||
new_menus.insert(0, 'Player')
|
||||
new_menus.insert(0, "Player")
|
||||
|
||||
with open(inventory_menus_dir, 'r') as f:
|
||||
with open(inventory_menus_dir, "r") as f:
|
||||
menus_rs = f.read().splitlines()
|
||||
|
||||
start_line_index = 0
|
||||
|
@ -27,17 +23,17 @@ def update_menus(initial_menu_entries: dict[str, Any]):
|
|||
current_menus = []
|
||||
in_the_macro = False
|
||||
for i, line in enumerate(menus_rs):
|
||||
if line.startswith('declare_menus!'):
|
||||
if line.startswith("declare_menus!"):
|
||||
in_the_macro = True
|
||||
start_line_index = i
|
||||
if in_the_macro:
|
||||
if line.startswith(' ') and line.endswith('{'):
|
||||
if line.startswith(" ") and line.endswith("{"):
|
||||
# get the variant name for this menu
|
||||
current_menu = line[:-1].strip()
|
||||
current_menus.append(current_menu)
|
||||
|
||||
print('current_menus', current_menus)
|
||||
print('new_menus', new_menus)
|
||||
print("current_menus", current_menus)
|
||||
print("new_menus", new_menus)
|
||||
|
||||
# now we have the current menus, so compare that with the expected
|
||||
# menus and update the file if needed
|
||||
|
@ -52,57 +48,84 @@ def update_menus(initial_menu_entries: dict[str, Any]):
|
|||
if (
|
||||
current_menus_list_index < len(current_menus)
|
||||
and new_menus_list_index < len(new_menus)
|
||||
and current_menus[current_menus_list_index] == new_menus[new_menus_list_index]
|
||||
and current_menus[current_menus_list_index]
|
||||
== new_menus[new_menus_list_index]
|
||||
):
|
||||
current_menus_list_index += 1
|
||||
new_menus_list_index += 1
|
||||
# increase insert_line_index until we get a line that starts with }
|
||||
while not menus_rs[insert_line_index].strip().startswith('}'):
|
||||
while not menus_rs[insert_line_index].strip().startswith("}"):
|
||||
insert_line_index += 1
|
||||
insert_line_index += 1
|
||||
# print('same', current_menus_list_index,
|
||||
# new_menus_list_index, insert_line_index)
|
||||
# something was added to new_menus but not current_menus
|
||||
elif new_menus_list_index < len(new_menus) and new_menus[new_menus_list_index] not in current_menus:
|
||||
elif (
|
||||
new_menus_list_index < len(new_menus)
|
||||
and new_menus[new_menus_list_index] not in current_menus
|
||||
):
|
||||
# insert the new menu
|
||||
menus_rs.insert(
|
||||
insert_line_index, f' {new_menus[new_menus_list_index]} {{\n todo!()\n }},')
|
||||
insert_line_index,
|
||||
f" {new_menus[new_menus_list_index]} {{\n todo!()\n }},",
|
||||
)
|
||||
insert_line_index += 1
|
||||
new_menus_list_index += 1
|
||||
print('added', current_menus_list_index,
|
||||
new_menus_list_index, insert_line_index)
|
||||
print(
|
||||
"added",
|
||||
current_menus_list_index,
|
||||
new_menus_list_index,
|
||||
insert_line_index,
|
||||
)
|
||||
# something was removed from new_menus but is still in current_menus
|
||||
elif current_menus_list_index < len(current_menus) and current_menus[current_menus_list_index] not in new_menus:
|
||||
elif (
|
||||
current_menus_list_index < len(current_menus)
|
||||
and current_menus[current_menus_list_index] not in new_menus
|
||||
):
|
||||
# remove the current menu
|
||||
while not menus_rs[insert_line_index].strip().startswith('}'):
|
||||
while not menus_rs[insert_line_index].strip().startswith("}"):
|
||||
menus_rs.pop(insert_line_index)
|
||||
menus_rs.pop(insert_line_index)
|
||||
current_menus_list_index += 1
|
||||
print('removed', current_menus_list_index,
|
||||
new_menus_list_index, insert_line_index)
|
||||
print(
|
||||
"removed",
|
||||
current_menus_list_index,
|
||||
new_menus_list_index,
|
||||
insert_line_index,
|
||||
)
|
||||
|
||||
# if current_menus_list_index overflowed, then add the rest of the new menus
|
||||
elif current_menus_list_index >= len(current_menus):
|
||||
for i in range(new_menus_list_index, len(new_menus)):
|
||||
menus_rs.insert(
|
||||
insert_line_index, f' {new_menus[i]} {{\n todo!()\n }},')
|
||||
insert_line_index,
|
||||
f" {new_menus[i]} {{\n todo!()\n }},",
|
||||
)
|
||||
insert_line_index += 1
|
||||
print('current_menus_list_index overflowed', current_menus_list_index,
|
||||
new_menus_list_index, insert_line_index)
|
||||
print(
|
||||
"current_menus_list_index overflowed",
|
||||
current_menus_list_index,
|
||||
new_menus_list_index,
|
||||
insert_line_index,
|
||||
)
|
||||
break
|
||||
# if new_menus_list_index overflowed, then remove the rest of the current menus
|
||||
elif new_menus_list_index >= len(new_menus):
|
||||
for _ in range(current_menus_list_index, len(current_menus)):
|
||||
while not menus_rs[insert_line_index].strip().startswith('}'):
|
||||
while not menus_rs[insert_line_index].strip().startswith("}"):
|
||||
menus_rs.pop(insert_line_index)
|
||||
menus_rs.pop(insert_line_index)
|
||||
# current_menus_list_index += 1
|
||||
print('new_menus_list_index overflowed', current_menus_list_index,
|
||||
new_menus_list_index, insert_line_index)
|
||||
print(
|
||||
"new_menus_list_index overflowed",
|
||||
current_menus_list_index,
|
||||
new_menus_list_index,
|
||||
insert_line_index,
|
||||
)
|
||||
break
|
||||
with open(inventory_menus_dir, 'w') as f:
|
||||
f.write('\n'.join(menus_rs))
|
||||
with open(inventory_menus_dir, "w") as f:
|
||||
f.write("\n".join(menus_rs))
|
||||
|
||||
|
||||
def menu_name_to_enum_name(menu_name: str) -> str:
|
||||
return to_camel_case(menu_name.split(':')[-1])
|
||||
return to_camel_case(menu_name.split(":")[-1])
|
||||
|
|
|
@ -1,15 +1,10 @@
|
|||
import lib.code.inventory
|
||||
import lib.code.registry
|
||||
import lib.code.version
|
||||
import lib.code.packet
|
||||
import lib.code.utils
|
||||
import lib.code.tags
|
||||
import lib.download
|
||||
import lib.extract
|
||||
import lib.utils
|
||||
|
||||
|
||||
ITEM_COMPONENTS_DIR = 'azalea-inventory/src/components.rs'
|
||||
ITEM_COMPONENTS_DIR = "azalea-inventory/src/components.rs"
|
||||
|
||||
|
||||
def generate(version_id: str):
|
||||
expected_variants = get_expected_variants(version_id)
|
||||
|
@ -25,61 +20,65 @@ def generate(version_id: str):
|
|||
if variant not in expected_variants:
|
||||
removed_variants.append(variant)
|
||||
|
||||
print('New variants:')
|
||||
print("New variants:")
|
||||
for variant in new_variants:
|
||||
print('-', variant)
|
||||
print("-", variant)
|
||||
print()
|
||||
print('Removed variants:')
|
||||
print("Removed variants:")
|
||||
for variant in removed_variants:
|
||||
print('-', variant)
|
||||
print("-", variant)
|
||||
print()
|
||||
|
||||
for variant in removed_variants:
|
||||
print(f'Removing {variant}...')
|
||||
print(f"Removing {variant}...")
|
||||
remove_variant(variant)
|
||||
for variant in new_variants:
|
||||
print(f'Adding {variant}...')
|
||||
print(f"Adding {variant}...")
|
||||
add_variant(variant)
|
||||
|
||||
lib.code.utils.fmt()
|
||||
|
||||
print('Done!')
|
||||
print("Done!")
|
||||
|
||||
|
||||
def get_expected_variants(version_id: str):
|
||||
expected_variants = []
|
||||
registries = lib.extract.get_registries_report(version_id)
|
||||
|
||||
registry = registries['minecraft:data_component_type']
|
||||
registry = registries["minecraft:data_component_type"]
|
||||
registry_entries = sorted(
|
||||
registry['entries'].items(), key=lambda x: x[1]['protocol_id'])
|
||||
registry["entries"].items(), key=lambda x: x[1]["protocol_id"]
|
||||
)
|
||||
for variant_name, _variant in registry_entries:
|
||||
variant_struct_name = lib.utils.to_camel_case(variant_name.split(':')[-1])
|
||||
variant_struct_name = lib.utils.to_camel_case(variant_name.split(":")[-1])
|
||||
expected_variants.append(variant_struct_name)
|
||||
|
||||
return expected_variants
|
||||
|
||||
|
||||
def get_actual_variants():
|
||||
actual_variants = []
|
||||
with open(ITEM_COMPONENTS_DIR, 'r') as f:
|
||||
code = f.read().split('\n')
|
||||
with open(ITEM_COMPONENTS_DIR, "r") as f:
|
||||
code = f.read().split("\n")
|
||||
|
||||
in_match = False
|
||||
for line in code:
|
||||
if in_match:
|
||||
if line == ' })':
|
||||
if line == " })":
|
||||
break
|
||||
variant_line_prefix = ' DataComponentKind::'
|
||||
variant_line_prefix = " DataComponentKind::"
|
||||
if line.startswith(variant_line_prefix):
|
||||
variant = line[len(variant_line_prefix):].split(' ', 1)[0]
|
||||
variant = line[len(variant_line_prefix) :].split(" ", 1)[0]
|
||||
actual_variants.append(variant)
|
||||
elif line == ' Ok(match kind {':
|
||||
elif line == " Ok(match kind {":
|
||||
in_match = True
|
||||
|
||||
return actual_variants
|
||||
|
||||
|
||||
def remove_variant(variant: str):
|
||||
with open(ITEM_COMPONENTS_DIR, 'r') as f:
|
||||
code = f.read().split('\n')
|
||||
with open(ITEM_COMPONENTS_DIR, "r") as f:
|
||||
code = f.read().split("\n")
|
||||
|
||||
first_line_with_variant = None
|
||||
line_after_variant = None
|
||||
|
@ -87,78 +86,83 @@ def remove_variant(variant: str):
|
|||
in_match = False
|
||||
for i, line in enumerate(list(code)):
|
||||
if in_match:
|
||||
if line == ' })':
|
||||
if line == " })":
|
||||
line_after_variant = i
|
||||
break
|
||||
variant_line_prefix = ' DataComponentKind::'
|
||||
variant_line_prefix = " DataComponentKind::"
|
||||
if line.startswith(variant_line_prefix):
|
||||
if first_line_with_variant is not None:
|
||||
line_after_variant = i
|
||||
break
|
||||
variant_name = line[len(variant_line_prefix):].split(' ', 1)[0]
|
||||
variant_name = line[len(variant_line_prefix) :].split(" ", 1)[0]
|
||||
if variant_name == variant:
|
||||
first_line_with_variant = i
|
||||
elif line == ' Ok(match kind {':
|
||||
elif line == " Ok(match kind {":
|
||||
in_match = True
|
||||
|
||||
|
||||
if first_line_with_variant is None:
|
||||
raise ValueError(f'Variant {variant} not found')
|
||||
raise ValueError(f"Variant {variant} not found")
|
||||
if line_after_variant is None:
|
||||
raise ValueError(f'Couldn\'t find end of variant {variant}')
|
||||
raise ValueError(f"Couldn't find end of variant {variant}")
|
||||
|
||||
code = code[:first_line_with_variant] + code[line_after_variant:]
|
||||
|
||||
# now remove the struct
|
||||
line_before_struct = None # this is the #[derive] line
|
||||
line_after_struct = None # impl DataComponent for ... {\n...\n}
|
||||
line_before_struct = None # this is the #[derive] line
|
||||
line_after_struct = None # impl DataComponent for ... {\n...\n}
|
||||
for i, line in enumerate(list(code)):
|
||||
if line == f'pub struct {variant} {{' or line == f'pub struct {variant};':
|
||||
if line == f"pub struct {variant} {{" or line == f"pub struct {variant};":
|
||||
line_before_struct = i - 1
|
||||
elif line == f'impl DataComponent for {variant} {{':
|
||||
elif line == f"impl DataComponent for {variant} {{":
|
||||
line_after_struct = i + 3
|
||||
break
|
||||
if line_before_struct is None:
|
||||
raise ValueError(f'Couldn\'t find struct {variant}')
|
||||
raise ValueError(f"Couldn't find struct {variant}")
|
||||
if line_after_struct is None:
|
||||
raise ValueError(f'Couldn\'t find impl DataComponent for {variant}')
|
||||
|
||||
raise ValueError(f"Couldn't find impl DataComponent for {variant}")
|
||||
|
||||
code = code[:line_before_struct] + code[line_after_struct:]
|
||||
|
||||
with open(ITEM_COMPONENTS_DIR, 'w') as f:
|
||||
f.write('\n'.join(code))
|
||||
with open(ITEM_COMPONENTS_DIR, "w") as f:
|
||||
f.write("\n".join(code))
|
||||
|
||||
|
||||
def add_variant(variant: str):
|
||||
with open(ITEM_COMPONENTS_DIR, 'r') as f:
|
||||
code = f.read().split('\n')
|
||||
with open(ITEM_COMPONENTS_DIR, "r") as f:
|
||||
code = f.read().split("\n")
|
||||
|
||||
in_match = False
|
||||
last_line_in_match = None
|
||||
for i, line in enumerate(list(code)):
|
||||
if in_match:
|
||||
if line == ' })':
|
||||
if line == " })":
|
||||
last_line_in_match = i
|
||||
break
|
||||
elif line == ' Ok(match kind {':
|
||||
elif line == " Ok(match kind {":
|
||||
in_match = True
|
||||
|
||||
if last_line_in_match is None:
|
||||
raise ValueError('Couldn\'t find end of match')
|
||||
|
||||
code = code[:last_line_in_match] + [
|
||||
f' DataComponentKind::{variant} => Box::new({variant}::azalea_read(buf)?),',
|
||||
] + code[last_line_in_match:]
|
||||
raise ValueError("Couldn't find end of match")
|
||||
|
||||
code = (
|
||||
code[:last_line_in_match]
|
||||
+ [
|
||||
f" DataComponentKind::{variant} => Box::new({variant}::azalea_read(buf)?),",
|
||||
]
|
||||
+ code[last_line_in_match:]
|
||||
)
|
||||
|
||||
# now insert the struct
|
||||
code.append('')
|
||||
code.append('#[derive(Clone, PartialEq, AzBuf)]')
|
||||
code.append(f'pub struct {variant} {{')
|
||||
code.append(' pub todo: todo!(), // see DataComponents.java')
|
||||
code.append('}')
|
||||
code.append(f'impl DataComponent for {variant} {{')
|
||||
code.append(f' const KIND: DataComponentKind = DataComponentKind::{variant};')
|
||||
code.append('}')
|
||||
code.append("")
|
||||
code.append("#[derive(Clone, PartialEq, AzBuf)]")
|
||||
code.append(f"pub struct {variant} {{")
|
||||
code.append(" pub todo: todo!(), // see DataComponents.java")
|
||||
code.append("}")
|
||||
code.append(f"impl DataComponent for {variant} {{")
|
||||
code.append(f" const KIND: DataComponentKind = DataComponentKind::{variant};")
|
||||
code.append("}")
|
||||
|
||||
with open(ITEM_COMPONENTS_DIR, 'w') as f:
|
||||
f.write('\n'.join(code))
|
||||
with open(ITEM_COMPONENTS_DIR, "w") as f:
|
||||
f.write("\n".join(code))
|
||||
|
||||
lib.code.utils.fmt()
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from lib.utils import get_dir_location
|
||||
import json
|
||||
|
||||
LANGUAGE_DIR = get_dir_location('../azalea-language/src/en_us.json')
|
||||
LANGUAGE_DIR = get_dir_location("../azalea-language/src/en_us.json")
|
||||
|
||||
|
||||
def write_language(contents: dict):
|
||||
with open(LANGUAGE_DIR, 'w') as f:
|
||||
f.write(json.dumps(contents, indent=' '))
|
||||
with open(LANGUAGE_DIR, "w") as f:
|
||||
f.write(json.dumps(contents, indent=" "))
|
||||
|
|
|
@ -8,92 +8,106 @@ import re
|
|||
|
||||
MOJMAP_TO_AZALEA_STATE_NAME_MAPPING = {
|
||||
# shorter name, i like it more
|
||||
'configuration': 'config',
|
||||
"configuration": "config",
|
||||
# in the files mojang calls the directory "game" so we do that too
|
||||
'play': 'game'
|
||||
"play": "game",
|
||||
}
|
||||
AZALEA_TO_MOJMAP_STATE_NAME_MAPPING = {
|
||||
v: k for k, v in MOJMAP_TO_AZALEA_STATE_NAME_MAPPING.items()
|
||||
}
|
||||
AZALEA_TO_MOJMAP_STATE_NAME_MAPPING = {v: k for k, v in MOJMAP_TO_AZALEA_STATE_NAME_MAPPING.items()}
|
||||
|
||||
PACKETS_DIR = '../azalea-protocol/src/packets'
|
||||
PACKETS_DIR = "../azalea-protocol/src/packets"
|
||||
|
||||
|
||||
def generate_packet(packets_report, packet_name, direction, state):
|
||||
mojmap_state = AZALEA_TO_MOJMAP_STATE_NAME_MAPPING.get(state, state)
|
||||
_packet_report = packets_report[mojmap_state][direction]['minecraft:' + packet_name]
|
||||
_packet_report = packets_report[mojmap_state][direction]["minecraft:" + packet_name]
|
||||
|
||||
code = []
|
||||
uses = set()
|
||||
|
||||
packet_derive_name = f'{to_camel_case(direction)}{to_camel_case(state)}Packet'
|
||||
packet_derive_name = f"{to_camel_case(direction)}{to_camel_case(state)}Packet"
|
||||
|
||||
packet_struct_name = to_camel_case(f'{direction}_{packet_name}')
|
||||
packet_struct_name = to_camel_case(f"{direction}_{packet_name}")
|
||||
packet_module_name = get_packet_module_name(packet_name, direction)
|
||||
|
||||
code.append(f'use azalea_buf::AzBuf;')
|
||||
code.append(f'use azalea_protocol_macros::{packet_derive_name};')
|
||||
code.append('')
|
||||
|
||||
code.append(
|
||||
f'#[derive(Clone, Debug, AzBuf, {packet_derive_name})]')
|
||||
code.append(
|
||||
f'pub struct {packet_struct_name} {{')
|
||||
code.append(' TODO')
|
||||
code.append('}')
|
||||
code.append("use azalea_buf::AzBuf;")
|
||||
code.append(f"use azalea_protocol_macros::{packet_derive_name};")
|
||||
code.append("")
|
||||
|
||||
code.append(f"#[derive(Clone, Debug, AzBuf, {packet_derive_name})]")
|
||||
code.append(f"pub struct {packet_struct_name} {{")
|
||||
code.append(" TODO")
|
||||
code.append("}")
|
||||
|
||||
print(code)
|
||||
write_packet_file(state, packet_module_name, '\n'.join(code))
|
||||
write_packet_file(state, packet_module_name, "\n".join(code))
|
||||
|
||||
# this won't handle writing to the packets/{state}/mod.rs file since we'd need to know the full packet list
|
||||
|
||||
def get_packet_module_name(packet_name: str, direction: str):
|
||||
return f'{direction[0]}_{packet_name}'
|
||||
|
||||
def set_packets(packets_report):
|
||||
def get_packet_module_name(packet_name: str, direction: str):
|
||||
return f"{direction[0]}_{packet_name}"
|
||||
|
||||
|
||||
def set_packets(packets_report):
|
||||
for mojmap_state in packets_report:
|
||||
state = MOJMAP_TO_AZALEA_STATE_NAME_MAPPING.get(mojmap_state, mojmap_state)
|
||||
|
||||
expected_packet_module_names = set()
|
||||
state_dir = get_dir_location(f'{PACKETS_DIR}/{state}')
|
||||
mod_rs_dir = get_dir_location(f'{state_dir}/mod.rs')
|
||||
state_dir = get_dir_location(f"{PACKETS_DIR}/{state}")
|
||||
mod_rs_dir = get_dir_location(f"{state_dir}/mod.rs")
|
||||
|
||||
serverbound_packets = packet_direction_report_to_packet_names(packets_report[mojmap_state]['serverbound'])
|
||||
clientbound_packets = packet_direction_report_to_packet_names(packets_report[mojmap_state].get('clientbound', {}))
|
||||
serverbound_packets = packet_direction_report_to_packet_names(
|
||||
packets_report[mojmap_state]["serverbound"]
|
||||
)
|
||||
clientbound_packets = packet_direction_report_to_packet_names(
|
||||
packets_report[mojmap_state].get("clientbound", {})
|
||||
)
|
||||
|
||||
code = []
|
||||
code.append('// NOTE: This file is generated automatically by codegen/packet.py.')
|
||||
code.append(
|
||||
"// NOTE: This file is @generated automatically by codegen/packet.py."
|
||||
)
|
||||
code.append("// Don't edit it directly!")
|
||||
code.append('')
|
||||
code.append('use azalea_protocol_macros::declare_state_packets;')
|
||||
code.append('')
|
||||
code.append(f'declare_state_packets!({to_camel_case(state)}Packet,')
|
||||
code.append(' Clientbound => [')
|
||||
code.append("")
|
||||
code.append("use azalea_protocol_macros::declare_state_packets;")
|
||||
code.append("")
|
||||
code.append(f"declare_state_packets!({to_camel_case(state)}Packet,")
|
||||
code.append(" Clientbound => [")
|
||||
for packet_id, packet_name in enumerate(clientbound_packets):
|
||||
code.append(f' {packet_name}, // {padded_hex(packet_id)}')
|
||||
expected_packet_module_names.add(get_packet_module_name(packet_name, 'clientbound'))
|
||||
code.append(' ],')
|
||||
code.append(' Serverbound => [')
|
||||
code.append(f" {packet_name}, // {padded_hex(packet_id)}")
|
||||
expected_packet_module_names.add(
|
||||
get_packet_module_name(packet_name, "clientbound")
|
||||
)
|
||||
code.append(" ],")
|
||||
code.append(" Serverbound => [")
|
||||
for packet_id, packet_name in enumerate(serverbound_packets):
|
||||
code.append(f' {packet_name}, // {padded_hex(packet_id)}')
|
||||
expected_packet_module_names.add(get_packet_module_name(packet_name, 'serverbound'))
|
||||
code.append(' ]')
|
||||
code.append(');')
|
||||
code.append('')
|
||||
code.append(f" {packet_name}, // {padded_hex(packet_id)}")
|
||||
expected_packet_module_names.add(
|
||||
get_packet_module_name(packet_name, "serverbound")
|
||||
)
|
||||
code.append(" ]")
|
||||
code.append(");")
|
||||
code.append("")
|
||||
|
||||
with open(mod_rs_dir, 'w') as f:
|
||||
f.write('\n'.join(code))
|
||||
with open(mod_rs_dir, "w") as f:
|
||||
f.write("\n".join(code))
|
||||
|
||||
existing_packet_module_names = set()
|
||||
# iterate over the directory
|
||||
for file in os.listdir(state_dir):
|
||||
if file.endswith('.rs') and file != 'mod.rs':
|
||||
existing_packet_module_names.add(file[:-len('.rs')])
|
||||
for packet_module_name in expected_packet_module_names - existing_packet_module_names:
|
||||
if file.endswith(".rs") and file != "mod.rs":
|
||||
existing_packet_module_names.add(file[: -len(".rs")])
|
||||
for packet_module_name in (
|
||||
expected_packet_module_names - existing_packet_module_names
|
||||
):
|
||||
direction = None
|
||||
if packet_module_name.startswith('c_'):
|
||||
direction = 'clientbound'
|
||||
elif packet_module_name.startswith('s_'):
|
||||
direction = 'serverbound'
|
||||
if packet_module_name.startswith("c_"):
|
||||
direction = "clientbound"
|
||||
elif packet_module_name.startswith("s_"):
|
||||
direction = "serverbound"
|
||||
else:
|
||||
raise Exception(f'Invalid packet module name: {packet_module_name}')
|
||||
raise Exception(f"Invalid packet module name: {packet_module_name}")
|
||||
packet = packet_module_name[2:]
|
||||
generate_packet(packets_report, packet, direction, state)
|
||||
|
||||
|
@ -101,15 +115,16 @@ def set_packets(packets_report):
|
|||
def packet_direction_report_to_packet_names(report):
|
||||
name_to_id = {}
|
||||
for resource_location, packet in report.items():
|
||||
packet_id = packet['protocol_id']
|
||||
name_to_id[resource_location.split(':')[-1]] = packet_id
|
||||
|
||||
packet_id = packet["protocol_id"]
|
||||
name_to_id[resource_location.split(":")[-1]] = packet_id
|
||||
|
||||
names_sorted = [name for name in sorted(name_to_id, key=lambda x: name_to_id[x])]
|
||||
return names_sorted
|
||||
|
||||
|
||||
def get_packets(direction: str, state: str):
|
||||
mod_rs_dir = get_dir_location(f'{PACKETS_DIR}/{state}/mod.rs')
|
||||
with open(mod_rs_dir, 'r') as f:
|
||||
mod_rs_dir = get_dir_location(f"{PACKETS_DIR}/{state}/mod.rs")
|
||||
with open(mod_rs_dir, "r") as f:
|
||||
mod_rs = f.read().splitlines()
|
||||
|
||||
in_serverbound = False
|
||||
|
@ -119,90 +134,125 @@ def get_packets(direction: str, state: str):
|
|||
packet_class_names: list[str] = []
|
||||
|
||||
for line in mod_rs:
|
||||
if line.strip() == 'Serverbound => {':
|
||||
if line.strip() == "Serverbound => {":
|
||||
in_serverbound = True
|
||||
continue
|
||||
elif line.strip() == 'Clientbound => {':
|
||||
elif line.strip() == "Clientbound => {":
|
||||
in_clientbound = True
|
||||
continue
|
||||
elif line.strip() in ('}', '},'):
|
||||
if (in_serverbound and direction == 'serverbound') or (in_clientbound and direction == 'clientbound'):
|
||||
elif line.strip() in ("}", "},"):
|
||||
if (in_serverbound and direction == "serverbound") or (
|
||||
in_clientbound and direction == "clientbound"
|
||||
):
|
||||
break
|
||||
in_serverbound = in_clientbound = False
|
||||
continue
|
||||
|
||||
if line.strip() == '' or line.strip().startswith('//') or (not in_serverbound and direction == 'serverbound') or (not in_clientbound and direction == 'clientbound'):
|
||||
if (
|
||||
line.strip() == ""
|
||||
or line.strip().startswith("//")
|
||||
or (not in_serverbound and direction == "serverbound")
|
||||
or (not in_clientbound and direction == "clientbound")
|
||||
):
|
||||
continue
|
||||
|
||||
line_packet_id_hex = line.strip().split(':')[0]
|
||||
assert line_packet_id_hex.startswith('0x')
|
||||
line_packet_id_hex = line.strip().split(":")[0]
|
||||
assert line_packet_id_hex.startswith("0x")
|
||||
line_packet_id = int(line_packet_id_hex[2:], 16)
|
||||
packet_ids.append(line_packet_id)
|
||||
|
||||
packet_class_name = line.strip().split(':')[1].strip()
|
||||
packet_class_name = line.strip().split(":")[1].strip()
|
||||
packet_class_names.append(packet_class_name)
|
||||
|
||||
return packet_ids, packet_class_names
|
||||
|
||||
|
||||
def burger_instruction_to_code(instructions: list[dict], index: int, generated_packet_code: list[str], mappings: Mappings, obfuscated_class_name: str, uses: set, extra_code: list[str], known_variable_types={}) -> Optional[int]:
|
||||
'''
|
||||
def burger_instruction_to_code(
|
||||
instructions: list[dict],
|
||||
index: int,
|
||||
generated_packet_code: list[str],
|
||||
mappings: Mappings,
|
||||
obfuscated_class_name: str,
|
||||
uses: set,
|
||||
extra_code: list[str],
|
||||
known_variable_types={},
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
Generate a field for an instruction, returns the number of instructions to skip (if any).
|
||||
'''
|
||||
"""
|
||||
instruction = instructions[index]
|
||||
next_instruction = instructions[index +
|
||||
1] if index + 1 < len(instructions) else None
|
||||
next_next_instruction = instructions[index +
|
||||
2] if index + 2 < len(instructions) else None
|
||||
next_instruction = (
|
||||
instructions[index + 1] if index + 1 < len(instructions) else None
|
||||
)
|
||||
next_next_instruction = (
|
||||
instructions[index + 2] if index + 2 < len(instructions) else None
|
||||
)
|
||||
|
||||
is_var = False
|
||||
skip = 0
|
||||
field_type_rs = None
|
||||
field_comment = None
|
||||
|
||||
print('instruction', instruction, next_instruction, next_next_instruction)
|
||||
print("instruction", instruction, next_instruction, next_next_instruction)
|
||||
|
||||
# iterators
|
||||
if instruction['operation'] == 'write'\
|
||||
and instruction['field'].endswith('.size()')\
|
||||
and next_instruction\
|
||||
and next_instruction['type'] == 'Iterator'\
|
||||
and next_next_instruction\
|
||||
and next_next_instruction['operation'] == 'loop':
|
||||
obfuscated_field_name = instruction['field'].split('.')[0]
|
||||
field_name = mappings.get_field(
|
||||
obfuscated_class_name, obfuscated_field_name)
|
||||
if (
|
||||
instruction["operation"] == "write"
|
||||
and instruction["field"].endswith(".size()")
|
||||
and next_instruction
|
||||
and next_instruction["type"] == "Iterator"
|
||||
and next_next_instruction
|
||||
and next_next_instruction["operation"] == "loop"
|
||||
):
|
||||
obfuscated_field_name = instruction["field"].split(".")[0]
|
||||
field_name = mappings.get_field(obfuscated_class_name, obfuscated_field_name)
|
||||
|
||||
# figure out what kind of iterator it is
|
||||
loop_instructions = next_next_instruction['instructions']
|
||||
loop_instructions = next_next_instruction["instructions"]
|
||||
if len(loop_instructions) == 2:
|
||||
entry_type_rs, is_var, value_uses, extra_code = burger_type_to_rust_type(
|
||||
loop_instructions[1]['type'], None, loop_instructions[1], mappings, obfuscated_class_name)
|
||||
field_type_rs = f'Vec<{entry_type_rs}>'
|
||||
loop_instructions[1]["type"],
|
||||
None,
|
||||
loop_instructions[1],
|
||||
mappings,
|
||||
obfuscated_class_name,
|
||||
)
|
||||
field_type_rs = f"Vec<{entry_type_rs}>"
|
||||
uses.update(value_uses)
|
||||
elif len(loop_instructions) == 3:
|
||||
is_map = loop_instructions[0]['type'].startswith(
|
||||
'Map.Entry<')
|
||||
is_map = loop_instructions[0]["type"].startswith("Map.Entry<")
|
||||
if is_map:
|
||||
assert loop_instructions[1]['field'].endswith(
|
||||
'.getKey()')
|
||||
assert loop_instructions[2]['field'].endswith(
|
||||
'.getValue()')
|
||||
assert loop_instructions[1]["field"].endswith(".getKey()")
|
||||
assert loop_instructions[2]["field"].endswith(".getValue()")
|
||||
|
||||
# generate the type for the key
|
||||
key_type_rs, is_key_var, key_uses, key_extra_code = burger_type_to_rust_type(
|
||||
loop_instructions[1]['type'], None, loop_instructions[1], mappings, obfuscated_class_name)
|
||||
key_type_rs, is_key_var, key_uses, key_extra_code = (
|
||||
burger_type_to_rust_type(
|
||||
loop_instructions[1]["type"],
|
||||
None,
|
||||
loop_instructions[1],
|
||||
mappings,
|
||||
obfuscated_class_name,
|
||||
)
|
||||
)
|
||||
uses.update(key_uses)
|
||||
extra_code.extend(key_extra_code)
|
||||
|
||||
# generate the type for the value
|
||||
value_type_rs, is_value_var, value_uses, value_extra_code = burger_type_to_rust_type(
|
||||
loop_instructions[2]['type'], None, loop_instructions[2], mappings, obfuscated_class_name)
|
||||
value_type_rs, is_value_var, value_uses, value_extra_code = (
|
||||
burger_type_to_rust_type(
|
||||
loop_instructions[2]["type"],
|
||||
None,
|
||||
loop_instructions[2],
|
||||
mappings,
|
||||
obfuscated_class_name,
|
||||
)
|
||||
)
|
||||
uses.update(value_uses)
|
||||
extra_code.extend(value_extra_code)
|
||||
|
||||
field_type_rs = f'HashMap<{key_type_rs}, {value_type_rs}>'
|
||||
uses.add('std::collections::HashMap')
|
||||
field_type_rs = f"HashMap<{key_type_rs}, {value_type_rs}>"
|
||||
uses.add("std::collections::HashMap")
|
||||
|
||||
# only the key is var since the value can be made var in other ways
|
||||
is_var = is_key_var
|
||||
|
@ -210,160 +260,206 @@ def burger_instruction_to_code(instructions: list[dict], index: int, generated_p
|
|||
skip = 2 # skip the next 2 instructions
|
||||
|
||||
# Option<T>
|
||||
elif instruction['operation'] == 'write' and (instruction['field'].endswith('.isPresent()') or instruction['field'].endswith(' != null')) and next_instruction and (next_instruction.get('condition', '').endswith('.isPresent()') or next_instruction.get('condition', '').endswith(' != null')):
|
||||
print('ok is option')
|
||||
obfuscated_field_name = instruction['field'].split('.')[
|
||||
0].split(' ')[0]
|
||||
|
||||
elif (
|
||||
instruction["operation"] == "write"
|
||||
and (
|
||||
instruction["field"].endswith(".isPresent()")
|
||||
or instruction["field"].endswith(" != null")
|
||||
)
|
||||
and next_instruction
|
||||
and (
|
||||
next_instruction.get("condition", "").endswith(".isPresent()")
|
||||
or next_instruction.get("condition", "").endswith(" != null")
|
||||
)
|
||||
):
|
||||
print("ok is option")
|
||||
obfuscated_field_name = instruction["field"].split(".")[0].split(" ")[0]
|
||||
|
||||
if obfuscated_field_name in known_variable_types:
|
||||
# just use the known name since it's not gonna be in the mappings
|
||||
obfuscated_field_name = known_variable_types[obfuscated_field_name]
|
||||
|
||||
field_name = mappings.get_field(
|
||||
obfuscated_class_name, obfuscated_field_name)
|
||||
|
||||
if field_name is None: field_name = obfuscated_field_name.split('/')[-1]
|
||||
if '<' in field_name:
|
||||
field_name = 'value'
|
||||
field_name = mappings.get_field(obfuscated_class_name, obfuscated_field_name)
|
||||
|
||||
condition_instructions = next_instruction['instructions']
|
||||
if field_name is None:
|
||||
field_name = obfuscated_field_name.split("/")[-1]
|
||||
if "<" in field_name:
|
||||
field_name = "value"
|
||||
|
||||
condition_instructions = next_instruction["instructions"]
|
||||
|
||||
condition_types_rs = []
|
||||
for condition_instruction in condition_instructions:
|
||||
print('condition_instruction', condition_instruction)
|
||||
if 'type' not in condition_instruction:
|
||||
print("condition_instruction", condition_instruction)
|
||||
if "type" not in condition_instruction:
|
||||
# weird type, maybe it's a loop or something
|
||||
condition_types_rs.append('todo!("weird type, maybe it\'s a loop or something")')
|
||||
condition_types_rs.append(
|
||||
'todo!("weird type, maybe it\'s a loop or something")'
|
||||
)
|
||||
continue
|
||||
condition_type_rs, is_var, this_uses, this_extra_code = burger_type_to_rust_type(
|
||||
condition_instruction['type'], None, condition_instruction, mappings, obfuscated_class_name)
|
||||
condition_type_rs, is_var, this_uses, this_extra_code = (
|
||||
burger_type_to_rust_type(
|
||||
condition_instruction["type"],
|
||||
None,
|
||||
condition_instruction,
|
||||
mappings,
|
||||
obfuscated_class_name,
|
||||
)
|
||||
)
|
||||
condition_types_rs.append(condition_type_rs)
|
||||
uses.update(this_uses)
|
||||
extra_code.extend(this_extra_code)
|
||||
field_type_rs = f'Option<({", ".join(condition_types_rs)})>' if len(
|
||||
condition_types_rs) != 1 else f'Option<{condition_types_rs[0]}>'
|
||||
field_type_rs = (
|
||||
f"Option<({', '.join(condition_types_rs)})>"
|
||||
if len(condition_types_rs) != 1
|
||||
else f"Option<{condition_types_rs[0]}>"
|
||||
)
|
||||
skip = 1
|
||||
else:
|
||||
field_type = instruction['type']
|
||||
obfuscated_field_name = instruction['field']
|
||||
field_type = instruction["type"]
|
||||
obfuscated_field_name = instruction["field"]
|
||||
|
||||
if obfuscated_field_name.startswith('(float)'):
|
||||
obfuscated_field_name = obfuscated_field_name[len('(float)'):]
|
||||
if obfuscated_field_name.startswith("(float)"):
|
||||
obfuscated_field_name = obfuscated_field_name[len("(float)") :]
|
||||
|
||||
field_name = mappings.get_field(
|
||||
obfuscated_class_name, obfuscated_field_name) or mappings.get_field(
|
||||
obfuscated_class_name.split('$')[0], obfuscated_field_name)
|
||||
obfuscated_class_name, obfuscated_field_name
|
||||
) or mappings.get_field(
|
||||
obfuscated_class_name.split("$")[0], obfuscated_field_name
|
||||
)
|
||||
|
||||
field_type_rs, is_var, instruction_uses, instruction_extra_code = burger_type_to_rust_type(
|
||||
field_type, field_name, instruction, mappings, obfuscated_class_name)
|
||||
field_type_rs, is_var, instruction_uses, instruction_extra_code = (
|
||||
burger_type_to_rust_type(
|
||||
field_type, field_name, instruction, mappings, obfuscated_class_name
|
||||
)
|
||||
)
|
||||
|
||||
if obfuscated_field_name in known_variable_types:
|
||||
# just use the known name since it's not gonna be in the mappings
|
||||
field_name = obfuscated_field_name
|
||||
|
||||
elif '.' in obfuscated_field_name or ' ' in obfuscated_field_name or '(' in obfuscated_field_name:
|
||||
elif (
|
||||
"." in obfuscated_field_name
|
||||
or " " in obfuscated_field_name
|
||||
or "(" in obfuscated_field_name
|
||||
):
|
||||
field_type_rs2, obfuscated_field_name, field_comment = burger_field_to_type(
|
||||
obfuscated_field_name, mappings, obfuscated_class_name, known_variable_types)
|
||||
obfuscated_field_name,
|
||||
mappings,
|
||||
obfuscated_class_name,
|
||||
known_variable_types,
|
||||
)
|
||||
if not field_type_rs2:
|
||||
generated_packet_code.append(f'// TODO: {instruction}')
|
||||
generated_packet_code.append(f"// TODO: {instruction}")
|
||||
return
|
||||
if obfuscated_field_name in known_variable_types:
|
||||
# just use the known name since it's not gonna be in the mappings
|
||||
obfuscated_field_name = known_variable_types[obfuscated_field_name]
|
||||
print('got obfuscated_field_name', obfuscated_field_name)
|
||||
print("got obfuscated_field_name", obfuscated_field_name)
|
||||
|
||||
# try to get the field name again with the new stuff we know
|
||||
field_name = mappings.get_field(
|
||||
obfuscated_class_name, obfuscated_field_name) or mappings.get_field(
|
||||
obfuscated_class_name.split('$')[0], obfuscated_field_name)
|
||||
obfuscated_class_name, obfuscated_field_name
|
||||
) or mappings.get_field(
|
||||
obfuscated_class_name.split("$")[0], obfuscated_field_name
|
||||
)
|
||||
if field_name is None:
|
||||
field_name = obfuscated_field_name.split('/')[-1]
|
||||
field_name = obfuscated_field_name.split("/")[-1]
|
||||
uses.update(instruction_uses)
|
||||
extra_code.extend(instruction_extra_code)
|
||||
|
||||
if not field_name:
|
||||
generated_packet_code.append(
|
||||
f'// TODO: unknown field {instruction}')
|
||||
generated_packet_code.append(f"// TODO: unknown field {instruction}")
|
||||
return skip
|
||||
|
||||
if is_var:
|
||||
generated_packet_code.append('#[var]')
|
||||
line = f'pub {to_snake_case(field_name)}: {field_type_rs or "todo!()"},'
|
||||
generated_packet_code.append("#[var]")
|
||||
line = f"pub {to_snake_case(field_name)}: {field_type_rs or 'todo!()'},"
|
||||
if field_comment:
|
||||
line += f' // {field_comment}'
|
||||
line += f" // {field_comment}"
|
||||
generated_packet_code.append(line)
|
||||
|
||||
return skip
|
||||
|
||||
|
||||
def burger_field_to_type(field, mappings: Mappings, obfuscated_class_name: str, known_variable_types={}) -> tuple[Optional[str], str, Optional[str]]:
|
||||
'''
|
||||
def burger_field_to_type(
|
||||
field, mappings: Mappings, obfuscated_class_name: str, known_variable_types={}
|
||||
) -> tuple[Optional[str], str, Optional[str]]:
|
||||
"""
|
||||
Returns field_type_rs, obfuscated_field_name, field_comment
|
||||
'''
|
||||
"""
|
||||
# match `(x) ? 1 : 0`
|
||||
match = re.match(r'\((.*)\) \? 1 : 0', field)
|
||||
match = re.match(r"\((.*)\) \? 1 : 0", field)
|
||||
if match:
|
||||
return ('bool', match.group(1), None)
|
||||
match = re.match(r'^\w+\.\w+\(\)$', field)
|
||||
return ("bool", match.group(1), None)
|
||||
match = re.match(r"^\w+\.\w+\(\)$", field)
|
||||
if match:
|
||||
print('field', field)
|
||||
obfuscated_first = field.split('.')[0]
|
||||
obfuscated_second = field.split('.')[1].split('(')[0]
|
||||
print("field", field)
|
||||
obfuscated_first = field.split(".")[0]
|
||||
obfuscated_second = field.split(".")[1].split("(")[0]
|
||||
# first = mappings.get_field(obfuscated_class_name, obfuscated_first)
|
||||
if obfuscated_first in known_variable_types:
|
||||
first_type = known_variable_types[obfuscated_first]
|
||||
else:
|
||||
try:
|
||||
first_type = mappings.get_field_type(
|
||||
obfuscated_class_name, obfuscated_first)
|
||||
except:
|
||||
first_type = 'TODO'
|
||||
first_obfuscated_class_name: Optional[str] = mappings.get_class_from_deobfuscated_name(
|
||||
first_type)
|
||||
obfuscated_class_name, obfuscated_first
|
||||
)
|
||||
except Exception:
|
||||
first_type = "TODO"
|
||||
first_obfuscated_class_name: Optional[str] = (
|
||||
mappings.get_class_from_deobfuscated_name(first_type)
|
||||
)
|
||||
if first_obfuscated_class_name:
|
||||
try:
|
||||
second = mappings.get_method(
|
||||
first_obfuscated_class_name, obfuscated_second, '')
|
||||
except:
|
||||
first_obfuscated_class_name, obfuscated_second, ""
|
||||
)
|
||||
except Exception:
|
||||
# if this happens then the field is probably from a super class
|
||||
second = obfuscated_second
|
||||
else:
|
||||
second = obfuscated_second
|
||||
first_type_short = first_type.split('.')[-1]
|
||||
if second in {'byteValue'}:
|
||||
first_type_short = first_type.split(".")[-1]
|
||||
if second in {"byteValue"}:
|
||||
return (first_type_short, obfuscated_first, None)
|
||||
return (first_type_short, obfuscated_first, f'TODO: Does {first_type_short}::{second}, may not be implemented')
|
||||
return (
|
||||
first_type_short,
|
||||
obfuscated_first,
|
||||
f"TODO: Does {first_type_short}::{second}, may not be implemented",
|
||||
)
|
||||
return None, field, None
|
||||
|
||||
|
||||
def change_packet_ids(id_map: dict[int, int], direction: str, state: str):
|
||||
existing_packet_ids, existing_packet_class_names = get_packets(
|
||||
direction, state)
|
||||
existing_packet_ids, existing_packet_class_names = get_packets(direction, state)
|
||||
|
||||
new_packet_ids = []
|
||||
|
||||
for packet_id in existing_packet_ids:
|
||||
new_packet_id = id_map.get(packet_id, packet_id)
|
||||
if new_packet_id in new_packet_ids:
|
||||
raise Exception('Two packets have the same id')
|
||||
raise Exception("Two packets have the same id")
|
||||
new_packet_ids.append(new_packet_id)
|
||||
|
||||
set_packets(new_packet_ids, existing_packet_class_names, direction, state)
|
||||
|
||||
|
||||
def remove_packet_ids(removing_packet_ids: list[int], direction: str, state: str):
|
||||
existing_packet_ids, existing_packet_class_names = get_packets(
|
||||
direction, state)
|
||||
existing_packet_ids, existing_packet_class_names = get_packets(direction, state)
|
||||
|
||||
new_packet_ids = []
|
||||
new_packet_class_names = []
|
||||
|
||||
for packet_id, packet_class_name in zip(existing_packet_ids, existing_packet_class_names):
|
||||
for packet_id, packet_class_name in zip(
|
||||
existing_packet_ids, existing_packet_class_names
|
||||
):
|
||||
if packet_id in removing_packet_ids:
|
||||
try:
|
||||
os.remove(
|
||||
f'../azalea-protocol/src/packets/{state}/{packet_class_name}.rs')
|
||||
except:
|
||||
f"../azalea-protocol/src/packets/{state}/{packet_class_name}.rs"
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
new_packet_ids.append(packet_id)
|
||||
|
@ -380,18 +476,22 @@ def are_packet_instructions_identical(old_packet, new_packet):
|
|||
return False
|
||||
|
||||
for old_field, new_field in zip(old_packet, new_packet):
|
||||
if old_field['operation'] != new_field['operation']:
|
||||
if old_field["operation"] != new_field["operation"]:
|
||||
return False
|
||||
if new_field['operation'] == 'write':
|
||||
if burger_type_to_rust_type(old_field.get('type')) != burger_type_to_rust_type(new_field.get('type')):
|
||||
if new_field["operation"] == "write":
|
||||
if burger_type_to_rust_type(
|
||||
old_field.get("type")
|
||||
) != burger_type_to_rust_type(new_field.get("type")):
|
||||
return False
|
||||
else:
|
||||
# comparing is too complicated here since it's possible the type has variables
|
||||
# so we just don't
|
||||
pass
|
||||
|
||||
if 'instructions' in old_field and 'instructions' in new_field:
|
||||
if not are_packet_instructions_identical(old_field['instructions'], new_field['instructions']):
|
||||
if "instructions" in old_field and "instructions" in new_field:
|
||||
if not are_packet_instructions_identical(
|
||||
old_field["instructions"], new_field["instructions"]
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
from lib.utils import to_snake_case, upper_first_letter, get_dir_location, to_camel_case
|
||||
from ..mappings import Mappings
|
||||
from typing import Optional
|
||||
import re
|
||||
from lib.utils import get_dir_location, to_camel_case
|
||||
|
||||
REGISTRIES_DIR = get_dir_location("../azalea-registry/src/lib.rs")
|
||||
|
||||
REGISTRIES_DIR = get_dir_location('../azalea-registry/src/lib.rs')
|
||||
|
||||
def generate_registries(registries: dict):
|
||||
with open(REGISTRIES_DIR, 'r') as f:
|
||||
code = f.read().split('\n')
|
||||
with open(REGISTRIES_DIR, "r") as f:
|
||||
code = f.read().split("\n")
|
||||
|
||||
existing_registry_enum_names = set()
|
||||
|
||||
|
@ -17,19 +15,20 @@ def generate_registries(registries: dict):
|
|||
# Stone => "minecraft:stone"
|
||||
# });
|
||||
|
||||
registry_name = registry_name.split(':')[1]
|
||||
registry_name = registry_name.split(":")[1]
|
||||
registry_enum_name = registry_name_to_enum_name(registry_name)
|
||||
|
||||
existing_registry_enum_names.add(registry_enum_name)
|
||||
|
||||
registry_code = []
|
||||
registry_code.append(f'enum {registry_enum_name} {{')
|
||||
registry_code.append(f"enum {registry_enum_name} {{")
|
||||
registry_entries = sorted(
|
||||
registry['entries'].items(), key=lambda x: x[1]['protocol_id'])
|
||||
registry["entries"].items(), key=lambda x: x[1]["protocol_id"]
|
||||
)
|
||||
for variant_name, _variant in registry_entries:
|
||||
variant_struct_name = to_camel_case(variant_name.split(':')[-1])
|
||||
variant_struct_name = to_camel_case(variant_name.split(":")[-1])
|
||||
registry_code.append(f'\t{variant_struct_name} => "{variant_name}",')
|
||||
registry_code.append('}')
|
||||
registry_code.append("}")
|
||||
|
||||
# when we find a "registry! {" line, find the next line that starts
|
||||
# with "enum <name>" and replace that until we find a line that's "}"
|
||||
|
@ -40,28 +39,28 @@ def generate_registries(registries: dict):
|
|||
in_registry_macro = True
|
||||
elif in_registry_macro and line == registry_code[0]:
|
||||
# found it, now delete until we get to "}"
|
||||
while code[i] != '}':
|
||||
while code[i] != "}":
|
||||
code.pop(i)
|
||||
code[i] = '\n'.join(registry_code)
|
||||
code[i] = "\n".join(registry_code)
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
code.append('registry! {')
|
||||
code.append('\n'.join(registry_code))
|
||||
code.append('}')
|
||||
code.append('')
|
||||
code.append("registry! {")
|
||||
code.append("\n".join(registry_code))
|
||||
code.append("}")
|
||||
code.append("")
|
||||
|
||||
# delete the unused registries
|
||||
i = 0
|
||||
while i < len(code):
|
||||
if code[i] == 'registry! {':
|
||||
if code[i] == "registry! {":
|
||||
# skip until we get to the enum line
|
||||
while not code[i].startswith('enum '):
|
||||
while not code[i].startswith("enum "):
|
||||
i += 1
|
||||
enum_name = code[i].split(' ')[1]
|
||||
enum_name = code[i].split(" ")[1]
|
||||
if enum_name not in existing_registry_enum_names:
|
||||
i -= 1
|
||||
while code[i] != '}':
|
||||
while code[i] != "}":
|
||||
code.pop(i)
|
||||
code.pop(i)
|
||||
# close the registry! block
|
||||
|
@ -69,17 +68,18 @@ def generate_registries(registries: dict):
|
|||
else:
|
||||
i += 1
|
||||
|
||||
with open(REGISTRIES_DIR, 'w') as f:
|
||||
f.write('\n'.join(code))
|
||||
with open(REGISTRIES_DIR, "w") as f:
|
||||
f.write("\n".join(code))
|
||||
|
||||
|
||||
def registry_name_to_enum_name(registry_name: str) -> str:
|
||||
registry_name = registry_name.split(':')[-1]
|
||||
registry_name = registry_name.split(":")[-1]
|
||||
|
||||
if registry_name.endswith('_type'):
|
||||
if registry_name.endswith("_type"):
|
||||
# change _type to _kind because that's Rustier (and because _type
|
||||
# is a reserved keyword)
|
||||
registry_name = registry_name[:-5] + '_kind'
|
||||
elif registry_name in {'menu'}:
|
||||
registry_name += '_kind'
|
||||
registry_name = registry_name[:-5] + "_kind"
|
||||
elif registry_name in {"menu"}:
|
||||
registry_name += "_kind"
|
||||
|
||||
return to_camel_case(registry_name)
|
||||
|
|
|
@ -1,46 +1,40 @@
|
|||
from lib.utils import get_dir_location, to_camel_case
|
||||
from ..mappings import Mappings
|
||||
from lib.utils import get_dir_location
|
||||
|
||||
COLLISION_BLOCKS_RS_DIR = get_dir_location(
|
||||
'../azalea-physics/src/collision/blocks.rs')
|
||||
COLLISION_BLOCKS_RS_DIR = get_dir_location("../azalea-physics/src/collision/blocks.rs")
|
||||
|
||||
|
||||
def generate_block_shapes(pumpkin_block_datas: dict, block_states_report):
|
||||
blocks, shapes = simplify_shapes(pumpkin_block_datas)
|
||||
|
||||
code = generate_block_shapes_code(blocks, shapes, block_states_report)
|
||||
with open(COLLISION_BLOCKS_RS_DIR, 'w') as f:
|
||||
with open(COLLISION_BLOCKS_RS_DIR, "w") as f:
|
||||
f.write(code)
|
||||
|
||||
|
||||
def simplify_shapes(blocks: dict) -> tuple[dict, dict]:
|
||||
'''
|
||||
"""
|
||||
Returns new_blocks and new_shapes,
|
||||
where new_blocks is like { grass_block: { collision: [1, 1], outline: [1, 1] } }
|
||||
and new_shapes is like { 1: [ [0, 0, 0, 1, 1, 1] ] }
|
||||
'''
|
||||
"""
|
||||
new_blocks = {}
|
||||
new_shapes = {}
|
||||
|
||||
all_shapes_ids = {}
|
||||
|
||||
for block_data in blocks['blocks']:
|
||||
for block_data in blocks["blocks"]:
|
||||
new_block_collision_shapes = []
|
||||
new_block_outline_shapes = []
|
||||
|
||||
for state in block_data['states']:
|
||||
for state in block_data["states"]:
|
||||
collision_shape = []
|
||||
for box_id in state['collision_shapes']:
|
||||
box = blocks['shapes'][box_id]
|
||||
collision_shape.append(
|
||||
tuple(box['min'] + box['max'])
|
||||
)
|
||||
for box_id in state["collision_shapes"]:
|
||||
box = blocks["shapes"][box_id]
|
||||
collision_shape.append(tuple(box["min"] + box["max"]))
|
||||
outline_shape = []
|
||||
for box_id in state['outline_shapes']:
|
||||
box = blocks['shapes'][box_id]
|
||||
outline_shape.append(
|
||||
tuple(box['min'] + box['max'])
|
||||
)
|
||||
for box_id in state["outline_shapes"]:
|
||||
box = blocks["shapes"][box_id]
|
||||
outline_shape.append(tuple(box["min"] + box["max"]))
|
||||
|
||||
collision_shape = tuple(collision_shape)
|
||||
outline_shape = tuple(outline_shape)
|
||||
|
@ -58,27 +52,25 @@ def simplify_shapes(blocks: dict) -> tuple[dict, dict]:
|
|||
all_shapes_ids[outline_shape] = outline_shape_id
|
||||
new_shapes[outline_shape_id] = outline_shape
|
||||
|
||||
block_id = block_data['name']
|
||||
block_id = block_data["name"]
|
||||
new_block_collision_shapes.append(collision_shape_id)
|
||||
new_block_outline_shapes.append(outline_shape_id)
|
||||
|
||||
new_blocks[block_id] = {
|
||||
'collision': new_block_collision_shapes,
|
||||
'outline': new_block_outline_shapes
|
||||
"collision": new_block_collision_shapes,
|
||||
"outline": new_block_outline_shapes,
|
||||
}
|
||||
|
||||
|
||||
return new_blocks, new_shapes
|
||||
|
||||
|
||||
def generate_block_shapes_code(blocks: dict, shapes: dict, block_states_report):
|
||||
# look at __cache__/generator-mod-*/blockCollisionShapes.json for format of blocks and shapes
|
||||
|
||||
generated_shape_code = ''
|
||||
for (shape_id, shape) in sorted(shapes.items(), key=lambda shape: int(shape[0])):
|
||||
generated_shape_code = ""
|
||||
for shape_id, shape in sorted(shapes.items(), key=lambda shape: int(shape[0])):
|
||||
generated_shape_code += generate_code_for_shape(shape_id, shape)
|
||||
|
||||
|
||||
# static COLLISION_SHAPES_MAP: [&LazyLock<VoxelShape>; 26644] = [&SHAPE0, &SHAPE1, &SHAPE1, ...]
|
||||
empty_shapes = []
|
||||
full_shapes = []
|
||||
|
@ -88,48 +80,56 @@ def generate_block_shapes_code(blocks: dict, shapes: dict, block_states_report):
|
|||
outline_shapes_map = []
|
||||
|
||||
for block_id, shape_datas in blocks.items():
|
||||
collision_shapes = shape_datas['collision']
|
||||
outline_shapes = shape_datas['outline']
|
||||
collision_shapes = shape_datas["collision"]
|
||||
outline_shapes = shape_datas["outline"]
|
||||
|
||||
if isinstance(collision_shapes, int): collision_shapes = [collision_shapes]
|
||||
if isinstance(outline_shapes, int): outline_shapes = [outline_shapes]
|
||||
if isinstance(collision_shapes, int):
|
||||
collision_shapes = [collision_shapes]
|
||||
if isinstance(outline_shapes, int):
|
||||
outline_shapes = [outline_shapes]
|
||||
|
||||
block_report_data = block_states_report['minecraft:' + block_id]
|
||||
block_report_data = block_states_report["minecraft:" + block_id]
|
||||
|
||||
for possible_state, shape_id in zip(block_report_data['states'], collision_shapes):
|
||||
block_state_id = possible_state['id']
|
||||
if shape_id == 0: empty_shapes.append(block_state_id)
|
||||
elif shape_id == 1: full_shapes.append(block_state_id)
|
||||
for possible_state, shape_id in zip(
|
||||
block_report_data["states"], collision_shapes
|
||||
):
|
||||
block_state_id = possible_state["id"]
|
||||
if shape_id == 0:
|
||||
empty_shapes.append(block_state_id)
|
||||
elif shape_id == 1:
|
||||
full_shapes.append(block_state_id)
|
||||
while len(collision_shapes_map) <= block_state_id:
|
||||
# default to shape 1 for missing shapes (full block)
|
||||
collision_shapes_map.append(1)
|
||||
collision_shapes_map[block_state_id] = shape_id
|
||||
for possible_state, shape_id in zip(block_report_data['states'], outline_shapes):
|
||||
block_state_id = possible_state['id']
|
||||
for possible_state, shape_id in zip(
|
||||
block_report_data["states"], outline_shapes
|
||||
):
|
||||
block_state_id = possible_state["id"]
|
||||
while len(outline_shapes_map) <= block_state_id:
|
||||
# default to shape 1 for missing shapes (full block)
|
||||
outline_shapes_map.append(1)
|
||||
outline_shapes_map[block_state_id] = shape_id
|
||||
|
||||
generated_map_code = f'static COLLISION_SHAPES_MAP: [&LazyLock<VoxelShape>; {len(collision_shapes_map)}] = ['
|
||||
generated_map_code = f"static COLLISION_SHAPES_MAP: [&LazyLock<VoxelShape>; {len(collision_shapes_map)}] = ["
|
||||
empty_shape_match_code = convert_ints_to_rust_ranges(empty_shapes)
|
||||
block_shape_match_code = convert_ints_to_rust_ranges(full_shapes)
|
||||
for block_state_id, shape_id in enumerate(collision_shapes_map):
|
||||
generated_map_code += f'&SHAPE{shape_id},\n'
|
||||
generated_map_code += '];\n'
|
||||
generated_map_code += f"&SHAPE{shape_id},\n"
|
||||
generated_map_code += "];\n"
|
||||
|
||||
generated_map_code += f'static OUTLINE_SHAPES_MAP: [&LazyLock<VoxelShape>; {len(outline_shapes_map)}] = ['
|
||||
generated_map_code += f"static OUTLINE_SHAPES_MAP: [&LazyLock<VoxelShape>; {len(outline_shapes_map)}] = ["
|
||||
for block_state_id, shape_id in enumerate(outline_shapes_map):
|
||||
generated_map_code += f'&SHAPE{shape_id},\n'
|
||||
generated_map_code += '];\n'
|
||||
generated_map_code += f"&SHAPE{shape_id},\n"
|
||||
generated_map_code += "];\n"
|
||||
|
||||
if empty_shape_match_code == '':
|
||||
print('Error: shape 0 was not found')
|
||||
if empty_shape_match_code == "":
|
||||
print("Error: shape 0 was not found")
|
||||
|
||||
return f'''
|
||||
return f"""
|
||||
//! Autogenerated block collisions for every block
|
||||
|
||||
// This file is generated from codegen/lib/code/shapes.py. If you want to
|
||||
// This file is @generated from codegen/lib/code/shapes.py. If you want to
|
||||
// modify it, change that file.
|
||||
|
||||
#![allow(clippy::explicit_auto_deref)]
|
||||
|
@ -172,36 +172,35 @@ impl BlockWithShape for BlockState {{
|
|||
}}
|
||||
|
||||
{generated_map_code}
|
||||
'''
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def generate_code_for_shape(shape_id: str, parts: list[list[float]]):
|
||||
def make_arguments(part: list[float]):
|
||||
return ', '.join(map(lambda n: str(n).rstrip('0'), part))
|
||||
code = ''
|
||||
code += f'static SHAPE{shape_id}: LazyLock<VoxelShape> = LazyLock::new(|| {{'
|
||||
return ", ".join(map(lambda n: str(n).rstrip("0"), part))
|
||||
|
||||
code = ""
|
||||
code += f"static SHAPE{shape_id}: LazyLock<VoxelShape> = LazyLock::new(|| {{"
|
||||
steps = []
|
||||
if parts == ():
|
||||
steps.append('collision::EMPTY_SHAPE.clone()')
|
||||
steps.append("collision::EMPTY_SHAPE.clone()")
|
||||
else:
|
||||
steps.append(f'collision::box_shape({make_arguments(parts[0])})')
|
||||
steps.append(f"collision::box_shape({make_arguments(parts[0])})")
|
||||
for part in parts[1:]:
|
||||
steps.append(
|
||||
f'Shapes::or(s, collision::box_shape({make_arguments(part)}))')
|
||||
steps.append(f"Shapes::or(s, collision::box_shape({make_arguments(part)}))")
|
||||
|
||||
if len(steps) == 1:
|
||||
code += steps[0]
|
||||
else:
|
||||
code += '{\n'
|
||||
code += "{\n"
|
||||
for step in steps[:-1]:
|
||||
code += f' let s = {step};\n'
|
||||
code += f' {steps[-1]}\n'
|
||||
code += '}\n'
|
||||
code += '});\n'
|
||||
code += f" let s = {step};\n"
|
||||
code += f" {steps[-1]}\n"
|
||||
code += "}\n"
|
||||
code += "});\n"
|
||||
return code
|
||||
|
||||
|
||||
def convert_ints_to_rust_ranges(block_state_ids: list[int]) -> str:
|
||||
# convert them into ranges (so like 1|2|3 is 1..=3 instead)
|
||||
block_state_ids_ranges = []
|
||||
|
@ -214,10 +213,18 @@ def convert_ints_to_rust_ranges(block_state_ids: list[int]) -> str:
|
|||
if last_block_state_id is not None:
|
||||
# check if the range is done
|
||||
if block_state_id - 1 != last_block_state_id:
|
||||
block_state_ids_ranges.append(f'{range_start_block_state_id}..={last_block_state_id}' if range_start_block_state_id != last_block_state_id else str(range_start_block_state_id))
|
||||
block_state_ids_ranges.append(
|
||||
f"{range_start_block_state_id}..={last_block_state_id}"
|
||||
if range_start_block_state_id != last_block_state_id
|
||||
else str(range_start_block_state_id)
|
||||
)
|
||||
range_start_block_state_id = block_state_id
|
||||
|
||||
last_block_state_id = block_state_id
|
||||
|
||||
block_state_ids_ranges.append(f'{range_start_block_state_id}..={last_block_state_id}' if range_start_block_state_id != last_block_state_id else str(range_start_block_state_id))
|
||||
return '|'.join(block_state_ids_ranges)
|
||||
block_state_ids_ranges.append(
|
||||
f"{range_start_block_state_id}..={last_block_state_id}"
|
||||
if range_start_block_state_id != last_block_state_id
|
||||
else str(range_start_block_state_id)
|
||||
)
|
||||
return "|".join(block_state_ids_ranges)
|
||||
|
|
|
@ -1,34 +1,36 @@
|
|||
from lib.utils import to_snake_case, upper_first_letter, get_dir_location, to_camel_case
|
||||
|
||||
REGISTRIES_DIR = get_dir_location('../azalea-registry/src/tags')
|
||||
REGISTRIES_DIR = get_dir_location("../azalea-registry/src/tags")
|
||||
|
||||
|
||||
def generate_tags(registries: dict, file_name: str, struct_name: str):
|
||||
tags_dir = f'{REGISTRIES_DIR}/{file_name}.rs'
|
||||
tags_dir = f"{REGISTRIES_DIR}/{file_name}.rs"
|
||||
|
||||
generated = f'''// This file was generated by codegen/lib/code/tags.py, don't edit it manually!
|
||||
generated = f"""// This file was @generated by codegen/lib/code/tags.py, don't edit it manually!
|
||||
|
||||
use std::{{collections::HashSet, sync::LazyLock}};
|
||||
|
||||
|
||||
use crate::{struct_name};
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
for tag_name, tag in sorted(registries.items(), key=lambda x: x[0]):
|
||||
tag_name = tag_name.replace('/', '_')
|
||||
tag_name = tag_name.replace("/", "_")
|
||||
static_set_name = to_snake_case(tag_name).upper()
|
||||
generated += f'pub static {static_set_name}: LazyLock<HashSet<{struct_name}>> = LazyLock::new(|| HashSet::from_iter(vec!['
|
||||
generated += f"pub static {static_set_name}: LazyLock<HashSet<{struct_name}>> = LazyLock::new(|| HashSet::from_iter(vec!["
|
||||
|
||||
queue = tag['values'].copy()
|
||||
queue = tag["values"].copy()
|
||||
while queue != []:
|
||||
item = queue.pop(0)
|
||||
namespace, item_name = item.split(':')
|
||||
if namespace[0] == '#':
|
||||
queue += registries[item_name]['values']
|
||||
namespace, item_name = item.split(":")
|
||||
if namespace[0] == "#":
|
||||
queue += registries[item_name]["values"]
|
||||
continue
|
||||
generated += f'{struct_name}::{upper_first_letter(to_camel_case(item_name))},\n'
|
||||
generated += ']));\n'
|
||||
generated += (
|
||||
f"{struct_name}::{upper_first_letter(to_camel_case(item_name))},\n"
|
||||
)
|
||||
generated += "]));\n"
|
||||
|
||||
with open(tags_dir, 'w') as f:
|
||||
f.write(generated)
|
||||
with open(tags_dir, "w") as f:
|
||||
f.write(generated)
|
||||
|
|
|
@ -1,175 +1,204 @@
|
|||
# utilities specifically for codegen
|
||||
|
||||
from lib.utils import to_camel_case, to_snake_case, get_dir_location
|
||||
from lib.mappings import Mappings
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
# utilities specifically for codegen
|
||||
|
||||
|
||||
def burger_type_to_rust_type(burger_type, field_name: Optional[str] = None, instruction=None, mappings: Optional[Mappings] = None, obfuscated_class_name: Optional[str] = None):
|
||||
def burger_type_to_rust_type(
|
||||
burger_type,
|
||||
field_name: Optional[str] = None,
|
||||
instruction=None,
|
||||
mappings: Optional[Mappings] = None,
|
||||
obfuscated_class_name: Optional[str] = None,
|
||||
):
|
||||
is_var = False
|
||||
uses = set()
|
||||
# extra code, like enum definitions
|
||||
extra_code = []
|
||||
|
||||
should_be_signed = False
|
||||
if field_name and any(map(lambda w: w in {'x', 'y', 'z', 'xa', 'ya', 'za'}, to_snake_case(field_name).split('_'))):
|
||||
if field_name and any(
|
||||
map(
|
||||
lambda w: w in {"x", "y", "z", "xa", "ya", "za"},
|
||||
to_snake_case(field_name).split("_"),
|
||||
)
|
||||
):
|
||||
# coordinates are signed
|
||||
should_be_signed = True
|
||||
|
||||
if burger_type == 'byte':
|
||||
field_type_rs = 'i8' if should_be_signed else 'u8'
|
||||
elif burger_type == 'short':
|
||||
field_type_rs = 'i16' if should_be_signed else 'u16'
|
||||
elif burger_type == 'int':
|
||||
field_type_rs = 'i32' if should_be_signed else 'u32'
|
||||
elif burger_type == 'long':
|
||||
field_type_rs = 'i64' if should_be_signed else 'u64'
|
||||
elif burger_type == 'float':
|
||||
field_type_rs = 'f32'
|
||||
elif burger_type == 'double':
|
||||
field_type_rs = 'f64'
|
||||
if burger_type == "byte":
|
||||
field_type_rs = "i8" if should_be_signed else "u8"
|
||||
elif burger_type == "short":
|
||||
field_type_rs = "i16" if should_be_signed else "u16"
|
||||
elif burger_type == "int":
|
||||
field_type_rs = "i32" if should_be_signed else "u32"
|
||||
elif burger_type == "long":
|
||||
field_type_rs = "i64" if should_be_signed else "u64"
|
||||
elif burger_type == "float":
|
||||
field_type_rs = "f32"
|
||||
elif burger_type == "double":
|
||||
field_type_rs = "f64"
|
||||
|
||||
elif burger_type == 'varint':
|
||||
elif burger_type == "varint":
|
||||
is_var = True
|
||||
field_type_rs = 'i32' if should_be_signed else 'u32'
|
||||
elif burger_type == 'varlong':
|
||||
field_type_rs = "i32" if should_be_signed else "u32"
|
||||
elif burger_type == "varlong":
|
||||
is_var = True
|
||||
field_type_rs = 'i64' if should_be_signed else 'u64'
|
||||
field_type_rs = "i64" if should_be_signed else "u64"
|
||||
|
||||
elif burger_type == 'boolean':
|
||||
field_type_rs = 'bool'
|
||||
elif burger_type == 'string':
|
||||
field_type_rs = 'String'
|
||||
elif burger_type == "boolean":
|
||||
field_type_rs = "bool"
|
||||
elif burger_type == "string":
|
||||
field_type_rs = "String"
|
||||
|
||||
elif burger_type == 'chatcomponent':
|
||||
field_type_rs = 'FormattedText'
|
||||
uses.add('azalea_chat::FormattedText')
|
||||
elif burger_type == 'identifier':
|
||||
field_type_rs = 'ResourceLocation'
|
||||
uses.add('azalea_core::resource_location::ResourceLocation')
|
||||
elif burger_type == 'uuid':
|
||||
field_type_rs = 'Uuid'
|
||||
uses.add('uuid::Uuid')
|
||||
elif burger_type == 'position':
|
||||
field_type_rs = 'BlockPos'
|
||||
uses.add('azalea_core::position::BlockPos')
|
||||
elif burger_type == 'nbtcompound':
|
||||
field_type_rs = 'simdnbt::owned::NbtCompound'
|
||||
elif burger_type == 'itemstack':
|
||||
field_type_rs = 'Slot'
|
||||
uses.add('azalea_core::slot::Slot')
|
||||
elif burger_type == 'metadata':
|
||||
field_type_rs = 'EntityMetadata'
|
||||
uses.add('azalea_entity::EntityMetadata')
|
||||
elif burger_type == 'bitset':
|
||||
elif burger_type == "chatcomponent":
|
||||
field_type_rs = "FormattedText"
|
||||
uses.add("azalea_chat::FormattedText")
|
||||
elif burger_type == "identifier":
|
||||
field_type_rs = "ResourceLocation"
|
||||
uses.add("azalea_core::resource_location::ResourceLocation")
|
||||
elif burger_type == "uuid":
|
||||
field_type_rs = "Uuid"
|
||||
uses.add("uuid::Uuid")
|
||||
elif burger_type == "position":
|
||||
field_type_rs = "BlockPos"
|
||||
uses.add("azalea_core::position::BlockPos")
|
||||
elif burger_type == "nbtcompound":
|
||||
field_type_rs = "simdnbt::owned::NbtCompound"
|
||||
elif burger_type == "itemstack":
|
||||
field_type_rs = "Slot"
|
||||
uses.add("azalea_core::slot::Slot")
|
||||
elif burger_type == "metadata":
|
||||
field_type_rs = "EntityMetadata"
|
||||
uses.add("azalea_entity::EntityMetadata")
|
||||
elif burger_type == "bitset":
|
||||
if instruction:
|
||||
length = instruction['length']
|
||||
length = instruction["length"]
|
||||
field_type_rs = f'todo!("fixed bitset of length {length}")'
|
||||
else:
|
||||
field_type_rs = 'todo!("fixed bitset")'
|
||||
elif burger_type == 'abstract':
|
||||
field_type_rs = 'todo!()'
|
||||
elif burger_type == 'interface':
|
||||
elif burger_type == "abstract":
|
||||
field_type_rs = "todo!()"
|
||||
elif burger_type == "interface":
|
||||
# depends on context
|
||||
field_type_rs = 'todo!()'
|
||||
elif burger_type == 'Iterator':
|
||||
field_type_rs = 'todo!()'
|
||||
elif burger_type == 'Object':
|
||||
field_type_rs = "todo!()"
|
||||
elif burger_type == "Iterator":
|
||||
field_type_rs = "todo!()"
|
||||
elif burger_type == "Object":
|
||||
# depends on context
|
||||
field_type_rs = 'todo!()'
|
||||
elif burger_type == 'enum':
|
||||
field_type_rs = "todo!()"
|
||||
elif burger_type == "enum":
|
||||
if not instruction or not mappings or not obfuscated_class_name:
|
||||
field_type_rs = 'todo!("enum")'
|
||||
else:
|
||||
# generate the whole enum :)
|
||||
print(instruction)
|
||||
enum_field = instruction['field']
|
||||
enum_field = instruction["field"]
|
||||
# enums with a.b() as the field
|
||||
if '.' in enum_field:
|
||||
if "." in enum_field:
|
||||
enum_first_part_name = mappings.get_field_type(
|
||||
obfuscated_class_name, enum_field.split('.')[0])
|
||||
enum_first_part_obfuscated_name = mappings.get_class_from_deobfuscated_name(
|
||||
enum_first_part_name)
|
||||
print('enum_first_part_obfuscated_name',
|
||||
enum_first_part_obfuscated_name)
|
||||
print('enum field', enum_field.split('.')[1].split('(')[0])
|
||||
obfuscated_class_name, enum_field.split(".")[0]
|
||||
)
|
||||
enum_first_part_obfuscated_name = (
|
||||
mappings.get_class_from_deobfuscated_name(enum_first_part_name)
|
||||
)
|
||||
print(
|
||||
"enum_first_part_obfuscated_name", enum_first_part_obfuscated_name
|
||||
)
|
||||
print("enum field", enum_field.split(".")[1].split("(")[0])
|
||||
try:
|
||||
enum_name = mappings.get_method_type(
|
||||
enum_first_part_obfuscated_name, enum_field.split('.')[1].split('(')[0], '')
|
||||
enum_first_part_obfuscated_name,
|
||||
enum_field.split(".")[1].split("(")[0],
|
||||
"",
|
||||
)
|
||||
except KeyError:
|
||||
# sometimes enums are fields instead of methods
|
||||
enum_name = mappings.get_field_type(
|
||||
enum_first_part_obfuscated_name, enum_field.split('.')[1].split('(')[0])
|
||||
enum_first_part_obfuscated_name,
|
||||
enum_field.split(".")[1].split("(")[0],
|
||||
)
|
||||
|
||||
print('hm', enum_name)
|
||||
print("hm", enum_name)
|
||||
else:
|
||||
try:
|
||||
enum_name = mappings.get_field_type(
|
||||
obfuscated_class_name, enum_field)
|
||||
except:
|
||||
obfuscated_class_name, enum_field
|
||||
)
|
||||
except Exception:
|
||||
enum_name = mappings.get_class(obfuscated_class_name)
|
||||
print(f'failed getting {obfuscated_class_name}.{enum_field} but continuing with {enum_name} anyways')
|
||||
print('enum_name', enum_name)
|
||||
enum_obfuscated_name = mappings.get_class_from_deobfuscated_name(
|
||||
enum_name)
|
||||
print('enum_obfuscated_name', enum_obfuscated_name)
|
||||
print(
|
||||
f"failed getting {obfuscated_class_name}.{enum_field} but continuing with {enum_name} anyways"
|
||||
)
|
||||
print("enum_name", enum_name)
|
||||
enum_obfuscated_name = mappings.get_class_from_deobfuscated_name(enum_name)
|
||||
print("enum_obfuscated_name", enum_obfuscated_name)
|
||||
enum_variants = []
|
||||
for obfuscated_field_name in mappings.fields[enum_obfuscated_name]:
|
||||
field_name = mappings.get_field(
|
||||
enum_obfuscated_name, obfuscated_field_name)
|
||||
enum_obfuscated_name, obfuscated_field_name
|
||||
)
|
||||
|
||||
# get the type just to make sure it's actually a variant and not something else
|
||||
field_type = mappings.get_field_type(
|
||||
enum_obfuscated_name, obfuscated_field_name)
|
||||
enum_obfuscated_name, obfuscated_field_name
|
||||
)
|
||||
if field_type != enum_name:
|
||||
continue
|
||||
|
||||
enum_variants.append(field_name)
|
||||
|
||||
field_type_rs = to_camel_case(
|
||||
enum_name.split('.')[-1].split('$')[-1])
|
||||
extra_code.append('')
|
||||
extra_code.append(f'#[derive(AzBuf, Clone, Copy, Debug)]')
|
||||
extra_code.append(f'pub enum {field_type_rs} {{')
|
||||
field_type_rs = to_camel_case(enum_name.split(".")[-1].split("$")[-1])
|
||||
extra_code.append("")
|
||||
extra_code.append("#[derive(AzBuf, Clone, Copy, Debug)]")
|
||||
extra_code.append(f"pub enum {field_type_rs} {{")
|
||||
for index, variant in enumerate(enum_variants):
|
||||
extra_code.append(
|
||||
f' {to_camel_case(variant.lower())}={index},')
|
||||
extra_code.append('}')
|
||||
extra_code.append(f" {to_camel_case(variant.lower())}={index},")
|
||||
extra_code.append("}")
|
||||
|
||||
elif burger_type.endswith('[]'):
|
||||
elif burger_type.endswith("[]"):
|
||||
field_type_rs, is_var, uses, extra_code = burger_type_to_rust_type(
|
||||
burger_type[:-2])
|
||||
field_type_rs = f'Vec<{field_type_rs}>'
|
||||
burger_type[:-2]
|
||||
)
|
||||
field_type_rs = f"Vec<{field_type_rs}>"
|
||||
|
||||
# sometimes burger gives us a slightly incorrect type
|
||||
if mappings and instruction:
|
||||
if field_type_rs == 'Vec<u8>':
|
||||
field = instruction['field']
|
||||
if field.endswith('.copy()'):
|
||||
if field_type_rs == "Vec<u8>":
|
||||
field = instruction["field"]
|
||||
if field.endswith(".copy()"):
|
||||
field = field[:-7]
|
||||
try:
|
||||
array_type = mappings.get_field_type(
|
||||
obfuscated_class_name, field)
|
||||
array_type = mappings.get_field_type(obfuscated_class_name, field)
|
||||
except KeyError:
|
||||
print('Error getting array type', field)
|
||||
print("Error getting array type", field)
|
||||
return field_type_rs, is_var, uses, extra_code
|
||||
if array_type == 'net.minecraft.network.FriendlyByteBuf':
|
||||
field_type_rs = 'UnsizedByteArray'
|
||||
uses.add('azalea_buf::UnsizedByteArray')
|
||||
if array_type == "net.minecraft.network.FriendlyByteBuf":
|
||||
field_type_rs = "UnsizedByteArray"
|
||||
uses.add("azalea_buf::UnsizedByteArray")
|
||||
|
||||
else:
|
||||
print('instruction that we errored on:', instruction)
|
||||
deobfuscated_class_name = mappings.get_class(obfuscated_class_name) if obfuscated_class_name else None
|
||||
raise Exception(f'Unknown field type: {burger_type} ({deobfuscated_class_name or obfuscated_class_name})')
|
||||
print("instruction that we errored on:", instruction)
|
||||
deobfuscated_class_name = (
|
||||
mappings.get_class(obfuscated_class_name) if obfuscated_class_name else None
|
||||
)
|
||||
raise Exception(
|
||||
f"Unknown field type: {burger_type} ({deobfuscated_class_name or obfuscated_class_name})"
|
||||
)
|
||||
return field_type_rs, is_var, uses, extra_code
|
||||
|
||||
|
||||
def write_packet_file(state, packet_module_name, code):
|
||||
with open(get_dir_location(f'../azalea-protocol/src/packets/{state}/{packet_module_name}.rs'), 'w') as f:
|
||||
with open(
|
||||
get_dir_location(
|
||||
f"../azalea-protocol/src/packets/{state}/{packet_module_name}.rs"
|
||||
),
|
||||
"w",
|
||||
) as f:
|
||||
f.write(code)
|
||||
|
||||
|
||||
def fmt():
|
||||
os.system(f'cd {get_dir_location("..")} && cargo fmt')
|
||||
os.system(f"cd {get_dir_location('..')} && cargo fmt")
|
||||
|
|
|
@ -2,12 +2,12 @@ from lib.utils import get_dir_location
|
|||
import re
|
||||
import os
|
||||
|
||||
README_DIR = get_dir_location('../README.md')
|
||||
VERSION_REGEX = r'\_Currently supported Minecraft version: `(.*)`.\_'
|
||||
README_DIR = get_dir_location("../README.md")
|
||||
VERSION_REGEX = r"\_Currently supported Minecraft version: `(.*)`.\_"
|
||||
|
||||
|
||||
def get_version_id() -> str:
|
||||
with open(README_DIR, 'rb') as f:
|
||||
with open(README_DIR, "rb") as f:
|
||||
readme_text = f.read().decode()
|
||||
|
||||
version_line_match = re.search(VERSION_REGEX, readme_text)
|
||||
|
@ -15,80 +15,85 @@ def get_version_id() -> str:
|
|||
version_id = version_line_match.group(1)
|
||||
return version_id
|
||||
else:
|
||||
raise Exception('Could not find version id in README.md')
|
||||
raise Exception("Could not find version id in README.md")
|
||||
|
||||
|
||||
def set_version_id(version_id: str) -> None:
|
||||
with open(README_DIR, 'rb') as f:
|
||||
with open(README_DIR, "rb") as f:
|
||||
readme_text = f.read().decode()
|
||||
|
||||
version_line_match = re.search(VERSION_REGEX, readme_text)
|
||||
if version_line_match:
|
||||
readme_text = readme_text.replace(
|
||||
version_line_match.group(1), version_id)
|
||||
readme_text = readme_text.replace(version_line_match.group(1), version_id)
|
||||
else:
|
||||
raise Exception('Could not find version id in README.md')
|
||||
raise Exception("Could not find version id in README.md")
|
||||
|
||||
with open(README_DIR, 'wb') as f:
|
||||
with open(README_DIR, "wb") as f:
|
||||
f.write(readme_text.encode())
|
||||
|
||||
|
||||
# update the version in all Cargo.toml files
|
||||
# version = "0.10.3+mc1.21.1"
|
||||
for root, _, files in os.walk(get_dir_location('..')):
|
||||
for root, _, files in os.walk(get_dir_location("..")):
|
||||
for file in files:
|
||||
if file == 'Cargo.toml':
|
||||
with open(os.path.join(root, file), 'r') as f:
|
||||
if file == "Cargo.toml":
|
||||
with open(os.path.join(root, file), "r") as f:
|
||||
cargo_toml = f.read().splitlines()
|
||||
for i, line in enumerate(cargo_toml):
|
||||
if line.strip().startswith('version = '):
|
||||
if line.strip().startswith("version = "):
|
||||
replaced = re.sub(r'\+mc[^"]+?"', f'+mc{version_id}"', line)
|
||||
cargo_toml[i] = replaced
|
||||
break
|
||||
else:
|
||||
# didn't have a version line
|
||||
continue
|
||||
if cargo_toml[-1] != '':
|
||||
if cargo_toml[-1] != "":
|
||||
# make sure there's always a trailing newline
|
||||
cargo_toml.append('')
|
||||
with open(os.path.join(root, file), 'w') as f:
|
||||
f.write('\n'.join(cargo_toml))
|
||||
print('Updated version in README.md and Cargo.toml files')
|
||||
cargo_toml.append("")
|
||||
with open(os.path.join(root, file), "w") as f:
|
||||
f.write("\n".join(cargo_toml))
|
||||
print("Updated version in README.md and Cargo.toml files")
|
||||
|
||||
|
||||
def get_protocol_version() -> str:
|
||||
# azalea-protocol/src/packets/mod.rs
|
||||
# pub const PROTOCOL_VERSION: i32 = 758;
|
||||
with open(get_dir_location('../azalea-protocol/src/packets/mod.rs'), 'r') as f:
|
||||
with open(get_dir_location("../azalea-protocol/src/packets/mod.rs"), "r") as f:
|
||||
mod_rs = f.read().splitlines()
|
||||
for line in mod_rs:
|
||||
if line.strip().startswith('pub const PROTOCOL_VERSION'):
|
||||
return line.strip().split(' ')[-1].strip(';')
|
||||
if line.strip().startswith("pub const PROTOCOL_VERSION"):
|
||||
return line.strip().split(" ")[-1].strip(";")
|
||||
raise Exception(
|
||||
'Could not find protocol version in azalea-protocol/src/packets/mod.rs')
|
||||
"Could not find protocol version in azalea-protocol/src/packets/mod.rs"
|
||||
)
|
||||
|
||||
|
||||
def set_protocol_version(protocol_version: str) -> None:
|
||||
with open(get_dir_location('../azalea-protocol/src/packets/mod.rs'), 'r') as f:
|
||||
with open(get_dir_location("../azalea-protocol/src/packets/mod.rs"), "r") as f:
|
||||
mod_rs = f.read().splitlines()
|
||||
for i, line in enumerate(mod_rs):
|
||||
if line.strip().startswith('pub const PROTOCOL_VERSION:'):
|
||||
mod_rs[i] = f'pub const PROTOCOL_VERSION: i32 = {protocol_version};'
|
||||
if line.strip().startswith("pub const PROTOCOL_VERSION:"):
|
||||
mod_rs[i] = f"pub const PROTOCOL_VERSION: i32 = {protocol_version};"
|
||||
break
|
||||
else:
|
||||
raise Exception(
|
||||
'Could not find protocol version in azalea-protocol/src/packets/mod.rs')
|
||||
"Could not find protocol version in azalea-protocol/src/packets/mod.rs"
|
||||
)
|
||||
|
||||
with open(get_dir_location("../azalea-protocol/src/packets/mod.rs"), "w") as f:
|
||||
f.write("\n".join(mod_rs))
|
||||
|
||||
|
||||
with open(get_dir_location('../azalea-protocol/src/packets/mod.rs'), 'w') as f:
|
||||
f.write('\n'.join(mod_rs))
|
||||
def set_version_name(version_name: str) -> None:
|
||||
with open(get_dir_location('../azalea-protocol/src/packets/mod.rs'), 'r') as f:
|
||||
with open(get_dir_location("../azalea-protocol/src/packets/mod.rs"), "r") as f:
|
||||
mod_rs = f.read().splitlines()
|
||||
for i, line in enumerate(mod_rs):
|
||||
if line.strip().startswith('pub const VERSION_NAME:'):
|
||||
if line.strip().startswith("pub const VERSION_NAME:"):
|
||||
mod_rs[i] = f'pub const VERSION_NAME: &str = "{version_name}";'
|
||||
break
|
||||
else:
|
||||
raise Exception(
|
||||
'Could not find version name in azalea-protocol/src/packets/mod.rs')
|
||||
"Could not find version name in azalea-protocol/src/packets/mod.rs"
|
||||
)
|
||||
|
||||
with open(get_dir_location('../azalea-protocol/src/packets/mod.rs'), 'w') as f:
|
||||
f.write('\n'.join(mod_rs))
|
||||
with open(get_dir_location("../azalea-protocol/src/packets/mod.rs"), "w") as f:
|
||||
f.write("\n".join(mod_rs))
|
||||
|
|
|
@ -6,29 +6,33 @@ import json
|
|||
import os
|
||||
|
||||
# make sure the cache directory exists
|
||||
print('Making __cache__')
|
||||
if not os.path.exists(get_dir_location('__cache__')):
|
||||
print('Made __cache__ directory.', get_dir_location('__cache__'))
|
||||
os.mkdir(get_dir_location('__cache__'))
|
||||
print("Making __cache__")
|
||||
if not os.path.exists(get_dir_location("__cache__")):
|
||||
print("Made __cache__ directory.", get_dir_location("__cache__"))
|
||||
os.mkdir(get_dir_location("__cache__"))
|
||||
|
||||
|
||||
def get_burger():
|
||||
if not os.path.exists(get_dir_location('__cache__/Burger')):
|
||||
print('\033[92mDownloading Burger...\033[m')
|
||||
if not os.path.exists(get_dir_location("__cache__/Burger")):
|
||||
print("\033[92mDownloading Burger...\033[m")
|
||||
os.system(
|
||||
f'cd {get_dir_location("__cache__")} && git clone https://github.com/mat-1/Burger && cd Burger && git pull')
|
||||
f"cd {get_dir_location('__cache__')} && git clone https://github.com/mat-1/Burger && cd Burger && git pull"
|
||||
)
|
||||
|
||||
print('\033[92mInstalling dependencies...\033[m')
|
||||
os.system(f'cd {get_dir_location("__cache__")}/Burger && python -m venv venv && venv/bin/pip install six jawa')
|
||||
print("\033[92mInstalling dependencies...\033[m")
|
||||
os.system(
|
||||
f"cd {get_dir_location('__cache__')}/Burger && python -m venv venv && venv/bin/pip install six jawa"
|
||||
)
|
||||
|
||||
|
||||
def get_pumpkin_extractor():
|
||||
if not os.path.exists(get_dir_location('__cache__/pumpkin-extractor')):
|
||||
print('\033[92mDownloading Pumpkin-MC/Extractor...\033[m')
|
||||
if not os.path.exists(get_dir_location("__cache__/pumpkin-extractor")):
|
||||
print("\033[92mDownloading Pumpkin-MC/Extractor...\033[m")
|
||||
os.system(
|
||||
f'cd {get_dir_location("__cache__")} && git clone https://github.com/Pumpkin-MC/Extractor pumpkin-extractor && cd pumpkin-extractor && git pull')
|
||||
|
||||
GIT_PATCH = '''diff --git a/src/main/kotlin/de/snowii/extractor/extractors/Blocks.kt b/src/main/kotlin/de/snowii/extractor/extractors/Blocks.kt
|
||||
f"cd {get_dir_location('__cache__')} && git clone https://github.com/Pumpkin-MC/Extractor pumpkin-extractor && cd pumpkin-extractor && git pull"
|
||||
)
|
||||
|
||||
GIT_PATCH = """diff --git a/src/main/kotlin/de/snowii/extractor/extractors/Blocks.kt b/src/main/kotlin/de/snowii/extractor/extractors/Blocks.kt
|
||||
index 936cd7b..9876a4b 100644
|
||||
--- a/src/main/kotlin/de/snowii/extractor/extractors/Blocks.kt
|
||||
+++ b/src/main/kotlin/de/snowii/extractor/extractors/Blocks.kt
|
||||
|
@ -51,166 +55,171 @@ index 936cd7b..9876a4b 100644
|
|||
|
||||
for (blockEntity in Registries.BLOCK_ENTITY_TYPE) {
|
||||
if (blockEntity.supports(state)) {
|
||||
'''
|
||||
"""
|
||||
os.system(
|
||||
f'cd {get_dir_location("__cache__")}/pumpkin-extractor && git apply - <<EOF\n{GIT_PATCH}\nEOF'
|
||||
f"cd {get_dir_location('__cache__')}/pumpkin-extractor && git apply - <<EOF\n{GIT_PATCH}\nEOF"
|
||||
)
|
||||
|
||||
return get_dir_location('__cache__/pumpkin-extractor')
|
||||
|
||||
return get_dir_location("__cache__/pumpkin-extractor")
|
||||
|
||||
|
||||
def get_version_manifest():
|
||||
if not os.path.exists(get_dir_location(f'__cache__/version_manifest.json')):
|
||||
print(
|
||||
f'\033[92mDownloading version manifest...\033[m')
|
||||
if not os.path.exists(get_dir_location("__cache__/version_manifest.json")):
|
||||
print("\033[92mDownloading version manifest...\033[m")
|
||||
version_manifest_data = requests.get(
|
||||
'https://piston-meta.mojang.com/mc/game/version_manifest_v2.json').json()
|
||||
with open(get_dir_location(f'__cache__/version_manifest.json'), 'w') as f:
|
||||
"https://piston-meta.mojang.com/mc/game/version_manifest_v2.json"
|
||||
).json()
|
||||
with open(get_dir_location("__cache__/version_manifest.json"), "w") as f:
|
||||
json.dump(version_manifest_data, f)
|
||||
else:
|
||||
with open(get_dir_location(f'__cache__/version_manifest.json'), 'r') as f:
|
||||
with open(get_dir_location("__cache__/version_manifest.json"), "r") as f:
|
||||
version_manifest_data = json.load(f)
|
||||
return version_manifest_data
|
||||
|
||||
|
||||
def get_version_data(version_id: str):
|
||||
if not os.path.exists(get_dir_location(f'__cache__/{version_id}.json')):
|
||||
if not os.path.exists(get_dir_location(f"__cache__/{version_id}.json")):
|
||||
version_manifest_data = get_version_manifest()
|
||||
|
||||
print(
|
||||
f'\033[92mGetting data for \033[1m{version_id}..\033[m')
|
||||
print(f"\033[92mGetting data for \033[1m{version_id}..\033[m")
|
||||
try:
|
||||
package_url = next(
|
||||
filter(lambda v: v['id'] == version_id, version_manifest_data['versions']))['url']
|
||||
filter(
|
||||
lambda v: v["id"] == version_id, version_manifest_data["versions"]
|
||||
)
|
||||
)["url"]
|
||||
except StopIteration:
|
||||
raise ValueError(
|
||||
f'No version with id {version_id} found. Maybe delete __cache__/version_manifest.json and try again?')
|
||||
f"No version with id {version_id} found. Maybe delete __cache__/version_manifest.json and try again?"
|
||||
)
|
||||
package_data = requests.get(package_url).json()
|
||||
with open(get_dir_location(f'__cache__/{version_id}.json'), 'w') as f:
|
||||
with open(get_dir_location(f"__cache__/{version_id}.json"), "w") as f:
|
||||
json.dump(package_data, f)
|
||||
else:
|
||||
with open(get_dir_location(f'__cache__/{version_id}.json'), 'r') as f:
|
||||
with open(get_dir_location(f"__cache__/{version_id}.json"), "r") as f:
|
||||
package_data = json.load(f)
|
||||
return package_data
|
||||
|
||||
|
||||
def get_client_jar(version_id: str):
|
||||
if not os.path.exists(get_dir_location(f'__cache__/client-{version_id}.jar')):
|
||||
if not os.path.exists(get_dir_location(f"__cache__/client-{version_id}.jar")):
|
||||
package_data = get_version_data(version_id)
|
||||
print('\033[92mDownloading client jar...\033[m')
|
||||
client_jar_url = package_data['downloads']['client']['url']
|
||||
with open(get_dir_location(f'__cache__/client-{version_id}.jar'), 'wb') as f:
|
||||
print("\033[92mDownloading client jar...\033[m")
|
||||
client_jar_url = package_data["downloads"]["client"]["url"]
|
||||
with open(get_dir_location(f"__cache__/client-{version_id}.jar"), "wb") as f:
|
||||
f.write(requests.get(client_jar_url).content)
|
||||
|
||||
|
||||
def get_server_jar(version_id: str):
|
||||
if not os.path.exists(get_dir_location(f'__cache__/server-{version_id}.jar')):
|
||||
if not os.path.exists(get_dir_location(f"__cache__/server-{version_id}.jar")):
|
||||
package_data = get_version_data(version_id)
|
||||
print('\033[92mDownloading server jar...\033[m')
|
||||
server_jar_url = package_data['downloads']['server']['url']
|
||||
with open(get_dir_location(f'__cache__/server-{version_id}.jar'), 'wb') as f:
|
||||
print("\033[92mDownloading server jar...\033[m")
|
||||
server_jar_url = package_data["downloads"]["server"]["url"]
|
||||
with open(get_dir_location(f"__cache__/server-{version_id}.jar"), "wb") as f:
|
||||
f.write(requests.get(server_jar_url).content)
|
||||
|
||||
|
||||
def get_mappings_for_version(version_id: str):
|
||||
if not os.path.exists(get_dir_location(f'__cache__/mappings-{version_id}.txt')):
|
||||
if not os.path.exists(get_dir_location(f"__cache__/mappings-{version_id}.txt")):
|
||||
package_data = get_version_data(version_id)
|
||||
|
||||
client_mappings_url = package_data['downloads']['client_mappings']['url']
|
||||
client_mappings_url = package_data["downloads"]["client_mappings"]["url"]
|
||||
|
||||
mappings_text = requests.get(client_mappings_url).text
|
||||
|
||||
with open(get_dir_location(f'__cache__/mappings-{version_id}.txt'), 'w') as f:
|
||||
with open(get_dir_location(f"__cache__/mappings-{version_id}.txt"), "w") as f:
|
||||
f.write(mappings_text)
|
||||
else:
|
||||
with open(get_dir_location(f'__cache__/mappings-{version_id}.txt'), 'r') as f:
|
||||
with open(get_dir_location(f"__cache__/mappings-{version_id}.txt"), "r") as f:
|
||||
mappings_text = f.read()
|
||||
return Mappings.parse(mappings_text)
|
||||
|
||||
|
||||
def get_yarn_versions():
|
||||
# https://meta.fabricmc.net/v2/versions/yarn
|
||||
if not os.path.exists(get_dir_location('__cache__/yarn_versions.json')):
|
||||
print('\033[92mDownloading yarn versions...\033[m')
|
||||
if not os.path.exists(get_dir_location("__cache__/yarn_versions.json")):
|
||||
print("\033[92mDownloading yarn versions...\033[m")
|
||||
yarn_versions_data = requests.get(
|
||||
'https://meta.fabricmc.net/v2/versions/yarn').json()
|
||||
with open(get_dir_location('__cache__/yarn_versions.json'), 'w') as f:
|
||||
"https://meta.fabricmc.net/v2/versions/yarn"
|
||||
).json()
|
||||
with open(get_dir_location("__cache__/yarn_versions.json"), "w") as f:
|
||||
json.dump(yarn_versions_data, f)
|
||||
else:
|
||||
with open(get_dir_location('__cache__/yarn_versions.json'), 'r') as f:
|
||||
with open(get_dir_location("__cache__/yarn_versions.json"), "r") as f:
|
||||
yarn_versions_data = json.load(f)
|
||||
return yarn_versions_data
|
||||
|
||||
|
||||
def get_yarn_data(version_id: str):
|
||||
for version in get_yarn_versions():
|
||||
if version['gameVersion'] == version_id:
|
||||
if version["gameVersion"] == version_id:
|
||||
return version
|
||||
|
||||
|
||||
def get_fabric_api_versions():
|
||||
# https://maven.fabricmc.net/net/fabricmc/fabric-api/fabric-api/maven-metadata.xml
|
||||
if not os.path.exists(get_dir_location('__cache__/fabric_api_versions.json')):
|
||||
print('\033[92mDownloading Fabric API versions...\033[m')
|
||||
if not os.path.exists(get_dir_location("__cache__/fabric_api_versions.json")):
|
||||
print("\033[92mDownloading Fabric API versions...\033[m")
|
||||
fabric_api_versions_xml_text = requests.get(
|
||||
'https://maven.fabricmc.net/net/fabricmc/fabric-api/fabric-api/maven-metadata.xml').text
|
||||
"https://maven.fabricmc.net/net/fabricmc/fabric-api/fabric-api/maven-metadata.xml"
|
||||
).text
|
||||
# parse xml
|
||||
fabric_api_versions_data_xml = ET.fromstring(
|
||||
fabric_api_versions_xml_text)
|
||||
fabric_api_versions_data_xml = ET.fromstring(fabric_api_versions_xml_text)
|
||||
fabric_api_versions = []
|
||||
|
||||
versioning_el = fabric_api_versions_data_xml.find('versioning')
|
||||
versioning_el = fabric_api_versions_data_xml.find("versioning")
|
||||
assert versioning_el
|
||||
versions_el = versioning_el.find('versions')
|
||||
versions_el = versioning_el.find("versions")
|
||||
assert versions_el
|
||||
|
||||
for version_el in versions_el.findall('version'):
|
||||
for version_el in versions_el.findall("version"):
|
||||
fabric_api_versions.append(version_el.text)
|
||||
|
||||
with open(get_dir_location('__cache__/fabric_api_versions.json'), 'w') as f:
|
||||
with open(get_dir_location("__cache__/fabric_api_versions.json"), "w") as f:
|
||||
f.write(json.dumps(fabric_api_versions))
|
||||
else:
|
||||
with open(get_dir_location('__cache__/fabric_api_versions.json'), 'r') as f:
|
||||
with open(get_dir_location("__cache__/fabric_api_versions.json"), "r") as f:
|
||||
fabric_api_versions = json.loads(f.read())
|
||||
return fabric_api_versions
|
||||
|
||||
|
||||
def get_fabric_loader_versions():
|
||||
# https://meta.fabricmc.net/v2/versions/loader
|
||||
if not os.path.exists(get_dir_location('__cache__/fabric_loader_versions.json')):
|
||||
print('\033[92mDownloading Fabric loader versions...\033[m')
|
||||
if not os.path.exists(get_dir_location("__cache__/fabric_loader_versions.json")):
|
||||
print("\033[92mDownloading Fabric loader versions...\033[m")
|
||||
fabric_api_versions_json = requests.get(
|
||||
'https://meta.fabricmc.net/v2/versions/loader').json()
|
||||
"https://meta.fabricmc.net/v2/versions/loader"
|
||||
).json()
|
||||
|
||||
fabric_api_versions = []
|
||||
for version in fabric_api_versions_json:
|
||||
fabric_api_versions.append(version['version'])
|
||||
fabric_api_versions.append(version["version"])
|
||||
|
||||
with open(get_dir_location('__cache__/fabric_loader_versions.json'), 'w') as f:
|
||||
with open(get_dir_location("__cache__/fabric_loader_versions.json"), "w") as f:
|
||||
f.write(json.dumps(fabric_api_versions))
|
||||
else:
|
||||
with open(get_dir_location('__cache__/fabric_loader_versions.json'), 'r') as f:
|
||||
with open(get_dir_location("__cache__/fabric_loader_versions.json"), "r") as f:
|
||||
fabric_api_versions = json.loads(f.read())
|
||||
return fabric_api_versions
|
||||
|
||||
|
||||
def clear_version_cache():
|
||||
print('\033[92mClearing version cache...\033[m')
|
||||
print("\033[92mClearing version cache...\033[m")
|
||||
files = [
|
||||
'version_manifest.json',
|
||||
'yarn_versions.json',
|
||||
'fabric_api_versions.json',
|
||||
'fabric_loader_versions.json'
|
||||
"version_manifest.json",
|
||||
"yarn_versions.json",
|
||||
"fabric_api_versions.json",
|
||||
"fabric_loader_versions.json",
|
||||
]
|
||||
for file in files:
|
||||
if os.path.exists(get_dir_location(f'__cache__/{file}')):
|
||||
os.remove(get_dir_location(f'__cache__/{file}'))
|
||||
if os.path.exists(get_dir_location(f"__cache__/{file}")):
|
||||
os.remove(get_dir_location(f"__cache__/{file}"))
|
||||
|
||||
burger_path = get_dir_location("__cache__/Burger")
|
||||
if os.path.exists(burger_path):
|
||||
os.system(
|
||||
f'cd {burger_path} && git pull')
|
||||
pumpkin_path = get_dir_location('__cache__/pumpkin-extractor')
|
||||
os.system(f"cd {burger_path} && git pull")
|
||||
pumpkin_path = get_dir_location("__cache__/pumpkin-extractor")
|
||||
if os.path.exists(pumpkin_path):
|
||||
os.system(
|
||||
f'cd {pumpkin_path} && git add . && git stash && git pull && git stash pop')
|
||||
f"cd {pumpkin_path} && git add . && git stash && git pull && git stash pop"
|
||||
)
|
||||
|
|
|
@ -1,81 +1,92 @@
|
|||
# Extracting data from the Minecraft jars
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from lib.download import get_mappings_for_version, get_pumpkin_extractor, get_server_jar, get_burger, get_client_jar
|
||||
from lib.download import (
|
||||
get_mappings_for_version,
|
||||
get_pumpkin_extractor,
|
||||
get_server_jar,
|
||||
get_burger,
|
||||
get_client_jar,
|
||||
)
|
||||
from lib.utils import get_dir_location, to_camel_case, upper_first_letter
|
||||
from zipfile import ZipFile
|
||||
import subprocess
|
||||
import requests
|
||||
import json
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
|
||||
|
||||
def generate_data_from_server_jar(version_id: str):
|
||||
if os.path.exists(get_dir_location(f'__cache__/generated-{version_id}')):
|
||||
if os.path.exists(get_dir_location(f"__cache__/generated-{version_id}")):
|
||||
return
|
||||
|
||||
get_server_jar(version_id)
|
||||
os.system(
|
||||
f'cd {get_dir_location(f"__cache__")} && java -DbundlerMainClass=net.minecraft.data.Main -jar {get_dir_location(f"__cache__/server-{version_id}.jar")} --all --output \"{get_dir_location(f"__cache__/generated-{version_id}")}\"'
|
||||
f'cd {get_dir_location("__cache__")} && java -DbundlerMainClass=net.minecraft.data.Main -jar {get_dir_location(f"__cache__/server-{version_id}.jar")} --all --output "{get_dir_location(f"__cache__/generated-{version_id}")}"'
|
||||
)
|
||||
|
||||
|
||||
def get_block_states_report(version_id: str):
|
||||
return get_report(version_id, 'blocks')
|
||||
return get_report(version_id, "blocks")
|
||||
|
||||
|
||||
def get_registries_report(version_id: str):
|
||||
return get_report(version_id, 'registries')
|
||||
return get_report(version_id, "registries")
|
||||
|
||||
|
||||
def get_packets_report(version_id: str):
|
||||
return get_report(version_id, 'packets')
|
||||
return get_report(version_id, "packets")
|
||||
|
||||
|
||||
def get_report(version_id: str, name: str):
|
||||
generate_data_from_server_jar(version_id)
|
||||
with open(get_dir_location(f'__cache__/generated-{version_id}/reports/{name}.json'), 'r') as f:
|
||||
with open(
|
||||
get_dir_location(f"__cache__/generated-{version_id}/reports/{name}.json"), "r"
|
||||
) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def get_registry_tags(version_id: str, name: str):
|
||||
generate_data_from_server_jar(version_id)
|
||||
tags_directory = get_dir_location(f'__cache__/generated-{version_id}/data/minecraft/tags/{name}')
|
||||
tags_directory = get_dir_location(
|
||||
f"__cache__/generated-{version_id}/data/minecraft/tags/{name}"
|
||||
)
|
||||
if not os.path.exists(tags_directory):
|
||||
return {}
|
||||
tags = {}
|
||||
for root, dirs, files in os.walk(tags_directory, topdown=False):
|
||||
for name in files:
|
||||
file = os.path.join(root, name)
|
||||
relative_path = file.replace(tags_directory, '')[1:]
|
||||
if not file.endswith('.json'):
|
||||
relative_path = file.replace(tags_directory, "")[1:]
|
||||
if not file.endswith(".json"):
|
||||
continue
|
||||
with open(file, 'r') as f:
|
||||
with open(file, "r") as f:
|
||||
tags[relative_path[:-5]] = json.load(f)
|
||||
return tags
|
||||
|
||||
|
||||
python_command = None
|
||||
|
||||
|
||||
def determine_python_command():
|
||||
return 'venv/bin/python'
|
||||
|
||||
return "venv/bin/python"
|
||||
|
||||
|
||||
def run_python_command_and_download_deps(command):
|
||||
print('>', command)
|
||||
print(">", command)
|
||||
for _ in range(10):
|
||||
p = subprocess.Popen(
|
||||
command,
|
||||
stderr=subprocess.PIPE,
|
||||
shell=True
|
||||
)
|
||||
p = subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
|
||||
|
||||
stderr = b''
|
||||
stderr = b""
|
||||
while True:
|
||||
data = p.stderr.read()
|
||||
if data == b'':
|
||||
if data == b"":
|
||||
break
|
||||
print(data.decode(), end='', flush=True)
|
||||
print(data.decode(), end="", flush=True)
|
||||
stderr += data
|
||||
|
||||
regex_match = re.search(
|
||||
r'ModuleNotFoundError: No module named \'(\w+?)\'', stderr.decode())
|
||||
r"ModuleNotFoundError: No module named \'(\w+?)\'", stderr.decode()
|
||||
)
|
||||
if not regex_match:
|
||||
out, err = p.communicate()
|
||||
if out:
|
||||
|
@ -84,58 +95,61 @@ def run_python_command_and_download_deps(command):
|
|||
print(err)
|
||||
break
|
||||
missing_lib = regex_match.group(1)
|
||||
print('Missing required lib:', missing_lib)
|
||||
subprocess.run(f'venv/bin/pip install {missing_lib}', cwd=os.path.dirname(os.path.dirname(__file__)))
|
||||
print('ok')
|
||||
print("Missing required lib:", missing_lib)
|
||||
subprocess.run(
|
||||
f"venv/bin/pip install {missing_lib}",
|
||||
cwd=os.path.dirname(os.path.dirname(__file__)),
|
||||
)
|
||||
print("ok")
|
||||
|
||||
|
||||
def get_burger_data_for_version(version_id: str):
|
||||
if not os.path.exists(get_dir_location(f'__cache__/burger-{version_id}.json')):
|
||||
if not os.path.exists(get_dir_location(f"__cache__/burger-{version_id}.json")):
|
||||
get_burger()
|
||||
get_client_jar(version_id)
|
||||
get_mappings_for_version(version_id)
|
||||
|
||||
print('\033[92mRunning Burger...\033[m')
|
||||
print("\033[92mRunning Burger...\033[m")
|
||||
run_python_command_and_download_deps(
|
||||
f'cd {get_dir_location("__cache__/Burger")} && '\
|
||||
f'venv/bin/python munch.py {get_dir_location("__cache__")}/client-{version_id}.jar '\
|
||||
f'--output {get_dir_location("__cache__")}/burger-{version_id}.json '\
|
||||
f'--mappings {get_dir_location("__cache__")}/mappings-{version_id}.txt'
|
||||
f"cd {get_dir_location('__cache__/Burger')} && "
|
||||
f"venv/bin/python munch.py {get_dir_location('__cache__')}/client-{version_id}.jar "
|
||||
f"--output {get_dir_location('__cache__')}/burger-{version_id}.json "
|
||||
f"--mappings {get_dir_location('__cache__')}/mappings-{version_id}.txt"
|
||||
)
|
||||
with open(get_dir_location(f'__cache__/burger-{version_id}.json'), 'r') as f:
|
||||
with open(get_dir_location(f"__cache__/burger-{version_id}.json"), "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def get_pumpkin_data(version_id: str, category: str):
|
||||
assert '/' not in version_id
|
||||
assert '\\' not in version_id
|
||||
target_parent_dir = get_dir_location(f'__cache__/pumpkin-{version_id}')
|
||||
category_dir = f'{target_parent_dir}/{category}.json'
|
||||
assert "/" not in version_id
|
||||
assert "\\" not in version_id
|
||||
target_parent_dir = get_dir_location(f"__cache__/pumpkin-{version_id}")
|
||||
category_dir = f"{target_parent_dir}/{category}.json"
|
||||
|
||||
if os.path.exists(category_dir):
|
||||
with open(category_dir, 'r') as f:
|
||||
with open(category_dir, "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
pumpkin_dir = get_pumpkin_extractor()
|
||||
os.makedirs(f'{pumpkin_dir}/run', exist_ok=True)
|
||||
with open(f'{pumpkin_dir}/run/eula.txt', 'w') as f:
|
||||
f.write('eula=true')
|
||||
os.makedirs(f"{pumpkin_dir}/run", exist_ok=True)
|
||||
with open(f"{pumpkin_dir}/run/eula.txt", "w") as f:
|
||||
f.write("eula=true")
|
||||
|
||||
# run ./gradlew runServer until it logs "(pumpkin_extractor) Done"
|
||||
p = subprocess.Popen(
|
||||
f'cd {pumpkin_dir} && ./gradlew runServer',
|
||||
f"cd {pumpkin_dir} && ./gradlew runServer",
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
shell=True
|
||||
shell=True,
|
||||
)
|
||||
|
||||
while True:
|
||||
data = p.stdout.readline().decode()
|
||||
print('>' + data, end='', flush=True)
|
||||
if '[Server thread/INFO] (pumpkin_extractor) Done' in data:
|
||||
print('Pumpkin extractor done')
|
||||
print(">" + data, end="", flush=True)
|
||||
if "[Server thread/INFO] (pumpkin_extractor) Done" in data:
|
||||
print("Pumpkin extractor done")
|
||||
break
|
||||
if data == b'':
|
||||
if data == b"":
|
||||
break
|
||||
|
||||
p.terminate()
|
||||
|
@ -144,44 +158,49 @@ def get_pumpkin_data(version_id: str, category: str):
|
|||
# delete target_parent_dir if it's empty
|
||||
if os.path.exists(target_parent_dir):
|
||||
os.rmdir(target_parent_dir)
|
||||
os.rename(f'{pumpkin_dir}/run/pumpkin_extractor_output', target_parent_dir)
|
||||
os.rename(f"{pumpkin_dir}/run/pumpkin_extractor_output", target_parent_dir)
|
||||
|
||||
with open(category_dir, 'r') as f:
|
||||
with open(category_dir, "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def get_file_from_jar(version_id: str, file_dir: str):
|
||||
get_client_jar(version_id)
|
||||
with ZipFile(get_dir_location(f'__cache__/client-{version_id}.jar')) as z:
|
||||
with ZipFile(get_dir_location(f"__cache__/client-{version_id}.jar")) as z:
|
||||
with z.open(file_dir) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def get_en_us_lang(version_id: str):
|
||||
return json.loads(
|
||||
get_file_from_jar(version_id, 'assets/minecraft/lang/en_us.json')
|
||||
)
|
||||
return json.loads(get_file_from_jar(version_id, "assets/minecraft/lang/en_us.json"))
|
||||
|
||||
|
||||
# burger packet id extraction is broken since 1.20.5 (always returns -1, so we have to determine packet id ourselves from the mappings).
|
||||
# this is very much not ideal.
|
||||
|
||||
if TYPE_CHECKING: from codegen.lib.mappings import Mappings
|
||||
|
||||
def get_packet_list(version_id: str):
|
||||
if version_id != '1.21':
|
||||
if version_id != "1.21":
|
||||
return []
|
||||
|
||||
generate_data_from_server_jar(version_id)
|
||||
with open(get_dir_location(f'__cache__/generated-{version_id}/reports/packets.json'), 'r') as f:
|
||||
with open(
|
||||
get_dir_location(f"__cache__/generated-{version_id}/reports/packets.json"), "r"
|
||||
) as f:
|
||||
packets_report = json.load(f)
|
||||
packet_list = []
|
||||
for state, state_value in packets_report.items():
|
||||
for direction, direction_value in state_value.items():
|
||||
for packet_resourcelocation, packet_value in direction_value.items():
|
||||
assert packet_resourcelocation.startswith('minecraft:')
|
||||
packet_resourcelocation = upper_first_letter(to_camel_case(packet_resourcelocation[len('minecraft:'):]))
|
||||
packet_list.append({
|
||||
'state': state,
|
||||
'direction': direction,
|
||||
'name': packet_resourcelocation,
|
||||
'id': packet_value['protocol_id']
|
||||
})
|
||||
assert packet_resourcelocation.startswith("minecraft:")
|
||||
packet_resourcelocation = upper_first_letter(
|
||||
to_camel_case(packet_resourcelocation[len("minecraft:") :])
|
||||
)
|
||||
packet_list.append(
|
||||
{
|
||||
"state": state,
|
||||
"direction": direction,
|
||||
"name": packet_resourcelocation,
|
||||
"id": packet_value["protocol_id"],
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
wrap_comments = true
|
||||
group_imports = "StdExternalCrate"
|
||||
imports_granularity = "Crate"
|
||||
format_code_in_doc_comments = true
|
||||
|
|
Loading…
Add table
Reference in a new issue