0
0
mirror of https://github.com/Pumpkin-MC/Pumpkin synced 2025-04-23 01:25:47 +00:00

Merge branch 'master' into chunk-features

This commit is contained in:
Alexander Medvedev
2025-04-03 19:37:09 +02:00
70 changed files with 1363 additions and 1111 deletions

@ -18,4 +18,4 @@ jobs:
- name: Checkout Actions Repository
uses: actions/checkout@v4
- name: Spell Check Repo
uses: crate-ci/typos@v1.30.0
uses: crate-ci/typos@v1.31.0

@ -14,7 +14,7 @@ members = [
]
[workspace.package]
version = "0.1.0"
version = "0.1.0-dev+1.21.5"
edition = "2024"

@ -15,9 +15,6 @@ RUN --mount=type=cache,sharing=private,target=/pumpkin/target \
--mount=type=cache,target=/usr/local/cargo/registry/ \
cargo build --release && cp target/release/pumpkin ./pumpkin.release
# strip debug symbols from binary
RUN strip pumpkin.release
FROM alpine:3.21
# Identifying information for registries like ghcr.io

@ -58,9 +58,9 @@ and customizable experience. It prioritizes performance and player enjoyment whi
- [x] Chunk Loading (Vanilla, Linear)
- [x] Chunk Generation
- [x] Chunk Saving (Vanilla, Linear)
- [x] Redstone
- [ ] Liquid Physics
- [x] Biomes
- [x] Redstone (W.I.P)
- [ ] Liquid Physics
- [ ] Vegetation
- Player
- [x] Skins
@ -93,7 +93,7 @@ and customizable experience. It prioritizes performance and player enjoyment whi
- [x] Inventories
- [x] Particles
- [x] Chat
- [x] Commands
- [x] Commands (W.I.P)
- [x] Permissions
- [x] Translations
- Proxy

@ -1,4 +1,5 @@
{
"version": 770,
"serverbound": {
"handshake": [
"intention"

14
pumpkin-config/src/fun.rs Normal file

@ -0,0 +1,14 @@
use serde::{Deserialize, Serialize};
// Lets face it, the only reason we play this game is because of fun 🙃
#[derive(Deserialize, Serialize)]
#[serde(default)]
pub struct FunConfig {
pub april_fools: bool,
}
impl Default for FunConfig {
fn default() -> Self {
Self { april_fools: true }
}
}

@ -1,4 +1,5 @@
use chunk::ChunkConfig;
use fun::FunConfig;
use log::warn;
use logging::LoggingConfig;
use pumpkin_util::{Difficulty, GameMode, PermissionLvl};
@ -12,6 +13,7 @@ use std::{
path::Path,
sync::LazyLock,
};
pub mod fun;
pub mod logging;
pub mod networking;
@ -99,6 +101,7 @@ pub struct AdvancedConfiguration {
pub pvp: PVPConfig,
pub server_links: ServerLinksConfig,
pub player_data: PlayerDataConfig,
pub fun: FunConfig,
}
#[derive(Serialize, Deserialize)]

@ -58,6 +58,7 @@ pub(crate) fn build() -> TokenStream {
variants.extend([quote! {
pub const #format_name: Biome = Biome {
id: #index,
registry_id: #name,
weather: Weather::new(
#has_precipitation,
#temperature,
@ -71,17 +72,18 @@ pub(crate) fn build() -> TokenStream {
type_to_name.extend(quote! { Self::#format_name => #name, });
name_to_type.extend(quote! { #name => Some(&Self::#format_name), });
type_to_id.extend(quote! { Self::#format_name => #index, });
id_to_type.extend(quote! { #index => Some(Self::#format_name), });
id_to_type.extend(quote! { #index => Some(&Self::#format_name), });
}
quote! {
use pumpkin_util::biome::{TemperatureModifier, Weather};
use serde::{de, Deserializer};
use std::fmt;
use std::{fmt, hash::{Hasher, Hash}};
#[derive(Clone, Debug)]
pub struct Biome {
pub id: u8,
pub registry_id: &'static str,
pub weather: Weather,
// carvers: &'static [&str],
pub features: &'static [&'static [&'static str]]
@ -93,6 +95,14 @@ pub(crate) fn build() -> TokenStream {
}
}
impl Eq for Biome {}
impl Hash for Biome {
fn hash<H>(&self, state: &mut H) where H: Hasher {
self.id.hash(state);
}
}
impl<'de> Deserialize<'de> for &'static Biome {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@ -138,7 +148,7 @@ pub(crate) fn build() -> TokenStream {
}
}
pub const fn from_id(id: u8) -> Option<Self> {
pub const fn from_id(id: u8) -> Option<&'static Self> {
match id {
#id_to_type
_ => None

@ -21,23 +21,23 @@ struct PropertyVariantMapping {
struct PropertyCollectionData {
variant_mappings: Vec<PropertyVariantMapping>,
block_names: Vec<String>,
blocks: Vec<(String, u16)>,
}
impl PropertyCollectionData {
pub fn add_block_name(&mut self, block_name: String) {
self.block_names.push(block_name);
pub fn add_block(&mut self, block_name: String, block_id: u16) {
self.blocks.push((block_name, block_id));
}
pub fn from_mappings(variant_mappings: Vec<PropertyVariantMapping>) -> Self {
Self {
variant_mappings,
block_names: Vec::new(),
blocks: Vec::new(),
}
}
pub fn derive_name(&self) -> String {
format!("{}_like", self.block_names[0])
format!("{}_like", self.blocks[0].0)
}
}
@ -148,7 +148,12 @@ impl ToTokens for BlockPropertyStruct {
}
});
let block_names = &self.data.block_names;
let block_ids = self
.data
.blocks
.iter()
.map(|(_, id)| *id)
.collect::<Vec<_>>();
let field_names: Vec<_> = self
.data
@ -222,7 +227,7 @@ impl ToTokens for BlockPropertyStruct {
}
fn to_state_id(&self, block: &Block) -> u16 {
if ![#(#block_names),*].contains(&block.name) {
if ![#(#block_ids),*].contains(&block.id) {
panic!("{} is not a valid block for {}", &block.name, #struct_name);
}
@ -230,7 +235,7 @@ impl ToTokens for BlockPropertyStruct {
}
fn from_state_id(state_id: u16, block: &Block) -> Self {
if ![#(#block_names),*].contains(&block.name) {
if ![#(#block_ids),*].contains(&block.id) {
panic!("{} is not a valid block for {}", &block.name, #struct_name);
}
@ -243,7 +248,7 @@ impl ToTokens for BlockPropertyStruct {
}
fn default(block: &Block) -> Self {
if ![#(#block_names),*].contains(&block.name) {
if ![#(#block_ids),*].contains(&block.id) {
panic!("{} is not a valid block for {}", &block.name, #struct_name);
}
@ -260,7 +265,7 @@ impl ToTokens for BlockPropertyStruct {
}
fn from_props(props: Vec<(String, String)>, block: &Block) -> Self {
if ![#(#block_names),*].contains(&block.name) {
if ![#(#block_ids),*].contains(&block.id) {
panic!("{} is not a valid block for {}", &block.name, #struct_name);
}
@ -844,7 +849,7 @@ pub(crate) fn build() -> TokenStream {
let mut type_from_name = TokenStream::new();
let mut block_from_state_id = TokenStream::new();
let mut block_from_item_id = TokenStream::new();
let mut block_properties_from_state_and_name = TokenStream::new();
let mut block_properties_from_state_and_block_id = TokenStream::new();
let mut block_properties_from_props_and_name = TokenStream::new();
let mut existing_item_ids: Vec<u16> = Vec::new();
let mut constants = TokenStream::new();
@ -973,12 +978,12 @@ pub(crate) fn build() -> TokenStream {
property_collection_map
.entry(property_collection)
.or_insert_with(|| PropertyCollectionData::from_mappings(property_mapping))
.add_block_name(block.name);
.add_block(block.name, block.id);
}
}
for property_group in property_collection_map.into_values() {
for block_name in &property_group.block_names {
for (block_name, id) in &property_group.blocks {
let const_block_name = Ident::new(
&const_block_name_from_block_name(block_name),
Span::call_site(),
@ -987,13 +992,14 @@ pub(crate) fn build() -> TokenStream {
&property_group_name_from_derived_name(&property_group.derive_name()),
Span::call_site(),
);
let id_lit = LitInt::new(&id.to_string(), Span::call_site());
block_properties_from_state_and_name.extend(quote! {
#block_name => Some(Box::new(#property_name::from_state_id(state_id, &Block::#const_block_name))),
block_properties_from_state_and_block_id.extend(quote! {
#id_lit => Some(Box::new(#property_name::from_state_id(state_id, &Block::#const_block_name))),
});
block_properties_from_props_and_name.extend(quote! {
#block_name => Some(Box::new(#property_name::from_props(props, &Block::#const_block_name))),
#id_lit => Some(Box::new(#property_name::from_props(props, &Block::#const_block_name))),
});
}
@ -1215,15 +1221,15 @@ pub(crate) fn build() -> TokenStream {
#[doc = r" Get the properties of the block."]
pub fn properties(&self, state_id: u16) -> Option<Box<dyn BlockProperties>> {
match self.name {
#block_properties_from_state_and_name
match self.id {
#block_properties_from_state_and_block_id
_ => None
}
}
#[doc = r" Get the properties of the block."]
pub fn from_properties(&self, props: Vec<(String, String)>) -> Option<Box<dyn BlockProperties>> {
match self.name {
match self.id {
#block_properties_from_props_and_name
_ => None
}

@ -6,6 +6,7 @@ use serde::Deserialize;
#[derive(Deserialize)]
pub struct Packets {
version: u32,
serverbound: HashMap<String, Vec<String>>,
clientbound: HashMap<String, Vec<String>>,
}
@ -15,10 +16,14 @@ pub(crate) fn build() -> TokenStream {
let packets: Packets = serde_json::from_str(include_str!("../../assets/packets.json"))
.expect("Failed to parse packets.json");
let version = packets.version;
let serverbound_consts = parse_packets(packets.serverbound);
let clientbound_consts = parse_packets(packets.clientbound);
quote!(
/// The current Minecraft protocol version. This changes only when the protocol itself is modified.
pub const CURRENT_MC_PROTOCOL: u32 = #version;
pub mod serverbound {
#serverbound_consts
}

@ -113,6 +113,8 @@ pub struct Deserializer<R: Read> {
// Yes, this breaks with recursion. Just an attempt at a sanity check
in_list: bool,
is_named: bool,
// For debugging
key_stack: Vec<String>,
}
impl<R: Read> Deserializer<R> {
@ -122,6 +124,7 @@ impl<R: Read> Deserializer<R> {
tag_to_deserialize_stack: Vec::new(),
in_list: false,
is_named,
key_stack: Vec::new(),
}
}
}
@ -310,9 +313,14 @@ impl<'de, R: Read> de::Deserializer<'de> for &mut Deserializer<R> {
{
if let Some(tag_id) = self.tag_to_deserialize_stack.pop() {
if tag_id != COMPOUND_ID {
return Err(Error::SerdeError(
"Trying to deserialize a map without a compound ID".to_string(),
));
return Err(Error::SerdeError(format!(
"Trying to deserialize a map without a compound ID ({} with id {})",
self.key_stack
.last()
.cloned()
.unwrap_or_else(|| "compound root".to_string()),
tag_id
)));
}
} else {
let next_byte = self.input.get_u8_be()?;
@ -380,7 +388,9 @@ impl<'de, R: Read> MapAccess<'de> for CompoundAccess<'_, R> {
where
V: DeserializeSeed<'de>,
{
seed.deserialize(&mut *self.de)
let result = seed.deserialize(&mut *self.de);
self.de.key_stack.pop();
result
}
}
@ -396,6 +406,7 @@ impl<'de, R: Read> de::Deserializer<'de> for MapKey<'_, R> {
V: de::Visitor<'de>,
{
let key = get_nbt_string(&mut self.de.input)?;
self.de.key_stack.push(key.clone());
visitor.visit_string(key)
}

@ -8,10 +8,9 @@ use crate::{
use pumpkin_data::packet::clientbound::PLAY_LEVEL_CHUNK_WITH_LIGHT;
use pumpkin_macros::packet;
use pumpkin_util::math::ceil_log2;
use pumpkin_world::{
DIRECT_PALETTE_BITS,
chunk::{ChunkData, SUBCHUNKS_COUNT},
use pumpkin_world::chunk::{
ChunkData,
palette::{BlockPalette, NetworkPalette},
};
#[packet(PLAY_LEVEL_CHUNK_WITH_LIGHT)]
@ -48,110 +47,65 @@ impl ClientPacket for CChunkData<'_> {
let mut data_buf = Vec::new();
let mut light_buf = Vec::new();
for subchunk in self.0.sections.array_iter_subchunks() {
let mut chunk_light = [0u8; 2048];
for i in 0..subchunk.len() {
// if !block .is_air() {
// continue;
// }
let index = i / 2;
let mask = if i % 2 == 1 { 0xF0 } else { 0x0F };
chunk_light[index] |= mask;
}
for section in self.0.section.sections.iter() {
// 2 blocks per byte for block lights
let chunk_light_len = BlockPalette::VOLUME / 2;
// TODO: Implement, currently default to full bright
let chunk_light = vec![0xFFu8; chunk_light_len];
light_buf.write_var_int(&VarInt(chunk_light.len() as i32))?;
light_buf.write_var_int(&chunk_light_len.into())?;
light_buf.write_slice(&chunk_light)?;
let non_empty_block_count = subchunk.len() as i16;
// Block count
// TODO: write only non empty blocks, so no air and no fluidstate
let non_empty_block_count = section.block_states.non_air_block_count() as i16;
data_buf.write_i16_be(non_empty_block_count)?;
//// Block states
let palette = &subchunk;
// TODO: make dynamic block_size work
// TODO: make direct block_size work
enum PaletteType {
Single,
Indirect(u32),
// aka IdListPalette
Direct,
}
let palette_type = {
let palette_bit_len = ceil_log2(palette.len() as u32);
if palette_bit_len == 0 {
PaletteType::Single
} else if palette_bit_len <= 4 {
PaletteType::Indirect(4)
} else if palette_bit_len <= 8 {
PaletteType::Indirect(palette_bit_len as u32)
} else {
PaletteType::Direct
// This is a bit messy, but we dont have access to VarInt in pumpkin-world
let network_repr = section.block_states.convert_network();
data_buf.write_u8_be(network_repr.bits_per_entry)?;
match network_repr.palette {
NetworkPalette::Single(registry_id) => {
data_buf.write_var_int(&registry_id.into())?;
}
// TODO: fix indirect palette to work correctly
// PaletteType::Direct
};
match palette_type {
PaletteType::Single => {
data_buf.write_u8_be(0)?;
data_buf.write_var_int(&VarInt(*palette.first().unwrap() as i32))?;
data_buf.write_var_int(&VarInt(0))?;
}
PaletteType::Indirect(block_size) => {
// Bits per entry
data_buf.write_u8_be(block_size as u8)?;
// Palette length
data_buf.write_var_int(&VarInt(palette.len() as i32 - 1))?;
for id in palette.iter() {
// Palette
data_buf.write_var_int(&VarInt(*id as i32))?;
}
// Data array length
let data_array_len = subchunk.len().div_ceil(64 / block_size as usize);
data_buf.reserve(data_array_len * 8);
for block_clump in subchunk.chunks(64 / block_size as usize) {
let mut out_long: i64 = 0;
for block in block_clump.iter().rev() {
let index = palette
.iter()
.position(|b| b == block)
.expect("Its just got added, ofc it should be there");
out_long = (out_long << block_size) | (index as i64);
}
data_buf.write_i64_be(out_long)?;
}
}
PaletteType::Direct => {
// Bits per entry
data_buf.write_u8_be(DIRECT_PALETTE_BITS as u8)?;
// Data array length
let data_array_len = subchunk.len().div_ceil(64 / DIRECT_PALETTE_BITS as usize);
data_buf.reserve(data_array_len * 8);
for block_clump in subchunk.chunks(64 / DIRECT_PALETTE_BITS as usize) {
let mut out_long: i64 = 0;
for (i, &block) in block_clump.iter().enumerate() {
out_long |= (block as i64) << (i as u32 * DIRECT_PALETTE_BITS);
}
data_buf.write_i64_be(out_long)?;
NetworkPalette::Indirect(palette) => {
data_buf.write_var_int(&palette.len().into())?;
for registry_id in palette {
data_buf.write_var_int(&registry_id.into())?;
}
}
NetworkPalette::Direct => {}
}
//// Biomes
// TODO: make biomes work
// bits
data_buf.write_u8_be(0)?;
data_buf.write_var_int(&VarInt(0))?;
// NOTE: Not updated in wiki; i64 array length is now determined by the bits per entry
//data_buf.write_var_int(&network_repr.packed_data.len().into())?;
for packed in network_repr.packed_data {
data_buf.write_i64_be(packed)?;
}
let network_repr = section.biomes.convert_network();
data_buf.write_u8_be(network_repr.bits_per_entry)?;
match network_repr.palette {
NetworkPalette::Single(registry_id) => {
data_buf.write_var_int(&registry_id.into())?;
}
NetworkPalette::Indirect(palette) => {
data_buf.write_var_int(&palette.len().into())?;
for registry_id in palette {
data_buf.write_var_int(&registry_id.into())?;
}
}
NetworkPalette::Direct => {}
}
// NOTE: Not updated in wiki; i64 array length is now determined by the bits per entry
//data_buf.write_var_int(&network_repr.packed_data.len().into())?;
for packed in network_repr.packed_data {
data_buf.write_i64_be(packed)?;
}
}
// Size
write.write_var_int(&VarInt(data_buf.len() as i32))?;
// Chunk data
write.write_var_int(&data_buf.len().into())?;
write.write_slice(&data_buf)?;
// TODO: block entities
@ -169,7 +123,7 @@ impl ClientPacket for CChunkData<'_> {
write.write_bitset(&BitSet(Box::new([0])))?;
// Sky light
write.write_var_int(&VarInt(SUBCHUNKS_COUNT as i32))?;
write.write_var_int(&self.0.section.sections.len().into())?;
write.write_slice(&light_buf)?;
// Block Lighting

@ -1,6 +1,5 @@
use std::io::Read;
use std::io::Write;
use std::num::NonZeroUsize;
use serde::{Serialize, Serializer};
@ -9,19 +8,10 @@ use crate::ser::NetworkWriteExt;
use crate::ser::ReadingError;
use crate::ser::WritingError;
use super::Codec;
pub struct BitSet(pub Box<[i64]>);
impl Codec<BitSet> for BitSet {
/// The maximum size of the `BitSet` is `remaining / 8`.
const MAX_SIZE: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(usize::MAX) };
fn written_size(&self) -> usize {
todo!()
}
fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
impl BitSet {
pub fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
write.write_var_int(&self.0.len().into())?;
for b in &self.0 {
write.write_i64_be(*b)?;
@ -30,7 +20,7 @@ impl Codec<BitSet> for BitSet {
Ok(())
}
fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
pub fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
// Read length
let length = read.get_var_int()?;
let mut array: Vec<i64> = Vec::with_capacity(length.0 as usize);

@ -7,8 +7,6 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Visitor};
use crate::ser::{NetworkReadExt, NetworkWriteExt, ReadingError, WritingError};
use super::Codec;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Identifier {
pub namespace: String,
@ -29,19 +27,15 @@ impl Identifier {
}
}
}
impl Codec<Self> for Identifier {
impl Identifier {
/// The maximum number of bytes an `Identifier` is the same as for a normal `String`.
const MAX_SIZE: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(i16::MAX as usize) };
const MAX_SIZE: NonZeroUsize = NonZeroUsize::new(i16::MAX as usize).unwrap();
fn written_size(&self) -> usize {
todo!()
}
fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
pub fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
write.write_string_bounded(&self.to_string(), Self::MAX_SIZE.get())
}
fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
pub fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
let identifier = read.get_string_bounded(Self::MAX_SIZE.get())?;
match identifier.split_once(":") {
Some((namespace, path)) => Ok(Identifier {

@ -1,22 +1,5 @@
use std::{
io::{Read, Write},
num::NonZeroUsize,
};
use crate::ser::{ReadingError, WritingError};
pub mod bit_set;
pub mod identifier;
pub mod slot;
pub mod var_int;
pub mod var_long;
pub trait Codec<T> {
const MAX_SIZE: NonZeroUsize;
fn written_size(&self) -> usize;
fn encode(&self, write: &mut impl Write) -> Result<(), WritingError>;
fn decode(read: &mut impl Read) -> Result<T, ReadingError>;
}

@ -6,7 +6,6 @@ use std::{
use crate::ser::{NetworkReadExt, NetworkWriteExt, ReadingError, WritingError};
use super::Codec;
use bytes::BufMut;
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
@ -22,20 +21,20 @@ pub type VarIntType = i32;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct VarInt(pub VarIntType);
impl Codec<Self> for VarInt {
impl VarInt {
/// The maximum number of bytes a `VarInt` can occupy.
const MAX_SIZE: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(5) };
const MAX_SIZE: NonZeroUsize = NonZeroUsize::new(5).unwrap();
/// Returns the exact number of bytes this VarInt will write when
/// [`Encode::encode`] is called, assuming no error occurs.
fn written_size(&self) -> usize {
pub fn written_size(&self) -> usize {
match self.0 {
0 => 1,
n => (31 - n.leading_zeros() as usize) / 7 + 1,
}
}
fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
pub fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
let mut val = self.0;
for _ in 0..Self::MAX_SIZE.get() {
let b: u8 = val as u8 & 0b01111111;
@ -48,7 +47,7 @@ impl Codec<Self> for VarInt {
Ok(())
}
fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
pub fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
let mut val = 0;
for i in 0..Self::MAX_SIZE.get() {
let byte = read.get_u8_be()?;
@ -118,6 +117,12 @@ impl From<u8> for VarInt {
}
}
impl From<u16> for VarInt {
fn from(value: u16) -> Self {
VarInt(value as i32)
}
}
impl From<usize> for VarInt {
fn from(value: usize) -> Self {
VarInt(value as i32)

@ -6,8 +6,6 @@ use std::{
use crate::ser::{NetworkReadExt, NetworkWriteExt, ReadingError, WritingError};
use super::Codec;
use bytes::BufMut;
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{self, SeqAccess, Visitor},
@ -21,20 +19,20 @@ pub type VarLongType = i64;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct VarLong(pub VarLongType);
impl Codec<Self> for VarLong {
impl VarLong {
/// The maximum number of bytes a `VarLong` can occupy.
const MAX_SIZE: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(10) };
const MAX_SIZE: NonZeroUsize = NonZeroUsize::new(10).unwrap();
/// Returns the exact number of bytes this VarLong will write when
/// [`Encode::encode`] is called, assuming no error occurs.
fn written_size(&self) -> usize {
pub fn written_size(&self) -> usize {
match self.0 {
0 => 1,
n => (31 - n.leading_zeros() as usize) / 7 + 1,
}
}
fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
pub fn encode(&self, write: &mut impl Write) -> Result<(), WritingError> {
let mut x = self.0;
for _ in 0..Self::MAX_SIZE.get() {
let byte = (x & 0x7F) as u8;
@ -49,7 +47,7 @@ impl Codec<Self> for VarLong {
Ok(())
}
fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
pub fn decode(read: &mut impl Read) -> Result<Self, ReadingError> {
let mut val = 0;
for i in 0..Self::MAX_SIZE.get() {
let byte = read.get_u8_be()?;
@ -115,11 +113,11 @@ impl Serialize for VarLong {
let mut buf = Vec::new();
while value > 0x7F {
buf.put_u8(value as u8 | 0x80);
buf.push(value as u8 | 0x80);
value >>= 7;
}
buf.put_u8(value as u8);
buf.push(value as u8);
serializer.serialize_bytes(&buf)
}

@ -1,7 +1,6 @@
use std::{
io::{Read, Write},
marker::PhantomData,
num::NonZeroU16,
};
use aes::cipher::{BlockDecryptMut, BlockEncryptMut, BlockSizeUser, generic_array::GenericArray};
@ -27,10 +26,6 @@ pub mod ser;
#[cfg(feature = "serverbound")]
pub mod server;
/// The current Minecraft protocol number.
/// Don't forget to change this when porting.
pub const CURRENT_MC_PROTOCOL: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(770) };
pub const MAX_PACKET_SIZE: u64 = 2097152;
pub const MAX_PACKET_DATA_SIZE: usize = 8388608;

@ -5,7 +5,7 @@ use tokio::io::{AsyncRead, AsyncReadExt, BufReader};
use crate::{
Aes128Cfb8Dec, CompressionThreshold, MAX_PACKET_DATA_SIZE, MAX_PACKET_SIZE, RawPacket,
StreamDecryptor, VarInt, codec::Codec, ser::ReadingError,
StreamDecryptor, VarInt, ser::ReadingError,
};
// decrypt -> decompress -> raw

@ -6,7 +6,7 @@ use tokio::io::{AsyncWrite, AsyncWriteExt};
use crate::{
Aes128Cfb8Enc, CompressionLevel, CompressionThreshold, MAX_PACKET_DATA_SIZE, MAX_PACKET_SIZE,
StreamEncryptor, VarInt, codec::Codec,
StreamEncryptor, VarInt,
};
// raw -> compress -> encrypt

@ -3,7 +3,7 @@ use std::io::{Read, Write};
use crate::{
FixedBitSet,
codec::{Codec, bit_set::BitSet, identifier::Identifier, var_int::VarInt, var_long::VarLong},
codec::{bit_set::BitSet, identifier::Identifier, var_int::VarInt, var_long::VarLong},
};
pub mod deserializer;

@ -13,4 +13,3 @@ indexmap = { version = "2.8", features = ["serde"] }
serde.workspace = true
serde_json.workspace = true

@ -40,6 +40,12 @@ macro_rules! read_data_from_file {
}};
}
/// The minimum number of bits required to represent this number
#[inline]
pub fn encompassing_bits(count: usize) -> u8 {
count.ilog2() as u8 + if count.is_power_of_two() { 0 } else { 1 }
}
#[derive(PartialEq, Serialize, Deserialize, Clone)]
pub enum Difficulty {
Peaceful,

@ -51,6 +51,7 @@ mod test {
use crate::{
GENERATION_SETTINGS, GeneratorSetting, GlobalProtoNoiseRouter, GlobalRandomConfig,
NOISE_ROUTER_ASTS, ProtoChunk,
chunk::palette::BIOME_NETWORK_MAX_BITS,
generation::noise_router::multi_noise_sampler::{
MultiNoiseSampler, MultiNoiseSamplerBuilderOptions,
},
@ -128,4 +129,12 @@ mod test {
let hashed_seed = hash_seed((-777i64) as u64);
assert_eq!(-1087248400229165450, hashed_seed);
}
#[test]
fn test_proper_network_bits_per_entry() {
let id_to_test = 1 << BIOME_NETWORK_MAX_BITS;
if Biome::from_id(id_to_test).is_some() {
panic!("We need to update our constants!");
}
}
}

@ -235,3 +235,18 @@ impl FacingExt for Facing {
}
}
}
#[cfg(test)]
mod test {
use pumpkin_data::block::Block;
use crate::chunk::palette::BLOCK_NETWORK_MAX_BITS;
#[test]
fn test_proper_network_bits_per_entry() {
let id_to_test = 1 << BLOCK_NETWORK_MAX_BITS;
if Block::from_state_id(id_to_test).is_some() {
panic!("We need to update our constants!");
}
}
}

@ -1,4 +1,4 @@
use crate::chunk::format::PaletteEntry;
use crate::chunk::format::PaletteBlockEntry;
use super::registry::{get_block, get_state_by_state_id};
@ -29,7 +29,7 @@ impl ChunkBlockState {
})
}
pub fn from_palette(palette: &PaletteEntry) -> Self {
pub fn from_palette(palette: &PaletteBlockEntry) -> Option<Self> {
let block = get_block(palette.name.as_str());
if let Some(block) = block {
@ -44,13 +44,13 @@ impl ChunkBlockState {
state_id = block_properties.to_state_id(&block);
}
return Self {
return Some(Self {
state_id,
block_id: block.id,
};
});
}
ChunkBlockState::AIR
None
}
pub fn get_id(&self) -> u16 {

@ -1,15 +1,13 @@
use async_trait::async_trait;
use bytes::*;
use flate2::read::{GzDecoder, GzEncoder, ZlibDecoder, ZlibEncoder};
use indexmap::IndexMap;
use itertools::Itertools;
use pumpkin_config::advanced_config;
use pumpkin_data::{block::Block, chunk::ChunkStatus};
use pumpkin_nbt::serializer::to_bytes;
use pumpkin_util::math::ceil_log2;
use pumpkin_util::math::vector2::Vector2;
use std::{
collections::{HashMap, HashSet},
collections::HashSet,
io::{Read, SeekFrom, Write},
path::{Path, PathBuf},
time::{SystemTime, UNIX_EPOCH},
@ -19,14 +17,15 @@ use tokio::{
sync::Mutex,
};
use crate::chunk::{
ChunkData, ChunkReadingError, ChunkSerializingError, ChunkWritingError, CompressionError,
io::{ChunkSerializer, LoadedData},
use crate::{
chunk::{
ChunkData, ChunkReadingError, ChunkSerializingError, ChunkWritingError, CompressionError,
io::{ChunkSerializer, LoadedData},
},
generation::section_coords,
};
use super::{
ChunkNbt, ChunkSection, ChunkSectionBlockStates, PaletteEntry, SerializedScheduledTick,
};
use super::{ChunkNbt, ChunkSectionNBT, SerializedScheduledTick};
/// The side size of a region in chunks (one region is 32x32 chunks)
pub const REGION_SIZE: usize = 32;
@ -774,11 +773,8 @@ impl ChunkSerializer for AnvilChunkFile {
// runtime
for chunk in chunks.iter().cloned() {
let index = AnvilChunkFile::get_chunk_index(&chunk);
match &self.chunks_data[index] {
None => stream
.send(LoadedData::Missing(chunk))
.await
.expect("Failed to send chunk"),
let is_ok = match &self.chunks_data[index] {
None => stream.send(LoadedData::Missing(chunk)).await.is_ok(),
Some(chunk_metadata) => {
let chunk_data = &chunk_metadata.serialized_data;
let result = match chunk_data.to_chunk(chunk) {
@ -786,11 +782,13 @@ impl ChunkSerializer for AnvilChunkFile {
Err(err) => LoadedData::Error((chunk, err)),
};
stream
.send(result)
.await
.expect("Failed to read the chunk to the stream");
stream.send(result).await.is_ok()
}
};
if !is_ok {
// Stream is closed. Stop unneeded work and IO
return;
}
}
}
@ -799,83 +797,14 @@ impl ChunkSerializer for AnvilChunkFile {
pub fn chunk_to_bytes(chunk_data: &ChunkData) -> Result<Vec<u8>, ChunkSerializingError> {
let mut sections = Vec::new();
for (i, blocks) in chunk_data.sections.array_iter_subchunks().enumerate() {
// get unique blocks
let unique_blocks: HashSet<_> = blocks.iter().collect();
for (i, section) in chunk_data.section.sections.iter().enumerate() {
let block_states = section.block_states.to_disk_nbt();
let biomes = section.biomes.to_disk_nbt();
let palette: IndexMap<_, _> = unique_blocks
.into_iter()
.enumerate()
.map(|(i, block)| {
let name = Block::from_state_id(*block).unwrap().name;
(block, (name, i))
})
.collect();
// Determine the number of bits needed to represent the largest index in the palette
let block_bit_size = if palette.len() < 16 {
4
} else {
ceil_log2(palette.len() as u32).max(4)
};
let mut section_longs = Vec::new();
let mut current_pack_long: i64 = 0;
let mut bits_used_in_pack: u32 = 0;
// Empty data if the palette only contains one index https://minecraft.fandom.com/wiki/Chunk_format
// if palette.len() > 1 {}
// TODO: Update to write empty data. Rn or read does not handle this elegantly
for block in blocks.iter() {
// Push if next bit does not fit
if bits_used_in_pack + block_bit_size as u32 > 64 {
section_longs.push(current_pack_long);
current_pack_long = 0;
bits_used_in_pack = 0;
}
let index = palette.get(block).expect("Just added all unique").1;
current_pack_long |= (index as i64) << bits_used_in_pack;
bits_used_in_pack += block_bit_size as u32;
assert!(bits_used_in_pack <= 64);
// If the current 64-bit integer is full, push it to the section_longs and start a new one
if bits_used_in_pack >= 64 {
section_longs.push(current_pack_long);
current_pack_long = 0;
bits_used_in_pack = 0;
}
}
// Push the last 64-bit integer if it contains any data
if bits_used_in_pack > 0 {
section_longs.push(current_pack_long);
}
sections.push(ChunkSection {
y: i as i8 - 4,
block_states: Some(ChunkSectionBlockStates {
data: Some(section_longs.into_boxed_slice()),
palette: palette
.into_iter()
.map(|entry| PaletteEntry {
name: entry.1.0.to_string(),
properties: {
let block = Block::from_state_id(*entry.0).unwrap();
if let Some(properties) = block.properties(*entry.0) {
let props = properties.to_props();
let mut props_map = HashMap::new();
for prop in props {
props_map.insert(prop.0.clone(), prop.1.clone());
}
Some(props_map)
} else {
None
}
},
})
.collect(),
}),
sections.push(ChunkSectionNBT {
y: i as i8 + section_coords::block_to_section(chunk_data.section.min_y) as i8,
block_states,
biomes,
});
}
@ -883,6 +812,7 @@ pub fn chunk_to_bytes(chunk_data: &ChunkData) -> Result<Vec<u8>, ChunkSerializin
data_version: WORLD_DATA_VERSION,
x_pos: chunk_data.position.x,
z_pos: chunk_data.position.z,
min_y_section: section_coords::block_to_section(chunk_data.section.min_y),
status: ChunkStatus::Full,
heightmaps: chunk_data.heightmap.clone(),
sections,
@ -940,7 +870,6 @@ mod tests {
use crate::chunk::format::anvil::AnvilChunkFile;
use crate::chunk::io::chunk_file_manager::ChunkFileManager;
use crate::chunk::io::{ChunkIO, LoadedData};
use crate::coordinates::ChunkRelativeBlockCoordinates;
use crate::generation::{Seed, get_world_gen};
use crate::level::{LevelFolder, SyncChunk};
@ -1026,7 +955,7 @@ mod tests {
for x in -5..5 {
for y in -5..5 {
let position = Vector2::new(x, y);
let chunk = generator.generate_chunk(position);
let chunk = generator.generate_chunk(&position);
chunks.push((position, Arc::new(RwLock::new(chunk))));
}
}
@ -1047,7 +976,31 @@ mod tests {
for read_chunk in read_chunks.iter() {
let read_chunk = read_chunk.read().await;
if read_chunk.position == chunk.position {
assert_eq!(chunk.sections, read_chunk.sections, "Chunks don't match");
let original = chunk.section.dump_blocks();
let read = read_chunk.section.dump_blocks();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
let original = chunk.section.dump_biomes();
let read = read_chunk.section.dump_biomes();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
break;
}
}
@ -1057,26 +1010,12 @@ mod tests {
// Idk what blocks these are, they just have to be different
let mut chunk = chunks.first().unwrap().1.write().await;
chunk.sections.set_block(
ChunkRelativeBlockCoordinates {
x: 0u32.into(),
y: 0.into(),
z: 0u32.into(),
},
1000,
);
chunk.section.set_relative_block(0, 0, 0, 1000);
// Mark dirty so we actually write it
chunk.dirty = true;
drop(chunk);
let mut chunk = chunks.last().unwrap().1.write().await;
chunk.sections.set_block(
ChunkRelativeBlockCoordinates {
x: 0u32.into(),
y: 0.into(),
z: 0u32.into(),
},
1000,
);
chunk.section.set_relative_block(0, 0, 0, 1000);
// Mark dirty so we actually write it
chunk.dirty = true;
drop(chunk);
@ -1095,7 +1034,32 @@ mod tests {
for read_chunk in read_chunks.iter() {
let read_chunk = read_chunk.read().await;
if read_chunk.position == chunk.position {
assert_eq!(chunk.sections, read_chunk.sections, "Chunks don't match");
let original = chunk.section.dump_blocks();
let read = read_chunk.section.dump_blocks();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
let original = chunk.section.dump_biomes();
let read = read_chunk.section.dump_biomes();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
break;
}
}
@ -1109,14 +1073,7 @@ mod tests {
for z in 0..16 {
for y in 0..4 {
let block_id = 16 * 16 * y + 16 * z + x;
chunk.sections.set_block(
ChunkRelativeBlockCoordinates {
x: x.into(),
y: (y as i32).into(),
z: z.into(),
},
block_id,
);
chunk.section.set_relative_block(x, y, z, block_id as u16);
}
}
}
@ -1128,14 +1085,7 @@ mod tests {
for z in 0..16 {
for y in 0..4 {
let block_id = 16 * 16 * y + 16 * z + x;
chunk.sections.set_block(
ChunkRelativeBlockCoordinates {
x: x.into(),
y: (y as i32).into(),
z: z.into(),
},
block_id,
);
chunk.section.set_relative_block(x, y, z, block_id as u16);
}
}
}
@ -1157,7 +1107,32 @@ mod tests {
for read_chunk in read_chunks.iter() {
let read_chunk = read_chunk.read().await;
if read_chunk.position == chunk.position {
assert_eq!(chunk.sections, read_chunk.sections, "Chunks don't match");
let original = chunk.section.dump_blocks();
let read = read_chunk.section.dump_blocks();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
let original = chunk.section.dump_biomes();
let read = read_chunk.section.dump_biomes();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
break;
}
}
@ -1171,14 +1146,7 @@ mod tests {
for z in 0..16 {
for y in 0..16 {
let block_id = 16 * 16 * y + 16 * z + x;
chunk.sections.set_block(
ChunkRelativeBlockCoordinates {
x: x.into(),
y: (y as i32).into(),
z: z.into(),
},
block_id,
);
chunk.section.set_relative_block(x, y, z, block_id as u16);
}
}
}
@ -1200,7 +1168,31 @@ mod tests {
for read_chunk in read_chunks.iter() {
let read_chunk = read_chunk.read().await;
if read_chunk.position == chunk.position {
assert_eq!(chunk.sections, read_chunk.sections, "Chunks don't match");
let original = chunk.section.dump_blocks();
let read = read_chunk.section.dump_blocks();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
let original = chunk.section.dump_biomes();
let read = read_chunk.section.dump_biomes();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
break;
}
}
@ -1231,7 +1223,7 @@ mod tests {
for x in -5..5 {
for y in -5..5 {
let position = Vector2::new(x, y);
let chunk = generator.generate_chunk(position);
let chunk = generator.generate_chunk(&position);
chunks.push((position, Arc::new(RwLock::new(chunk))));
}
}
@ -1257,7 +1249,31 @@ mod tests {
for read_chunk in read_chunks.iter() {
let read_chunk = read_chunk.read().await;
if read_chunk.position == chunk.position {
assert_eq!(chunk.sections, read_chunk.sections, "Chunks don't match");
let original = chunk.section.dump_blocks();
let read = read_chunk.section.dump_blocks();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
let original = chunk.section.dump_biomes();
let read = read_chunk.section.dump_biomes();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
break;
}
}

@ -345,10 +345,10 @@ impl ChunkSerializer for LinearFile {
LoadedData::Missing(chunk)
};
stream
.send(result)
.await
.expect("Failed to read chunks to stream");
if stream.send(result).await.is_err() {
// The stream is closed. Return early to prevent unneeded work and IO
return;
}
}
}
}
@ -414,7 +414,7 @@ mod tests {
for x in -5..5 {
for y in -5..5 {
let position = Vector2::new(x, y);
let chunk = generator.generate_chunk(position);
let chunk = generator.generate_chunk(&position);
chunks.push((position, Arc::new(RwLock::new(chunk))));
}
}
@ -465,7 +465,31 @@ mod tests {
for read_chunk in read_chunks.iter() {
let read_chunk = read_chunk.read().await;
if read_chunk.position == chunk.position {
assert_eq!(chunk.sections, read_chunk.sections, "Chunks don't match");
let original = chunk.section.dump_blocks();
let read = read_chunk.section.dump_blocks();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
let original = chunk.section.dump_biomes();
let read = read_chunk.section.dump_biomes();
original
.into_iter()
.zip(read)
.enumerate()
.for_each(|(i, (o, r))| {
if o != r {
panic!("Data miss-match expected {}, got {} ({})", o, r, i);
}
});
break;
}
}

@ -3,17 +3,15 @@ use std::collections::HashMap;
use pumpkin_data::{block::Block, chunk::ChunkStatus};
use pumpkin_nbt::{from_bytes, nbt_long_array};
use pumpkin_util::math::{ceil_log2, position::BlockPos, vector2::Vector2};
use pumpkin_util::math::{position::BlockPos, vector2::Vector2};
use serde::{Deserialize, Serialize};
use crate::{
block::ChunkBlockState,
coordinates::{ChunkRelativeBlockCoordinates, Height},
};
use crate::generation::section_coords;
use super::{
CHUNK_AREA, ChunkBlocks, ChunkData, ChunkHeightmaps, ChunkParsingError, SUBCHUNK_VOLUME,
ScheduledTick, TickPriority,
ChunkData, ChunkHeightmaps, ChunkParsingError, ChunkSections, ScheduledTick, SubChunk,
TickPriority,
palette::{BiomePalette, BlockPalette},
};
pub mod anvil;
@ -50,72 +48,19 @@ impl ChunkData {
)));
}
// this needs to be boxed, otherwise it will cause a stack-overflow
let mut blocks = ChunkBlocks::Homogeneous(0);
let mut block_index = 0; // which block we're currently at
for section in chunk_data.sections.into_iter() {
let block_states = match section.block_states {
Some(states) => states,
None => continue, // TODO @lukas0008 this should instead fill all blocks with the only element of the palette
};
let palette = block_states
.palette
.iter()
.map(ChunkBlockState::from_palette)
.collect::<Vec<_>>();
let block_data = match block_states.data {
None => {
// We skipped placing an empty subchunk.
// We need to increase the y coordinate of the next subchunk being placed.
block_index += SUBCHUNK_VOLUME;
continue;
}
Some(d) => d,
};
// How many bits each block has in one of the palette u64s
let block_bit_size = if palette.len() < 16 {
4
} else {
ceil_log2(palette.len() as u32).max(4)
};
// How many blocks there are in one of the palettes u64s
let blocks_in_palette = 64 / block_bit_size;
let mask = (1 << block_bit_size) - 1;
'block_loop: for block in block_data.iter() {
for i in 0..blocks_in_palette {
let index = (block >> (i * block_bit_size)) & mask;
let block = &palette[index as usize];
// TODO allow indexing blocks directly so we can just use block_index and save some time?
// this is fine because we initialized the heightmap of `blocks`
// from the cached value in the world file
blocks.set_block_no_heightmap_update(
ChunkRelativeBlockCoordinates {
z: ((block_index % CHUNK_AREA) / 16).into(),
y: Height::from_absolute((block_index / CHUNK_AREA) as u16),
x: (block_index % 16).into(),
},
block.get_id(),
);
block_index += 1;
// if `SUBCHUNK_VOLUME `is not divisible by `blocks_in_palette` the block_data
// can sometimes spill into other subchunks. We avoid that by aborting early
if (block_index % SUBCHUNK_VOLUME) == 0 {
break 'block_loop;
}
}
}
}
let sub_chunks = chunk_data
.sections
.into_iter()
.map(|section| SubChunk {
block_states: BlockPalette::from_disk_nbt(section.block_states),
biomes: BiomePalette::from_disk_nbt(section.biomes),
})
.collect();
let min_y = section_coords::section_to_block(chunk_data.min_y_section);
let section = ChunkSections::new(sub_chunks, min_y);
Ok(ChunkData {
sections: blocks,
section,
heightmap: chunk_data.heightmaps,
position,
// This chunk is read from disk, so it has not been modified
@ -152,31 +97,55 @@ impl ChunkData {
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct PaletteEntry {
// block name
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<HashMap<String, String>>,
}
#[derive(Serialize, Deserialize, Debug)]
struct ChunkSection {
struct ChunkSectionNBT {
block_states: ChunkSectionBlockStates,
biomes: ChunkSectionBiomes,
// TODO
// #[serde(rename = "BlockLight", skip_serializing_if = "Option::is_none")]
// block_light: Option<Box<[u8]>>,
// #[serde(rename = "SkyLight", skip_serializing_if = "Option::is_none")]
// sky_light: Option<Box<[u8]>>,
#[serde(rename = "Y")]
y: i8,
#[serde(skip_serializing_if = "Option::is_none")]
block_states: Option<ChunkSectionBlockStates>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct ChunkSectionBlockStates {
pub struct ChunkSectionBiomes {
#[serde(
serialize_with = "nbt_long_array",
skip_serializing_if = "Option::is_none"
)]
data: Option<Box<[i64]>>,
palette: Vec<PaletteEntry>,
pub(crate) data: Option<Box<[i64]>>,
pub(crate) palette: Vec<PaletteBiomeEntry>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
// NOTE: Change not documented in the wiki; biome palettes are directly just the name now
#[serde(rename_all = "PascalCase", transparent)]
pub struct PaletteBiomeEntry {
/// Biome name
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ChunkSectionBlockStates {
#[serde(
serialize_with = "nbt_long_array",
skip_serializing_if = "Option::is_none"
)]
pub(crate) data: Option<Box<[i64]>>,
pub(crate) palette: Vec<PaletteBlockEntry>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "PascalCase")]
pub struct PaletteBlockEntry {
/// Block name
pub name: String,
/// Key-value pairs of properties
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<HashMap<String, String>>,
}
#[derive(Serialize, Deserialize, Debug)]
@ -201,13 +170,13 @@ struct ChunkNbt {
data_version: i32,
#[serde(rename = "xPos")]
x_pos: i32,
// #[serde(rename = "yPos")]
//y_pos: i32,
#[serde(rename = "zPos")]
z_pos: i32,
#[serde(rename = "yPos")]
min_y_section: i32,
status: ChunkStatus,
#[serde(rename = "sections")]
sections: Vec<ChunkSection>,
sections: Vec<ChunkSectionNBT>,
heightmaps: ChunkHeightmaps,
#[serde(rename = "block_ticks")]
block_ticks: Vec<SerializedScheduledTick>,

@ -191,9 +191,7 @@ where
unreachable!("Default Serializer must be created")
}
Err(err) => {
if let Err(err) = stream.send(LoadedData::Error((chunks[0], err))).await {
log::warn!("Failed to send data to the chunk stream: {:?}", err);
};
let _ = stream.send(LoadedData::Error((chunks[0], err))).await;
return;
}
};
@ -204,10 +202,10 @@ where
let intermediary = async {
while let Some(data) = recv.recv().await {
let wrapped_data = data.map_loaded(|data| Arc::new(RwLock::new(data)));
stream
.send(wrapped_data)
.await
.expect("Failed chunk wrapper intermediary");
if stream.send(wrapped_data).await.is_err() {
// Stream is closed, so stop unneeded computation and io
return;
}
}
};

@ -1,20 +1,18 @@
use palette::{BiomePalette, BlockPalette};
use pumpkin_nbt::nbt_long_array;
use pumpkin_util::math::{position::BlockPos, vector2::Vector2};
use serde::{Deserialize, Serialize};
use std::iter::repeat_with;
use thiserror::Error;
use crate::coordinates::ChunkRelativeBlockCoordinates;
pub mod format;
pub mod io;
pub mod palette;
// TODO
const WORLD_HEIGHT: usize = 384;
pub const CHUNK_AREA: usize = 16 * 16;
pub const SUBCHUNK_VOLUME: usize = CHUNK_AREA * 16;
pub const SUBCHUNKS_COUNT: usize = WORLD_HEIGHT / 16;
pub const CHUNK_VOLUME: usize = CHUNK_AREA * WORLD_HEIGHT;
pub const CHUNK_WIDTH: usize = BlockPalette::SIZE;
pub const CHUNK_AREA: usize = CHUNK_WIDTH * CHUNK_WIDTH;
pub const BIOME_VOLUME: usize = BiomePalette::VOLUME;
pub const SUBCHUNK_VOLUME: usize = CHUNK_AREA * CHUNK_WIDTH;
#[derive(Error, Debug)]
pub enum ChunkReadingError {
@ -106,14 +104,14 @@ pub struct ScheduledTick {
}
pub struct ChunkData {
/// See description in [`ChunkBlocks`]
pub sections: ChunkBlocks,
pub section: ChunkSections,
/// See `https://minecraft.wiki/w/Heightmap` for more info
pub heightmap: ChunkHeightmaps,
pub position: Vector2<i32>,
pub dirty: bool,
pub block_ticks: Vec<ScheduledTick>,
pub fluid_ticks: Vec<ScheduledTick>,
pub dirty: bool,
}
/// Represents pure block data for a chunk.
@ -121,25 +119,41 @@ pub struct ChunkData {
/// There are currently 24 subchunks per chunk.
///
/// A chunk can be:
/// - Homogeneous: the whole chunk is filled with one block type, like air or water.
/// - Subchunks: 24 separate subchunks are stored.
#[derive(PartialEq, Debug, Clone)]
pub enum ChunkBlocks {
Homogeneous(u16),
Subchunks(Box<[SubchunkBlocks; SUBCHUNKS_COUNT]>),
#[derive(Debug)]
pub struct ChunkSections {
pub sections: Box<[SubChunk]>,
min_y: i32,
}
/// Subchunks are vertical portions of a chunk. They are 16 blocks tall.
///
/// A subchunk can be:
/// - Homogeneous: the whole subchunk is filled with one block type, like air or water.
/// - Heterogeneous: 16^3 = 4096 individual blocks are stored.
#[derive(Clone, PartialEq, Debug)]
pub enum SubchunkBlocks {
Homogeneous(u16),
// The packet relies on this ordering -> leave it like this for performance
/// Ordering: yzx (y being the most significant)
Heterogeneous(Box<[u16; SUBCHUNK_VOLUME]>),
impl ChunkSections {
#[cfg(test)]
pub fn dump_blocks(&self) -> Vec<u16> {
let mut dump = Vec::new();
for section in self.sections.iter() {
section.block_states.for_each(|raw_id| {
dump.push(raw_id);
});
}
dump
}
#[cfg(test)]
pub fn dump_biomes(&self) -> Vec<u8> {
let mut dump = Vec::new();
for section in self.sections.iter() {
section.biomes.for_each(|raw_id| {
dump.push(raw_id);
});
}
dump
}
}
#[derive(Debug, Default)]
pub struct SubChunk {
pub block_states: BlockPalette,
pub biomes: BiomePalette,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
@ -155,6 +169,7 @@ pub struct ChunkHeightmaps {
impl Default for ChunkHeightmaps {
fn default() -> Self {
Self {
// 9 bits per entry
// 0 packed into an i64 7 times.
motion_blocking: vec![0; 37].into_boxed_slice(),
world_surface: vec![0; 37].into_boxed_slice(),
@ -162,134 +177,138 @@ impl Default for ChunkHeightmaps {
}
}
impl SubchunkBlocks {
/// Gets the given block in the chunk
pub fn get_block(&self, position: ChunkRelativeBlockCoordinates) -> Option<u16> {
match &self {
Self::Homogeneous(block) => Some(*block),
Self::Heterogeneous(blocks) => blocks.get(convert_index(position)).copied(),
}
impl ChunkSections {
pub fn new(sections: Box<[SubChunk]>, min_y: i32) -> Self {
Self { sections, min_y }
}
/// Sets the given block in the chunk, returning the old block
pub fn set_block(&mut self, position: ChunkRelativeBlockCoordinates, block_id: u16) {
// TODO @LUK_ESC? update the heightmap
self.set_block_no_heightmap_update(position, block_id)
}
/// Sets the given block in the chunk, returning the old block
/// Contrary to `set_block` this does not update the heightmap.
///
/// Only use this if you know you don't need to update the heightmap
/// or if you manually set the heightmap in `empty_with_heightmap`
pub fn set_block_no_heightmap_update(
&mut self,
position: ChunkRelativeBlockCoordinates,
new_block: u16,
) {
match self {
Self::Homogeneous(block) => {
if *block != new_block {
let mut blocks = Box::new([*block; SUBCHUNK_VOLUME]);
blocks[convert_index(position)] = new_block;
*self = Self::Heterogeneous(blocks)
}
}
Self::Heterogeneous(blocks) => {
blocks[convert_index(position)] = new_block;
if blocks.iter().all(|b| *b == new_block) {
*self = Self::Homogeneous(new_block)
}
}
}
}
pub fn clone_as_array(&self) -> Box<[u16; SUBCHUNK_VOLUME]> {
match &self {
Self::Homogeneous(block) => Box::new([*block; SUBCHUNK_VOLUME]),
Self::Heterogeneous(blocks) => blocks.clone(),
}
}
}
impl ChunkBlocks {
/// Gets the given block in the chunk
pub fn get_block(&self, position: ChunkRelativeBlockCoordinates) -> Option<u16> {
match &self {
Self::Homogeneous(block) => Some(*block),
Self::Subchunks(subchunks) => subchunks
.get((position.y.get_absolute() / 16) as usize)
.and_then(|subchunk| subchunk.get_block(position)),
}
}
/// Sets the given block in the chunk, returning the old block
pub fn set_block(&mut self, position: ChunkRelativeBlockCoordinates, block_id: u16) {
// TODO @LUK_ESC? update the heightmap
self.set_block_no_heightmap_update(position, block_id)
}
/// Sets the given block in the chunk, returning the old block
/// Contrary to `set_block` this does not update the heightmap.
///
/// Only use this if you know you don't need to update the heightmap
/// or if you manually set the heightmap in `empty_with_heightmap`
pub fn set_block_no_heightmap_update(
&mut self,
position: ChunkRelativeBlockCoordinates,
new_block: u16,
) {
match self {
Self::Homogeneous(block) => {
if *block != new_block {
let mut subchunks = vec![SubchunkBlocks::Homogeneous(0); SUBCHUNKS_COUNT];
subchunks[(position.y.get_absolute() / 16) as usize]
.set_block(position, new_block);
*self = Self::Subchunks(subchunks.try_into().unwrap());
}
}
Self::Subchunks(subchunks) => {
subchunks[(position.y.get_absolute() / 16) as usize].set_block(position, new_block);
if subchunks
.iter()
.all(|subchunk| *subchunk == SubchunkBlocks::Homogeneous(new_block))
{
*self = Self::Homogeneous(new_block)
}
}
}
}
//TODO: Needs optimizations
pub fn array_iter_subchunks(
pub fn get_block_absolute_y(
&self,
) -> Box<dyn Iterator<Item = Box<[u16; SUBCHUNK_VOLUME]>> + '_> {
match self {
Self::Homogeneous(block) => {
Box::new(repeat_with(|| Box::new([*block; SUBCHUNK_VOLUME])).take(SUBCHUNKS_COUNT))
}
Self::Subchunks(subchunks) => {
Box::new(subchunks.iter().map(|subchunk| subchunk.clone_as_array()))
}
relative_x: usize,
y: i32,
relative_z: usize,
) -> Option<u16> {
let y = y - self.min_y;
if y < 0 {
None
} else {
let relative_y = y as usize;
self.get_relative_block(relative_x, relative_y, relative_z)
}
}
pub fn set_block_absolute_y(
&mut self,
relative_x: usize,
y: i32,
relative_z: usize,
block_state_id: u16,
) {
let y = y - self.min_y;
debug_assert!(y > 0);
let relative_y = y as usize;
self.set_relative_block(relative_x, relative_y, relative_z, block_state_id);
}
/// Gets the given block in the chunk
fn get_relative_block(
&self,
relative_x: usize,
relative_y: usize,
relative_z: usize,
) -> Option<u16> {
debug_assert!(relative_x < BlockPalette::SIZE);
debug_assert!(relative_z < BlockPalette::SIZE);
let section_index = relative_y / BlockPalette::SIZE;
let relative_y = relative_y % BlockPalette::SIZE;
self.sections
.get(section_index)
.map(|section| section.block_states.get(relative_x, relative_y, relative_z))
}
/// Sets the given block in the chunk, returning the old block
#[inline]
pub fn set_relative_block(
&mut self,
relative_x: usize,
relative_y: usize,
relative_z: usize,
block_state_id: u16,
) {
// TODO @LUK_ESC? update the heightmap
self.set_block_no_heightmap_update(relative_x, relative_y, relative_z, block_state_id);
}
/// Sets the given block in the chunk, returning the old block
/// Contrary to `set_block` this does not update the heightmap.
///
/// Only use this if you know you don't need to update the heightmap
/// or if you manually set the heightmap in `empty_with_heightmap`
pub fn set_block_no_heightmap_update(
&mut self,
relative_x: usize,
relative_y: usize,
relative_z: usize,
block_state_id: u16,
) {
debug_assert!(relative_x < BlockPalette::SIZE);
debug_assert!(relative_z < BlockPalette::SIZE);
let section_index = relative_y / BlockPalette::SIZE;
let relative_y = relative_y % BlockPalette::SIZE;
if let Some(section) = self.sections.get_mut(section_index) {
section
.block_states
.set(relative_x, relative_y, relative_z, block_state_id);
}
}
/// Sets the given block in the chunk, returning the old block
pub fn set_relative_biome(
&mut self,
relative_x: usize,
relative_y: usize,
relative_z: usize,
biome_id: u8,
) {
debug_assert!(relative_x < BiomePalette::SIZE);
debug_assert!(relative_z < BiomePalette::SIZE);
let section_index = relative_y / BiomePalette::SIZE;
let relative_y = relative_y % BiomePalette::SIZE;
self.sections[section_index]
.biomes
.set(relative_x, relative_y, relative_z, biome_id);
}
}
impl ChunkData {
/// Gets the given block in the chunk
pub fn get_block(&self, position: ChunkRelativeBlockCoordinates) -> Option<u16> {
self.sections.get_block(position)
#[inline]
pub fn get_relative_block(
&self,
relative_x: usize,
relative_y: usize,
relative_z: usize,
) -> Option<u16> {
self.section
.get_relative_block(relative_x, relative_y, relative_z)
}
/// Sets the given block in the chunk, returning the old block
pub fn set_block(&mut self, position: ChunkRelativeBlockCoordinates, block_id: u16) {
/// Sets the given block in the chunk
#[inline]
pub fn set_relative_block(
&mut self,
relative_x: usize,
relative_y: usize,
relative_z: usize,
block_state_id: u16,
) {
// TODO @LUK_ESC? update the heightmap
self.sections.set_block(position, block_id);
self.section
.set_relative_block(relative_x, relative_y, relative_z, block_state_id);
}
/// Sets the given block in the chunk, returning the old block
@ -297,12 +316,16 @@ impl ChunkData {
///
/// Only use this if you know you don't need to update the heightmap
/// or if you manually set the heightmap in `empty_with_heightmap`
#[inline]
pub fn set_block_no_heightmap_update(
&mut self,
position: ChunkRelativeBlockCoordinates,
block: u16,
relative_x: usize,
relative_y: usize,
relative_z: usize,
block_state_id: u16,
) {
self.sections.set_block_no_heightmap_update(position, block);
self.section
.set_relative_block(relative_x, relative_y, relative_z, block_state_id);
}
#[expect(dead_code)]
@ -312,6 +335,7 @@ impl ChunkData {
todo!()
}
}
#[derive(Error, Debug)]
pub enum ChunkParsingError {
#[error("Failed reading chunk status {0}")]
@ -322,10 +346,6 @@ pub enum ChunkParsingError {
ErrorDeserializingChunk(String),
}
fn convert_index(index: ChunkRelativeBlockCoordinates) -> usize {
// % works for negative numbers as intended.
(index.y.get_absolute() % 16) as usize * CHUNK_AREA + *index.z as usize * 16 + *index.x as usize
}
#[derive(Error, Debug)]
pub enum ChunkSerializingError {
#[error("Error serializing chunk: {0}")]

@ -0,0 +1,495 @@
use std::{
cmp::Ordering,
collections::{HashMap, hash_map::Entry},
hash::Hash,
};
use pumpkin_data::{block::Block, chunk::Biome};
use pumpkin_macros::block_state;
use pumpkin_util::encompassing_bits;
use crate::block::ChunkBlockState;
use super::format::{
ChunkSectionBiomes, ChunkSectionBlockStates, PaletteBiomeEntry, PaletteBlockEntry,
};
/// 3d array indexed by y,z,x
type AbstractCube<T, const DIM: usize> = [[[T; DIM]; DIM]; DIM];
// TODO: Verify the default state for these blocks is the only state
const AIR: ChunkBlockState = block_state!("air");
const CAVE_AIR: ChunkBlockState = block_state!("cave_air");
const VOID_AIR: ChunkBlockState = block_state!("void_air");
#[inline]
fn is_not_air_block(state_id: u16) -> bool {
state_id != AIR.state_id && state_id != CAVE_AIR.state_id && state_id != VOID_AIR.state_id
}
#[derive(Debug)]
pub struct HeterogeneousPaletteData<V: Hash + Eq + Copy, const DIM: usize> {
cube: AbstractCube<V, DIM>,
counts: HashMap<V, u16>,
}
impl<V: Hash + Eq + Copy, const DIM: usize> HeterogeneousPaletteData<V, DIM> {
fn from_cube(cube: AbstractCube<V, DIM>) -> Self {
let counts =
cube.as_flattened()
.as_flattened()
.iter()
.fold(HashMap::new(), |mut acc, key| {
acc.entry(*key).and_modify(|count| *count += 1).or_insert(1);
acc
});
Self { cube, counts }
}
fn get(&self, x: usize, y: usize, z: usize) -> V {
debug_assert!(x < DIM);
debug_assert!(y < DIM);
debug_assert!(z < DIM);
self.cube[y][z][x]
}
fn set(&mut self, x: usize, y: usize, z: usize, value: V) {
debug_assert!(x < DIM);
debug_assert!(y < DIM);
debug_assert!(z < DIM);
let original = self.cube[y][z][x];
if let Entry::Occupied(mut entry) = self.counts.entry(original) {
let count = entry.get_mut();
*count -= 1;
if *count == 0 {
let _ = entry.remove();
}
}
self.cube[y][z][x] = value;
self.counts
.entry(value)
.and_modify(|count| *count += 1)
.or_insert(1);
}
}
/// A paletted container is a cube of registry ids. It uses a custom compression scheme based on how
/// may distinct registry ids are in the cube.
#[derive(Debug)]
pub enum PalettedContainer<V: Hash + Eq + Copy + Default, const DIM: usize> {
Homogeneous(V),
Heterogeneous(Box<HeterogeneousPaletteData<V, DIM>>),
}
impl<V: Hash + Eq + Copy + Default, const DIM: usize> PalettedContainer<V, DIM> {
pub const SIZE: usize = DIM;
pub const VOLUME: usize = DIM * DIM * DIM;
fn bits_per_entry(&self) -> u8 {
match self {
Self::Homogeneous(_) => 0,
Self::Heterogeneous(data) => encompassing_bits(data.counts.len()),
}
}
pub fn to_palette_and_packed_data(&self, bits_per_entry: u8) -> (Box<[V]>, Box<[i64]>) {
match self {
Self::Homogeneous(registry_id) => (Box::new([*registry_id]), Box::new([])),
Self::Heterogeneous(data) => {
debug_assert!(bits_per_entry >= encompassing_bits(data.counts.len()));
debug_assert!(bits_per_entry <= 15);
let palette: Box<[V]> = data.counts.keys().copied().collect();
let key_to_index_map: HashMap<V, usize> = palette
.iter()
.enumerate()
.map(|(index, key)| (*key, index))
.collect();
let blocks_per_i64 = 64 / bits_per_entry;
let packed_indices = data
.cube
.as_flattened()
.as_flattened()
.chunks(blocks_per_i64 as usize)
.map(|chunk| {
chunk.iter().enumerate().fold(0, |acc, (index, key)| {
let key_index = key_to_index_map.get(key).unwrap();
debug_assert!((1 << bits_per_entry) > *key_index);
let packed_offset_index =
(*key_index as u64) << (bits_per_entry as usize * index);
acc | packed_offset_index as i64
})
})
.collect();
(palette, packed_indices)
}
}
}
pub fn from_palette_and_packed_data(
palette: &[V],
packed_data: &[i64],
minimum_bits_per_entry: u8,
) -> Self {
if palette.is_empty() {
log::warn!("No palette data! Defaulting...");
Self::Homogeneous(V::default())
} else if palette.len() == 1 {
Self::Homogeneous(palette[0])
} else {
let bits_per_key = encompassing_bits(palette.len()).max(minimum_bits_per_entry);
let index_mask = (1 << bits_per_key) - 1;
let keys_per_i64 = 64 / bits_per_key;
let expected_i64_count = Self::VOLUME.div_ceil(keys_per_i64 as usize);
match packed_data.len().cmp(&expected_i64_count) {
Ordering::Greater => {
// Handled by the zip
log::warn!("Filled the section but there is still more data! Ignoring...");
}
Ordering::Less => {
// Handled by the array initialization and zip
log::warn!(
"Ran out of packed indices, but did not fill the section ({} vs {} for {}). Defaulting...",
packed_data.len() * keys_per_i64 as usize,
Self::VOLUME,
palette.len(),
);
}
// This is what we want!
Ordering::Equal => {}
}
// TODO: Can we do this all with an `array::from_fn` or something?
let mut cube = [[[V::default(); DIM]; DIM]; DIM];
cube.as_flattened_mut()
.as_flattened_mut()
.chunks_mut(keys_per_i64 as usize)
.zip(packed_data)
.for_each(|(values, packed)| {
values.iter_mut().enumerate().for_each(|(index, value)| {
let lookup_index =
(*packed as usize >> (index * bits_per_key as usize)) & index_mask;
if let Some(v) = palette.get(lookup_index) {
*value = *v;
} else {
// The cube is already initialized to the default
log::warn!("Lookup index out of bounds! Defaulting...");
}
});
});
Self::Heterogeneous(Box::new(HeterogeneousPaletteData::from_cube(cube)))
}
}
pub fn get(&self, x: usize, y: usize, z: usize) -> V {
match self {
Self::Homogeneous(value) => *value,
Self::Heterogeneous(data) => data.get(x, y, z),
}
}
pub fn set(&mut self, x: usize, y: usize, z: usize, value: V) {
debug_assert!(x < Self::SIZE);
debug_assert!(y < Self::SIZE);
debug_assert!(z < Self::SIZE);
match self {
Self::Homogeneous(original) => {
if value != *original {
let mut cube = [[[*original; DIM]; DIM]; DIM];
cube[y][z][x] = value;
let data = HeterogeneousPaletteData::from_cube(cube);
*self = Self::Heterogeneous(Box::new(data));
}
}
Self::Heterogeneous(data) => {
data.set(x, y, z, value);
if data.counts.len() == 1 {
*self = Self::Homogeneous(*data.counts.keys().next().unwrap());
}
}
}
}
pub fn for_each<F>(&self, mut f: F)
where
F: FnMut(V),
{
match self {
Self::Homogeneous(registry_id) => {
for _ in 0..Self::VOLUME {
f(*registry_id);
}
}
Self::Heterogeneous(data) => {
data.cube
.as_flattened()
.as_flattened()
.iter()
.for_each(|value| {
f(*value);
});
}
}
}
}
impl<V: Default + Hash + Eq + Copy, const DIM: usize> Default for PalettedContainer<V, DIM> {
fn default() -> Self {
Self::Homogeneous(V::default())
}
}
impl BiomePalette {
pub fn convert_network(&self) -> NetworkSerialization<u8> {
match self {
Self::Homogeneous(registry_id) => NetworkSerialization {
bits_per_entry: 0,
palette: NetworkPalette::Single(*registry_id),
packed_data: Box::new([]),
},
Self::Heterogeneous(data) => {
let raw_bits_per_entry = encompassing_bits(data.counts.len());
if raw_bits_per_entry > BIOME_NETWORK_MAX_MAP_BITS {
let bits_per_entry = BIOME_NETWORK_MAX_BITS;
let values_per_i64 = 64 / bits_per_entry;
let packed_data = data
.cube
.as_flattened()
.as_flattened()
.chunks(values_per_i64 as usize)
.map(|chunk| {
chunk.iter().enumerate().fold(0, |acc, (index, value)| {
debug_assert!((1 << bits_per_entry) > *value);
let packed_offset_index =
(*value as u64) << (bits_per_entry as usize * index);
acc | packed_offset_index as i64
})
})
.collect();
NetworkSerialization {
bits_per_entry,
palette: NetworkPalette::Direct,
packed_data,
}
} else {
let bits_per_entry = raw_bits_per_entry.max(BIOME_NETWORK_MIN_MAP_BITS);
let (palette, packed) = self.to_palette_and_packed_data(bits_per_entry);
NetworkSerialization {
bits_per_entry,
palette: NetworkPalette::Indirect(palette),
packed_data: packed,
}
}
}
}
}
pub fn from_disk_nbt(nbt: ChunkSectionBiomes) -> Self {
let palette = nbt
.palette
.into_iter()
.map(|entry| Biome::from_name(&entry.name).unwrap_or(&Biome::PLAINS).id)
.collect::<Vec<_>>();
Self::from_palette_and_packed_data(
&palette,
nbt.data.as_ref().unwrap_or(&vec![].into_boxed_slice()),
BIOME_DISK_MIN_BITS,
)
}
pub fn to_disk_nbt(&self) -> ChunkSectionBiomes {
#[allow(clippy::unnecessary_min_or_max)]
let bits_per_entry = self.bits_per_entry().max(BIOME_DISK_MIN_BITS);
let (palette, packed_data) = self.to_palette_and_packed_data(bits_per_entry);
ChunkSectionBiomes {
data: if packed_data.is_empty() {
None
} else {
Some(packed_data)
},
palette: palette
.into_iter()
.map(|registry_id| PaletteBiomeEntry {
name: Biome::from_id(registry_id).unwrap().registry_id.into(),
})
.collect(),
}
}
}
impl BlockPalette {
pub fn convert_network(&self) -> NetworkSerialization<u16> {
match self {
Self::Homogeneous(registry_id) => NetworkSerialization {
bits_per_entry: 0,
palette: NetworkPalette::Single(*registry_id),
packed_data: Box::new([]),
},
Self::Heterogeneous(data) => {
let raw_bits_per_entry = encompassing_bits(data.counts.len());
if raw_bits_per_entry > BLOCK_NETWORK_MAX_MAP_BITS {
let bits_per_entry = BLOCK_NETWORK_MAX_BITS;
let values_per_i64 = 64 / bits_per_entry;
let packed_data = data
.cube
.as_flattened()
.as_flattened()
.chunks(values_per_i64 as usize)
.map(|chunk| {
chunk.iter().enumerate().fold(0, |acc, (index, value)| {
debug_assert!((1 << bits_per_entry) > *value);
let packed_offset_index =
(*value as i64) << (bits_per_entry as usize * index);
acc | packed_offset_index
})
})
.collect();
NetworkSerialization {
bits_per_entry,
palette: NetworkPalette::Direct,
packed_data,
}
} else {
let bits_per_entry = raw_bits_per_entry.max(BLOCK_NETWORK_MIN_MAP_BITS);
let (palette, packed) = self.to_palette_and_packed_data(bits_per_entry);
NetworkSerialization {
bits_per_entry,
palette: NetworkPalette::Indirect(palette),
packed_data: packed,
}
}
}
}
}
pub fn non_air_block_count(&self) -> u16 {
match self {
Self::Homogeneous(registry_id) => {
if is_not_air_block(*registry_id) {
Self::VOLUME as u16
} else {
0
}
}
Self::Heterogeneous(data) => data
.counts
.iter()
.map(|(registry_id, count)| {
if is_not_air_block(*registry_id) {
*count
} else {
0
}
})
.sum(),
}
}
pub fn from_disk_nbt(nbt: ChunkSectionBlockStates) -> Self {
let palette = nbt
.palette
.into_iter()
.map(|entry| {
if let Some(block_state) = ChunkBlockState::from_palette(&entry) {
block_state.get_id()
} else {
log::warn!(
"Could not find valid block state for {}. Defaulting...",
entry.name
);
0
}
})
.collect::<Vec<_>>();
Self::from_palette_and_packed_data(
&palette,
nbt.data.as_ref().unwrap_or(&vec![].into_boxed_slice()),
BLOCK_DISK_MIN_BITS,
)
}
pub fn to_disk_nbt(&self) -> ChunkSectionBlockStates {
let bits_per_entry = self.bits_per_entry().max(BLOCK_DISK_MIN_BITS);
let (palette, packed_data) = self.to_palette_and_packed_data(bits_per_entry);
ChunkSectionBlockStates {
data: if packed_data.is_empty() {
None
} else {
Some(packed_data)
},
palette: palette
.into_iter()
.map(Self::block_state_id_to_palette_entry)
.collect(),
}
}
fn block_state_id_to_palette_entry(registry_id: u16) -> PaletteBlockEntry {
let block = Block::from_state_id(registry_id).unwrap();
PaletteBlockEntry {
name: block.name.into(),
properties: {
if let Some(properties) = block.properties(registry_id) {
let props = properties.to_props();
let mut props_map = HashMap::new();
for prop in props {
props_map.insert(prop.0.clone(), prop.1.clone());
}
Some(props_map)
} else {
None
}
},
}
}
}
pub enum NetworkPalette<V> {
Single(V),
Indirect(Box<[V]>),
Direct,
}
pub struct NetworkSerialization<V> {
pub bits_per_entry: u8,
pub palette: NetworkPalette<V>,
pub packed_data: Box<[i64]>,
}
// According to the wiki, palette serialization for disk and network is different. Disk
// serialization always uses a palette if greater than one entry. Network serialization packs ids
// directly instead of using a palette above a certain bits-per-entry
// TODO: Do our own testing; do we really need to handle network and disk serialization differently?
pub type BlockPalette = PalettedContainer<u16, 16>;
const BLOCK_DISK_MIN_BITS: u8 = 4;
const BLOCK_NETWORK_MIN_MAP_BITS: u8 = 4;
const BLOCK_NETWORK_MAX_MAP_BITS: u8 = 8;
pub(crate) const BLOCK_NETWORK_MAX_BITS: u8 = 15;
pub type BiomePalette = PalettedContainer<u8, 4>;
const BIOME_DISK_MIN_BITS: u8 = 0;
const BIOME_NETWORK_MIN_MAP_BITS: u8 = 1;
const BIOME_NETWORK_MAX_MAP_BITS: u8 = 3;
pub(crate) const BIOME_NETWORK_MAX_BITS: u8 = 7;

@ -1,152 +0,0 @@
use std::ops::Deref;
use derive_more::derive::{AsMut, AsRef, Display, Into};
use num_traits::{PrimInt, Signed, Unsigned};
use pumpkin_util::math::vector2::Vector2;
use pumpkin_util::math::vector3::Vector3;
use serde::{Deserialize, Serialize};
use crate::generation::settings::{GENERATION_SETTINGS, GeneratorSetting};
#[derive(
Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, AsRef, AsMut, Into, Display,
)]
#[serde(transparent)]
pub struct Height(pub i16);
impl Height {
pub fn from_absolute(height: u16) -> Self {
// TODO: :crying
let surface_config = GENERATION_SETTINGS
.get(&GeneratorSetting::Overworld)
.unwrap();
Self((height as i32 + surface_config.noise.min_y as i32) as i16)
}
/// Absolute height ranges from `0..WORLD_HEIGHT`
/// instead of `WORLD_LOWEST_Y..WORLD_MAX_Y`
pub fn get_absolute(self) -> u16 {
let surface_config = GENERATION_SETTINGS
.get(&GeneratorSetting::Overworld)
.unwrap();
(self.0 as i32 - surface_config.noise.min_y as i32) as u16
}
}
impl<T: PrimInt + Signed> From<T> for Height {
fn from(height: T) -> Self {
let height = height.to_i16().unwrap();
// assert!(height <= WORLD_MAX_Y);
// assert!(height >= WORLD_LOWEST_Y);
Self(height)
}
}
impl Deref for Height {
type Target = i16;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(
Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, AsRef, AsMut, Into, Display,
)]
#[repr(transparent)]
pub struct ChunkRelativeOffset(u8);
impl<T: PrimInt + Unsigned> From<T> for ChunkRelativeOffset {
fn from(scalar: T) -> Self {
let scalar = scalar.to_u8().unwrap();
assert!(scalar < 16);
Self(scalar)
}
}
impl Deref for ChunkRelativeOffset {
type Target = u8;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct BlockCoordinates {
pub x: i32,
pub y: Height,
pub z: i32,
}
/// BlockCoordinates that do not specify a height.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct XZBlockCoordinates {
pub x: i32,
pub z: i32,
}
impl XZBlockCoordinates {
pub fn with_y(self, height: Height) -> BlockCoordinates {
BlockCoordinates {
x: self.x,
y: height,
z: self.z,
}
}
}
/// Coordinates of a block relative to a chunk
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ChunkRelativeBlockCoordinates {
pub x: ChunkRelativeOffset,
pub y: Height,
pub z: ChunkRelativeOffset,
}
impl ChunkRelativeBlockCoordinates {
pub fn with_chunk_coordinates(self, chunk_coordinates: Vector2<i32>) -> BlockCoordinates {
BlockCoordinates {
x: *self.x as i32 + chunk_coordinates.x * 16,
y: self.y,
z: *self.z as i32 + chunk_coordinates.z * 16,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ChunkRelativeXZBlockCoordinates {
pub x: ChunkRelativeOffset,
pub z: ChunkRelativeOffset,
}
impl ChunkRelativeXZBlockCoordinates {
pub fn with_chunk_coordinates(&self, chunk_coordinates: Vector2<i32>) -> XZBlockCoordinates {
XZBlockCoordinates {
x: *self.x as i32 + chunk_coordinates.x * 16,
z: *self.z as i32 + chunk_coordinates.z * 16,
}
}
pub fn with_y(self, height: Height) -> ChunkRelativeBlockCoordinates {
ChunkRelativeBlockCoordinates {
x: self.x,
y: height,
z: self.z,
}
}
}
impl From<Vector3<i32>> for ChunkRelativeBlockCoordinates {
fn from(value: Vector3<i32>) -> Self {
Self {
x: (value.x as u8).into(),
z: (value.z as u8).into(),
y: value.y.into(),
}
}
}

@ -695,7 +695,7 @@ mod test {
let surface_config = GENERATION_SETTINGS
.get(&GeneratorSetting::Overworld)
.unwrap();
let shape = &surface_config.noise;
let shape = &surface_config.shape;
let chunk_pos = Vector2::new(7, 4);
let sampler = FluidLevelSampler::Chunk(StandardChunkFluidLevelSampler::new(
FluidLevel::new(63, WATER_BLOCK),

@ -1,11 +1,6 @@
use noise::Perlin;
use pumpkin_data::chunk::Biome;
use pumpkin_util::math::vector2::Vector2;
use pumpkin_util::math::vector3::Vector3;
use crate::block::state::ChunkBlockState;
use crate::chunk::{ChunkBlocks, ChunkData};
use crate::coordinates::{BlockCoordinates, ChunkRelativeBlockCoordinates, XZBlockCoordinates};
use crate::chunk::ChunkData;
use crate::generation::Seed;
pub trait GeneratorInit {
@ -13,41 +8,5 @@ pub trait GeneratorInit {
}
pub trait WorldGenerator: Sync + Send {
fn generate_chunk(&self, at: Vector2<i32>) -> ChunkData;
}
pub(crate) trait BiomeGenerator: Sync + Send {
fn generate_biome(&self, at: XZBlockCoordinates) -> Biome;
}
pub(crate) trait TerrainGenerator: Sync + Send {
fn prepare_chunk(&self, at: &Vector2<i32>);
fn clean_chunk(&self, at: &Vector2<i32>);
/// Is static
fn generate_block(
&self,
chunk_pos: &Vector2<i32>,
at: Vector3<i32>,
biome: Biome,
) -> ChunkBlockState;
}
pub(crate) trait PerlinTerrainGenerator: Sync + Send {
fn height_variation(&self) -> f64 {
4.0
}
fn prepare_chunk(&self, at: &Vector2<i32>, perlin: &Perlin);
/// Depends on the perlin noise height
fn generate_block(
&self,
coordinates: ChunkRelativeBlockCoordinates,
at: BlockCoordinates,
chunk_blocks: &mut ChunkBlocks,
chunk_height: i16,
biome: &Biome,
);
fn generate_chunk(&self, at: &Vector2<i32>) -> ChunkData;
}

@ -1,124 +0,0 @@
use noise::{NoiseFn, Perlin};
use pumpkin_util::math::vector2::Vector2;
use crate::{
chunk::{ChunkBlocks, ChunkData},
coordinates::{ChunkRelativeBlockCoordinates, ChunkRelativeXZBlockCoordinates},
};
use super::{
Seed,
generator::{BiomeGenerator, GeneratorInit, PerlinTerrainGenerator, WorldGenerator},
};
pub struct GenericGenerator<B: BiomeGenerator, T: PerlinTerrainGenerator> {
biome_generator: B,
terrain_generator: T,
// TODO: May make this optional?. But would be pain to use in most biomes then. Maybe make a new trait like
// PerlinTerrainGenerator
perlin: Perlin,
}
impl<B: BiomeGenerator + GeneratorInit, T: PerlinTerrainGenerator + GeneratorInit> GeneratorInit
for GenericGenerator<B, T>
{
fn new(seed: Seed) -> Self {
Self {
biome_generator: B::new(seed),
terrain_generator: T::new(seed),
perlin: Perlin::new(seed.0 as u32),
}
}
}
impl<B: BiomeGenerator, T: PerlinTerrainGenerator> WorldGenerator for GenericGenerator<B, T> {
fn generate_chunk(&self, at: Vector2<i32>) -> ChunkData {
let mut blocks = ChunkBlocks::Homogeneous(0);
self.terrain_generator.prepare_chunk(&at, &self.perlin);
let noise_value = self.perlin.get([at.x as f64 / 16.0, at.z as f64 / 16.0]);
let base_height = 64.0;
let chunk_height =
noise_value.mul_add(self.terrain_generator.height_variation(), base_height) as i16;
for x in 0..16u8 {
for z in 0..16u8 {
let biome = self.biome_generator.generate_biome(
ChunkRelativeXZBlockCoordinates {
x: x.into(),
z: z.into(),
}
.with_chunk_coordinates(at),
);
// Iterate from the highest block to the lowest, in order to minimize the heightmap updates
const WORLD_LOWEST_Y: i16 = -64; // TODO
for y in (WORLD_LOWEST_Y..chunk_height).rev() {
let coordinates = ChunkRelativeBlockCoordinates {
x: x.into(),
y: y.into(),
z: z.into(),
};
//coordinates,
self.terrain_generator.generate_block(
coordinates,
coordinates.with_chunk_coordinates(at),
&mut blocks,
chunk_height,
&biome,
);
}
}
}
ChunkData {
sections: blocks,
heightmap: Default::default(),
position: at,
// This chunk was just created! We want to say its been changed
dirty: true,
block_ticks: vec![],
fluid_ticks: vec![],
}
}
}
// TODO: implement static terrain generator
/*
fn generate_chunk(&mut self, at: Vector2<i32>) -> ChunkData {
let mut blocks = ChunkBlocks::default();
self.terrain_generator.prepare_chunk(&at, &self.perlin);
for x in 0..16u8 {
for z in 0..16u8 {
let biome = self.biome_generator.generate_biome(
ChunkRelativeXZBlockCoordinates {
x: x.into(),
z: z.into(),
}
.with_chunk_coordinates(at),
);
// Iterate from the highest block to the lowest, in order to minimize the heightmap updates
for y in (WORLD_LOWEST_Y..WORLD_MAX_Y).rev() {
let coordinates = ChunkRelativeBlockCoordinates {
x: x.into(),
y: y.into(),
z: z.into(),
};
blocks.set_block(
coordinates,
self.terrain_generator
.generate_block(coordinates.with_chunk_coordinates(at), biome),
);
}
}
}
ChunkData {
blocks,
position: at,
}
}
*/

@ -1,10 +1,10 @@
pub mod superflat;
use pumpkin_util::math::{vector2::Vector2, vector3::Vector3};
use crate::{
chunk::{ChunkBlocks, ChunkData},
coordinates::ChunkRelativeBlockCoordinates,
chunk::{
ChunkData, ChunkSections, SubChunk,
palette::{BiomePalette, BlockPalette},
},
generation::{
GlobalRandomConfig, Seed, WorldGenerator, generator::GeneratorInit,
noise_router::proto_noise_router::GlobalProtoNoiseRouter, proto_chunk::ProtoChunk,
@ -12,7 +12,10 @@ use crate::{
noise_router::NOISE_ROUTER_ASTS,
};
use super::settings::{GENERATION_SETTINGS, GeneratorSetting};
use super::{
biome_coords,
settings::{GENERATION_SETTINGS, GeneratorSetting},
};
pub struct VanillaGenerator {
random_config: GlobalRandomConfig,
@ -34,14 +37,18 @@ impl GeneratorInit for VanillaGenerator {
}
impl WorldGenerator for VanillaGenerator {
fn generate_chunk(&self, at: Vector2<i32>) -> ChunkData {
let mut blocks = ChunkBlocks::Homogeneous(0);
// TODO: This is bad, but it works
fn generate_chunk(&self, at: &Vector2<i32>) -> ChunkData {
// TODO: Dont hardcode this
let generation_settings = GENERATION_SETTINGS
.get(&GeneratorSetting::Overworld)
.unwrap();
let sub_chunks = generation_settings.shape.height as usize / BlockPalette::SIZE;
let sections = (0..sub_chunks).map(|_| SubChunk::default()).collect();
let mut sections = ChunkSections::new(sections, generation_settings.shape.min_y as i32);
let mut proto_chunk = ProtoChunk::new(
at,
*at,
&self.base_router,
&self.random_config,
generation_settings,
@ -51,27 +58,33 @@ impl WorldGenerator for VanillaGenerator {
proto_chunk.build_surface();
proto_chunk.generate_features();
for x in 0..16u8 {
for z in 0..16u8 {
// TODO: This can be chunk specific
for y in 0..generation_settings.noise.height {
let y = generation_settings.noise.min_y as i32 + y as i32;
let coordinates = ChunkRelativeBlockCoordinates {
x: x.into(),
y: y.into(),
z: z.into(),
};
for y in 0..biome_coords::from_block(generation_settings.shape.height) {
for z in 0..BiomePalette::SIZE {
for x in 0..BiomePalette::SIZE {
let absolute_y =
biome_coords::from_block(generation_settings.shape.min_y as i32) + y as i32;
let biome =
proto_chunk.get_biome(&Vector3::new(x as i32, absolute_y, z as i32));
sections.set_relative_biome(x, y as usize, z, biome.id);
}
}
}
let block = proto_chunk.get_block_state(&Vector3::new(x.into(), y, z.into()));
blocks.set_block(coordinates, block.state_id);
for y in 0..generation_settings.shape.height {
for z in 0..BlockPalette::SIZE {
for x in 0..BlockPalette::SIZE {
let absolute_y = generation_settings.shape.min_y as i32 + y as i32;
let block =
proto_chunk.get_block_state(&Vector3::new(x as i32, absolute_y, z as i32));
sections.set_relative_block(x, y as usize, z, block.state_id);
}
}
}
ChunkData {
sections: blocks,
section: sections,
heightmap: Default::default(),
position: at,
position: *at,
dirty: true,
block_ticks: Default::default(),
fluid_ticks: Default::default(),

@ -1,64 +0,0 @@
use pumpkin_data::chunk::Biome;
use pumpkin_util::math::vector2::Vector2;
use crate::{
block::state::ChunkBlockState,
coordinates::XZBlockCoordinates,
generation::{
Seed,
generator::{BiomeGenerator, GeneratorInit, TerrainGenerator},
generic_generator::GenericGenerator,
},
};
#[expect(dead_code)]
pub type SuperflatGenerator = GenericGenerator<SuperflatBiomeGenerator, SuperflatTerrainGenerator>;
pub(crate) struct SuperflatBiomeGenerator {}
impl GeneratorInit for SuperflatBiomeGenerator {
fn new(_: Seed) -> Self {
Self {}
}
}
impl BiomeGenerator for SuperflatBiomeGenerator {
// TODO make generic over Biome and allow changing the Biome in the config.
fn generate_biome(&self, _: XZBlockCoordinates) -> Biome {
Biome::PLAINS
}
}
pub(crate) struct SuperflatTerrainGenerator {}
impl GeneratorInit for SuperflatTerrainGenerator {
fn new(_: Seed) -> Self {
Self {}
}
}
impl TerrainGenerator for SuperflatTerrainGenerator {
fn prepare_chunk(&self, _at: &Vector2<i32>) {}
fn clean_chunk(&self, _at: &Vector2<i32>) {}
// TODO allow specifying which blocks should be at which height in the config.
fn generate_block(
&self,
_chunk_pos: &Vector2<i32>,
_at: pumpkin_util::math::vector3::Vector3<i32>,
_biome: Biome,
) -> ChunkBlockState {
todo!()
}
/*
fn generate_block(&self, at: BlockCoordinates, _: Biome) -> BlockState {
match *at.y {
-64 => block_state!("bedrock"),
-63..=-62 => block_state!("dirt"),
-61 => block_state!("grass_block"),
_ => BlockState::AIR,
}
}
*/
}

@ -9,7 +9,6 @@ pub mod carver;
pub mod chunk_noise;
mod feature;
mod generator;
mod generic_generator;
pub mod height_limit;
pub mod height_provider;
mod implementation;

@ -116,7 +116,7 @@ impl<'a> ProtoChunk<'a> {
random_config: &'a GlobalRandomConfig,
settings: &'a GenerationSettings,
) -> Self {
let generation_shape = &settings.noise;
let generation_shape = &settings.shape;
let horizontal_cell_count = CHUNK_DIM / generation_shape.horizontal_cell_block_count();
@ -220,10 +220,10 @@ impl<'a> ProtoChunk<'a> {
#[cfg(debug_assertions)]
{
assert!(local_pos.x >= 0 && local_pos.x <= 15);
assert!(local_pos.y < self.noise_sampler.height() as i32 && local_pos.y >= 0);
assert!(local_pos.y < self.height() as i32 && local_pos.y >= 0);
assert!(local_pos.z >= 0 && local_pos.z <= 15);
}
self.noise_sampler.height() as usize * CHUNK_DIM as usize * local_pos.x as usize
self.height() as usize * CHUNK_DIM as usize * local_pos.x as usize
+ CHUNK_DIM as usize * local_pos.y as usize
+ local_pos.z as usize
}
@ -234,8 +234,12 @@ impl<'a> ProtoChunk<'a> {
{
assert!(local_biome_pos.x >= 0 && local_biome_pos.x <= 3);
assert!(
local_biome_pos.y < biome_coords::from_chunk(self.noise_sampler.height() as i32)
&& local_biome_pos.y >= 0
local_biome_pos.y < biome_coords::from_chunk(self.height() as i32)
&& local_biome_pos.y >= 0,
"{} - {} vs {}",
0,
biome_coords::from_chunk(self.height() as i32),
local_biome_pos.y
);
assert!(local_biome_pos.z >= 0 && local_biome_pos.z <= 3);
}

@ -27,7 +27,8 @@ pub enum GeneratorSetting {
pub struct GenerationSettings {
pub legacy_random_source: bool,
pub sea_level: i32,
pub noise: GenerationShapeConfig,
#[serde(rename = "noise")]
pub shape: GenerationShapeConfig,
pub surface_rule: MaterialRule,
pub default_block: BlockStateCodec,
}

@ -1,4 +1,11 @@
use std::{fs, path::PathBuf, sync::Arc};
use std::{
fs,
path::PathBuf,
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
};
use dashmap::{DashMap, Entry};
use log::trace;
@ -7,7 +14,7 @@ use pumpkin_config::{advanced_config, chunk::ChunkFormat};
use pumpkin_util::math::{position::BlockPos, vector2::Vector2};
use tokio::{
sync::{Mutex, Notify, RwLock, mpsc},
task::{JoinHandle, JoinSet},
task::JoinHandle,
};
use tokio_util::task::TaskTracker;
@ -400,28 +407,32 @@ impl Level {
return;
}
// If false, stop loading chunks because the channel has closed.
let send_chunk =
move |is_new: bool,
chunk: SyncChunk,
channel: &mpsc::UnboundedSender<(SyncChunk, bool)>| {
let _ = channel
.send((chunk, is_new))
.inspect_err(|err| log::error!("unable to send chunk to channel: {}", err));
channel.send((chunk, is_new)).is_ok()
};
// First send all chunks that we have cached
// We expect best case scenario to have all cached
let mut remaining_chunks = Vec::new();
for chunk in chunks {
if let Some(chunk) = self.loaded_chunks.get(chunk) {
send_chunk(false, chunk.value().clone(), &channel);
let is_ok = if let Some(chunk) = self.loaded_chunks.get(chunk) {
send_chunk(false, chunk.value().clone(), &channel)
} else if let Some(spawn_chunk) = self.spawn_chunks.get(chunk) {
// Also clone the arc into the loaded chunks
self.loaded_chunks
.insert(*chunk, spawn_chunk.value().clone());
send_chunk(false, spawn_chunk.value().clone(), &channel);
send_chunk(false, spawn_chunk.value().clone(), &channel)
} else {
remaining_chunks.push(*chunk);
true
};
if !is_ok {
return;
}
}
@ -440,7 +451,7 @@ impl Level {
let level_block_ticks = self.block_ticks.clone();
let handle_load = async move {
while let Some(data) = load_bridge_recv.recv().await {
match data {
let is_ok = match data {
LoadedData::Loaded(chunk) => {
let position = chunk.read().await.position;
@ -455,12 +466,9 @@ impl Level {
.or_insert(chunk)
.value()
.clone();
send_chunk(false, value, &load_channel);
send_chunk(false, value, &load_channel)
}
LoadedData::Missing(pos) => generate_bridge_send
.send(pos)
.await
.expect("Failed to send position to generation handler"),
LoadedData::Missing(pos) => generate_bridge_send.send(pos).await.is_ok(),
LoadedData::Error((pos, error)) => {
match error {
// this is expected, and is not an error
@ -478,11 +486,13 @@ impl Level {
}
};
generate_bridge_send
.send(pos)
.await
.expect("Failed to send position to generation handler");
generate_bridge_send.send(pos).await.is_ok()
}
};
if !is_ok {
// This isn't recoverable, so stop listening
return;
}
}
};
@ -490,34 +500,50 @@ impl Level {
let loaded_chunks = self.loaded_chunks.clone();
let world_gen = self.world_gen.clone();
let handle_generate = async move {
let continue_to_generate = Arc::new(AtomicBool::new(true));
while let Some(pos) = generate_bridge_recv.recv().await {
if !continue_to_generate.load(Ordering::Relaxed) {
return;
}
let loaded_chunks = loaded_chunks.clone();
let world_gen = world_gen.clone();
let channel = channel.clone();
let cloned_continue_to_generate = continue_to_generate.clone();
rayon::spawn(move || {
// Rayon tasks are queued, so also check it here
if !cloned_continue_to_generate.load(Ordering::Relaxed) {
return;
}
let result = loaded_chunks
.entry(pos)
.or_insert_with(|| {
// Avoid possible duplicating work by doing this within the dashmap lock
let generated_chunk = world_gen.generate_chunk(pos);
let generated_chunk = world_gen.generate_chunk(&pos);
Arc::new(RwLock::new(generated_chunk))
})
.value()
.clone();
send_chunk(true, result, &channel);
if !send_chunk(true, result, &channel) {
// Stop any additional queued generations
cloned_continue_to_generate.store(false, Ordering::Relaxed);
}
});
}
};
let mut set = JoinSet::new();
set.spawn(handle_load);
set.spawn(handle_generate);
let tracker = TaskTracker::new();
tracker.spawn(handle_load);
tracker.spawn(handle_generate);
self.chunk_saver
.fetch_chunks(&self.level_folder, &remaining_chunks, load_bridge_send)
.await;
let _ = set.join_all().await;
tracker.close();
tracker.wait().await;
}
pub fn try_get_chunk(

@ -4,7 +4,6 @@ use pumpkin_util::math::vector2::Vector2;
pub mod biome;
pub mod block;
pub mod chunk;
pub mod coordinates;
pub mod cylindrical_chunk_iterator;
pub mod data;
pub mod dimension;
@ -14,7 +13,6 @@ pub mod level;
mod lock;
mod noise_router;
pub mod world_info;
pub const DIRECT_PALETTE_BITS: u32 = 15;
#[macro_export]
macro_rules! global_path {

@ -7,7 +7,8 @@ use crate::{generation::Seed, level::LevelFolder};
pub mod anvil;
pub const MINIMUM_SUPPORTED_WORLD_DATA_VERSION: i32 = 4080; // 1.21.2
// Constraint: disk biome palette serialization changed in 1.21.5
pub const MINIMUM_SUPPORTED_WORLD_DATA_VERSION: i32 = 4325; // 1.21.5
pub const MAXIMUM_SUPPORTED_WORLD_DATA_VERSION: i32 = 4325; // 1.21.5
pub(crate) trait WorldInfoReader {

@ -1,6 +1,6 @@
[package]
name = "pumpkin"
version = "0.1.0-dev"
version.workspace = true
description = "Empowering everyone to host fast and efficient Minecraft servers."
edition.workspace = true
rust-version = "1.85"

@ -90,7 +90,7 @@ pub fn register_button_blocks(manager: &mut BlockRegistry) {
props.facing = *player_direction;
} else {
props.facing = face.opposite().to_cardinal_direction();
};
}
props.to_state_id(block)
}

@ -63,7 +63,7 @@ impl PumpkinBlock for LeverBlock {
lever_props.facing = *player_direction;
} else {
lever_props.facing = face.opposite().to_cardinal_direction();
};
}
lever_props.to_state_id(block)
}

@ -168,16 +168,16 @@ impl RedstoneWireTurbo {
let mut cz = 0;
if from_west {
cx += 1;
};
}
if from_east {
cx -= 1;
};
}
if from_north {
cz += 1;
};
}
if from_south {
cz -= 1;
};
}
let UpdateNode { xbias, zbias, .. } = &self.nodes[upd1.index];
let xbias = *xbias;

@ -37,7 +37,7 @@ impl ArgumentConsumer for GamemodeArgumentConsumer {
if let Ok(gamemode) = GameMode::try_from(id) {
return Some(Arg::GameMode(gamemode));
}
};
}
GameMode::from_str(s).map_or_else(|_| None, |gamemode| Some(Arg::GameMode(gamemode)))
}

@ -185,7 +185,7 @@ impl<'a, T: DoubleEndedIterator<Item = (usize, char)>> Iterator
self.is_complete = true;
return Some(&self.s[start..self.pos]);
}
};
}
}
}
}

@ -39,11 +39,11 @@ impl ArgumentConsumer for RotationArgumentConsumer {
yaw %= 360.0;
if yaw >= 180.0 {
yaw -= 360.0;
};
}
pitch %= 360.0;
if pitch >= 180.0 {
pitch -= 360.0;
};
}
Some(Arg::Rotation(yaw, pitch))
}

@ -19,6 +19,7 @@ use async_trait::async_trait;
use pumpkin_util::text::TextComponent;
use pumpkin_util::text::color::{Color, NamedColor};
use pumpkin_util::text::hover::HoverEvent;
use std::fmt::Write as _;
use uuid::Uuid;
const NAMES: [&str; 1] = ["bossbar"];
@ -283,7 +284,7 @@ impl CommandExecutor for RemoveExecuter {
handle_bossbar_error(sender, err).await;
return Ok(());
}
};
}
Ok(())
}
@ -691,7 +692,7 @@ async fn handle_bossbar_error(sender: &CommandSender, error: BossbarUpdateError<
key.push_str(value);
key.push_str(".unchanged");
if let Some(variation) = variation {
key.push_str(&format!(".{variation}"));
write!(key, ".{variation}").unwrap();
}
send_error_message(sender, TextComponent::translate(key, [])).await;

@ -259,7 +259,6 @@ impl CommandExecutor for Executor {
.color(Color::Named(NamedColor::Red)),
)
.await;
continue;
}
}
}

@ -183,7 +183,7 @@ impl CommandExecutor for Executor {
}
}
}
};
}
sender
.send_message(TextComponent::translate(

@ -1,16 +1,16 @@
use async_trait::async_trait;
use pumpkin_protocol::CURRENT_MC_PROTOCOL;
use pumpkin_data::packet::CURRENT_MC_PROTOCOL;
use pumpkin_util::text::click::ClickEvent;
use pumpkin_util::text::hover::HoverEvent;
use pumpkin_util::text::{TextComponent, color::NamedColor};
use std::borrow::Cow;
use crate::server::CURRENT_MC_VERSION;
use crate::{
GIT_VERSION,
command::{
CommandError, CommandExecutor, CommandSender, args::ConsumedArgs, tree::CommandTree,
},
server::CURRENT_MC_VERSION,
};
const NAMES: [&str; 2] = ["pumpkin", "version"];

@ -198,7 +198,7 @@ impl CommandExecutor for SelfToEntityExecutor {
.send_message(TextComponent::translate("permissions.requires.player", []))
.await;
}
};
}
Ok(())
}
@ -226,7 +226,7 @@ impl CommandExecutor for SelfToPosExecutor {
.send_message(TextComponent::translate("permissions.requires.player", []))
.await;
}
};
}
Ok(())
}

@ -28,7 +28,7 @@ impl Display for Node {
f.write_char('>')?;
}
_ => {}
};
}
Ok(())
}
@ -72,7 +72,7 @@ impl Display for CommandTree {
if visible_children.is_empty() {
break;
};
}
f.write_char(' ')?;

@ -125,5 +125,5 @@ pub async fn player_attack_sound(pos: &Vector3<f64>, world: &World, attack_type:
.play_sound(Sound::EntityPlayerAttackWeak, SoundCategory::Players, pos)
.await;
}
};
}
}

@ -459,7 +459,7 @@ impl Player {
combat::spawn_sweep_particle(attacker_entity, &world, &pos).await;
}
_ => {}
};
}
if config.knockback {
combat::handle_knockback(
attacker_entity,
@ -1707,7 +1707,7 @@ impl Player {
// TODO: We give an error if all play packets are implemented
// return Err(Box::new(DeserializerError::UnknownPacket));
}
};
}
Ok(())
}
}

@ -1,13 +1,17 @@
use crate::entity::item::ItemEntity;
use crate::entity::player::Player;
use crate::item::pumpkin_item::{ItemMetadata, PumpkinItem};
use crate::server::Server;
use crate::world::BlockFlags;
use async_trait::async_trait;
use pumpkin_data::block::Block;
use pumpkin_data::entity::EntityType;
use pumpkin_data::item::Item;
use pumpkin_data::tag::Tagable;
use pumpkin_util::math::position::BlockPos;
use pumpkin_world::block::BlockDirection;
use std::sync::Arc;
pub struct HoeItem;
impl ItemMetadata for HoeItem {
@ -43,19 +47,54 @@ impl PumpkinItem for HoeItem {
|| block == &Block::COARSE_DIRT
|| block == &Block::ROOTED_DIRT
{
let mut future_block = block;
let world = player.world().await;
if face != &BlockDirection::Down
&& world.get_block_state(&location.up()).await.unwrap().air
{
world
.set_block_state(
&location,
Block::FARMLAND.default_state_id,
BlockFlags::NOTIFY_ALL,
)
.await;
//Only rooted can be right-clicked on the bottom of the block
if face == &BlockDirection::Down {
if block == &Block::ROOTED_DIRT {
future_block = &Block::DIRT;
}
} else {
// grass, dirt && dirt path become farmland
if (block == &Block::GRASS_BLOCK
|| block == &Block::DIRT_PATH
|| block == &Block::DIRT)
&& world.get_block_state(&location.up()).await.unwrap().air
{
future_block = &Block::FARMLAND;
}
//Coarse dirt and rooted dirt become dirt
else if block == &Block::COARSE_DIRT || block == &Block::ROOTED_DIRT {
future_block = &Block::DIRT;
}
}
world
.set_block_state(
&location,
future_block.default_state_id,
BlockFlags::NOTIFY_ALL,
)
.await;
//Also rooted_dirt drop a hanging_root
if block == &Block::ROOTED_DIRT {
let location = match face {
BlockDirection::Up => location.up().to_f64(),
BlockDirection::Down => location.down().to_f64(),
BlockDirection::North => location.up().to_f64().add_raw(0.0, -0.4, -1.0),
BlockDirection::South => location.up().to_f64().add_raw(0.0, -0.4, 1.0),
BlockDirection::West => location.up().to_f64().add_raw(-1.0, -0.4, 0.0),
BlockDirection::East => location.up().to_f64().add_raw(1.0, -0.4, 0.0),
};
let entity = world.create_entity(location, EntityType::ITEM);
// TODO: Merge stacks together
let item_entity =
Arc::new(ItemEntity::new(entity, Block::HANGING_ROOTS.item_id, 1).await);
world.spawn_entity(item_entity.clone()).await;
item_entity.send_meta_packet().await;
}
}
// TODO: implement hanging_roots
}
}

@ -37,6 +37,7 @@
compile_error!("Compiling for WASI targets is not supported!");
use plugin::PluginManager;
use pumpkin_data::packet::CURRENT_MC_PROTOCOL;
use std::{
io::{self},
sync::LazyLock,
@ -49,7 +50,6 @@ use tokio::sync::Mutex;
use crate::server::CURRENT_MC_VERSION;
use pumpkin::{PumpkinServer, SHOULD_STOP, STOP_INTERRUPT, init_log, stop_server};
use pumpkin_protocol::CURRENT_MC_PROTOCOL;
use pumpkin_util::text::{TextComponent, color::NamedColor};
use std::time::Instant;
// Setup some tokens to allow us to identify which event is for which socket.
@ -92,7 +92,6 @@ async fn main() {
.thread_name(|_| "rayon-worker".to_string())
.build_global()
.expect("Rayon thread pool can only be initialized once");
log::info!(
"Starting Pumpkin {CARGO_PKG_VERSION} ({GIT_VERSION}) for Minecraft {CURRENT_MC_VERSION} (Protocol {CURRENT_MC_PROTOCOL})",
);

@ -335,7 +335,7 @@ impl Player {
*carried_item = None;
}
}
};
}
}
Ok(())
}
@ -463,7 +463,7 @@ impl Player {
}
}
container_click::Slot::OutsideInventory => (),
};
}
Ok(())
}

@ -37,7 +37,7 @@ pub async fn start_lan_broadcast(bound_addr: SocketAddr) {
);
} else {
motd = advanced_motd.clone();
};
}
let advertisement = format!("[MOTD]{}[/MOTD][AD]{}[/AD]", &motd, bound_addr.port());

@ -392,7 +392,7 @@ impl Client {
if let Err(err) = packet.write(&mut packet_buf) {
log::error!("Failed to serialize packet {}: {}", P::PACKET_ID, err);
return;
};
}
if let Err(err) = self
.network_writer
@ -437,7 +437,7 @@ impl Client {
error
);
self.kick(TextComponent::text(text)).await;
};
}
}
}
@ -505,7 +505,7 @@ impl Client {
packet.id
);
}
};
}
Ok(())
}
@ -530,7 +530,7 @@ impl Client {
packet.id
);
}
};
}
Ok(())
}
@ -567,7 +567,7 @@ impl Client {
packet.id
);
}
};
}
Ok(())
}
@ -607,7 +607,7 @@ impl Client {
packet.id
);
}
};
}
Ok(())
}
@ -637,7 +637,7 @@ impl Client {
log::warn!("Can't kick in {:?} State", self.connection_state);
return;
}
};
}
log::debug!("Closing connection for {}", self.id);
self.close();
}

@ -1,6 +1,5 @@
use std::num::NonZeroI32;
use pumpkin_protocol::{CURRENT_MC_PROTOCOL, ConnectionState, server::handshake::SHandShake};
use pumpkin_data::packet::CURRENT_MC_PROTOCOL;
use pumpkin_protocol::{ConnectionState, server::handshake::SHandShake};
use pumpkin_util::text::TextComponent;
use crate::{net::Client, server::CURRENT_MC_VERSION};
@ -16,7 +15,7 @@ impl Client {
self.connection_state.store(handshake.next_state);
if self.connection_state.load() != ConnectionState::Status {
let protocol = version;
match protocol.cmp(&NonZeroI32::from(CURRENT_MC_PROTOCOL).get()) {
match protocol.cmp(&(CURRENT_MC_PROTOCOL as i32)) {
std::cmp::Ordering::Less => {
self.kick(TextComponent::translate(
"multiplayer.disconnect.outdated_client",

@ -9,6 +9,7 @@ use crate::net::PlayerConfig;
use crate::plugin::player::player_chat::PlayerChatEvent;
use crate::plugin::player::player_command_send::PlayerCommandSendEvent;
use crate::plugin::player::player_move::PlayerMoveEvent;
use crate::server::seasonal_events;
use crate::world::BlockFlags;
use crate::{
command::CommandSender,
@ -688,10 +689,15 @@ impl Player {
let config = advanced_config();
let message = match seasonal_events::modify_chat_message(&event.message) {
Some(m) => m,
None => event.message.clone(),
};
let decorated_message = &TextComponent::chat_decorated(
config.chat.format.clone(),
gameprofile.name.clone(),
event.message.clone(),
message,
);
let entity = &self.living_entity.entity;
@ -841,7 +847,7 @@ impl Player {
self.kick(TextComponent::text("Invalid client status"))
.await;
}
};
}
}
pub async fn handle_interact(&self, interact: SInteract) {
@ -914,7 +920,7 @@ impl Player {
))
.await;
return;
};
}
}
ActionType::Interact | ActionType::InteractAt => {
log::debug!("todo");
@ -1227,7 +1233,7 @@ impl Player {
if let Some(entity) = entity_from_egg(stack.item.id) {
self.spawn_entity_from_egg(entity, location, &face).await;
should_try_decrement = true;
};
}
if should_try_decrement {
// TODO: Config
@ -1319,7 +1325,7 @@ impl Player {
// Item drop
self.drop_item(item_stack.item.id, u32::from(item_stack.item_count))
.await;
};
}
Ok(())
}

@ -2,16 +2,16 @@ use core::error;
use std::{
fs::File,
io::{Cursor, Read},
num::NonZeroU32,
path::Path,
};
use base64::{Engine as _, engine::general_purpose};
use pumpkin_config::{BASIC_CONFIG, BasicConfiguration};
use pumpkin_data::packet::CURRENT_MC_PROTOCOL;
use pumpkin_protocol::{
CURRENT_MC_PROTOCOL, Players, StatusResponse, Version,
Players, StatusResponse, Version,
client::{config::CPluginMessage, status::CStatusResponse},
codec::{Codec, var_int::VarInt},
codec::var_int::VarInt,
};
use super::CURRENT_MC_VERSION;
@ -144,7 +144,7 @@ impl CachedStatus {
StatusResponse {
version: Some(Version {
name: CURRENT_MC_VERSION.into(),
protocol: NonZeroU32::from(CURRENT_MC_PROTOCOL).get(),
protocol: CURRENT_MC_PROTOCOL,
}),
players: Some(Players {
max: config.max_players,

@ -40,6 +40,7 @@ use tokio_util::task::TaskTracker;
mod connection_cache;
mod key_store;
pub mod seasonal_events;
pub mod ticker;
pub const CURRENT_MC_VERSION: &str = "1.21.5";

@ -0,0 +1,26 @@
use chrono::{Datelike, Local};
use pumpkin_config::advanced_config;
use rand::{seq::SliceRandom, thread_rng};
// Infact Mojang also has some Seasonal Events, so we can use that later to match Vanilla :D
#[must_use]
pub fn is_april() -> bool {
let data = Local::now();
data.day() == 1 && data.month() == 4
}
#[must_use]
pub fn modify_chat_message(message: &str) -> Option<String> {
if !advanced_config().fun.april_fools || !is_april() {
return None;
}
let mut words: Vec<&str> = message.split_whitespace().collect();
if words.is_empty() {
return None;
}
let mut rng = thread_rng();
words.shuffle(&mut rng);
let result = words.join(" ");
Some(result)
}

@ -54,14 +54,11 @@ use pumpkin_registry::DimensionType;
use pumpkin_util::math::{position::BlockPos, vector3::Vector3};
use pumpkin_util::math::{position::chunk_section_from_pos, vector2::Vector2};
use pumpkin_util::text::{TextComponent, color::NamedColor};
use pumpkin_world::block::registry::{
get_block_and_state_by_state_id, get_block_by_state_id, get_state_by_state_id,
};
use pumpkin_world::{GENERATION_SETTINGS, GeneratorSetting, biome, level::SyncChunk};
use pumpkin_world::{block::BlockDirection, chunk::ChunkData};
use pumpkin_world::{
block::registry::{
get_block_and_state_by_state_id, get_block_by_state_id, get_state_by_state_id,
},
coordinates::ChunkRelativeBlockCoordinates,
};
use pumpkin_world::{chunk::TickPriority, level::Level};
use rand::{Rng, thread_rng};
use scoreboard::Scoreboard;
@ -1141,8 +1138,7 @@ impl World {
///
/// # Arguments
/// * `pos`: The center of the sphere.
/// * `radius`: The radius of the sphere. The higher the radius,
/// the more area will be checked (in every direction).
/// * `radius`: The radius of the sphere. The higher the radius, the more area will be checked (in every direction).
pub async fn get_nearby_players(
&self,
pos: Vector3<f64>,
@ -1325,18 +1321,24 @@ impl World {
) -> u16 {
let chunk = self.get_chunk(position).await;
let (_, relative) = position.chunk_and_chunk_relative_position();
let relative = ChunkRelativeBlockCoordinates::from(relative);
let mut chunk = chunk.write().await;
let replaced_block_state_id = chunk
.sections
.get_block(relative)
.unwrap_or(Block::AIR.default_state_id);
.section
.get_block_absolute_y(relative.x as usize, relative.y, relative.z as usize)
.unwrap();
if replaced_block_state_id == block_state_id {
return block_state_id;
}
chunk.dirty = true;
chunk.sections.set_block(relative, block_state_id);
chunk.section.set_block_absolute_y(
relative.x as usize,
relative.y,
relative.z as usize,
block_state_id,
);
self.unsent_block_changes
.lock()
.await
@ -1527,11 +1529,13 @@ impl World {
pub async fn get_block_state_id(&self, position: &BlockPos) -> Result<u16, GetBlockError> {
let chunk = self.get_chunk(position).await;
let (_, relative) = position.chunk_and_chunk_relative_position();
let relative = ChunkRelativeBlockCoordinates::from(relative);
let chunk: tokio::sync::RwLockReadGuard<ChunkData> = chunk.read().await;
let Some(id) = chunk.sections.get_block(relative) else {
let chunk = chunk.read().await;
let Some(id) = chunk.section.get_block_absolute_y(
relative.x as usize,
relative.y,
relative.z as usize,
) else {
return Err(GetBlockError::BlockOutOfWorldBounds);
};