Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
230 changes: 126 additions & 104 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ pre-release-replacements = [

[dependencies]
anyhow = "1.0.68"
bincode = "2.0.1"
clap = { version = "4.1.4", features = ["derive", "wrap_help"] }
enum-map = "2.7.3"
fastnbt = "2.3.2"
Expand All @@ -57,6 +56,7 @@ notify = "8.0.0"
num-integer = "0.1.45"
num_cpus = "1.16.0"
phf = { version = "0.13.1", features = ["macros"] }
postcard = { version = "1.1.3", default-features = false, features = ["use-std"] }
rayon = "1.7.0"
regex = "1.10.2"
rustc-hash = "2.0.0"
Expand Down
4 changes: 2 additions & 2 deletions crates/resource/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ readme.workspace = true
repository.workspace = true

[dependencies]
bincode = "2.0.1"
enumflags2 = "0.7.7"
enumflags2 = { version = "0.7.7", features = ["serde"] }
glam = "0.30.0"
serde = { version = "1.0.183", features = ["derive"] }
49 changes: 6 additions & 43 deletions crates/resource/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ mod legacy_block_types;

use std::collections::HashMap;

use bincode::{BorrowDecode, Decode, Encode};
use enumflags2::{BitFlags, bitflags};
use serde::{Deserialize, Serialize};

/// Flags describing special properties of [BlockType]s
#[bitflags]
#[repr(u8)]
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum BlockFlag {
/// The block type is opaque
Opaque,
Expand All @@ -38,14 +38,14 @@ pub enum BlockFlag {
}

/// An RGB color with u8 components
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Encode, Decode)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct Color(pub [u8; 3]);

/// An RGB color with f32 components
pub type Colorf = glam::Vec3;

/// A block type specification
#[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct BlockColor {
/// Bit set of [BlockFlag]s describing special properties of the block type
pub flags: BitFlags<BlockFlag>,
Expand All @@ -61,43 +61,6 @@ impl BlockColor {
}
}

impl Encode for BlockColor {
fn encode<E: bincode::enc::Encoder>(
&self,
encoder: &mut E,
) -> Result<(), bincode::error::EncodeError> {
bincode::Encode::encode(&self.flags.bits(), encoder)?;
bincode::Encode::encode(&self.color, encoder)?;
Ok(())
}
}

impl<Context> Decode<Context> for BlockColor {
fn decode<D: bincode::de::Decoder<Context = Context>>(
decoder: &mut D,
) -> Result<Self, bincode::error::DecodeError> {
Ok(BlockColor {
flags: BitFlags::from_bits(bincode::Decode::decode(decoder)?).or(Err(
bincode::error::DecodeError::Other("invalid block flags"),
))?,
color: bincode::Decode::decode(decoder)?,
})
}
}

impl<'de, Context> BorrowDecode<'de, Context> for BlockColor {
fn borrow_decode<D: bincode::de::BorrowDecoder<'de, Context = Context>>(
decoder: &mut D,
) -> Result<Self, bincode::error::DecodeError> {
Ok(BlockColor {
flags: BitFlags::from_bits(bincode::BorrowDecode::borrow_decode(decoder)?).or(Err(
bincode::error::DecodeError::Other("invalid block flags"),
))?,
color: bincode::BorrowDecode::borrow_decode(decoder)?,
})
}
}

/// A block type specification (for use in constants)
#[derive(Debug, Clone)]
struct ConstBlockType {
Expand Down Expand Up @@ -174,7 +137,7 @@ impl BlockTypes {
pub use block_color::{block_color, needs_biome};

/// Grass color modifier used by a biome
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Encode, Decode)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum BiomeGrassColorModifier {
/// Grass color modifier used by the dark forest biome
DarkForest,
Expand All @@ -186,7 +149,7 @@ pub enum BiomeGrassColorModifier {
///
/// A Biome contains all information about a biome necessary to compute a block
/// color given a block type and depth
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Encode, Decode)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Biome {
/// Temperature value
///
Expand Down
2 changes: 1 addition & 1 deletion crates/types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ readme.workspace = true
repository.workspace = true

[dependencies]
bincode = "2.0.1"
itertools = "0.14.0"
serde = { version = "1.0.183", features = ["derive"] }
6 changes: 3 additions & 3 deletions crates/types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ use std::{
ops::{Index, IndexMut},
};

use bincode::{Decode, Encode};
use itertools::iproduct;
use serde::{Deserialize, Serialize};

/// Const generic AXIS arguments for coordinate types
pub mod axis {
Expand Down Expand Up @@ -110,7 +110,7 @@ impl LayerBlockCoords {
/// Generic array for data stored per block of a chunk layer
///
/// Includes various convenient iteration functions.
#[derive(Debug, Clone, Copy, Default, Encode, Decode)]
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)]
pub struct LayerBlockArray<T>(pub [[T; BLOCKS_PER_CHUNK]; BLOCKS_PER_CHUNK]);

impl<T> Index<LayerBlockCoords> for LayerBlockArray<T> {
Expand Down Expand Up @@ -196,7 +196,7 @@ impl Debug for ChunkCoords {
/// Generic array for data stored per chunk of a region
///
/// Includes various convenient iteration functions.
#[derive(Debug, Clone, Copy, Default, Encode, Decode)]
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)]
pub struct ChunkArray<T>(pub [[T; CHUNKS_PER_REGION]; CHUNKS_PER_REGION]);

impl<T> ChunkArray<T> {
Expand Down
14 changes: 6 additions & 8 deletions src/core/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,13 @@
use std::{
collections::{BTreeMap, BTreeSet},
fmt::Debug,
hash::Hash,
path::{Path, PathBuf},
};

use anyhow::{Context, Result};
use bincode::{Decode, Encode};
use clap::ValueEnum;
use regex::{Regex, RegexSet};
use serde::Serialize;
use serde::{Deserialize, Serialize};

use crate::{
io::fs::FileMetaVersion,
Expand All @@ -26,7 +24,7 @@ use crate::{
///
/// Increase when the generation of processed regions from region data changes
/// (usually because of updated resource data)
pub const REGION_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(9);
pub const REGION_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(10);

/// MinedMap map tile data version number
///
Expand All @@ -48,7 +46,7 @@ pub const MIPMAP_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(0);
/// MinedMap processed entity data version number
///
/// Increase when entity collection changes bacause of code changes.
pub const ENTITIES_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(3);
pub const ENTITIES_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(4);

/// Coordinate pair of a generated tile
///
Expand Down Expand Up @@ -87,7 +85,7 @@ impl TileCoordMap {
}

/// Data structure for storing chunk data between processing and rendering steps
#[derive(Debug, Encode, Decode)]
#[derive(Debug, Serialize, Deserialize)]
pub struct ProcessedChunk {
/// Block type data
pub blocks: Box<layer::BlockArray>,
Expand All @@ -98,7 +96,7 @@ pub struct ProcessedChunk {
}

/// Data structure for storing region data between processing and rendering steps
#[derive(Debug, Default, Encode, Decode)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct ProcessedRegion {
/// List of biomes used in the region
///
Expand All @@ -109,7 +107,7 @@ pub struct ProcessedRegion {
}

/// Data structure for storing entity data between processing and collection steps
#[derive(Debug, Default, Encode, Decode)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct ProcessedEntities {
/// List of block entities
pub block_entities: Vec<BlockEntity>,
Expand Down
25 changes: 13 additions & 12 deletions src/core/entity_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,22 +78,23 @@ impl<'a> EntityCollector<'a> {
let mut output = ProcessedEntities::default();

for source_path in sources {
let mut source: ProcessedEntities = match storage::read_file(source_path.as_ref()) {
Ok(source) => source,
Err(err) => {
warn!(
"Failed to read entity data file {}: {:?}",
source_path.as_ref().display(),
err,
);
continue;
}
};
let mut source: ProcessedEntities =
match storage::read_file(source_path.as_ref(), storage::Format::Json) {
Ok(source) => source,
Err(err) => {
warn!(
"Failed to read entity data file {}: {:?}",
source_path.as_ref().display(),
err,
);
continue;
}
};

output.block_entities.append(&mut source.block_entities);
}

storage::write(file, &output).context("Failed to write entity data")
storage::write(file, &output, storage::Format::Json).context("Failed to write entity data")
}

/// Runs the mipmap generation
Expand Down
5 changes: 3 additions & 2 deletions src/core/metadata_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,9 @@ impl<'a> MetadataWriter<'a> {

/// Generates [Entities] data from collected entity lists
fn entities(&self) -> Result<Entities> {
let data: ProcessedEntities = storage::read_file(&self.config.entities_path_final)
.context("Failed to read entity data file")?;
let data: ProcessedEntities =
storage::read_file(&self.config.entities_path_final, storage::Format::Json)
.context("Failed to read entity data file")?;

let ret = Entities {
signs: data
Expand Down
2 changes: 2 additions & 0 deletions src/core/region_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,7 @@ impl<'a> SingleRegionProcessor<'a> {
storage::write_file(
&self.output_path,
processed_region,
storage::Format::Postcard,
REGION_FILE_META_VERSION,
self.input_timestamp,
)
Expand Down Expand Up @@ -214,6 +215,7 @@ impl<'a> SingleRegionProcessor<'a> {
storage::write_file(
&self.entities_path,
entities,
storage::Format::Json,
ENTITIES_FILE_META_VERSION,
self.input_timestamp,
)
Expand Down
5 changes: 4 additions & 1 deletion src/core/tile_renderer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,10 @@ impl<'a> TileRenderer<'a> {

region_loader
.get_or_try_init(|| async {
storage::read_file(&processed_path).context("Failed to load processed region data")
Ok(Arc::new(
storage::read_file(&processed_path, storage::Format::Postcard)
.context("Failed to load processed region data")?,
))
})
.await
.cloned()
Expand Down
46 changes: 28 additions & 18 deletions src/io/storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,28 @@ use std::{
};

use anyhow::{Context, Result};
use bincode::{Decode, Encode};
use serde::{Serialize, de::DeserializeOwned};

use super::fs;

/// Bincode configuration
const BINCODE_CONFIG: bincode::config::Configuration = bincode::config::standard();
/// Storage format
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Format {
/// Encode as Postcard
///
/// Postcard is more efficient than JSON, but cannot handle many of
/// serde's features like flatten, conditional skipping, ...
Postcard,
/// Encode as JSON
Json,
}

/// Serializes data and writes it to a writer
pub fn write<W: Write, T: Encode>(writer: &mut W, value: &T) -> Result<()> {
let data = bincode::encode_to_vec(value, BINCODE_CONFIG)?;
pub fn write<W: Write, T: Serialize>(writer: &mut W, value: &T, format: Format) -> Result<()> {
let data = match format {
Format::Postcard => postcard::to_stdvec(value)?,
Format::Json => serde_json::to_vec(value)?,
};
let len = u32::try_from(data.len())?;
let compressed = zstd::bulk::compress(&data, 1)?;
drop(data);
Expand All @@ -33,21 +45,18 @@ pub fn write<W: Write, T: Encode>(writer: &mut W, value: &T) -> Result<()> {
/// Serializes data and stores it in a file
///
/// A timestamp is stored in an assiciated metadata file.
pub fn write_file<T: Encode>(
pub fn write_file<T: Serialize>(
path: &Path,
value: &T,
format: Format,
version: fs::FileMetaVersion,
timestamp: SystemTime,
) -> Result<()> {
fs::create_with_timestamp(path, version, timestamp, |file| write(file, value))
fs::create_with_timestamp(path, version, timestamp, |file| write(file, value, format))
}

/// Reads data from a reader and deserializes it
pub fn read<R, T>(reader: &mut R) -> Result<T>
where
R: Read,
T: Decode<()>,
{
pub fn read<R: Read, T: DeserializeOwned>(reader: &mut R, format: Format) -> Result<T> {
let mut len_buf = [0u8; 4];
reader.read_exact(&mut len_buf)?;
let len = usize::try_from(u32::from_be_bytes(len_buf))?;
Expand All @@ -57,17 +66,18 @@ where
let data = zstd::bulk::decompress(&compressed, len)?;
drop(compressed);

Ok(bincode::decode_from_slice(&data, BINCODE_CONFIG)?.0)
let value = match format {
Format::Postcard => postcard::from_bytes(&data)?,
Format::Json => serde_json::from_slice(&data)?,
};
Ok(value)
}

/// Reads data from a file and deserializes it
pub fn read_file<T>(path: &Path) -> Result<T>
where
T: Decode<()>,
{
pub fn read_file<T: DeserializeOwned>(path: &Path, format: Format) -> Result<T> {
(|| -> Result<T> {
let mut file = File::open(path)?;
read(&mut file)
read(&mut file, format)
})()
.with_context(|| format!("Failed to read file {}", path.display()))
}
Loading