WIP - New metatile converter
parent
15143e08cb
commit
e6d56304e4
|
@ -1,9 +1,10 @@
|
|||
use std::{error::Error, fs::read_to_string, path::Path, borrow::Cow, collections::HashSet};
|
||||
use std::{error::Error, fs::read_to_string, path::Path, borrow::Cow, collections::HashSet, convert::TryInto};
|
||||
use euclid::default::Rect;
|
||||
use image::{DynamicImage, GenericImageView};
|
||||
use linked_hash_map::LinkedHashMap;
|
||||
use roxmltree::Node;
|
||||
use crate::reskit::{utility::print_warning, cli::settings::TileOrder};
|
||||
use super::dmapack;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TiledTilemap {
|
||||
|
@ -22,6 +23,7 @@ pub struct Metatile {
|
|||
pub source: String,
|
||||
pub width: u16,
|
||||
pub height: u16,
|
||||
pub pack: dmapack::PackSettings,
|
||||
pub tiles: Vec<u32>
|
||||
}
|
||||
|
||||
|
@ -268,6 +270,25 @@ pub fn get_metatile( id: u32, tileset: Node, working_directory: &str ) -> Result
|
|||
if let Some( extension ) = formatted_path.extension() {
|
||||
let extension = extension.to_string_lossy();
|
||||
if extension == "tmx" {
|
||||
// Before we discard tileset, obtain dmapack bucket and slot
|
||||
let bucket: u16 = tileset.descendants()
|
||||
.find( | node | node.tag_name() == "property".into() && node.attribute( "name" ) == Some( "reskit-dmapack-bucket" ) )
|
||||
.ok_or( "invalid file: no dmapack bucket specified for metatile" )?
|
||||
.attribute( "value" )
|
||||
.ok_or( "invalid file: reskit-dmapack-bucket property has no value" )?
|
||||
.parse()?;
|
||||
let slot: Vec<u16> = tileset.descendants()
|
||||
.find( | node | node.tag_name() == "property".into() && node.attribute( "name" ) == Some( "reskit-dmapack" ) )
|
||||
.ok_or( "invalid file: no dmapack slot specified for metatile" )?
|
||||
.attribute( "value" )
|
||||
.ok_or( "invalid file: reskit-dmapack property has no value" )?
|
||||
.split( "," )
|
||||
.map( | each | each.trim().parse::<u16>().expect( "fatal: non-u16 value specified for reskit-dmapack" ) )
|
||||
.collect();
|
||||
let slot: [u16; 2] = slot[0..2].try_into()?;
|
||||
|
||||
let pack = dmapack::PackSettings { bucket, slot };
|
||||
|
||||
// Case where source is a nested tileset. Open the .tmx file
|
||||
let file = read_to_string( full_path.clone() )?;
|
||||
let tmx_document = roxmltree::Document::parse( &file )?;
|
||||
|
@ -301,7 +322,7 @@ pub fn get_metatile( id: u32, tileset: Node, working_directory: &str ) -> Result
|
|||
val - 1
|
||||
}
|
||||
} ).collect();
|
||||
return Ok( Some( Metatile { id, source, width, height, tiles } ) )
|
||||
return Ok( Some( Metatile { id, source, width, height, pack, tiles } ) )
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
use std::{error::Error, collections::{HashSet, HashMap}};
|
||||
use super::converter::Metatile;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct PackSettings {
|
||||
pub bucket: u16,
|
||||
pub slot: [u16; 2]
|
||||
}
|
||||
|
||||
pub type Bucket = Vec<u16>;
|
||||
|
||||
pub fn get_buckets( metatiles: &Vec<Metatile> ) -> Result<Vec<Bucket>, Box<dyn Error>> {
|
||||
let mut buckets: HashMap<u16, Bucket> = HashMap::new();
|
||||
|
||||
// Prepare buckets. Each bucket is exactly 64 tiles wide, and as tall as the largest
|
||||
// height in the bucket.
|
||||
|
||||
todo!()
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
pub mod converter;
|
||||
pub mod dmapack;
|
||||
pub mod ecs;
|
||||
pub mod system;
|
|
@ -132,105 +132,8 @@ pub fn get_tiles( tilemap: &TiledTilemap ) -> Result<(Vec<u8>, Vec<u8>), Box<dyn
|
|||
* 2 bytes: Y coordinate (world, divide by 8 to get tile coordinate).
|
||||
*/
|
||||
pub fn get_metatile_maps( tilemap: &TiledTilemap ) -> Result<Vec<u8>, Box<dyn Error>> {
|
||||
// Assemble header of offsets (as we go)
|
||||
let header_offset = ( tilemap.metatiles.len() * 2 ) + 2;
|
||||
let mut header: Vec<u8> = vec![];
|
||||
|
||||
// Assemble definitions
|
||||
let mut definitions: Vec<u8> = vec![];
|
||||
for metatile in &tilemap.metatiles {
|
||||
let mut subdefinitions: Vec<u8> = vec![];
|
||||
subdefinitions.extend( metatile.width.to_be_bytes() );
|
||||
subdefinitions.extend( metatile.height.to_be_bytes() );
|
||||
|
||||
let ( tilemap_index, palettes ) = get_tilemap_prefix_palette( &metatile.source, &tilemap.tileset )?;
|
||||
for tile in &metatile.tiles {
|
||||
let palette = {
|
||||
if *tile == 0 {
|
||||
0
|
||||
} else {
|
||||
let palette = palettes.get( *tile as usize ).ok_or( "internal error: no correlation between tile index and palette index" )?;
|
||||
if let Ok( palette ) = palette.ok_or( "" ) {
|
||||
palette
|
||||
} else {
|
||||
println!( "{:?}", palettes );
|
||||
return Err( format!( "invalid file: tile \"{}\" in metatile tilemap does not name reskit-palette attribute", tile ) )?;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let nametable_entry: u16 = ( ( palette as u16 ) << 13 ) | ( ( tile + tilemap_index as u32 ) as u16 );
|
||||
subdefinitions.extend( nametable_entry.to_be_bytes() );
|
||||
}
|
||||
|
||||
let this_offset = header_offset + definitions.len();
|
||||
header.extend( ( this_offset as u16 ).to_be_bytes() );
|
||||
|
||||
definitions.extend( subdefinitions );
|
||||
}
|
||||
|
||||
// Assemble instances
|
||||
let mut instances: Vec<u8> = vec![];
|
||||
|
||||
let layer_b: Option<&Layer> = tilemap.layers.iter().find( | layer | matches!( layer, Layer::Metatile { system_plane: SystemPlane::MdPlaneB, tiles: _ } ) );
|
||||
if let Some( layer_b ) = layer_b {
|
||||
let tiles = match layer_b { Layer::Metatile { system_plane: _, tiles } => tiles };
|
||||
let mut subdefinitions: Vec<u8> = vec![];
|
||||
|
||||
for tile_y in 0..tilemap.height {
|
||||
for tile_x in 0..tilemap.width {
|
||||
let item_at = tiles.get( ( tile_y * tilemap.width ) + tile_x ).ok_or( "internal error: tilemap does not correlate to width/height" )?;
|
||||
if *item_at != 0 {
|
||||
// What index-ID was that?
|
||||
let index_id = tilemap.metatiles.iter().position( | metatile | metatile.id == *item_at ).ok_or( "invalid file: metatile id not found" )?;
|
||||
|
||||
// Write the data to subdefinitions
|
||||
subdefinitions.extend( ( index_id as u16 ).to_be_bytes() );
|
||||
subdefinitions.extend( ( ( tile_x * 8 ) as u16 ).to_be_bytes() );
|
||||
subdefinitions.extend( ( ( tile_y * 8 ) as u16 ).to_be_bytes() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
instances.extend( ( ( subdefinitions.len() / 6 ) as u16 ).to_be_bytes() );
|
||||
instances.extend( subdefinitions );
|
||||
} else {
|
||||
instances.extend( ( 0 as u16 ).to_be_bytes() );
|
||||
}
|
||||
|
||||
let layer_a: Option<&Layer> = tilemap.layers.iter().find( | layer | matches!( layer, Layer::Metatile { system_plane: SystemPlane::MdPlaneA, tiles: _ } ) );
|
||||
if let Some( layer_a ) = layer_a {
|
||||
let tiles = match layer_a { Layer::Metatile { system_plane: _, tiles } => tiles };
|
||||
let mut subdefinitions: Vec<u8> = vec![];
|
||||
|
||||
for tile_y in 0..tilemap.height {
|
||||
for tile_x in 0..tilemap.width {
|
||||
let item_at = tiles.get( ( tile_y * tilemap.width ) + tile_x ).ok_or( "internal error: tilemap does not correlate to width/height" )?;
|
||||
if *item_at != 0 {
|
||||
// What index-ID was that?
|
||||
let index_id = tilemap.metatiles.iter().position( | metatile | metatile.id == *item_at ).ok_or( "invalid file: metatile id not found" )?;
|
||||
|
||||
// Write the data to subdefinitions
|
||||
subdefinitions.extend( ( index_id as u16 ).to_be_bytes() );
|
||||
subdefinitions.extend( ( ( tile_x * 8 ) as u16 ).to_be_bytes() );
|
||||
subdefinitions.extend( ( ( tile_y * 8 ) as u16 ).to_be_bytes() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
instances.extend( ( ( subdefinitions.len() / 6 ) as u16 ).to_be_bytes() );
|
||||
instances.extend( subdefinitions );
|
||||
} else {
|
||||
instances.extend( ( 0 as u16 ).to_be_bytes() );
|
||||
}
|
||||
|
||||
// Write the final result
|
||||
let mut result: Vec<u8> = vec![];
|
||||
let offset_to_instances = header_offset + definitions.len();
|
||||
result.extend( ( offset_to_instances as u16 ).to_be_bytes() );
|
||||
result.extend( header );
|
||||
result.extend( definitions );
|
||||
result.extend( instances );
|
||||
|
||||
Ok( result )
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue