Compare commits

...

48 Commits

Author SHA1 Message Date
Ashley N. 6ef2a1eb74 Add number of metatiles to dmapack structure 2023-11-05 10:46:34 -05:00
Ashley N. 93558cf3c8 Metatile converter 2023-11-02 21:49:34 -04:00
Ashley N. 3e5338670a WIP - New metatile converter 2023-11-02 17:22:21 -04:00
Ashley N. e6d56304e4 WIP - New metatile converter 2023-11-02 01:00:01 -04:00
Ashley N. 15143e08cb WIP - New metatile converter 2023-10-30 21:09:43 -04:00
Ashley N. 6fe634c864 WIP - New metatile converter 2023-10-30 17:00:15 -04:00
Ashley N. 18f1e20aa1 Convert 16.16 fixed point curve export to fractional x/128 export 2023-10-22 13:10:01 -04:00
Ashley N. 9e1014bbd9 Easing function tool 2023-10-21 20:40:50 -04:00
Ashley N. 1ae417af5c Update README.md 2023-10-19 22:28:08 -04:00
Ashley N. bb8fc323f5 Adjust collision to use drawn bounding boxes 2023-10-10 19:15:03 -04:00
Ashley N. 5f8bd8c87b Add frame count to tiles prior 2023-10-08 00:40:12 -04:00
Ashley N. a61b799c72 Fix off-by-one error as a blank tile is pushed before all the others 2023-09-29 16:48:47 -04:00
Ashley N. f3170b6b4b Fix how sprite indices are counted 2023-09-29 13:15:22 -04:00
Ashley N. 90cee2d998 Add custom fields to object export 2023-09-28 21:30:04 -04:00
Ashley N. 8ae402d037 Export palette with sprite 2023-09-28 20:31:38 -04:00
Ashley N. 27a6e09560 Remove entity-component system design (it ended up being types but worse) 2023-09-28 11:59:41 -04:00
Ashley N. 7a5bc41ce8 Merge branch 'master' of https://git.ne0ndrag0n.com/ashley/reskit into level-tool 2023-09-27 15:31:09 -04:00
Ashley N. 07a7ee97b7 Consistent ordering of components and attributes on export 2023-09-25 21:31:32 -04:00
Ashley N. 73b5ba6cc7 Export sprite table 2023-09-24 13:52:37 -04:00
Ashley N. 485d1d0304 Remove some compiler warnings 2023-09-23 16:25:14 -04:00
Ashley N. 1f704e88ba Improved ECS exporter with component-ids, attribute-ids, and symbol-ids 2023-09-23 16:11:01 -04:00
Ashley N. 5ba27514e7 Attribute export 2023-09-23 12:44:56 -04:00
Ashley N. 9d461a6c41 Fix bug with asmx compilation 2023-09-23 12:07:34 -04:00
Ashley N. 62fcb599cc Export constants to asm file 2023-09-23 12:04:27 -04:00
Ashley N. 94bec93c06 Revert "Allow user-specified component ids in ECS"
This reverts commit 31832e8882.
2023-09-22 23:52:04 -04:00
Ashley N. 31832e8882 Allow user-specified component ids in ECS 2023-09-22 10:23:08 -04:00
Ashley N. 6e63b9c40f Fill remainder of component attribute table 2023-09-21 21:09:47 -04:00
Ashley N. dcf8079592 Provide info for entity-component export 2023-09-20 23:45:02 -04:00
Ashley N. efa2c24487 Write component attribute table to .ecs file 2023-09-20 23:03:17 -04:00
Ashley N. 0360a875f2 75 percent done ECS exporter 2023-09-19 23:54:48 -04:00
Ashley N. bd6136903d Add collision map to export 2023-09-19 22:18:46 -04:00
Ashley N. e6e1500fd4 Support multiple tilesets per tilemap 2023-09-19 20:39:54 -04:00
Ashley N. 32874109d8 Stage changes for multiple tilesets per tmx file 2023-09-19 17:43:07 -04:00
Ashley N. 6fe992cb33 Correctly handle working directory of input file 2023-09-19 17:32:12 -04:00
Ashley N. 0dc7e95c21 Use more accurate color ramp conversion method 2023-09-18 23:42:58 -04:00
Ashley N. c4af44a5e4 Changes to fix level output in --system md 2023-09-18 20:22:12 -04:00
Ashley N. 8d4ade5f8e Fix bug in nametable exporter 2023-09-18 00:31:54 -04:00
Ashley N. 37f82daca1 connect nametable exporter 2023-09-17 23:28:18 -04:00
Ashley N. 62a762515d Export levels with width, height, and number of tiles 2023-09-17 22:48:50 -04:00
Ashley N. de5868305d Initial level export functionality 2023-09-14 10:11:40 -04:00
Ashley N. c62573e1ed Fix small bug in get_tiles 2023-09-13 23:18:52 -04:00
Ashley N. 1163ee2752 Changes to tileset to allow error throwing per-overstuffed-palette 2023-09-13 23:01:17 -04:00
Ashley N. 5d8f7294ef Generation of palette/nametables simplified and refactored for palettes 2023-09-13 22:36:56 -04:00
Ashley N. ff08ee4e46 Modify data structures to include palette data per tile 2023-09-13 21:10:36 -04:00
Ashley N. f7b9216680 Level exports - tiles, nametables, collision 2023-09-13 00:58:41 -04:00
Ashley N. dc10213f75 Add ECS parsing 2023-09-10 14:21:57 -04:00
Ashley N. 0543d9adfc Level tool 2023-09-10 00:15:53 -04:00
Ashley N. 264c2df1e8 Begin work on the level tool, add some documentation images 2023-09-07 19:16:56 -04:00
15 changed files with 1488 additions and 92 deletions

View File

@ -16,4 +16,9 @@ hound = "3.5.0"
pitch_shift = "1.0.0" pitch_shift = "1.0.0"
linked_hash_set = "0.1.4" linked_hash_set = "0.1.4"
linked-hash-map = "0.5.6" linked-hash-map = "0.5.6"
convert_case = "0.6.0" convert_case = "0.6.0"
roxmltree = "0.18.0"
regex = "1.9.5"
euclid = "0.22.9"
flo_curves = "0.7.2"
fixed = "1.24.0"

View File

@ -1,24 +1,11 @@
Reskit Reskit (stinkhead7ds edition)
====== ======
The **R**etro **E**ntertainment **S**oftware Tool**kit** (reskit) is a suite containing a variety of tools useful for creating 8-bit and 16-bit console homebrew software. Reskit allows you to easily generate tilemaps and convert soundtracks for import into your homebrew game projects. <font color="red">This is an internal branch used by Rivethead Interactive for the development of _Stinkhead & The Seven Deadly Sins._ No issues, pull requests, or support of any kind will be taken for `level`, `easing`, and other tools exclusive to this branch.</font>
To get started, check out the [wiki](https://git.ne0ndrag0n.com/ashley/reskit/wiki/?action=_pages) or type `reskit --help` to view a list of supported tools and options. ## Internal Tools
* `level` - Generate a stinkhead7ds level from a Tiled editor module.
**Reskit is beta software!** Please read important disclaimers for each tool. * `easing` - _Coming soon!_ Generate a cubic bezier easing curve for a given timespan e.g. for stylistic camera motion.
* Output will be in format:
# Available Tools * 2 bytes: Length of the curve, in 1/60 s increments.
* `tileset` - Convert an image to a series of tiles for a given console. * For each 1/60 increment: the value on the curve at the given 1/60 s increment.
* Supported consoles: Sega Mega Drive VDP (tile and sprite order)
* `soundtrack` - Convert a music sequence to an on-console format/sound driver
* Supported input formats: DMF (Deflemask) Version 27 (1.1.8)
* Supported export formats: Echo Sound Engine (ESF) for Sega Mega Drive
# Build
Reskit is developed in Rust and provided as a cargo crate. Simply type `cargo build` to build the project. Output should be in `target/debug/reskit`.
# Reporting Issues
First [register](https://git.ne0ndrag0n.com/user/sign_up) for an account on Temple of the Neon Dragon, then see the [Issues page](https://git.ne0ndrag0n.com/ashley/reskit/issues).
# Pull Requests/Contributing
Contributions are welcome! Request a repository slot in [this issue](https://git.ne0ndrag0n.com/ashley/reskit/issues/4) and I will allocate a repository for your account so that you may fork Reskit.

Binary file not shown.

After

Width:  |  Height:  |  Size: 391 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 383 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 415 KiB

View File

@ -1,6 +1,6 @@
use std::{error::Error, fs::File, io::Write, path::Path}; use std::{error::Error, fs::File, io::Write, path::Path, collections::HashMap};
use clap::Parser; use clap::Parser;
use crate::reskit::{tileset, soundtrack::{formats::dmf::DmfModule, engines::echo::engine::{EchoFormat, EchoArtifact}}, utility::print_good}; use crate::reskit::{tileset, soundtrack::{formats::dmf::DmfModule, engines::echo::engine::{EchoFormat, EchoArtifact}}, utility::print_good, level::{converter::get_tiled_tilemap, system::{get_tiles, get_code, get_metatile_maps, get_collision_map, get_objs, get_sprites}}, easing::get_cubic_bezier};
use super::settings::{Args, Tools, TileOutputFormat, TileOrder}; use super::settings::{Args, Tools, TileOutputFormat, TileOrder};
pub fn run_command() -> Result<(), Box<dyn Error>> { pub fn run_command() -> Result<(), Box<dyn Error>> {
@ -19,7 +19,7 @@ pub fn run_command() -> Result<(), Box<dyn Error>> {
TileOrder::Tile => "tile", TileOrder::Tile => "tile",
TileOrder::Sprite => "sprite" TileOrder::Sprite => "sprite"
} }
), )?,
Tools::Soundtrack { input_files, output_directory, input_format: _, output_format: _, source_file_format: _, source_file_output_directory, artifact_output_directory } => { Tools::Soundtrack { input_files, output_directory, input_format: _, output_format: _, source_file_format: _, source_file_output_directory, artifact_output_directory } => {
if input_files.is_empty() { if input_files.is_empty() {
return Err( "no input files (see `reskit soundtrack --help` for more info)" )?; return Err( "no input files (see `reskit soundtrack --help` for more info)" )?;
@ -65,6 +65,89 @@ pub fn run_command() -> Result<(), Box<dyn Error>> {
print_good( "all files converted successfully" ); print_good( "all files converted successfully" );
} }
Tools::Level { input_file, output_directory, fields, symbol_ids, sprite_ids, console: _ } => {
// Clap can't do it. Sad!
let mut all_symbols: HashMap<String, u16> = HashMap::new();
let mut all_sprites: HashMap<String, u16> = HashMap::new();
for pair in symbol_ids {
let symbols: Vec<&str> = pair.split( "=" ).collect();
if symbols.len() != 2 {
return Err( format!( "invalid format for symbol_ids: {}", pair ) )?;
}
let symbol_name = symbols[ 0 ];
let symbol_id: u16 = symbols[ 1 ].parse()?;
all_symbols.insert( symbol_name.to_owned(), symbol_id );
}
for pair in sprite_ids {
let symbols: Vec<&str> = pair.split( "=" ).collect();
if symbols.len() != 2 {
return Err( format!( "invalid format for symbol_ids: {}", pair ) )?;
}
let symbol_name = symbols[ 0 ];
let symbol_id: u16 = symbols[ 1 ].parse()?;
all_sprites.insert( symbol_name.to_owned(), symbol_id );
}
let symbol_ids = all_symbols;
let sprite_ids = all_sprites;
let fields: Vec<&str> = fields.split( "," ).collect();
let tiled_file = get_tiled_tilemap( &input_file, &fields )?;
// Get tile and palette files
let ( all_tiles, palettes ) = get_tiles( &tiled_file )?;
let mut tiles_bin = File::create( format!( "{}tiles.bin", output_directory ) )?;
tiles_bin.write_all( &all_tiles )?;
print_good( "exported tiles.bin" );
let mut palettes_bin = File::create( format!( "{}palettes.pal", output_directory ) )?;
palettes_bin.write_all( &palettes )?;
print_good( "exported palettes.pal" );
let mut nametables_bin = File::create( format!( "{}nametables.map", output_directory ) )?;
nametables_bin.write_all( &get_metatile_maps( &tiled_file )? )?;
print_good( "exported nametables.map" );
let mut nametables_bin = File::create( format!( "{}collision.lvc", output_directory ) )?;
nametables_bin.write_all( &get_collision_map( &tiled_file )? )?;
print_good( "exported collision.lvc" );
let mut objs_bin = File::create( format!( "{}objects.obs", output_directory ) )?;
objs_bin.write_all( &get_objs( &tiled_file, &symbol_ids )? )?;
print_good( "exported objects.obs" );
let mut sprites_bin = File::create( format!( "{}sprites.spt", output_directory ) )?;
sprites_bin.write_all( &get_sprites( &tiled_file, &sprite_ids )? )?;
print_good( "exported sprites.spt" );
let mut code_asm = File::create( format!( "{}level.asm", output_directory ) )?;
code_asm.write_all( &get_code( &tiled_file, "testlevel", "levels/" )?.as_bytes() )?;
print_good( "exported level.asm" );
}
Tools::Easing { output_directory, interval, point_1, point_2 } => {
let cp1: Vec<&str> = point_1.split( "," ).collect();
if cp1.len() != 2 {
return Err( format!( "invalid format for point_1: {}", point_1 ) )?;
}
let ( cp1_x, cp1_y ): ( f64, f64 ) = ( cp1[ 0 ].parse()?, cp1[ 1 ].parse()? );
let cp2: Vec<&str> = point_1.split( "," ).collect();
if cp2.len() != 2 {
return Err( format!( "invalid format for point_2: {}", point_2 ) )?;
}
let ( cp2_x, cp2_y ): ( f64, f64 ) = ( cp2[ 0 ].parse()?, cp2[ 1 ].parse()? );
let mut easing = File::create( format!( "{}curve.bin", output_directory ) )?;
easing.write_all( &get_cubic_bezier( ( 0.0, 0.0 ), ( cp1_x, cp1_y ), ( cp2_x, cp2_y ), ( 1.0, 1.0 ), interval )? )?;
print_good( "exported curve.bin" );
}
}; };
Ok( () ) Ok( () )

View File

@ -11,7 +11,7 @@ pub struct Args {
} }
#[derive(Clone, ValueEnum)] #[derive(Clone, Debug, ValueEnum)]
pub enum TileOrder { pub enum TileOrder {
Tile, Tile,
Sprite Sprite
@ -38,6 +38,11 @@ pub enum ArtifactListFormat {
Asmx Asmx
} }
#[derive(Clone, ValueEnum)]
pub enum SystemType {
Md
}
#[derive(Subcommand)] #[derive(Subcommand)]
pub enum Tools { pub enum Tools {
@ -91,5 +96,55 @@ pub enum Tools {
/// Directory to output artifacts (instruments and samples) /// Directory to output artifacts (instruments and samples)
#[arg(long, default_value_t=String::from( "./" ))] #[arg(long, default_value_t=String::from( "./" ))]
artifact_output_directory: String artifact_output_directory: String
},
#[command(name = "level")]
#[command(about = "Generate a level containing a tilemap and defining an entity-component system, from a selected Tiled Map Editor .tmx file.")]
Level {
/// Input filename
#[arg(short, long)]
input_file: String,
/// Output directory for artifacts
#[arg(short, long, default_value_t=String::from("./"))]
output_directory: String,
/// Zero or more fields in order to define objects' struct (after object ID, position x, and position y) (format: <field1>,<field2>,<field3>)
#[arg(long, default_value_t=String::from(""))]
fields: String,
/// Zero or more symbol ID definitions used for string identifiers in objects (format: <symbol_name>=<u8>)
#[arg(long, num_args(0..))]
symbol_ids: Vec<String>,
/// Zero or more sprite ID definitions (format: <sprite_name_1>=<sprite_id_1>, <sprite_name_2>=<sprite_id_2>, ... )
#[arg(long, num_args(0..))]
sprite_ids: Vec<String>,
/// Console system type
#[arg(short, long, value_enum, default_value_t=SystemType::Md)]
console: SystemType
},
#[command(name = "easing")]
#[command(about = "Generate an easing curve given a time interval and control points.")]
Easing {
/// Output directory for artifacts
#[arg(short, long, default_value_t=String::from("./"))]
output_directory: String,
/// Time span of the easing curve (in 1/60 s increments)
#[arg(short,long)]
interval: u16,
/// First control point of the cubic bezier (format: <x>,<y>)
#[arg(long)]
point_1: String,
/// Second control point of the cubic bezier (format: <x>,<y>)
#[arg(long)]
point_2: String
} }
} }

65
src/reskit/easing.rs Normal file
View File

@ -0,0 +1,65 @@
use std::error::Error;
use flo_curves::*;
use flo_curves::{bezier, Coord2};
use super::utility::print_info;
/*
stinkhead7ds easing curve description:
e.g. going from 120 to 140
easing function is cubic bezier going from 0.0 to 1.0
140-120=20 increments
0.0 is +0 and 1.0 is +20
console has no floating point and fixed point will overflow 16-bit
each curve tailored for use case. for camera motion:
- generate the cubic bezier easing curve in my custom branch of reskit, converting 0.0-1.0 floating point spans to x/128 fractions equivalent to x/100 fractions.
literal grade school shit i realised i wasted a weekend re-deriving from first principles. these fparts even export as single bytes, so that's 3 less bytes per point
on the curve than if i were sticking with 16.16 fix. for a 1 second animation, 60 points are taken on the 0.0-1.0 curve, for a 2 second animation, 120 points are taken, etc.
equivalent PAL framerate curves can be export in the same curve file.
- the camera animates moves by setting a new destination point and performing interpolation of intermediate positions along the curve - just like css easing functions, or
unity/unreal bezier curves. you restrict camera new position to +/- powers of two in any axis to eliminate mul[s/u].w. e.g. camera moves +2, +4, +8.... in the x direction.
- for each axis, new position - current position = span of movement. so if you're starting at x = 30 and moving to x = 46, you're going +16 in the x direction
- while the camera is animating, interpolate positions by reading the fractional on the curve for the current frame and doing fractional * span / 128. since new camera positions
in any axis are kept power of two this can all be done using asl/asr. take the remainder with the and 2^127 trick and bump the resulting value up 1 if the modulo is >= 64
- cubic bezier animated camera motion
*/
/**
* Output a binary for a cubic bezier defined by the given four points and the
* time interval defined as number of 1/60 jiffies.
*/
pub fn get_cubic_bezier( p0: (f64, f64), c0: (f64, f64), c1: (f64, f64), p1: (f64, f64), jiffies: u16 ) -> Result<Vec<u8>, Box<dyn Error>> {
let mut result: Vec<u8> = Vec::new();
// 2 bytes: Number of frames this curve spans
result.extend( jiffies.to_be_bytes() );
let curve = bezier::Curve::from_points(
Coord2( p0.0, p0.1 ),
( Coord2( c0.0, c0.1 ), Coord2( c1.0, c1.1 ) ),
Coord2( p1.0, p1.1 )
);
let mut current_jiffy = 0.0;
// For each point in curve: 1 byte fractional point in the curve at the given frame
for i in 0..jiffies {
let point = curve.point_at_pos( current_jiffy );
let x = point.1;
// Round and convert x to n/100 fraction, then equivalent n/128 fraction
let x = ( ( x * 100.0 ).round() / 100.0 ) * 100.0; // only get last two decimal places, then convert to whole number
let x = x * 128.0 / 100.0; // x / 100 = y / 128
let x = x.round() as u8;
print_info( &format!( "at iteration {}, curve is {:?}, fractional format will be {}", i, point, x ) );
current_jiffy += 1.0 / jiffies as f64;
result.push( x );
}
Ok( result )
}

View File

@ -0,0 +1,517 @@
use std::{error::Error, fs::read_to_string, path::Path, borrow::Cow, collections::HashSet, convert::TryInto};
use euclid::default::Rect;
use image::{DynamicImage, GenericImageView};
use linked_hash_map::LinkedHashMap;
use roxmltree::Node;
use crate::reskit::{utility::print_warning, cli::settings::TileOrder};
use super::dmapack;
#[derive(Debug)]
pub struct TiledTilemap {
pub tileset: Vec<TiledTileset>,
pub metatiles: Vec<Metatile>,
pub layers: Vec<Layer>,
pub objects: Vec<Object>,
pub collision: Vec<Rect<u16>>,
pub width: usize,
pub height: usize
}
#[derive(Debug, PartialEq)]
pub struct Metatile {
pub id: u32,
pub source: String,
pub width: u16,
pub height: u16,
pub pack: dmapack::PackSettings,
pub tiles: Vec<u32>
}
#[derive(Debug)]
pub struct Object {
pub id: String,
pub attributes: LinkedHashMap<String, String>
}
#[derive(Debug)]
pub struct SpriteMetadata {
pub id: String,
pub width: u8,
pub height: u8,
pub palette: u16,
pub anim_interval: Option<u16>
}
#[derive(Debug)]
pub struct TiledTileset {
pub source: String,
pub image: DynamicImage,
pub palettes: Vec<Option<u8>>,
pub tile_order: TileOrder,
pub sprite_metadata: Option<SpriteMetadata>
}
#[derive(Debug)]
pub enum SystemPlane {
MdPlaneA,
MdPlaneB
}
#[derive(Debug)]
pub enum Layer {
Metatile {
system_plane: SystemPlane,
tiles: Vec<u32>
}
}
fn get_layer( layer: Node, map_width: usize, map_height: usize ) -> Result<Option<Layer>, Box<dyn Error>> {
let layer_id = layer.attribute( "id" ).ok_or( "invalid file: on layer: no id" )?;
let layer_name = layer.attribute( "name" ).unwrap_or( "<no name>" );
let layer_name = format!( "({}, ID: {})", layer_name, layer_id );
// Validate layer is same as total map size
let ( layer_width, layer_height ): ( usize, usize ) = (
layer.attribute( "width" ).ok_or( format!( "invalid file: on layer {}: no width attribute", layer_name ) )?.parse()?,
layer.attribute( "height" ).ok_or( format!( "invalid file: on layer {}: no height attribute", layer_name ) )?.parse()?
);
if layer_width != map_width || layer_height != map_height {
return Err( format!( "invalid file: on layer {}: layer width must match map width", layer_name ) )?
}
let properties = layer.descendants().find( | node | node.tag_name() == "properties".into() );
if let Some( properties ) = properties {
let properties: Vec<Node> = properties.descendants().filter( | node | {
if let Some( name ) = node.attribute( "name" ) {
name == "reskit-layer"
} else {
false
}
} ).collect();
// Should be either one or none
if let Some( layer_property ) = properties.first() {
let layer_type = layer_property.attribute( "value" ).ok_or( format!( "invalid file: on layer {}: no value for property", layer_name ) )?;
let data = layer.descendants()
.find( | node | node.tag_name() == "data".into() )
.ok_or( format!( "invalid file: on layer {}: no data for layer", layer_name ) )?;
let encoding = data.attribute( "encoding" ).ok_or( format!( "invalid file: on layer {}: no encoding attribute", layer_name ) )?;
if encoding != "csv" {
return Err( format!( "invalid file: on layer {}: only csv is supported for layer encoding", layer_name ) )?
}
let data: Vec<&str> = data.text().ok_or( format!( "invalid file: on layer {}: no layer data", layer_name ) )?.split( "," ).collect();
let tiles: Vec<u32> = data.into_iter().map( | string | Ok( string.trim().parse()? ) ).collect::< Result< Vec<u32>, Box<dyn Error> > >()?;
match layer_type.to_lowercase().as_str() {
"a" => Ok( Some( Layer::Metatile {
system_plane: SystemPlane::MdPlaneA,
tiles
} ) ),
"b" => Ok( Some( Layer::Metatile {
system_plane: SystemPlane::MdPlaneB,
tiles
} ) ),
_ => {
print_warning( &format!( "on layer {}: invalid reskit-layer value {}; ignoring this layer", layer_name, layer_type ) );
Ok( None )
}
}
} else {
print_warning( &format!( "on layer {}: no reskit-layer property defining hardware layer or collision; ignoring this layer", layer_name ) );
Ok( None )
}
} else {
print_warning( &format!( "on layer {}: no properties defining hardware layer or collision; ignoring this layer", layer_name ) );
Ok( None )
}
}
fn get_tiles( tileset: Node, seen_sources: &mut HashSet<String>, working_directory: &str ) -> Result<Option<TiledTileset>, Box<dyn Error>> {
// Get the image for the tileset and its source
let image = tileset.descendants().find( | node | node.tag_name() == "image".into() ).ok_or( "invalid file: no image object" )?;
let source = image.attribute( "source" ).ok_or( "invalid file: no source attribute on image" )?.to_string();
// If the file path ends in .tmx, it's a metatile definition. We need to hop to the .tmx file in source, then redefine layer
// to the single tileset in that metatile .tmx.
let full_path = format!( "{}/{}", working_directory, source );
let formatted_path = Path::new( &full_path );
if let Some( extension ) = formatted_path.extension() {
let extension = extension.to_string_lossy();
if extension == "tmx" {
// Case where source is a nested tileset. Open the .tmx file
let file = read_to_string( full_path.clone() )?;
let tmx_document = roxmltree::Document::parse( &file )?;
// Obtain the single (first) <tileset>
let tileset = tmx_document.descendants().find( | node | node.tag_name() == "tileset".into() ).ok_or( "invalid file: metatile has no tileset" )?;
let tileset_source = tileset.attribute( "source" ).ok_or( "invalid file: metatile tileset has no source" )?.to_string();
// tileset_source should already point to the _actual_ source we need. Was this tileset source already seen? If so, bail
if seen_sources.contains( &tileset_source ) {
return Ok( None )
} else {
seen_sources.insert( tileset_source.clone() );
}
// Load nested tileset into roxmltree document
let file = read_to_string( format!( "{}/{}", working_directory, tileset_source ) )?;
let nested_tileset = roxmltree::Document::parse( &file )?;
let tileset = nested_tileset.descendants().find( | node | node.tag_name() == "tileset".into() ).ok_or( "invalid file: metatile tileset has no tileset" )?;
// Return nested tileset from tmx file
let result = get_tiles( tileset, seen_sources, working_directory )?;
if let Some( mut result ) = result {
result.source = tileset_source.to_owned();
return Ok( Some( result ) )
}
return Ok( result )
}
}
// Case where source is an actual image
let image = image::open( full_path )?;
// Image must be a multiple of 8 (--system md)
if image.width() % 8 != 0 { return Err( "invalid file: tileset width not multiple of 8" )? }
if image.height() % 8 != 0 { return Err( "invalid file: tileset height not multiple of 8" )? }
let tile_count: usize = tileset.attribute( "tilecount" ).ok_or( "invalid file: no tilecount attribute on tileset" )?.parse()?;
let mut palettes: Vec<Option<u8>> = vec![ None; tile_count ];
let defined_tiles = tileset.descendants().filter( | node | node.tag_name() == "tile".into() );
for defined_tile in defined_tiles {
let tile_id: usize = defined_tile.attribute( "id" ).ok_or( "invalid file: id attribute not defined on a tile" )?.parse()?;
let properties = defined_tile.descendants().find( | node | node.tag_name() == "properties".into() ).ok_or( "invalid file: no properties descendant in tileset" )?;
let property = properties.descendants().find( | node | node.tag_name() == "property".into() && node.attribute( "name" ).unwrap_or( "" ) == "reskit-palette" );
if let Some( property ) = property {
let property_type = property.attribute( "type" ).unwrap_or( "string" );
if property_type == "int" {
let palette_value: u8 = property.attribute( "value" ).ok_or( "invalid file: reskit-palette property has no value" )?.parse()?;
// --system md
if palette_value > 3 {
print_warning( &format!( "reskit-palette property on tile {} is not valid palette (0 to 3) - leaving palette unset, this is probably not what you want...", tile_id ) );
} else {
palettes[ tile_id ] = Some( palette_value );
}
} else {
print_warning( &format!( "reskit-palette property on tile {} is not int type - leaving palette unset, this is probably not what you want...", tile_id ) )
}
}
}
let sprite_metadata;
// Retrieve the tile order as specified by reskit-tile-order (if none is specified, fallback on Tile)
let tile_order = if let Some( properties ) = tileset.descendants().find( | node | node.tag_name() == "properties".into() ) {
if let Some( tile_order_property ) = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-tile-order" ) ) {
let tile_order_property = tile_order_property.attribute( "value" ).expect( "invalid file: no reskit-tile-order value" );
match tile_order_property.to_lowercase().as_str() {
"sprite" => {
// If sprite, reskit-sprite-height and reskit-sprite-width must be defined
let reskit_sprite_height = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-sprite-height" ) ).ok_or( "invalid file: for reskit-tile-order \"sprite\", reskit-sprite-height and reskit-sprite-width must be defined." )?;
let reskit_sprite_width = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-sprite-width" ) ).ok_or( "invalid file: for reskit-tile-order \"sprite\", reskit-sprite-height and reskit-sprite-width must be defined." )?;
let reskit_sprite_id = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-sprite-id" ) ).ok_or( "invalid file: for reskit-tile-order \"sprite\", reskit-sprite-id must be defined." )?;
let reskit_palette = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-palette" ) ).ok_or( "invalid file: for reskit-tile-order \"sprite\", reskit-palette must be defined." )?;
let reskit_anim_interval = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-anim-interval" ) );
let reskit_sprite_height = reskit_sprite_height.attribute( "value" ).ok_or( "invalid file: no reskit-sprite-height value" )?;
let reskit_sprite_width = reskit_sprite_width.attribute( "value" ).ok_or( "invalid file: no reskit-sprite-width value" )?;
let reskit_palette = reskit_palette.attribute( "value" ).ok_or( "invalid file: no reskit-palette value" )?;
let id: String = reskit_sprite_id.attribute( "value" ).ok_or( "invalid file: no reskit-sprite-id value" )?.to_owned();
let width: u8 = reskit_sprite_width.parse()?;
let height: u8 = reskit_sprite_height.parse()?;
let palette: u16 = reskit_palette.parse()?;
let anim_interval = if let Some( property ) = reskit_anim_interval {
let value = property.attribute( "value" ).ok_or( "invalid file: no reskit-anim-interval value" )?;
Some( value.parse()? )
} else {
None
};
sprite_metadata = Some( SpriteMetadata { id, width, height, palette, anim_interval } );
TileOrder::Sprite
},
"tile" => {
sprite_metadata = None;
TileOrder::Tile
},
invalid => {
sprite_metadata = None;
print_warning( &format!( "invalid setting for property reskit-tile-order: {}. falling back on \"tile\"", invalid ) );
TileOrder::Tile
}
}
} else {
sprite_metadata = None;
TileOrder::Tile
}
} else {
sprite_metadata = None;
TileOrder::Tile
};
Ok( Some( TiledTileset { source: format!( "" ), image, palettes, tile_order, sprite_metadata } ) )
}
pub fn get_metatile( id: u32, tileset: Node, working_directory: &str ) -> Result<Option<Metatile>, Box<dyn Error>> {
// Get the image for the tileset and its source
let image = tileset.descendants().find( | node | node.tag_name() == "image".into() ).ok_or( "invalid file: no image object" )?;
let source = image.attribute( "source" ).ok_or( "invalid file: no source attribute on image" )?.to_string();
let full_path = format!( "{}/{}", working_directory, source );
let formatted_path = Path::new( &full_path );
if let Some( extension ) = formatted_path.extension() {
let extension = extension.to_string_lossy();
if extension == "tmx" {
// Before we discard tileset, obtain dmapack bucket and slot
let bucket: u16 = tileset.descendants()
.find( | node | node.tag_name() == "property".into() && node.attribute( "name" ) == Some( "reskit-dmapack-bucket" ) )
.ok_or( "invalid file: no dmapack bucket specified for metatile" )?
.attribute( "value" )
.ok_or( "invalid file: reskit-dmapack-bucket property has no value" )?
.parse()?;
let slot: Vec<u16> = tileset.descendants()
.find( | node | node.tag_name() == "property".into() && node.attribute( "name" ) == Some( "reskit-dmapack" ) )
.ok_or( "invalid file: no dmapack slot specified for metatile" )?
.attribute( "value" )
.ok_or( "invalid file: reskit-dmapack property has no value" )?
.split( "," )
.map( | each | each.trim().parse::<u16>().expect( "fatal: non-u16 value specified for reskit-dmapack" ) )
.collect();
let slot: [u16; 2] = slot[0..2].try_into()?;
let pack = dmapack::PackSettings { bucket, slot };
// Case where source is a nested tileset. Open the .tmx file
let file = read_to_string( full_path.clone() )?;
let tmx_document = roxmltree::Document::parse( &file )?;
// Get width and height from <map> item
let map_item = tmx_document.descendants().find( | node | node.tag_name() == "map".into() ).ok_or( "invalid file: no map element in metatile" )?;
let width: u16 = map_item.attribute( "width" ).ok_or( "invalid file: map of metatile has no width attribute" )?.parse()?;
let height: u16 = map_item.attribute( "height" ).ok_or( "invalid file: map of metatile has no height attribute" )?.parse()?;
// Obtain the single (first) <tileset>
let tileset_count = tmx_document.descendants().filter( | node | node.tag_name() == "tileset".into() ).count();
if tileset_count > 1 {
return Err( "invalid file: only a single tileset is valid for a metatile file" )?
}
let tileset = tmx_document.descendants().find( | node | node.tag_name() == "tileset".into() ).ok_or( "invalid file: metatile has no tileset" )?;
let source = tileset.attribute( "source" ).ok_or( "invalid file: metatile tileset has no source" )?.to_string();
// Create metatile object here
let data = tmx_document.descendants().find( | node | node.tag_name() == "data".into() ).ok_or( "invalid file: metatile has no data" )?;
if data.attribute( "encoding" ) != Some( "csv" ) {
return Err( "invalid file: metatile data not in csv format" )?
}
let data = data.text().ok_or( "invalid file: data node has no text" )?;
let values: Vec<&str> = data.split( "," ).collect();
// Subtract 1 to each value to account for firstgid. This is a sane assumption because a valid metatile file only contains a single tileset.
let tiles: Vec<u32> = values.into_iter().map( | string | {
let val = string.trim().parse::<u32>().expect( "fatal: non-u32 value in metatile" );
if val == 0 {
val
} else {
val - 1
}
} ).collect();
return Ok( Some( Metatile { id, source, width, height, pack, tiles } ) )
}
}
Ok( None )
}
pub fn get_objs( node: &Node, object_fields: &Vec<&str> ) -> Result<Vec<Object>, Box<dyn Error>> {
let mut result = Vec::new();
let objects = node.descendants().filter( | node | node.tag_name() == "object".into() );
for object in objects {
let object_id = object.attribute( "id" ).ok_or( "invalid file: object in objectgroup has no id attribute" )?;
if let Some( properties ) = object.descendants().find( | node | node.tag_name() == "properties".into() ) {
if let Some( object_id_property ) = properties.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-object-id" ) ) {
let id = object_id_property.attribute( "value" ).ok_or( "invalid file: property has no value attribute" )?.to_owned();
let mut attributes = LinkedHashMap::new();
// Get position x and y as beginning attributes
let ( x, y ) = (
object.attribute( "x" ).ok_or( "invalid file: object has no x position" )?.to_owned(),
object.attribute( "y" ).ok_or( "invalid file: object has no y position" )?.to_owned()
);
attributes.insert( "x".trim().to_owned(), x );
attributes.insert( "y".trim().to_owned(), y );
// Custom object fields - push these in order into the LinkedHashMap
for field in object_fields {
let property = properties.descendants().find( | node | node.attribute( "name" ) == Some( &format!( "reskit-field[{}]", field ) ) );
if let Some( property ) = property {
let value = property.attribute( "value" ).ok_or( "invalid file: property has no value attribute" )?.to_owned();
attributes.insert( field.to_string(), value );
} else {
print_warning( &format!( "object {} does not define a value for struct field \"{}\", this field will be filled in with 0x00 at export", id, field ) );
attributes.insert( field.to_string(), "0".to_owned() );
}
}
result.push( Object { id, attributes } );
} else {
print_warning( &format!( "object {} has no \"reskit-object-id\" property....ignoring this object. this is probably not what you want.", object_id ) );
}
} else {
print_warning( &format!( "object {} has no properties....ignoring this object. this is probably not what you want", object_id ) );
}
}
Ok( result )
}
pub fn get_tiled_tilemap( path: &str, object_fields: &Vec<&str> ) -> Result<TiledTilemap, Box<dyn Error>> {
let file = read_to_string( path )?;
let document = roxmltree::Document::parse( &file )?;
let working_directory = {
let result = Path::new( path ).parent().unwrap_or( Path::new( "." ) ).to_string_lossy();
if result == "" {
Cow::from( "." )
} else {
result
}
};
let map = document.descendants().find( | node | node.tag_name() == "map".into() );
if let Some( map ) = map {
// Validate version
let version = map.attribute( "version" ).ok_or( "invalid file: no version attribute" )?;
if version < "1.10" || !version.starts_with( "1." ) {
return Err( "invalid file: unsupported version" )?
}
// Validate orientation and render order
let orientation = map.attribute( "orientation" ).ok_or( "invalid file: no orientation attribute" )?;
if orientation != "orthogonal" {
return Err( "invalid file: only orthogonal orientation is supported" )?
}
let render_order = map.attribute( "renderorder" ).ok_or( "invalid file: no renderorder attribute" )?;
if render_order != "left-down" {
return Err( "invalid file: only left-down orientation is supported" )?
}
let ( width, height ): ( usize, usize ) = (
map.attribute( "width" ).ok_or( "invalid file: no width attribute" )?.parse()?,
map.attribute( "height" ).ok_or( "invalid file: no height attribute" )?.parse()?
);
let ( tile_width, tile_height ): ( usize, usize ) = (
map.attribute( "tilewidth" ).ok_or( "invalid file: no tilewidth attribute" )?.parse()?,
map.attribute( "tileheight" ).ok_or( "invalid file: no tileheight attribute" )?.parse()?
);
// --system md is 8x8
if tile_width != 8 {
return Err( "invalid file: tile width is not 8 for --system md" )?
}
if tile_height != 8 {
return Err( "invalid file: tile height is not 8 for --system md" )?
}
// Build tilesets (current version assumes one tileset per level)
let mut tilesets: Vec<TiledTileset> = vec![];
let mut metatiles: Vec<Metatile> = vec![];
let mut seen_sources: HashSet<String> = HashSet::new();
for tileset in map.descendants().filter( | node | node.tag_name() == "tileset".into() ) {
let tileset_first_gid = tileset.attribute( "firstgid" ).ok_or( "invalid file: no tileset firstgid" )?;
let tileset_source_path = tileset.attribute( "source" ).ok_or( "invalid file: no tileset source" )?;
let tileset_file = read_to_string( format!( "{}/{}", working_directory, tileset_source_path ) )?;
let tileset_document = roxmltree::Document::parse( &tileset_file )?;
let tileset = tileset_document.descendants().find( | node | node.tag_name() == "tileset".into() ).ok_or( "invalid file: no tileset origin object" )?;
// Tilesets may instead contain metatiles
// First, load tilesets instead of metatiles
// If tileset_source_path was seen, don't proceed
if !seen_sources.contains( tileset_source_path ) {
seen_sources.insert( tileset_source_path.to_owned() );
if let Some( mut tiles ) = get_tiles( tileset.clone(), &mut seen_sources, &working_directory )? {
if tiles.source == "" {
tiles.source = tileset_source_path.to_owned();
}
tilesets.push( tiles );
}
}
// Then, check for metatiles
if let Some( metatile ) = get_metatile( tileset_first_gid.parse()?, tileset, &working_directory )? {
metatiles.push( metatile );
}
}
let tileset = tilesets;
if tileset.is_empty() {
return Err( "invalid file: at least one tileset must be present in file" )?
}
// Get the layers
let layers: Vec<Layer> = map.descendants()
.filter( | node | node.tag_name() == "layer".into() )
.map( | node | get_layer( node, width, height ) )
.collect::< Result< Vec<Option<Layer>>, Box<dyn Error> > >()?
.into_iter()
.filter_map( | option | option )
.collect();
// Get the entity-component system
let object_group = map.descendants().find( | node |
node.tag_name() == "objectgroup".into() &&
node.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-layer" ) && node.attribute( "value" ) == Some( "object" ) ).is_some()
);
let objects = if let Some( object_group ) = object_group {
get_objs( &object_group, object_fields )?
} else {
print_warning( "no object layer in this file, this is probably not what you want..." );
Vec::new()
};
// Get collision
let collision_group = map.descendants().find( | node |
node.tag_name() == "objectgroup".into() &&
node.descendants().find( | node | node.attribute( "name" ) == Some( "reskit-layer" ) && node.attribute( "value" ) == Some( "collision" ) ).is_some()
);
let collision = if let Some( collision_group ) = collision_group {
let mut result = Vec::new();
let objects = collision_group.descendants().filter( | node | node.tag_name() == "object".into() );
for object in objects {
let x: u16 = object.attribute( "x" ).ok_or( "invalid file: no \"x\" attribute in collision bounding box" )?.parse()?;
let y: u16 = object.attribute( "y" ).ok_or( "invalid file: no \"y\" attribute in collision bounding box" )?.parse()?;
let width: u16 = object.attribute( "width" ).ok_or( "invalid file: no \"width\" attribute in collision bounding box" )?.parse()?;
let height: u16 = object.attribute( "height" ).ok_or( "invalid file: no \"height\" attribute in collision bounding box" )?.parse()?;
result.push( euclid::rect( x, y, width, height ) );
}
if result.is_empty() {
print_warning( "collision layer present but no bounding boxes are defined, this is probably not what you want..." );
}
result
} else {
print_warning( "no collision layer in this file, this is probably not what you want..." );
Vec::new()
};
Ok( TiledTilemap { tileset, metatiles, layers, objects, collision, width, height } )
} else {
Err( "invalid file: this does not appear to be valid Tiled .tmx file" )?
}
}

View File

@ -0,0 +1,99 @@
use std::{error::Error, collections::HashMap, cmp::max};
use crate::reskit::level::system::get_tilemap_prefix_palette;
use super::converter::{Metatile, TiledTilemap, TiledTileset};
#[derive(Debug, PartialEq)]
pub struct PackSettings {
pub bucket: u16,
pub slot: [u16; 2]
}
pub type Bucket<'a> = Vec<Option<u16>>;
pub fn get_buckets( tilemap: &TiledTilemap ) -> Result<Vec<(u16, Bucket)>, Box<dyn Error>> {
let mut buckets: HashMap<u16, Bucket> = tilemap.metatiles.iter().map( | metatile | ( metatile.pack.bucket, Bucket::new() ) ).collect();
// Prepare buckets. Each bucket is exactly 64 tiles wide, and as tall as the largest
// height in the bucket.
for ( bucket_id, bucket ) in &mut buckets {
let height = get_bucket_height( *bucket_id, &tilemap.metatiles );
bucket.resize_with( 64 * height, || None );
}
// Build definitions and stamp them into their respective buckets and bucket slots
for metatile in &tilemap.metatiles {
// Get definition (in Vec<u16> form) so we can stamp it into the bucket
let definition = get_metatile_definition( metatile, &tilemap.tileset )?;
// Get bucket to stamp definition into
let bucket: &mut Bucket = buckets.get_mut( &metatile.pack.bucket ).ok_or( "internal error: no dmapack bucket" )?;
// Stamp the definition into the appropriate slot of the bucket.
for y in 0..metatile.height {
for x in 0..metatile.width {
let metatile_tile = definition.get( ( ( y * metatile.width ) + x ) as usize ).ok_or( "internal error: metatile tile expected" )?;
let bucket_target_x = x + metatile.pack.slot[ 0 ];
let bucket_target_y = y + metatile.pack.slot[ 1 ];
let bucket_target_tile = bucket.get_mut( ( ( bucket_target_y * 64 ) + bucket_target_x ) as usize ).ok_or( "internal error: bucket not correct size" )?;
// At any point if we encounter an overlapping bucket segment, throw an error.
if let Some( _ ) = bucket_target_tile {
return Err( format!( "invalid file: overlapping bucket segment at {},{}", metatile.pack.slot[ 0 ], metatile.pack.slot[ 1 ] ) )?;
}
// Stamp the segment
bucket_target_tile.replace( *metatile_tile );
}
}
}
let mut sorted_buckets: Vec<(u16, Bucket)> = buckets.into_iter().map( | pair | pair ).collect();
sorted_buckets.sort_by_key( | ( bucket_id, _ ) | *bucket_id );
Ok( sorted_buckets )
}
/**
* Get the metatile definition (the block of u16s in sega genesis vdp nametable format)
*/
fn get_metatile_definition( metatile: &Metatile, tileset: &Vec<TiledTileset> ) -> Result<Vec<u16>, Box<dyn Error>> {
let mut result: Vec<u16> = Vec::new();
let ( tile_id_prefix, palettes ) = get_tilemap_prefix_palette( &metatile.source, &tileset )?;
for tile in &metatile.tiles {
let palette = {
if *tile == 0 {
0
} else {
let palette = palettes.get( *tile as usize ).ok_or( "internal error: no correlation between tile index and palette index" )?;
if let Ok( palette ) = palette.ok_or( "" ) {
palette
} else {
println!( "{:?}", palettes );
return Err( format!( "invalid file: tile \"{}\" in metatile tilemap does not name reskit-palette attribute", tile ) )?;
}
}
};
let tile_id: u16 = if *tile == 0 { 0 } else { ( tile + tile_id_prefix as u32 ) as u16 };
result.push( ( ( palette as u16 ) << 13 ) | tile_id );
}
Ok( result )
}
/**
* Get the height of a given dmapack bucket (the max of all heights of metatiles in the bucket)
*/
fn get_bucket_height( bucket_id: u16, metatiles: &Vec<Metatile> ) -> usize {
let filtered: Vec<&Metatile> = metatiles.iter().filter( | metatile | metatile.pack.bucket == bucket_id ).collect();
let mut max_height = 0;
for metatile in filtered {
max_height = max( max_height, metatile.height );
}
max_height as usize
}

100
src/reskit/level/ecs.rs Normal file
View File

@ -0,0 +1,100 @@
use std::{error::Error, collections::HashMap};
use regex::Regex;
use roxmltree::Node;
use crate::reskit::utility::print_warning;
#[derive(Debug)]
pub struct Entity {
pub components: HashMap<String, Component>
}
#[derive(Debug)]
pub struct Component {
pub attributes: HashMap<String, String>
}
pub fn get_ecs( object_group: Node ) -> Result<Vec<Entity>, Box<dyn Error>> {
let mut entities: Vec<Entity> = Vec::new();
let objects: Vec<Node> = object_group.descendants().filter( | node | node.tag_name() == "object".into() ).collect();
for object in objects {
let object_id = object.attribute( "id" ).ok_or( "invalid file: no id attribute set on object" )?;
let object_name = object.attribute( "name" ).unwrap_or( "<no name>" );
let object_name = format!( "({}, ID: {})", object_id, object_name );
let mut entity: Entity = Entity {
components: HashMap::new()
};
// Get attributes for implicit `position` component
let x = object.attribute( "x" ).ok_or( "invalid file: position property not present on object" )?;
let y = object.attribute( "y" ).ok_or( "invalid file: position property not present on object" )?;
let width = object.attribute( "width" ).ok_or( "invalid file: position property not present on object" )?;
let height = object.attribute( "height" ).ok_or( "invalid file: position property not present on object" )?;
entity.components.insert(
format!( "position" ),
Component {
attributes: HashMap::from( [
( format!( "x" ), x.to_owned() ),
( format!( "y" ), y.to_owned() ),
( format!( "width" ), width.to_owned() ),
( format!( "height" ), height.to_owned() )
] )
}
);
let properties = object.descendants().find( | node | node.tag_name() == "properties".into() ).ok_or( "invalid file: no properties in object" )?;
let properties = properties.descendants().filter( | node | node.tag_name() == "property".into() && node.attribute( "name" ).unwrap_or( "" ).starts_with( "reskit-component" ) );
for component_property in properties {
let name = component_property.attribute( "name" ).ok_or( "internal error: name attribute expected" )?;
let prop_type = component_property.attribute( "type" ).unwrap_or( "string" );
let value = component_property.attribute( "value" );
if let Some( value ) = value {
// Set up regex to remove individual parts
let regex = Regex::new( r#"reskit-component\[([a-z0-9_]+)\](\.[a-z0-9_]*)?"# )?;
if let Some( captures ) = regex.captures( name ) {
let component_name = captures.get( 1 ).ok_or( "internal error: regex did not match properly" )?.as_str();
let attribute_name = captures.get( 2 );
if let Some( attribute_name ) = attribute_name {
// Setting is a component attribute setting ("value" is the setting)
let attribute_name = attribute_name.as_str().replace( ".", "" );
if let Some( component ) = entity.components.get_mut( component_name ) {
component.attributes.insert( attribute_name, value.to_owned() );
} else {
return Err( format!( "in object {}: undefined component in reskit-component definition \"{}\"", object_name, name ) )?;
}
} else {
// Setting is a component definition
if prop_type != "bool" {
print_warning( &format!( "in object {}: non-bool type reskit-component definition \"{}\". ignoring...", object_name, name ) );
} else {
if value == "true" {
if entity.components.contains_key( component_name ) {
print_warning( &format!( "in object {}: duplicate reskit-component definition \"{}\". ignoring...", object_name, name ) );
} else {
entity.components.insert(
component_name.to_owned(),
Component { attributes: HashMap::new() }
);
}
} else {
print_warning( &format!( "in object {}: reskit-component definition \"{}\" is set to false. ignoring...", object_name, name ) );
}
}
}
} else {
print_warning( &format!( "in object {}: invalid format for reskit-component attribute \"{}\". ignoring...", object_name, name ) );
}
} else {
print_warning( &format!( "in object {}: no value for reskit-component attribute \"{}\". ignoring...", object_name, name ) );
}
}
entities.push( entity );
}
Ok( entities )
}

4
src/reskit/level/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod converter;
pub mod dmapack;
pub mod ecs;
pub mod system;

432
src/reskit/level/system.rs Normal file
View File

@ -0,0 +1,432 @@
use std::{error::Error, collections::HashMap};
use image::GenericImageView;
use crate::reskit::{tileset::image_to_tiles, utility::{symbol_to_pascal, print_info}, cli::settings::TileOrder};
use super::{converter::{TiledTilemap, Layer, SystemPlane, TiledTileset}, dmapack::get_buckets};
/**
* Output the .bin and .pal file (using `tileset` tool to build it) containing each of the tiles
* and palettes in the Tiled Editor tileset.
*/
pub fn get_tiles( tilemap: &TiledTilemap ) -> Result<(Vec<u8>, Vec<u8>), Box<dyn Error>> {
let mut system_pals: [[u16; 16]; 4] = [
[ 0; 16 ],
[ 0; 16 ],
[ 0; 16 ],
[ 0; 16 ]
];
let mut all_tiles: Vec<u8> = vec![0; 32]; // --system md, start with a blank buffer tile
for tileset in &tilemap.tileset {
let tiles_height = tileset.image.height() / 8; // --system md
let tiles_width = tileset.image.width() / 8; // --system md
// all this copy pasted code, it's so over
if matches!( tileset.tile_order, TileOrder::Sprite ) {
// Sprite iteration order
for tile_x in 0..tiles_width {
for tile_y in 0..tiles_height {
let tile_index = ( tile_y * tiles_width ) + tile_x;
let tile = tileset.image.clone().crop( tile_x * 8, tile_y * 8, 8, 8 );
// Fake palette (see below)
let mut fake: [u16; 16] = [ 0; 16 ];
let selected_pal = tileset.palettes[ tile_index as usize ];
let tile_bin = image_to_tiles(
&tile,
{
// Determine if palette is used here or it is a dummy palette
if let Some( selected_pal ) = selected_pal {
&mut system_pals[ selected_pal as usize ]
} else {
// Fake-a-palette
// You will get an error in get_tilemap if you try to use this palette-less tile
&mut fake
}
},
"tile" // this is not set to "sprite" because we're only doing one tile at a time
);
if let Err( _ ) = tile_bin {
return Err( format!( "palette {:?} full (try moving tile {} to another palette)", selected_pal, tile_index ) )?
}
if let Ok( tile_bin ) = tile_bin {
all_tiles.extend( tile_bin );
}
}
}
} else {
// Tile iteration order
for tile_y in 0..tiles_height {
for tile_x in 0..tiles_width {
let tile_index = ( tile_y * tiles_width ) + tile_x;
let tile = tileset.image.clone().crop( tile_x * 8, tile_y * 8, 8, 8 );
// Fake palette (see below)
let mut fake: [u16; 16] = [ 0; 16 ];
let selected_pal = tileset.palettes[ tile_index as usize ];
let tile_bin = image_to_tiles(
&tile,
{
// Determine if palette is used here or it is a dummy palette
if let Some( selected_pal ) = selected_pal {
&mut system_pals[ selected_pal as usize ]
} else {
// Fake-a-palette
// You will get an error in get_tilemap if you try to use this palette-less tile
&mut fake
}
},
"tile"
);
if let Err( _ ) = tile_bin {
return Err( format!( "palette {:?} full (try moving tile {} to another palette)", selected_pal, tile_index ) )?
}
if let Ok( tile_bin ) = tile_bin {
all_tiles.extend( tile_bin );
}
}
}
}
}
// Define result and write palettes
let mut palettes: Vec<u8> = Vec::new();
for pal in 0..system_pals.len() {
for i in 0..system_pals[ pal ].len() {
let bytes = system_pals[ pal ][ i ].to_be_bytes();
for i in 0..2 {
palettes.push( bytes[ i ] );
}
}
}
Ok( ( all_tiles, palettes ) )
}
/**
* Get the .map file containing `dmapack`-formatted metatiles, as well as B and A metatile-based maps.
*
* Format:
* <<< HEADER >>>
* 2 bytes: The offset to the instances table
* 2 bytes: The number of metatiles in the `dmapack` structure
* For each metatile:
* 2 bytes: Metatile width
* 2 bytes: Metatile height
* 2 bytes: Offset into `dmapack` buckets pointing directly to the metatile
*
* <<< DEFINITIONS >>>
* For each `dmapack` bucket:
* Contents of the `dmapack` bucket. Use offsets in headers to access metatiles in `dmapack`
* buckets located in this block.
*
* <<< INSTANCES >>>
* For B, then A layers:
* 2 bytes: Number of metatile instances in this layer.
* For each metatile instance:
* 2 bytes: Metatile ID.
* 2 bytes: X coordinate (world, divide by 8 to get tile coordinate).
* 2 bytes: Y coordinate (world, divide by 8 to get tile coordinate).
*/
pub fn get_metatile_maps( tilemap: &TiledTilemap ) -> Result<Vec<u8>, Box<dyn Error>> {
print_info( &format!( "number of metatiles: {}", tilemap.metatiles.len() ) );
// Assemble header of offsets
let header_offset = ( tilemap.metatiles.len() * 6 ) + 4;
let mut header: Vec<u8> = vec![];
// Get offsets for each bucket
let buckets = get_buckets( tilemap )?;
let mut bucket_offsets: HashMap<u16, usize> = HashMap::new();
let mut total_offset = header_offset;
for ( bucket_id, bucket ) in &buckets {
print_info( &format!( "bucket {} is of size {}", bucket_id, bucket.len() ) );
bucket_offsets.insert( *bucket_id, total_offset );
total_offset += bucket.len();
}
// Get the offset for each metatile within the buckets
let mut index = 0;
for metatile in &tilemap.metatiles {
let bucket_offset = bucket_offsets.get( &metatile.pack.bucket ).ok_or( "internal error: no offset for dmapack bucket" )?;
let cell_x = metatile.pack.slot[ 0 ];
let cell_y = metatile.pack.slot[ 1 ];
let metatile_byte_offset: u16 = *bucket_offset as u16 + ( ( ( cell_y * 64 ) + cell_x ) * 2 );
header.extend( metatile.width.to_be_bytes() );
header.extend( metatile.height.to_be_bytes() );
header.extend( metatile_byte_offset.to_be_bytes() );
print_info( &format!( "metatile {}: width {} height {} file offset {}", index, metatile.width, metatile.height, metatile_byte_offset ) );
index += 1;
}
// Assemble definitions
let mut definitions: Vec<u8> = vec![];
let mut wasted_space: usize = 0;
for ( _, bucket ) in &buckets {
for cell in bucket {
if let Some( nametable_entry ) = cell {
definitions.extend( nametable_entry.to_be_bytes() );
} else {
definitions.extend( ( 0 as u16 ).to_be_bytes() );
wasted_space += 1;
}
}
}
print_info( &format!( "total dmapack unused space is {} bytes", wasted_space ) );
// Assemble instances
let mut instances: Vec<u8> = vec![];
let layer_b: Option<&Layer> = tilemap.layers.iter().find( | layer | matches!( layer, Layer::Metatile { system_plane: SystemPlane::MdPlaneB, tiles: _ } ) );
if let Some( layer_b ) = layer_b {
let tiles = match layer_b { Layer::Metatile { system_plane: _, tiles } => tiles };
let mut subdefinitions: Vec<u8> = vec![];
for tile_y in 0..tilemap.height {
for tile_x in 0..tilemap.width {
let item_at = tiles.get( ( tile_y * tilemap.width ) + tile_x ).ok_or( "internal error: tilemap does not correlate to width/height" )?;
if *item_at != 0 {
let metatile = tilemap.metatiles.iter().find( | metatile | metatile.id == *item_at ).ok_or( "invalid file: metatile definition not found" )?;
// What index-ID was that?
let index_id = tilemap.metatiles.iter().position( | metatile | metatile.id == *item_at ).ok_or( "invalid file: metatile id not found" )?;
// Write the data to subdefinitions
print_info( &format!( "plane B metatile instance: id {}, x {}, y {}", index_id, ( ( tile_x * 8 ) as u16 ), ( ( ( tile_y - ( metatile.height as usize - 1 ) ) * 8 ) as u16 ) ) );
subdefinitions.extend( ( index_id as u16 ).to_be_bytes() );
subdefinitions.extend( ( ( tile_x * 8 ) as u16 ).to_be_bytes() );
subdefinitions.extend( ( ( ( tile_y - ( metatile.height as usize - 1 ) ) * 8 ) as u16 ).to_be_bytes() );
}
}
}
instances.extend( ( ( subdefinitions.len() / 6 ) as u16 ).to_be_bytes() );
instances.extend( subdefinitions );
} else {
instances.extend( ( 0 as u16 ).to_be_bytes() );
}
let layer_a: Option<&Layer> = tilemap.layers.iter().find( | layer | matches!( layer, Layer::Metatile { system_plane: SystemPlane::MdPlaneA, tiles: _ } ) );
if let Some( layer_a ) = layer_a {
let tiles = match layer_a { Layer::Metatile { system_plane: _, tiles } => tiles };
let mut subdefinitions: Vec<u8> = vec![];
for tile_y in 0..tilemap.height {
for tile_x in 0..tilemap.width {
let item_at = tiles.get( ( tile_y * tilemap.width ) + tile_x ).ok_or( "internal error: tilemap does not correlate to width/height" )?;
if *item_at != 0 {
let metatile = tilemap.metatiles.iter().find( | metatile | metatile.id == *item_at ).ok_or( "invalid file: metatile definition not found" )?;
// What index-ID was that?
let index_id = tilemap.metatiles.iter().position( | metatile | metatile.id == *item_at ).ok_or( "invalid file: metatile id not found" )?;
// Write the data to subdefinitions
print_info( &format!( "plane A metatile instance: id {}, x {}, y {}", index_id, ( ( tile_x * 8 ) as u16 ), ( ( ( tile_y - ( metatile.height as usize - 1 ) ) * 8 ) as u16 ) ) );
subdefinitions.extend( ( index_id as u16 ).to_be_bytes() );
subdefinitions.extend( ( ( tile_x * 8 ) as u16 ).to_be_bytes() );
subdefinitions.extend( ( ( ( tile_y - ( metatile.height as usize - 1 ) ) * 8 ) as u16 ).to_be_bytes() );
}
}
}
instances.extend( ( ( subdefinitions.len() / 6 ) as u16 ).to_be_bytes() );
instances.extend( subdefinitions );
} else {
instances.extend( ( 0 as u16 ).to_be_bytes() );
}
// Write the final result
let mut result: Vec<u8> = vec![];
let offset_to_instances = header_offset + definitions.len();
result.extend( ( offset_to_instances as u16 ).to_be_bytes() );
let num_metatiles: u16 = tilemap.metatiles.len() as u16;
result.extend( num_metatiles.to_be_bytes() );
result.extend( header );
result.extend( definitions );
result.extend( instances );
Ok( result )
}
pub fn get_tilemap_prefix_palette( target_source: &str, tilesets: &Vec<TiledTileset> ) -> Result<(usize, Vec<Option<u8>>), Box<dyn Error>> {
// Everything starts at 1 for the blank buffer tile
let mut index: usize = 1;
for tileset in tilesets {
if tileset.source == target_source {
return Ok( ( index, tileset.palettes.clone() ) )
} else {
index += ( ( tileset.image.width() / 8 ) * ( tileset.image.height() / 8 ) ) as usize;
}
}
Err( format!( "internal error: could not find metatile with target source \"{}\"", target_source ) )?
}
/**
* Get the .lvc collision map (u8 sized for the map dimensions, collision areas are either 0 or 1)
*/
pub fn get_collision_map( tilemap: &TiledTilemap ) -> Result<Vec<u8>, Box<dyn Error>> {
let mut result: Vec<u8> = Vec::new();
// 2 bytes: Number of bounding boxes
result.extend( ( tilemap.collision.len() as u16 ).to_be_bytes() );
// For (Number of bounding boxes):
for bounding_box in &tilemap.collision {
// 2 bytes: X dimension
result.extend( bounding_box.origin.x.to_be_bytes() );
// 2 bytes: Y dimension
result.extend( bounding_box.origin.y.to_be_bytes() );
// 2 bytes: X2 dimension (x + width)
result.extend( ( bounding_box.origin.x + bounding_box.size.width ).to_be_bytes() );
// 2 bytes: Y2 dimension (y + height)
result.extend( ( bounding_box.origin.y + bounding_box.size.height ).to_be_bytes() );
}
Ok( result )
}
pub fn get_objs( tilemap: &TiledTilemap, symbol_ids: &HashMap<String, u16> ) -> Result<Vec<u8>, Box<dyn Error>> {
let mut result: Vec<u8> = Vec::new();
// 2 bytes: Number of objects
result.extend( ( tilemap.objects.len() as u16 ).to_be_bytes() );
if tilemap.objects.len() > 0 {
let archetype = tilemap.objects.first().ok_or( "internal error: no first object" )?;
// 2 bytes: Object struct size
result.extend( ( ( archetype.attributes.len() + 1 ) as u16 ).to_be_bytes() );
// For (number of objects):
for object in &tilemap.objects {
// (Object struct size * 2): Data for object, ordered by --fields option, each one 16-bit field
// First output the object ID
let object_id = symbol_ids.get( &object.id ).ok_or( format!( "invalid file: undefined symbol \"{}\"", object.id ) )?;
result.extend( object_id.to_be_bytes() );
// Then output the attributes in the order provided via LinkedHashMap
for ( _attribute, value ) in &object.attributes {
let value: u16 = match value.as_str() {
"true" => 0x01,
"false" => 0x00,
all_else => {
if let Ok( valid_u16 ) = all_else.parse::<u16>() {
valid_u16
} else {
*symbol_ids.get( all_else ).ok_or( format!( "invalid file: undefined symbol \"{}\"", all_else ) )?
}
}
};
result.extend( value.to_be_bytes() );
}
}
}
Ok( result )
}
/**
* Get the sprite table.
*/
pub fn get_sprites( tilemap: &TiledTilemap, sprite_ids: &HashMap<String, u16> ) -> Result<Vec<u8>, Box<dyn Error>> {
let mut result: Vec<u8> = Vec::new();
let sprites: Vec<&TiledTileset> = tilemap.tileset.iter().filter( | tileset | matches!( tileset.tile_order, TileOrder::Sprite ) ).collect();
result.extend( ( sprites.len() as u16 ).to_be_bytes() );
for sprite in sprites {
// Just find the prefix of this source (add up all the number of tiles before it)
let ( index, _ ) = get_tilemap_prefix_palette( &sprite.source, &tilemap.tileset )?;
let sprite_metadata = sprite.sprite_metadata.as_ref().ok_or( "internal error: tile order is sprite but no sprite data" )?;
let sprite_id = sprite_ids.get( &sprite_metadata.id ).ok_or( format!( "invalid file: undefined sprite id \"{}\"", sprite_metadata.id ) )?;
result.extend( sprite_id.to_be_bytes() );
let tiles_across = sprite.image.width() / 8;
let frames: u16 = ( tiles_across / ( sprite_metadata.width as u32 ) ) as u16;
result.extend( ( index as u16 ).to_be_bytes() );
result.push( sprite_metadata.width );
result.push( sprite_metadata.height );
result.extend( sprite_metadata.palette.to_be_bytes() );
// One animation per .tsx file
// Animations run like a filmstrip across, never down
result.extend( frames.to_be_bytes() );
if let Some( jiffies ) = sprite_metadata.anim_interval {
result.extend( jiffies.to_be_bytes() );
} else {
result.extend( ( 0 as u16 ).to_be_bytes() );
};
}
Ok( result )
}
/**
* Get a helper .asm or .c file that ties all the level components together
*/
pub fn get_code( tilemap: &TiledTilemap, level_name: &str, path_prefix: &str ) -> Result<String, Box<dyn Error>> {
let version = env!( "CARGO_PKG_VERSION" );
let level_label = symbol_to_pascal( level_name );
let ( width, height ) = ( tilemap.width, tilemap.height );
let num_tiles = {
let mut total_tiles = 1;
for tileset in &tilemap.tileset {
total_tiles += ( tileset.image.width() / 8 ) * ( tileset.image.height() / 8 ); // --system md
}
total_tiles
};
let file = format!( r#"; Level definition file
; Generated by reskit v{version}
{level_label}Tiles:
incbin '{path_prefix}{level_name}/tiles.bin'
{level_label}Palettes:
incbin '{path_prefix}{level_name}/palettes.pal'
{level_label}Nametables:
incbin '{path_prefix}{level_name}/nametables.map'
{level_label}Collision:
incbin '{path_prefix}{level_name}/collision.lvc'
{level_label}Objects:
incbin '{path_prefix}{level_name}/objects.obs'
{level_label}Sprites:
incbin '{path_prefix}{level_name}/sprites.spt'
{level_label}:
dc.w {width}, {height}, {num_tiles}
dc.l {level_label}Tiles
dc.l {level_label}Palettes
dc.l {level_label}Nametables
dc.l {level_label}Collision
dc.l {level_label}Objects
dc.l {level_label}Sprites"# );
Ok( file )
}

View File

@ -1,4 +1,6 @@
pub mod cli; pub mod cli;
pub mod easing;
pub mod level;
pub mod soundtrack; pub mod soundtrack;
pub mod tileset; pub mod tileset;
pub mod utility; pub mod utility;

View File

@ -1,20 +1,50 @@
use crate::reskit::utility; use crate::reskit::utility;
use std::process::exit; use std::{error::Error, ops::RangeInclusive};
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
use image::{ GenericImageView, DynamicImage }; use image::{ GenericImageView, DynamicImage };
fn color_to_palette( r: u16, g: u16, b: u16, palette: &mut [u16; 16] ) -> u32 { /**
* Get a particular RGB component adjusted for the Mega Drive VDP colour ramp.
* See colour ramp at https://plutiedev.com/vdp-color-ramp
*/
fn rgb_component_to_ramp( component: u16 ) -> Result<u16, Box<dyn Error>> {
let ramp: [(RangeInclusive<u16>, (u16,u16)); 7] = [
(0..=52, (0x0, 0x2)),
(52..=87, (0x2, 0x4)),
(87..=116, (0x4, 0x6)),
(116..=144, (0x6, 0x8)),
(144..=172, (0x8, 0xA)),
(172..=206, (0xA, 0xC)),
(206..=255, (0xC, 0xE))
];
for i in 0..ramp.len() {
let ( ramp_range, ( cram_round_down, cram_round_up ) ) = &ramp[ i ];
if ramp_range.contains( &component ) {
let range_midpoint = ramp_range.start() + ( ( ramp_range.end() - ramp_range.start() ) / 2 );
return if component >= range_midpoint {
Ok( *cram_round_up )
} else {
Ok( *cram_round_down )
}
}
}
Err( "rgb component not in range 0-255" )?
}
pub fn color_to_palette( r: u16, g: u16, b: u16, palette: &mut [u16; 16] ) -> Result<u32, Box<dyn Error>> {
let final_val = let final_val =
( ( r & 0x00F0 ) >> 4 ) | ( rgb_component_to_ramp( b )? << 8 ) |
( g & 0x00F0 ) | ( rgb_component_to_ramp( g )? << 4 ) |
( ( b & 0x00F0 ) << 4 ); ( rgb_component_to_ramp( r )? );
// Does the color already exist? // Does the color already exist?
for i in 0..palette.len() { for i in 0..palette.len() {
if palette[ i ] == final_val { if palette[ i ] == final_val {
return i as u32; return Ok( i as u32 );
} }
} }
@ -22,26 +52,26 @@ fn color_to_palette( r: u16, g: u16, b: u16, palette: &mut [u16; 16] ) -> u32 {
for i in 1..palette.len() { for i in 1..palette.len() {
if palette[ i ] == 0 { if palette[ i ] == 0 {
palette[ i ] = final_val; palette[ i ] = final_val;
return i as u32; return Ok( i as u32 );
} }
} }
utility::print_error( "image contains greater than 15 colours, exiting..." ); utility::print_error( "attempted to insert greater than 15 colours in a palette" );
exit( 3 ); Err( "no more room in this palette!" )?
} }
fn get_pixel( image: &DynamicImage, palette: &mut [u16; 16], x: u32, y: u32 ) -> u32 { fn get_pixel( image: &DynamicImage, palette: &mut [u16; 16], x: u32, y: u32 ) -> Result<u32, Box<dyn Error>> {
let ( max_x, max_y ) = image.dimensions(); let ( max_x, max_y ) = image.dimensions();
if x >= max_x || y >= max_y { if x >= max_x || y >= max_y {
return 0; return Ok( 0 );
} }
let pixel = image.get_pixel( x, y ); let pixel = image.get_pixel( x, y );
color_to_palette( pixel[ 0 ].into(), pixel[ 1 ].into(), pixel[ 2 ].into(), palette ) color_to_palette( pixel[ 0 ].into(), pixel[ 1 ].into(), pixel[ 2 ].into(), palette )
} }
fn output_bin( image_filename: &str, output_filename: &str, palette: [u16; 16], body: Vec<u8> ) { pub fn output_bin( output_filename: &str, palette: [u16; 16], body: Vec<u8> ) -> Result<(), Box<dyn Error>> {
let mut output_palette: Vec< u8 > = Vec::new(); let mut output_palette: Vec< u8 > = Vec::new();
for i in 0..palette.len() { for i in 0..palette.len() {
let bytes = palette[ i ].to_be_bytes(); let bytes = palette[ i ].to_be_bytes();
@ -54,13 +84,14 @@ fn output_bin( image_filename: &str, output_filename: &str, palette: [u16; 16],
if let Ok( mut output_file ) = output_try { if let Ok( mut output_file ) = output_try {
output_file.write( &output_palette ).unwrap(); output_file.write( &output_palette ).unwrap();
output_file.write( &body ).unwrap(); output_file.write( &body ).unwrap();
utility::print_good( format!( "converted file {}", image_filename ).as_str() );
Ok( () )
} else { } else {
utility::print_error( format!( "could not open filename for output {}", output_filename ).as_str() ); return Err( format!( "could not open filename for output {}", output_filename ).as_str() )?;
} }
} }
fn output_inc( image_filename: &str, output_filename: &str, palette: [u16; 16], body: Vec<u8> ) { pub fn output_inc( output_filename: &str, palette: [u16; 16], body: Vec<u8> ) -> Result<(), Box<dyn Error>> {
let mut output_palette: Vec< u8 > = Vec::new(); let mut output_palette: Vec< u8 > = Vec::new();
for i in 0..palette.len() { for i in 0..palette.len() {
let bytes = palette[ i ].to_be_bytes(); let bytes = palette[ i ].to_be_bytes();
@ -111,75 +142,91 @@ fn output_inc( image_filename: &str, output_filename: &str, palette: [u16; 16],
fs::write( output_filename.to_string() + ".h", output_h ).expect( "Could not write header file" ); fs::write( output_filename.to_string() + ".h", output_h ).expect( "Could not write header file" );
fs::write( output_filename.to_string() + ".c", output_c ).expect( "Could not write source file" ); fs::write( output_filename.to_string() + ".c", output_c ).expect( "Could not write source file" );
utility::print_good( format!( "converted file {}", image_filename ).as_str() ); Ok( () )
} }
pub fn generate( image_filename: &str, output_filename: &str, output_mode: &str, tile_order: &str ) { pub fn image_to_tiles( img: &DynamicImage, palette: &mut [u16; 16], tile_order: &str ) -> Result<Vec<u8>, Box<dyn Error>> {
let img = image::open( image_filename ); let ( mut max_x, mut max_y ) = img.dimensions();
if let Ok( img ) = img { if max_x % 8 != 0 { max_x = ( 8 * ( max_x / 8 ) ) + ( 8 - ( max_x % 8 ) ); }
let ( mut max_x, mut max_y ) = img.dimensions(); if max_y % 8 != 0 { max_y = ( 8 * ( max_y / 8 ) ) + ( 8 - ( max_y % 8 ) ); }
if max_x % 8 != 0 { max_x = ( 8 * ( max_x / 8 ) ) + ( 8 - ( max_x % 8 ) ); }
if max_y % 8 != 0 { max_y = ( 8 * ( max_y / 8 ) ) + ( 8 - ( max_y % 8 ) ); }
let mut palette: [u16; 16] = [ 0; 16 ]; let mut body: Vec<u8> = Vec::new();
let mut body: Vec< u8 > = Vec::new();
if tile_order == "sprite" { if tile_order == "sprite" {
/* /*
* Tile order: * Sprite order:
* 1 3 * 1 3
* 2 4 * 2 4
*/ */
for x in ( 0..max_x ).step_by( 8 ) { for x in ( 0..max_x ).step_by( 8 ) {
for y in ( 0..max_y ).step_by( 8 ) {
for cell_y in 0..8 {
let mut series: u32 = 0;
for cell_x in 0..8 {
let nibble: u32 = get_pixel( &img, &mut palette, cell_x + x, cell_y + y ) << ( ( 7 - cell_x ) * 4 );
series = series | nibble;
}
let bytes = series.to_be_bytes();
for i in 0..4 {
body.push( bytes[ i ] );
}
}
}
}
} else {
/*
* Tile order:
* 1 2
* 3 4
*/
for y in ( 0..max_y ).step_by( 8 ) { for y in ( 0..max_y ).step_by( 8 ) {
for x in ( 0..max_x ).step_by( 8 ) { for cell_y in 0..8 {
for cell_y in 0..8 { let mut series: u32 = 0;
let mut series: u32 = 0;
for cell_x in 0..8 { for cell_x in 0..8 {
let nibble: u32 = get_pixel( &img, &mut palette, cell_x + x, cell_y + y ) << ( ( 7 - cell_x ) * 4 ); let nibble: u32 = get_pixel( &img, palette, cell_x + x, cell_y + y )? << ( ( 7 - cell_x ) * 4 );
series = series | nibble; series = series | nibble;
} }
let bytes = series.to_be_bytes(); let bytes = series.to_be_bytes();
for i in 0..4 { for i in 0..4 {
body.push( bytes[ i ] ); body.push( bytes[ i ] );
}
} }
} }
} }
} }
} else {
/*
* Tile order:
* 1 2
* 3 4
*/
for y in ( 0..max_y ).step_by( 8 ) {
for x in ( 0..max_x ).step_by( 8 ) {
for cell_y in 0..8 {
let mut series: u32 = 0;
for cell_x in 0..8 {
let nibble: u32 = get_pixel( &img, palette, cell_x + x, cell_y + y )? << ( ( 7 - cell_x ) * 4 );
series = series | nibble;
}
let bytes = series.to_be_bytes();
for i in 0..4 {
body.push( bytes[ i ] );
}
}
}
}
}
Ok( body )
}
pub fn generate( image_filename: &str, output_filename: &str, output_mode: &str, tile_order: &str ) -> Result<(), Box<dyn Error>> {
let img = image::open( image_filename );
if let Ok( img ) = img {
let mut palette: [u16; 16] = [ 0; 16 ];
let body = image_to_tiles( &img, &mut palette, tile_order )?;
if output_mode == "bin" { if output_mode == "bin" {
output_bin( image_filename, output_filename, palette, body ); if let Err( err ) = output_bin( output_filename, palette, body ) {
utility::print_error( &format!( "{}", err ) );
}
utility::print_good( format!( "converted file {}", image_filename ).as_str() );
} else if output_mode == "inc" { } else if output_mode == "inc" {
output_inc( image_filename, output_filename, palette, body ); if let Err( err ) = output_inc( output_filename, palette, body ) {
utility::print_error( &format!( "{}", err ) );
}
utility::print_good( format!( "converted file {}", image_filename ).as_str() );
} else { } else {
utility::print_error( format!( "invalid output mode {}", output_mode ).as_str() ); utility::print_error( format!( "invalid output mode {}", output_mode ).as_str() );
} }
} else { } else {
utility::print_error( format!( "could not open filename {}", image_filename ).as_str() ); utility::print_error( format!( "could not open filename {}", image_filename ).as_str() );
} }
Ok( () )
} }