diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 910fcec..16bc44e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -35,7 +35,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - rust_versions: ["stable", "1.46"] + rust_versions: ["stable", "1.75"] os: [ubuntu-latest, windows-latest] steps: - name: Checkout the source code diff --git a/Cargo.toml b/Cargo.toml index a75d491..fd65289 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nobject-rs" -version = "2.0.0" +version = "3.0.0" authors = ["shmapdy "] edition = "2021" license = "MIT" @@ -12,7 +12,7 @@ description = "A parser for wavefront Obj/Mtl files. Written with Nom." # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -derive_more = "0.99" +derive_more = {version = "1.0", features = ["constructor", "from", "into"]} log = "0.4" -nom = "7.1.3" -thiserror = "1.0" +nom = "8.0" +thiserror = "2.0" diff --git a/src/lib.rs b/src/lib.rs index d3e1efc..2ae5e77 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -88,6 +88,7 @@ mod tokenizer; mod material; mod model; +use std::borrow::Cow; use std::result::Result; pub use model::{ @@ -121,8 +122,8 @@ pub enum ObjError { MaterialParse(#[from] MaterialError), /// An unexpected token was encountered in the token stream. - #[error("Unexpected token encountered: `{0:#?}`")] - UnexpectedToken(Token), + #[error("Unexpected token encountered: `{0}`")] + UnexpectedToken(String), /// The specification for obj/mtl files has some settings /// either being "on" or "off". If there is an issue @@ -141,7 +142,7 @@ pub enum ObjError { /// or a constructed `Model`. pub fn load_obj(input: &str) -> Result { match tokenizer::parse_obj(input) { - Ok(tokens) => Ok(model::parse(&tokens)?), + Ok(tokens) => Ok(model::parse(tokens)?), Err(e) => Err(e.into()), } } @@ -156,7 +157,7 @@ pub fn load_obj(input: &str) -> Result { /// or a collection of `Material`. pub fn load_mtl(input: &str) -> Result, ObjError> { match tokenizer::parse_mtl(input) { - Ok(tokens) => Ok(material::parse(&tokens)?), + Ok(tokens) => Ok(material::parse(tokens)?), Err(e) => Err(e.into()), } } @@ -167,7 +168,7 @@ fn get_token_float(token: &Token) -> Result { } else if let Token::Int(i) = token { Ok(*i as f32) } else { - Err(ObjError::UnexpectedToken(token.clone())) + Err(ObjError::UnexpectedToken(format!("{:#?}", token))) } } @@ -178,7 +179,7 @@ fn get_opt_token_float_opt(token: &Option) -> Result, ObjErro } else if let Token::Int(i) = t { Ok(Some(*i as f32)) } else { - Err(ObjError::UnexpectedToken(t.clone())) + Err(ObjError::UnexpectedToken(format!("{:#?}", token))) } } else { Ok(None) @@ -189,27 +190,29 @@ fn get_token_int(token: &Token) -> Result { if let Token::Int(i) = token { Ok(*i) } else { - Err(ObjError::UnexpectedToken(token.clone())) + Err(ObjError::UnexpectedToken(format!("{:#?}", token))) } } -fn get_token_string(token: &Token) -> Result { +fn get_token_string<'a>(token: &'a Token) -> Result, ObjError> { if let Token::String(s) = token { Ok(s.clone()) } else if let Token::Int(i) = token { - Ok(i.to_string()) + Ok(Cow::Owned(i.to_string())) } else if let Token::Float(f) = token { - Ok(f.to_string()) + Ok(Cow::Owned(f.to_string())) } else { - Err(ObjError::UnexpectedToken(token.clone())) + Err(ObjError::UnexpectedToken(format!("{:#?}", token))) } } fn get_on_off_from_str(token: &Token) -> Result { - let s = get_token_string(&token)?; - match s.as_str() { - "on" => Ok(true), - "off" => Ok(false), - _ => Err(ObjError::InvalidOnOffValue(s.clone())), + let s = get_token_string(token)?; + if s.eq_ignore_ascii_case("on") { + Ok(true) + } else if s.eq_ignore_ascii_case("off") { + Ok(false) + } else { + Err(ObjError::UnexpectedToken(format!("{:#?}", token))) } } diff --git a/src/macros.rs b/src/macros.rs index 7afb801..c0f102d 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -1,14 +1,14 @@ macro_rules! token_match { ($($token:tt)*) => {{ - fn inner() -> impl Fn(&[Token]) -> IResult<&[Token], Token> { - move |input: &[Token]| -> IResult<&[Token], Token> { + fn inner() -> impl Fn(crate::tokenizer::TokenSet) -> IResult { + move |input: crate::tokenizer::TokenSet| -> IResult { if input.is_empty() { Err(nom::Err::Error(nom::error::Error::new( input, nom::error::ErrorKind::Eof, ))) - } else if matches!(input[0], $($token)*) { - let token = input[0].clone(); + } else if matches!(input.as_ref()[0], $($token)*) { + let token = input.as_ref()[0].clone(); let (_, remainder) = input.split_at(1); Ok((remainder, token)) } else { diff --git a/src/material.rs b/src/material.rs index 0462547..8fe7ded 100644 --- a/src/material.rs +++ b/src/material.rs @@ -2,14 +2,14 @@ use std::result::Result; use crate::{ get_on_off_from_str, get_opt_token_float_opt, get_token_float, get_token_int, get_token_string, - tokenizer::Token, + tokenizer::{Token, TokenSet}, }; use nom::{ branch::alt, combinator::{map, opt}, multi::many1, - sequence::{preceded, tuple}, - IResult, + sequence::preceded, + IResult, Parser, }; use thiserror::Error; @@ -88,32 +88,32 @@ impl ColorCorrectedMap { OptionElement::FileName(n) => res.file_name = n.clone(), OptionElement::BlendU(b) => { res.blend_u = Some(*b); - } + }, OptionElement::BlendV(b) => { res.blend_v = Some(*b); - } + }, OptionElement::Cc(b) => { res.color_correct = Some(*b); - } + }, OptionElement::Clamp(b) => { res.clamp = Some(*b); - } + }, OptionElement::TextureRange((base, gain)) => { res.texture_range = Some((*base, *gain)); - } + }, OptionElement::Offset((x, y, z)) => { res.offset = Some((*x, *y, *z)); - } + }, OptionElement::Scale((x, y, z)) => { res.scale = Some((*x, *y, *z)); - } + }, OptionElement::Turbulance((x, y, z)) => { res.turbulance = Some((*x, *y, *z)); - } + }, OptionElement::TextureRes(tex_res) => { res.texture_res = Some(*tex_res); - } - _ => {} + }, + _ => {}, } } res @@ -156,30 +156,30 @@ impl NonColorCorrectedMap { OptionElement::FileName(n) => res.file_name = n.clone(), OptionElement::BlendU(b) => { res.blend_u = Some(*b); - } + }, OptionElement::BlendV(b) => { res.blend_v = Some(*b); - } + }, OptionElement::Clamp(b) => { res.clamp = Some(*b); - } + }, OptionElement::ImfChan(chan) => res.imf_chan = Some(chan.clone()), OptionElement::TextureRange((base, gain)) => { res.texture_range = Some((*base, *gain)); - } + }, OptionElement::Offset((x, y, z)) => { res.offset = Some((*x, *y, *z)); - } + }, OptionElement::Scale((x, y, z)) => { res.scale = Some((*x, *y, *z)); - } + }, OptionElement::Turbulance((x, y, z)) => { res.turbulance = Some((*x, *y, *z)); - } + }, OptionElement::TextureRes(tex_res) => { res.texture_res = Some(*tex_res); - } - _ => {} + }, + _ => {}, } } res @@ -198,7 +198,7 @@ pub struct BumpMap { impl BumpMap { fn new(o: &[OptionElement]) -> Self { let mut res = Self { - map_settings: Some(NonColorCorrectedMap::new(&o)), + map_settings: Some(NonColorCorrectedMap::new(o)), ..Default::default() }; @@ -225,7 +225,7 @@ pub struct ReflectionMap { impl ReflectionMap { fn new(o: &[OptionElement]) -> Self { let mut res = Self { - map_settings: Some(ColorCorrectedMap::new(&o)), + map_settings: Some(ColorCorrectedMap::new(o)), ..Default::default() }; @@ -303,70 +303,70 @@ impl Material { match element { MaterialElement::Name(n) => { self.name = n.clone(); - } + }, MaterialElement::Ambient(c) => { self.ambient = Some(c.clone()); - } + }, MaterialElement::Diffuse(c) => { self.diffuse = Some(c.clone()); - } + }, MaterialElement::Specular(c) => { self.specular = Some(c.clone()); - } + }, MaterialElement::EmissiveCoefficient(c) => { self.emissive_coefficient = Some(c.clone()); - } + }, MaterialElement::SpecularExponent(f) => { self.specular_exponent = Some(*f); - } + }, MaterialElement::Disolve(d) => { self.disolve = Some(*d); - } + }, MaterialElement::Transparency(f) => { self.transparancy = Some(*f); - } + }, MaterialElement::TransmissionFactor(c) => { self.transmission_factor = Some(c.clone()); - } + }, MaterialElement::Sharpness(f) => { self.sharpness = Some(*f); - } + }, MaterialElement::IndexOfRefraction(f) => { self.index_of_refraction = Some(*f); - } + }, MaterialElement::IlluminationModel(u) => { self.illumination_mode = Some(*u); - } + }, MaterialElement::TexMapAmbient(cc) => { self.texture_map_ambient = Some(cc.clone()); - } + }, MaterialElement::TexMapDiffuse(cc) => { self.texture_map_diffuse = Some(cc.clone()); - } + }, MaterialElement::TexMapSpecular(cc) => { self.texture_map_specular = Some(cc.clone()); - } + }, MaterialElement::ShininessMap(ncc) => { self.shininess_map = Some(ncc.clone()); - } + }, MaterialElement::DisolveMap(ncc) => { self.disolve_map = Some(ncc.clone()); - } + }, MaterialElement::DisplacementMap(ncc) => { self.displacement_map = Some(ncc.clone()); - } + }, MaterialElement::Decal(ncc) => { self.decal = Some(ncc.clone()); - } + }, MaterialElement::BumpMap(bm) => { self.bump_map = Some(bm.clone()); - } + }, MaterialElement::ReflectionMap(rm) => { self.reflection_map = Some(rm.clone()); - } + }, MaterialElement::AntiAliasMap(b) => { self.anti_alias_map = Some(*b); - } + }, } } } @@ -411,35 +411,32 @@ enum MaterialElement { AntiAliasMap(bool), } -pub(crate) fn parse(input: &[Token]) -> Result, MaterialError> { - let elements: Vec = match many1(alt(( - alt(( - parse_new_material, - parse_ambient, - parse_diffuse, - parse_specular, - parse_emissive_coefficient, - parse_specular_exponent, - parse_disolve, - parse_transparency, - parse_transmission_factor, - parse_sharpness, - parse_index_of_refraction, - )), - alt(( - parse_illumination_model, - parse_texture_map_ambient, - parse_texture_map_diffuse, - parse_texture_map_specular, - parse_shininess_map, - parse_disolve_map, - parse_displacement_map, - parse_decal, - parse_bump_map, - parse_reflection_map, - parse_anti_alias_map, - )), - )))(input) +pub(crate) fn parse(input: TokenSet) -> Result, MaterialError> { + let elements: Vec = match many1(alt([ + parse_new_material, + parse_ambient, + parse_diffuse, + parse_specular, + parse_emissive_coefficient, + parse_specular_exponent, + parse_disolve, + parse_transparency, + parse_transmission_factor, + parse_sharpness, + parse_index_of_refraction, + parse_illumination_model, + parse_texture_map_ambient, + parse_texture_map_diffuse, + parse_texture_map_specular, + parse_shininess_map, + parse_disolve_map, + parse_displacement_map, + parse_decal, + parse_bump_map, + parse_reflection_map, + parse_anti_alias_map, + ])) + .parse(input) { Ok((_, x)) => x, Err(e) => return Err(MaterialError::Parse(e.to_string())), @@ -461,7 +458,7 @@ pub(crate) fn parse(input: &[Token]) -> Result, MaterialError> { Ok(res) } -fn parse_new_material(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_new_material(input: TokenSet) -> IResult { map( preceded( token_match!(Token::NewMaterial), @@ -473,53 +470,54 @@ fn parse_new_material(input: &[Token]) -> IResult<&[Token], MaterialElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; - MaterialElement::Name(name) + MaterialElement::Name(name.into()) }, - )(input) + ) + .parse(input) } -fn parse_color_type(input: &[Token]) -> IResult<&[Token], ColorType> { +fn parse_color_type(input: TokenSet) -> IResult { alt(( map( - tuple(( + ( token_match!(Token::Spectral), token_match!(Token::String(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), |(_, file, factor)| { let file_name = match get_token_string(&file) { Ok(s) => s, Err(e) => { log::error!("{}", e); Default::default() - } + }, }; let factor = match get_opt_token_float_opt(&factor) { Ok(s) => s.unwrap_or(1.0), Err(e) => { log::error!("{}", e); Default::default() - } + }, }; - ColorType::Spectral(file_name, factor) + ColorType::Spectral(file_name.into(), factor) }, ), map( - tuple(( + ( token_match!(Token::Xyz), token_match!(Token::Float(_) | Token::Int(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), |(_, x_token, y_token, z_token)| { let x = match get_token_float(&x_token) { Ok(s) => s, Err(e) => { log::error!("{}", e); Default::default() - } + }, }; let y = match y_token { Some(y) => match get_token_float(&y) { @@ -527,7 +525,7 @@ fn parse_color_type(input: &[Token]) -> IResult<&[Token], ColorType> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }, None => x, }; @@ -537,7 +535,7 @@ fn parse_color_type(input: &[Token]) -> IResult<&[Token], ColorType> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }, None => x, }; @@ -546,11 +544,11 @@ fn parse_color_type(input: &[Token]) -> IResult<&[Token], ColorType> { }, ), map( - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), - )), + ), |(r, g, b)| { let (r, g, b) = ( match get_token_float(&r) { @@ -558,59 +556,64 @@ fn parse_color_type(input: &[Token]) -> IResult<&[Token], ColorType> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }, match get_token_float(&g) { Ok(s) => s, Err(e) => { log::error!("{}", e); Default::default() - } + }, }, match get_token_float(&b) { Ok(s) => s, Err(e) => { log::error!("{}", e); Default::default() - } + }, }, ); ColorType::Rgb(r, g, b) }, ), - ))(input) + )) + .parse(input) } -fn parse_ambient(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_ambient(input: TokenSet) -> IResult { preceded( token_match!(Token::AmbientColor), map(parse_color_type, MaterialElement::Ambient), - )(input) + ) + .parse(input) } -fn parse_diffuse(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_diffuse(input: TokenSet) -> IResult { preceded( token_match!(Token::DiffuseColor), map(parse_color_type, MaterialElement::Diffuse), - )(input) + ) + .parse(input) } -fn parse_specular(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_specular(input: TokenSet) -> IResult { preceded( token_match!(Token::SpecularColor), map(parse_color_type, MaterialElement::Specular), - )(input) + ) + .parse(input) } -fn parse_emissive_coefficient(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_emissive_coefficient(input: TokenSet) -> IResult { preceded( token_match!(Token::EmissiveCoefficient), map(parse_color_type, MaterialElement::EmissiveCoefficient), - )(input) + ) + .parse(input) } -fn parse_specular_exponent(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_specular_exponent(input: TokenSet) -> IResult { preceded( token_match!(Token::SpecularExponent), map(token_match!(Token::Float(_) | Token::Int(_)), |f| { @@ -619,14 +622,15 @@ fn parse_specular_exponent(input: &[Token]) -> IResult<&[Token], MaterialElement Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::SpecularExponent(f) }), - )(input) + ) + .parse(input) } -fn parse_disolve(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_disolve(input: TokenSet) -> IResult { preceded( token_match!(Token::Disolved), alt(( @@ -641,7 +645,7 @@ fn parse_disolve(input: &[Token]) -> IResult<&[Token], MaterialElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::Disolve(DisolveType::Halo(f)) }, @@ -652,15 +656,16 @@ fn parse_disolve(input: &[Token]) -> IResult<&[Token], MaterialElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::Disolve(DisolveType::Alpha(f)) }), )), - )(input) + ) + .parse(input) } -fn parse_transparency(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_transparency(input: TokenSet) -> IResult { preceded( token_match!(Token::Transparancy), map(token_match!(Token::Float(_) | Token::Int(_)), |f| { @@ -669,21 +674,23 @@ fn parse_transparency(input: &[Token]) -> IResult<&[Token], MaterialElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::Transparency(f) }), - )(input) + ) + .parse(input) } -fn parse_transmission_factor(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_transmission_factor(input: TokenSet) -> IResult { preceded( token_match!(Token::TransmissionFactor), map(parse_color_type, MaterialElement::TransmissionFactor), - )(input) + ) + .parse(input) } -fn parse_sharpness(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_sharpness(input: TokenSet) -> IResult { preceded( token_match!(Token::Sharpness), map(token_match!(Token::Float(_) | Token::Int(_)), |f| { @@ -692,14 +699,15 @@ fn parse_sharpness(input: &[Token]) -> IResult<&[Token], MaterialElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::Sharpness(f) }), - )(input) + ) + .parse(input) } -fn parse_index_of_refraction(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_index_of_refraction(input: TokenSet) -> IResult { preceded( token_match!(Token::IndexOfRefraction), map(token_match!(Token::Float(_) | Token::Int(_)), |f| { @@ -708,14 +716,15 @@ fn parse_index_of_refraction(input: &[Token]) -> IResult<&[Token], MaterialEleme Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::IndexOfRefraction(f) }), - )(input) + ) + .parse(input) } -fn parse_illumination_model(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_illumination_model(input: TokenSet) -> IResult { preceded( token_match!(Token::IlluminationModel), map(token_match!(Token::Int(_)), |f| { @@ -724,95 +733,105 @@ fn parse_illumination_model(input: &[Token]) -> IResult<&[Token], MaterialElemen Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::IlluminationModel(f as u32) }), - )(input) + ) + .parse(input) } -fn parse_texture_map_ambient(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_texture_map_ambient(input: TokenSet) -> IResult { preceded( token_match!(Token::TextureMapAmbient), map(parse_options, |o| { MaterialElement::TexMapAmbient(ColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_texture_map_diffuse(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_texture_map_diffuse(input: TokenSet) -> IResult { preceded( token_match!(Token::TextureMapDiffuse), map(parse_options, |o| { MaterialElement::TexMapDiffuse(ColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_texture_map_specular(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_texture_map_specular(input: TokenSet) -> IResult { preceded( token_match!(Token::TextureMapSpecular), map(parse_options, |o| { MaterialElement::TexMapSpecular(ColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_shininess_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_shininess_map(input: TokenSet) -> IResult { preceded( token_match!(Token::TextureMapShininess), map(parse_options, |o| { MaterialElement::ShininessMap(NonColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_disolve_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_disolve_map(input: TokenSet) -> IResult { preceded( token_match!(Token::TextureMapDisolved), map(parse_options, |o| { MaterialElement::DisolveMap(NonColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_displacement_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_displacement_map(input: TokenSet) -> IResult { preceded( token_match!(Token::DisplacementMap), map(parse_options, |o| { MaterialElement::DisplacementMap(NonColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_decal(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_decal(input: TokenSet) -> IResult { preceded( token_match!(Token::Decal), map(parse_options, |o| { MaterialElement::Decal(NonColorCorrectedMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_bump_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_bump_map(input: TokenSet) -> IResult { preceded( token_match!(Token::BumpMap), map(parse_options, |o| { MaterialElement::BumpMap(BumpMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_reflection_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_reflection_map(input: TokenSet) -> IResult { preceded( token_match!(Token::ReflectionMap), map(parse_options, |o| { MaterialElement::ReflectionMap(ReflectionMap::new(&o)) }), - )(input) + ) + .parse(input) } -fn parse_anti_alias_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { +fn parse_anti_alias_map(input: TokenSet) -> IResult { preceded( token_match!(Token::AntiAliasMap), map(token_match!(Token::String(_)), |o| { @@ -821,14 +840,15 @@ fn parse_anti_alias_map(input: &[Token]) -> IResult<&[Token], MaterialElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; MaterialElement::AntiAliasMap(val) }), - )(input) + ) + .parse(input) } -fn parse_options(input: &[Token]) -> IResult<&[Token], Vec> { +fn parse_options(input: TokenSet) -> IResult> { many1(alt(( parse_option_blend, parse_option_bm, @@ -847,14 +867,15 @@ fn parse_options(input: &[Token]) -> IResult<&[Token], Vec> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; - OptionElement::FileName(name) + OptionElement::FileName(name.into()) }), - )))(input) + ))) + .parse(input) } -fn parse_option_blend(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_blend(input: TokenSet) -> IResult { alt(( map( preceded( @@ -867,7 +888,7 @@ fn parse_option_blend(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::BlendU(val) }, @@ -883,15 +904,16 @@ fn parse_option_blend(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::BlendV(val) }, ), - ))(input) + )) + .parse(input) } -fn parse_option_bm(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_bm(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionBumpMultiplier), @@ -903,14 +925,15 @@ fn parse_option_bm(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::BumpMultiplier(val) }, - )(input) + ) + .parse(input) } -fn parse_option_cc(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_cc(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionColorCorrect), @@ -922,14 +945,15 @@ fn parse_option_cc(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::Cc(val) }, - )(input) + ) + .parse(input) } -fn parse_option_clamp(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_clamp(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionClamp), @@ -941,21 +965,22 @@ fn parse_option_clamp(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::Clamp(val) }, - )(input) + ) + .parse(input) } -fn parse_option_texture_range(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_texture_range(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionRange), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), - )), + ), ), |(base, gain)| { let base = match get_token_float(&base) { @@ -963,29 +988,30 @@ fn parse_option_texture_range(input: &[Token]) -> IResult<&[Token], OptionElemen Err(e) => { log::error!("{}", e); Default::default() - } + }, }; let gain = match get_token_float(&gain) { Ok(s) => s, Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::TextureRange((base, gain)) }, - )(input) + ) + .parse(input) } -fn parse_option_offset(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_offset(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionOffset), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), ), |(x, y, z)| { let x = match get_token_float(&x) { @@ -993,36 +1019,37 @@ fn parse_option_offset(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; let y = match get_opt_token_float_opt(&y) { Ok(s) => s, Err(e) => { log::error!("{}", e); None - } + }, }; let z = match get_opt_token_float_opt(&z) { Ok(s) => s, Err(e) => { log::error!("{}", e); None - } + }, }; OptionElement::Offset((x, y, z)) }, - )(input) + ) + .parse(input) } -fn parse_option_scale(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_scale(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionScale), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), ), |(x, y, z)| { let x = match get_token_float(&x) { @@ -1030,36 +1057,37 @@ fn parse_option_scale(input: &[Token]) -> IResult<&[Token], OptionElement> { Err(e) => { log::error!("{}", e); Default::default() - } + }, }; let y = match get_opt_token_float_opt(&y) { Ok(s) => s, Err(e) => { log::error!("{}", e); None - } + }, }; let z = match get_opt_token_float_opt(&z) { Ok(s) => s, Err(e) => { log::error!("{}", e); None - } + }, }; OptionElement::Scale((x, y, z)) }, - )(input) + ) + .parse(input) } -fn parse_option_turbulance(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_turbulance(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionTurbulence), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), ), |(x, y, z)| { let x = match get_token_float(&x) { @@ -1067,28 +1095,29 @@ fn parse_option_turbulance(input: &[Token]) -> IResult<&[Token], OptionElement> Err(e) => { log::error!("{}", e); Default::default() - } + }, }; let y = match get_opt_token_float_opt(&y) { Ok(s) => s, Err(e) => { log::error!("{}", e); None - } + }, }; let z = match get_opt_token_float_opt(&z) { Ok(s) => s, Err(e) => { log::error!("{}", e); None - } + }, }; OptionElement::Turbulance((x, y, z)) }, - )(input) + ) + .parse(input) } -fn parse_option_texture_resolution(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_texture_resolution(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionTextureResolution), @@ -1100,14 +1129,15 @@ fn parse_option_texture_resolution(input: &[Token]) -> IResult<&[Token], OptionE Err(e) => { log::error!("{}", e); Default::default() - } + }, }; OptionElement::TextureRes(val) }, - )(input) + ) + .parse(input) } -fn parse_option_imf_channel(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_imf_channel(input: TokenSet) -> IResult { map( preceded( token_match!(Token::OptionIMFChan), @@ -1119,14 +1149,15 @@ fn parse_option_imf_channel(input: &[Token]) -> IResult<&[Token], OptionElement> Err(e) => { log::error!("{}", e); Default::default() - } + }, }; - OptionElement::ImfChan(val) + OptionElement::ImfChan(val.into()) }, - )(input) + ) + .parse(input) } -fn parse_option_reflection_type(input: &[Token]) -> IResult<&[Token], OptionElement> { +fn parse_option_reflection_type(input: TokenSet) -> IResult { map( preceded( token_match!(Token::ReflectionType), @@ -1138,9 +1169,10 @@ fn parse_option_reflection_type(input: &[Token]) -> IResult<&[Token], OptionElem Err(e) => { log::error!("{}", e); Default::default() - } + }, }; - OptionElement::ReflectionType(val) + OptionElement::ReflectionType(val.into()) }, - )(input) + ) + .parse(input) } diff --git a/src/model.rs b/src/model.rs index 397850e..d40f0a1 100644 --- a/src/model.rs +++ b/src/model.rs @@ -3,15 +3,16 @@ use std::{collections::HashMap, result::Result}; use derive_more::{Constructor, From, Into}; use crate::{ - get_on_off_from_str, get_token_float, get_token_int, get_token_string, tokenizer::Token, + get_on_off_from_str, get_token_float, get_token_int, get_token_string, + tokenizer::{Token, TokenSet}, }; use nom::{ branch::alt, combinator::{map, opt}, multi::{fold_many0, fold_many1, many1}, - sequence::{preceded, tuple}, - IResult, + sequence::preceded, + IResult, Parser, }; use thiserror::Error; @@ -212,7 +213,7 @@ pub(crate) enum ModelElement { TextureMap(String), } -pub(crate) fn parse(input: &[Token]) -> Result { +pub(crate) fn parse(input: TokenSet) -> Result { match fold_many0( alt(( map(parse_vertex, ModelElement::Vertex), @@ -244,19 +245,19 @@ pub(crate) fn parse(input: &[Token]) -> Result { ModelElement::Face(mut f) => { f.smoothing_group = model.current_smoothing_group; for g in &model.current_group { - let set = model.faces.entry(g.clone()).or_insert_with(Vec::new); + let set = model.faces.entry(g.clone()).or_default(); set.push(f.clone()); } }, ModelElement::Line(l) => { for g in &model.current_group { - let set = model.lines.entry(g.clone()).or_insert_with(Vec::new); + let set = model.lines.entry(g.clone()).or_default(); set.push(l.clone()); } }, ModelElement::Point(p) => { for g in &model.current_group { - let set = model.points.entry(g.clone()).or_insert_with(Vec::new); + let set = model.points.entry(g.clone()).or_default(); set.push(p.clone()); } }, @@ -296,23 +297,24 @@ pub(crate) fn parse(input: &[Token]) -> Result { } model }, - )(input) + ) + .parse(input) { Ok((_, acc)) => Ok(acc), Err(e) => Err(ModelError::Parse(e.to_string())), } } -pub(crate) fn parse_vertex(input: &[Token]) -> IResult<&[Token], Vertex> { +pub(crate) fn parse_vertex(input: TokenSet) -> IResult { map( preceded( token_match!(Token::Vertex), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), ), |(x, y, z, w)| { let (x, y, z) = ( @@ -347,18 +349,19 @@ pub(crate) fn parse_vertex(input: &[Token]) -> IResult<&[Token], Vertex> { }); (x, y, z, w).into() }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_vertex_normal(input: &[Token]) -> IResult<&[Token], Normal> { +pub(crate) fn parse_vertex_normal(input: TokenSet) -> IResult { map( preceded( token_match!(Token::VertexNormal), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), token_match!(Token::Float(_) | Token::Int(_)), - )), + ), ), |(x, y, z)| { let (x, y, z) = ( @@ -386,18 +389,19 @@ pub(crate) fn parse_vertex_normal(input: &[Token]) -> IResult<&[Token], Normal> ); (x, y, z).into() }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_vertex_texture(input: &[Token]) -> IResult<&[Token], Texture> { +pub(crate) fn parse_vertex_texture(input: TokenSet) -> IResult { map( preceded( token_match!(Token::VertexTexture), - tuple(( + ( token_match!(Token::Float(_) | Token::Int(_)), opt(token_match!(Token::Float(_) | Token::Int(_))), opt(token_match!(Token::Float(_) | Token::Int(_))), - )), + ), ), |(u, v, w)| { let u = match get_token_float(&u) { @@ -423,15 +427,16 @@ pub(crate) fn parse_vertex_texture(input: &[Token]) -> IResult<&[Token], Texture }); (u, v, w).into() }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_face(input: &[Token]) -> IResult<&[Token], Face> { +pub(crate) fn parse_face(input: TokenSet) -> IResult { preceded( token_match!(Token::Face), fold_many1( map( - tuple(( + ( token_match!(Token::Int(_)), opt(preceded( token_match!(Token::Slash), @@ -441,7 +446,7 @@ pub(crate) fn parse_face(input: &[Token]) -> IResult<&[Token], Face> { token_match!(Token::Slash), opt(token_match!(Token::Int(_))), )), - )), + ), |(v, t, n)| { let v = match get_token_int(&v) { Ok(s) => s, @@ -480,21 +485,22 @@ pub(crate) fn parse_face(input: &[Token]) -> IResult<&[Token], Face> { f }, ), - )(input) + ) + .parse(input) } -pub(crate) fn parse_line(input: &[Token]) -> IResult<&[Token], Line> { +pub(crate) fn parse_line(input: TokenSet) -> IResult { preceded( token_match!(Token::Line), fold_many1( map( - tuple(( + ( token_match!(Token::Int(_)), opt(preceded( token_match!(Token::Slash), opt(token_match!(Token::Int(_))), )), - )), + ), |(v, t)| { let v = match get_token_int(&v) { Ok(s) => s, @@ -519,10 +525,11 @@ pub(crate) fn parse_line(input: &[Token]) -> IResult<&[Token], Line> { f }, ), - )(input) + ) + .parse(input) } -pub(crate) fn parse_point(input: &[Token]) -> IResult<&[Token], Point> { +pub(crate) fn parse_point(input: TokenSet) -> IResult { preceded( token_match!(Token::Point), fold_many1( @@ -539,17 +546,18 @@ pub(crate) fn parse_point(input: &[Token]) -> IResult<&[Token], Point> { f }, ), - )(input) + ) + .parse(input) } -pub(crate) fn parse_group(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_group(input: TokenSet) -> IResult { map( preceded( token_match!(Token::Group), many1(map( token_match!(Token::String(_)), |s| match get_token_string(&s) { - Ok(s) => s, + Ok(s) => s.into(), Err(e) => { log::error!("{}", e); Default::default() @@ -558,17 +566,18 @@ pub(crate) fn parse_group(input: &[Token]) -> IResult<&[Token], ModelElement> { )), ), ModelElement::Group, - )(input) + ) + .parse(input) } -pub(crate) fn parse_mat_lib(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_mat_lib(input: TokenSet) -> IResult { map( preceded( token_match!(Token::MaterialLib), many1(map( token_match!(Token::String(_)), |s| match get_token_string(&s) { - Ok(s) => s, + Ok(s) => s.into(), Err(e) => { log::error!("{}", e); Default::default() @@ -577,10 +586,11 @@ pub(crate) fn parse_mat_lib(input: &[Token]) -> IResult<&[Token], ModelElement> )), ), ModelElement::MaterialLib, - )(input) + ) + .parse(input) } -pub(crate) fn parse_material(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_material(input: TokenSet) -> IResult { map( preceded( token_match!(Token::UseMaterial), @@ -595,12 +605,13 @@ pub(crate) fn parse_material(input: &[Token]) -> IResult<&[Token], ModelElement> }, }; - ModelElement::Material(res) + ModelElement::Material(res.into()) }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_obj_name(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_obj_name(input: TokenSet) -> IResult { map( preceded( token_match!(Token::Object), @@ -614,12 +625,13 @@ pub(crate) fn parse_obj_name(input: &[Token]) -> IResult<&[Token], ModelElement> Default::default() }, }; - ModelElement::ObjName(res) + ModelElement::ObjName(res.into()) }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_smoothing(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_smoothing(input: TokenSet) -> IResult { map( preceded( token_match!(Token::Smoothing), @@ -652,10 +664,11 @@ pub(crate) fn parse_smoothing(input: &[Token]) -> IResult<&[Token], ModelElement }; ModelElement::Smoothing(res) }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_bevel(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_bevel(input: TokenSet) -> IResult { map( preceded(token_match!(Token::Bevel), token_match!(Token::String(_))), |s| { @@ -673,10 +686,11 @@ pub(crate) fn parse_bevel(input: &[Token]) -> IResult<&[Token], ModelElement> { ModelElement::Bevel(false) } }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_c_interp(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_c_interp(input: TokenSet) -> IResult { map( preceded(token_match!(Token::CInterp), token_match!(Token::String(_))), |s| { @@ -694,10 +708,11 @@ pub(crate) fn parse_c_interp(input: &[Token]) -> IResult<&[Token], ModelElement> ModelElement::CInterp(false) } }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_d_interp(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_d_interp(input: TokenSet) -> IResult { map( preceded(token_match!(Token::DInterp), token_match!(Token::String(_))), |s| { @@ -715,10 +730,11 @@ pub(crate) fn parse_d_interp(input: &[Token]) -> IResult<&[Token], ModelElement> ModelElement::DInterp(false) } }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_lod(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_lod(input: TokenSet) -> IResult { map( preceded(token_match!(Token::Lod), token_match!(Token::Int(_))), |s| { @@ -731,10 +747,11 @@ pub(crate) fn parse_lod(input: &[Token]) -> IResult<&[Token], ModelElement> { }; ModelElement::Lod(res) }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_shadow_obj(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_shadow_obj(input: TokenSet) -> IResult { map( preceded( token_match!(Token::ShadowObj), @@ -749,12 +766,13 @@ pub(crate) fn parse_shadow_obj(input: &[Token]) -> IResult<&[Token], ModelElemen }, }; - ModelElement::ShadowObj(res) + ModelElement::ShadowObj(res.into()) }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_trace_obj(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_trace_obj(input: TokenSet) -> IResult { map( preceded( token_match!(Token::TraceObj), @@ -769,12 +787,13 @@ pub(crate) fn parse_trace_obj(input: &[Token]) -> IResult<&[Token], ModelElement }, }; - ModelElement::TraceObj(res) + ModelElement::TraceObj(res.into()) }, - )(input) + ) + .parse(input) } -pub(crate) fn parse_texture_lib(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_texture_lib(input: TokenSet) -> IResult { map( preceded( token_match!(Token::TextureMapLib), @@ -787,14 +806,15 @@ pub(crate) fn parse_texture_lib(input: &[Token]) -> IResult<&[Token], ModelEleme }, }; - res + res.into() })), ), ModelElement::TextureLib, - )(input) + ) + .parse(input) } -pub(crate) fn parse_texture_map(input: &[Token]) -> IResult<&[Token], ModelElement> { +pub(crate) fn parse_texture_map(input: TokenSet) -> IResult { map( preceded( token_match!(Token::UseTextureMap), @@ -809,7 +829,8 @@ pub(crate) fn parse_texture_map(input: &[Token]) -> IResult<&[Token], ModelEleme }, }; - ModelElement::TextureMap(res) + ModelElement::TextureMap(res.into()) }, - )(input) + ) + .parse(input) } diff --git a/src/test/obj.rs b/src/test/obj.rs index 68c4cad..e065858 100644 --- a/src/test/obj.rs +++ b/src/test/obj.rs @@ -92,7 +92,7 @@ fn parse_vertex_texture() { assert_eq!(tokens[1], Token::Float(0.500)); assert_eq!(tokens[2], Token::Int(1)); - let res = model::parse_vertex_texture(&tokens); + let res = model::parse_vertex_texture(tokens); assert!(res.is_ok()); let (extra, texture) = res.ok().unwrap(); @@ -113,7 +113,7 @@ fn parse_vertex_texture2() { assert!(res.is_ok()); let tokens = res.unwrap(); - let res = model::parse_vertex_texture(&tokens); + let res = model::parse_vertex_texture(tokens); assert!(res.is_ok()); let (extra, texture) = res.ok().unwrap(); @@ -139,7 +139,7 @@ fn parse_face() { assert_eq!(tokens[2], Token::Int(2)); assert_eq!(tokens[3], Token::Int(3)); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); assert!(res.is_ok()); let (extra, face) = res.ok().unwrap(); @@ -187,7 +187,7 @@ fn parse_face_1() { assert_eq!(tokens[8], Token::Slash); assert_eq!(tokens[9], Token::Int(4)); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); let (extra, face) = res.ok().unwrap(); assert_eq!(extra.len(), 0); @@ -240,7 +240,7 @@ fn parse_face_2() { assert_eq!(tokens[14], Token::Slash); assert_eq!(tokens[15], Token::Int(5)); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); let (extra, face) = res.ok().unwrap(); assert_eq!(extra.len(), 0); @@ -290,7 +290,7 @@ fn parse_face_3() { assert_eq!(tokens[11], Token::Slash); assert_eq!(tokens[12], Token::Int(4)); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); let (extra, face) = res.ok().unwrap(); assert_eq!(extra.len(), 0); @@ -328,7 +328,7 @@ fn parse_face_4() { assert!(res.is_ok()); let tokens = res.unwrap(); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); let (extra, face) = res.ok().unwrap(); assert_eq!(extra.len(), 0); @@ -371,7 +371,7 @@ fn parse_face_trailing_slash() { assert!(res.is_ok()); let tokens = res.unwrap(); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); let (extra, face) = res.ok().unwrap(); assert_eq!(extra.len(), 0); @@ -409,7 +409,7 @@ fn parse_face_trailing_slash_slash() { assert!(res.is_ok()); let tokens = res.unwrap(); - let res = model::parse_face(&tokens); + let res = model::parse_face(tokens); let (extra, face) = res.ok().unwrap(); assert_eq!(extra.len(), 0); @@ -450,7 +450,7 @@ fn parse_point() { assert_eq!(tokens[2], Token::Int(2)); assert_eq!(tokens[3], Token::Int(3)); - let res = model::parse_point(&tokens); + let res = model::parse_point(tokens); assert!(res.is_ok()); let (extra, point) = res.ok().unwrap(); @@ -475,7 +475,7 @@ fn parse_line() { assert_eq!(tokens[2], Token::Int(2)); assert_eq!(tokens[3], Token::Int(3)); - let res = model::parse_line(&tokens); + let res = model::parse_line(tokens); assert!(res.is_ok()); let (extra, line) = res.ok().unwrap(); @@ -507,7 +507,7 @@ fn parse_line_texture_struct() { assert!(res.is_ok()); let tokens = res.unwrap(); - let res = model::parse_line(&tokens); + let res = model::parse_line(tokens); assert!(res.is_ok()); let (extra, line) = res.ok().unwrap(); @@ -541,7 +541,7 @@ fn parse_line_trailing_slash_struct() { assert!(res.is_ok()); let tokens = res.unwrap(); - let res = model::parse_line(&tokens); + let res = model::parse_line(tokens); assert!(res.is_ok()); let (extra, line) = res.ok().unwrap(); @@ -574,9 +574,9 @@ fn simple_material() { let tokens = res.unwrap(); assert_eq!(tokens.len(), 2); assert_eq!(tokens[0], Token::MaterialLib); - assert_eq!(tokens[1], Token::String("some_mtl_file.mtl".to_string())); + assert_eq!(tokens[1], Token::String("some_mtl_file.mtl".into())); - let res = model::parse_mat_lib(&tokens); + let res = model::parse_mat_lib(tokens); assert!(res.is_ok()); let (extra, model) = res.ok().unwrap(); @@ -596,9 +596,9 @@ fn simple_group() { let tokens = res.unwrap(); assert_eq!(tokens.len(), 2); assert_eq!(tokens[0], Token::Group); - assert_eq!(tokens[1], Token::String("some_group".to_string())); + assert_eq!(tokens[1], Token::String("some_group".into())); - let res = model::parse_group(&tokens); + let res = model::parse_group(tokens); assert!(res.is_ok()); let (extra, model) = res.ok().unwrap(); @@ -615,9 +615,9 @@ fn simple_object() { let tokens = res.unwrap(); assert_eq!(tokens.len(), 2); assert_eq!(tokens[0], Token::Object); - assert_eq!(tokens[1], Token::String("some_object".to_string())); + assert_eq!(tokens[1], Token::String("some_object".into())); - let res = model::parse_obj_name(&tokens); + let res = model::parse_obj_name(tokens); assert!(res.is_ok()); let (extra, model) = res.ok().unwrap(); diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index 85b849a..7adec28 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -4,6 +4,11 @@ mod obj; #[cfg(test)] mod test; +use std::borrow::Cow; +use std::iter::Enumerate; +use std::ops::Index; +use std::ops::IndexMut; + pub use mtl::parse_mtl; use nom::{ branch::alt, @@ -11,8 +16,7 @@ use nom::{ character::complete::digit1, combinator::{map, opt}, multi::{fold_many0, fold_many1}, - sequence::tuple, - IResult, + IResult, Input, Parser, }; pub use obj::parse_obj; @@ -25,9 +29,9 @@ pub enum TokenizeError { } #[derive(Clone, Debug, PartialEq)] -pub enum Token { +pub enum Token<'a> { Ignore, - String(String), + String(Cow<'a, str>), Float(f32), Int(i32), Slash, @@ -461,15 +465,117 @@ pub enum Token { OptionTextureResolution, } -pub(self) fn parse_digit(input: &str) -> IResult<&str, Token> { +#[derive(Debug, Clone)] +pub struct TokenSet<'a> { + tokens: Vec>, +} + +impl TokenSet<'_> { + pub fn is_empty(&self) -> bool { + self.tokens.is_empty() + } + + pub fn split_at(&self, index: usize) -> (Self, Self) { + let (a, b) = self.tokens.split_at(index); + (Self { tokens: a.to_vec() }, Self { tokens: b.to_vec() }) + } + + pub fn len(&self) -> usize { + self.tokens.len() + } +} + +impl<'a> Index for TokenSet<'a> { + type Output = Token<'a>; + fn index(&self, index: usize) -> &Self::Output { + &self.tokens[index] + } +} + +impl IndexMut for TokenSet<'_> { + fn index_mut(&mut self, index: usize) -> &mut Self::Output { + &mut self.tokens[index] + } +} + +impl<'a> From>> for TokenSet<'a> { + fn from(tokens: Vec>) -> Self { + Self { tokens } + } +} + +impl<'a> AsRef>> for TokenSet<'a> { + fn as_ref(&self) -> &Vec> { + &self.tokens + } +} + +impl<'a> Iterator for TokenSet<'a> { + type Item = Token<'a>; + + fn next(&mut self) -> Option { + self.tokens.first().cloned() + } +} + +impl<'a> Input for TokenSet<'a> { + type Item = Token<'a>; + type Iter = TokenSet<'a>; + type IterIndices = Enumerate; + + fn input_len(&self) -> usize { + self.len() + } + + fn take(&self, index: usize) -> Self { + Self { + tokens: self.tokens.iter().take(index).cloned().collect(), + } + } + + fn take_from(&self, index: usize) -> Self { + Self { + tokens: self.tokens[index..].to_vec(), + } + } + + fn take_split(&self, index: usize) -> (Self, Self) { + self.split_at(index) + } + + fn position

(&self, predicate: P) -> Option + where + P: Fn(Self::Item) -> bool, + { + self.tokens.iter().position(|t| predicate(t.clone())) + } + + fn iter_elements(&self) -> Self::Iter { + self.clone() + } + + fn iter_indices(&self) -> Self::IterIndices { + self.iter_elements().enumerate() + } + + fn slice_index(&self, count: usize) -> Result { + if self.len() >= count { + Ok(count) + } else { + Err(nom::Needed::new(count - self.len())) + } + } +} + +fn parse_digit(input: &str) -> IResult<&str, Token> { map( - tuple(( + ( opt(alt((tag("+"), tag("-")))), fold_many1(digit1, Vec::new, |mut acc: Vec<_>, item| { acc.push(item); acc }), - )), + ), |(sign, s): (Option<&str>, Vec<&str>)| { let mut val = s.join("").parse::().unwrap_or_default(); if sign == Some("-") { @@ -477,17 +583,18 @@ pub(self) fn parse_digit(input: &str) -> IResult<&str, Token> { } Token::Int(val) }, - )(input) + ) + .parse(input) } #[allow(clippy::type_complexity)] -pub(self) fn parse_float(input: &str) -> IResult<&str, Token> { +fn parse_float(input: &str) -> IResult<&str, Token> { map( - tuple(( + ( opt(alt((tag("+"), tag("-")))), alt(( map( - tuple(( + ( fold_many0(digit1, Vec::new, |mut acc: Vec<_>, item| { acc.push(item); acc @@ -498,11 +605,11 @@ pub(self) fn parse_float(input: &str) -> IResult<&str, Token> { acc })), opt(map( - tuple(( + ( alt((tag("e"), tag("E"))), opt(alt((tag("+"), tag("-")))), digit1, - )), + ), |(e, sign, digits)| { let mut acc = String::new(); acc.push_str(e); @@ -513,11 +620,11 @@ pub(self) fn parse_float(input: &str) -> IResult<&str, Token> { acc }, )), - )), + ), |(f, _, s, e)| (f, s.unwrap_or_default(), e.unwrap_or_default()), ), map( - tuple(( + ( opt(fold_many1(digit1, Vec::new, |mut acc: Vec<_>, item| { acc.push(item); acc @@ -528,11 +635,11 @@ pub(self) fn parse_float(input: &str) -> IResult<&str, Token> { acc }), opt(map( - tuple(( + ( alt((tag("e"), tag("E"))), opt(alt((tag("+"), tag("-")))), digit1, - )), + ), |(e, sign, digits)| { let mut acc = String::new(); acc.push_str(e); @@ -543,11 +650,11 @@ pub(self) fn parse_float(input: &str) -> IResult<&str, Token> { acc }, )), - )), + ), |(f, _, s, e)| (f.unwrap_or_default(), s, e.unwrap_or_default()), ), )), - )), + ), |(sign, (f, s, e)): (Option<&str>, (Vec<&str>, Vec<&str>, String))| { let mut acc = Vec::new(); if !f.is_empty() { @@ -566,5 +673,6 @@ pub(self) fn parse_float(input: &str) -> IResult<&str, Token> { } Token::Float(val) }, - )(input) + ) + .parse(input) } diff --git a/src/tokenizer/mtl.rs b/src/tokenizer/mtl.rs index 907b249..d1f2b2c 100644 --- a/src/tokenizer/mtl.rs +++ b/src/tokenizer/mtl.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::result::Result; use nom::{ @@ -7,11 +8,12 @@ use nom::{ combinator::map, multi::fold_many0, sequence::{delimited, preceded}, + Parser, }; -use super::{Token, TokenizeError}; +use super::{Token, TokenSet, TokenizeError}; -pub fn parse_mtl(input: &str) -> Result, TokenizeError> { +pub fn parse_mtl(input: &str) -> Result { match fold_many0( alt(( delimited( @@ -70,7 +72,7 @@ pub fn parse_mtl(input: &str) -> Result, TokenizeError> { |_| Token::Ignore, ), map(alt((line_ending, multispace1)), |_| Token::Ignore), - map(is_not(" \r\n"), |s: &str| Token::String(s.to_string())), + map(is_not(" \r\n"), |s: &str| Token::String(Cow::Borrowed(s))), )), Vec::new, |mut acc: Vec, item| { @@ -79,9 +81,10 @@ pub fn parse_mtl(input: &str) -> Result, TokenizeError> { } acc }, - )(input) + ) + .parse(input) { - Ok((_, v)) => Ok(v), + Ok((_, v)) => Ok(v.into()), Err(e) => Err(TokenizeError::Parse(e.to_string())), } } diff --git a/src/tokenizer/obj.rs b/src/tokenizer/obj.rs index e60bc64..fae4cca 100644 --- a/src/tokenizer/obj.rs +++ b/src/tokenizer/obj.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::result::Result; use nom::{ @@ -7,11 +8,12 @@ use nom::{ combinator::map, multi::fold_many0, sequence::{delimited, preceded}, + Parser, }; -use super::{Token, TokenizeError}; +use super::{Token, TokenSet, TokenizeError}; -pub fn parse_obj(input: &str) -> Result, TokenizeError> { +pub fn parse_obj(input: &str) -> Result { match fold_many0( alt(( delimited( @@ -48,7 +50,7 @@ pub fn parse_obj(input: &str) -> Result, TokenizeError> { |_| Token::Ignore, ), map(alt((line_ending, multispace1)), |_| Token::Ignore), - map(is_not("\r\n"), |s: &str| Token::String(s.to_string())), + map(is_not("\r\n"), |s: &str| Token::String(Cow::Borrowed(s))), )), Vec::new, |mut acc: Vec, item| { @@ -57,9 +59,10 @@ pub fn parse_obj(input: &str) -> Result, TokenizeError> { } acc }, - )(input) + ) + .parse(input) { - Ok((_, v)) => Ok(v), + Ok((_, v)) => Ok(v.into()), Err(e) => Err(TokenizeError::Parse(e.to_string())), } }