From 9f09523b564c957c7dfa03b1ddb85394433cfd39 Mon Sep 17 00:00:00 2001 From: Rob Hand <146272+sinon@users.noreply.github.com> Date: Sat, 2 Aug 2025 19:47:55 +0100 Subject: [PATCH] Remove Display trait impl for Token --- crates/lexer/src/lib.rs | 49 ------------------- crates/lexer/tests/lexer_test.rs | 21 ++++---- .../snapshots/lexer_test__empty_handling.snap | 4 +- .../snapshots/lexer_test__group_literal.snap | 5 ++ .../snapshots/lexer_test__identifiers.snap | 20 ++++---- .../tests/snapshots/lexer_test__keywords.snap | 36 +++++++------- .../tests/snapshots/lexer_test__numbers.snap | 22 ++++----- .../snapshots/lexer_test__punctuators.snap | 44 ++++++++--------- .../tests/snapshots/lexer_test__strings.snap | 8 +-- .../snapshots/lexer_test__whitespace.snap | 12 ++--- crates/loxide/src/interpreter.rs | 14 +++--- 11 files changed, 95 insertions(+), 140 deletions(-) create mode 100644 crates/lexer/tests/snapshots/lexer_test__group_literal.snap diff --git a/crates/lexer/src/lib.rs b/crates/lexer/src/lib.rs index cfd2e7c..07cb2d4 100644 --- a/crates/lexer/src/lib.rs +++ b/crates/lexer/src/lib.rs @@ -3,8 +3,6 @@ //! Responsible for transforming a given input str into a Iterator of `Result` #![allow(clippy::too_many_lines)] -use std::fmt; - use miette::{Error, LabeledSpan, Result, miette}; /// `Token` is formed of a token type (`TokenType`) and a reference to a str in the input string @@ -68,53 +66,6 @@ pub enum TokenType { Eof, } -impl fmt::Display for Token<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let i = self.origin; - match self.token_type { - TokenType::RightParen => write!(f, "RIGHT_PAREN {i} null"), - TokenType::LeftParen => write!(f, "LEFT_PAREN {i} null"), - TokenType::RightBrace => write!(f, "RIGHT_BRACE {i} null"), - TokenType::LeftBrace => write!(f, "LEFT_BRACE {i} null"), - TokenType::Comma => write!(f, "COMMA {i} null"), - TokenType::Dot => write!(f, "DOT {i} null"), - TokenType::Minus => write!(f, "MINUS {i} null"), - TokenType::Plus => write!(f, "PLUS {i} null"), - TokenType::Star => write!(f, "STAR {i} null"), - TokenType::Semicolon => write!(f, "SEMICOLON {i} null"), - TokenType::Equal => write!(f, "EQUAL {i} null"), - TokenType::EqualEqual => write!(f, "EQUAL_EQUAL {i} null"), - TokenType::Bang => write!(f, "BANG {i} null"), - TokenType::BangEqual => write!(f, "BANG_EQUAL {i} null"), - TokenType::Less => write!(f, "LESS {i} null"), - TokenType::LessEqual => write!(f, "LESS_EQUAL {i} null"), - TokenType::Greater => write!(f, "GREATER {i} null"), - TokenType::GreaterEqual => write!(f, "GREATER_EQUAL {i} null"), - TokenType::Slash => write!(f, "SLASH {i} null"), - TokenType::String => write!(f, "STRING \"{i}\" {i}"), - TokenType::Identifier => write!(f, "IDENTIFIER {i} null"), - TokenType::Number(n) => write!(f, "NUMBER {i} {n:?}"), - TokenType::And => write!(f, "AND {i} null"), - TokenType::Class => write!(f, "CLASS {i} null"), - TokenType::Else => write!(f, "ELSE {i} null"), - TokenType::False => write!(f, "FALSE {i} null"), - TokenType::Fun => write!(f, "FUN {i} null"), - TokenType::For => write!(f, "FOR {i} null"), - TokenType::If => write!(f, "IF {i} null"), - TokenType::Nil => write!(f, "NIL {i} null"), - TokenType::Or => write!(f, "OR {i} null"), - TokenType::Print => write!(f, "PRINT {i} null"), - TokenType::Return => write!(f, "RETURN {i} null"), - TokenType::Super => write!(f, "SUPER {i} null"), - TokenType::This => write!(f, "THIS {i} null"), - TokenType::True => write!(f, "TRUE {i} null"), - TokenType::Var => write!(f, "VAR {i} null"), - TokenType::While => write!(f, "WHILE {i} null"), - TokenType::Eof => write!(f, "EOF null"), - } - } -} - /// `Lexer` is responsible for iterating over a input string and emitting `Token` for /// each detected `TokenType`. It maintains the following state: /// diff --git a/crates/lexer/tests/lexer_test.rs b/crates/lexer/tests/lexer_test.rs index 086a22c..d52adce 100644 --- a/crates/lexer/tests/lexer_test.rs +++ b/crates/lexer/tests/lexer_test.rs @@ -9,7 +9,7 @@ fn test_identifiers() { abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"; let output = Lexer::new(input) .filter_map(Result::ok) - .map(|x| format!("{x:}")) + .map(|x| format!("{x:?}")) .collect::>(); insta::with_settings!({ description => input, @@ -24,7 +24,7 @@ fn test_keywords() { let input = "and class else false for fun if nil or return super this true var while print"; let output = Lexer::new(input) .filter_map(Result::ok) - .map(|x| format!("{x}")) + .map(|x| format!("{x:?}")) .collect::>(); insta::with_settings!({ description => input, @@ -44,7 +44,7 @@ fn test_numbers() { 523."; let out = Lexer::new(input) .filter_map(Result::ok) - .map(|x| format!("{x}")) + .map(|x| format!("{x:?}")) .collect::>(); insta::with_settings!({ description => input, @@ -59,7 +59,7 @@ fn test_punctuators() { let input = "(){};,+-*!===<=>=!=<>/.=!"; let out = Lexer::new(input) .filter_map(Result::ok) - .map(|x| format!("{x}")) + .map(|x| format!("{x:?}")) .collect::>(); insta::with_settings!({ description => input, @@ -75,7 +75,7 @@ fn test_strings() { \"string\""; let out = Lexer::new(input) .filter_map(Result::ok) - .map(|x| format!("{x}")) + .map(|x| format!("{x:?}")) .collect::>(); insta::with_settings!({ description => input, @@ -95,7 +95,7 @@ fn test_whitespace() { end//"; let out = Lexer::new(input) .filter_map(Result::ok) - .map(|x| format!("{x}")) + .map(|x| format!("{x:?}")) .collect::>(); insta::with_settings!({ @@ -123,13 +123,10 @@ fn test_errors() { fn test_group_literal() { let out = Lexer::new("((true))") .filter_map(Result::ok) - .map(|x| format!("{x}")) + .map(|x| format!("{x:?}")) .collect::>() .join("\n"); - assert_eq!( - out, - "LEFT_PAREN ( null\nLEFT_PAREN ( null\nTRUE true null\nRIGHT_PAREN ) null\nRIGHT_PAREN ) null\nEOF null" - ); + assert_yaml_snapshot!(out); } #[test] @@ -137,7 +134,7 @@ fn test_empty_handling() { let out: String = Lexer::new("") .filter_map(Result::ok) .fold(String::new(), |mut out, t| { - let _ = write!(out, "{t}"); + let _ = write!(out, "{t:?}"); out }); assert_yaml_snapshot!(out); diff --git a/crates/lexer/tests/snapshots/lexer_test__empty_handling.snap b/crates/lexer/tests/snapshots/lexer_test__empty_handling.snap index 6be0f27..cd72672 100644 --- a/crates/lexer/tests/snapshots/lexer_test__empty_handling.snap +++ b/crates/lexer/tests/snapshots/lexer_test__empty_handling.snap @@ -1,5 +1,5 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs expression: out --- -EOF null +"Token { token_type: Eof, origin: \"\", line: 1 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__group_literal.snap b/crates/lexer/tests/snapshots/lexer_test__group_literal.snap new file mode 100644 index 0000000..0528771 --- /dev/null +++ b/crates/lexer/tests/snapshots/lexer_test__group_literal.snap @@ -0,0 +1,5 @@ +--- +source: crates/lexer/tests/lexer_test.rs +expression: out +--- +"Token { token_type: LeftParen, origin: \"(\", line: 1 }\nToken { token_type: LeftParen, origin: \"(\", line: 1 }\nToken { token_type: True, origin: \"true\", line: 1 }\nToken { token_type: RightParen, origin: \")\", line: 1 }\nToken { token_type: RightParen, origin: \")\", line: 1 }\nToken { token_type: Eof, origin: \"\", line: 1 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__identifiers.snap b/crates/lexer/tests/snapshots/lexer_test__identifiers.snap index 73362e2..fa30f33 100644 --- a/crates/lexer/tests/snapshots/lexer_test__identifiers.snap +++ b/crates/lexer/tests/snapshots/lexer_test__identifiers.snap @@ -1,13 +1,13 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs description: "andy formless fo _ _123 _abc ab123\nabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_" --- -- IDENTIFIER andy null -- IDENTIFIER formless null -- IDENTIFIER fo null -- IDENTIFIER _ null -- IDENTIFIER _123 null -- IDENTIFIER _abc null -- IDENTIFIER ab123 null -- IDENTIFIER abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_ null -- EOF null +- "Token { token_type: Identifier, origin: \"andy\", line: 1 }" +- "Token { token_type: Identifier, origin: \"formless\", line: 1 }" +- "Token { token_type: Identifier, origin: \"fo\", line: 1 }" +- "Token { token_type: Identifier, origin: \"_\", line: 1 }" +- "Token { token_type: Identifier, origin: \"_123\", line: 1 }" +- "Token { token_type: Identifier, origin: \"_abc\", line: 1 }" +- "Token { token_type: Identifier, origin: \"ab123\", line: 1 }" +- "Token { token_type: Identifier, origin: \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_\", line: 2 }" +- "Token { token_type: Eof, origin: \"\", line: 2 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__keywords.snap b/crates/lexer/tests/snapshots/lexer_test__keywords.snap index 19e8774..a91eb94 100644 --- a/crates/lexer/tests/snapshots/lexer_test__keywords.snap +++ b/crates/lexer/tests/snapshots/lexer_test__keywords.snap @@ -1,21 +1,21 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs description: and class else false for fun if nil or return super this true var while print --- -- AND and null -- CLASS class null -- ELSE else null -- FALSE false null -- FOR for null -- FUN fun null -- IF if null -- NIL nil null -- OR or null -- RETURN return null -- SUPER super null -- THIS this null -- TRUE true null -- VAR var null -- WHILE while null -- PRINT print null -- EOF null +- "Token { token_type: And, origin: \"and\", line: 1 }" +- "Token { token_type: Class, origin: \"class\", line: 1 }" +- "Token { token_type: Else, origin: \"else\", line: 1 }" +- "Token { token_type: False, origin: \"false\", line: 1 }" +- "Token { token_type: For, origin: \"for\", line: 1 }" +- "Token { token_type: Fun, origin: \"fun\", line: 1 }" +- "Token { token_type: If, origin: \"if\", line: 1 }" +- "Token { token_type: Nil, origin: \"nil\", line: 1 }" +- "Token { token_type: Or, origin: \"or\", line: 1 }" +- "Token { token_type: Return, origin: \"return\", line: 1 }" +- "Token { token_type: Super, origin: \"super\", line: 1 }" +- "Token { token_type: This, origin: \"this\", line: 1 }" +- "Token { token_type: True, origin: \"true\", line: 1 }" +- "Token { token_type: Var, origin: \"var\", line: 1 }" +- "Token { token_type: While, origin: \"while\", line: 1 }" +- "Token { token_type: Print, origin: \"print\", line: 1 }" +- "Token { token_type: Eof, origin: \"\", line: 1 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__numbers.snap b/crates/lexer/tests/snapshots/lexer_test__numbers.snap index e524f92..ce8c3ca 100644 --- a/crates/lexer/tests/snapshots/lexer_test__numbers.snap +++ b/crates/lexer/tests/snapshots/lexer_test__numbers.snap @@ -1,14 +1,14 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs description: "123\n123.456\n.457\n123.\n90\n523." --- -- NUMBER 123 123.0 -- NUMBER 123.456 123.456 -- DOT . null -- NUMBER 457 457.0 -- NUMBER 123 123.0 -- DOT . null -- NUMBER 90 90.0 -- NUMBER 523 523.0 -- DOT . null -- EOF null +- "Token { token_type: Number(123.0), origin: \"123\", line: 1 }" +- "Token { token_type: Number(123.456), origin: \"123.456\", line: 2 }" +- "Token { token_type: Dot, origin: \".\", line: 3 }" +- "Token { token_type: Number(457.0), origin: \"457\", line: 3 }" +- "Token { token_type: Number(123.0), origin: \"123\", line: 4 }" +- "Token { token_type: Dot, origin: \".\", line: 4 }" +- "Token { token_type: Number(90.0), origin: \"90\", line: 5 }" +- "Token { token_type: Number(523.0), origin: \"523\", line: 6 }" +- "Token { token_type: Dot, origin: \".\", line: 6 }" +- "Token { token_type: Eof, origin: \"\", line: 6 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__punctuators.snap b/crates/lexer/tests/snapshots/lexer_test__punctuators.snap index 782b925..a210670 100644 --- a/crates/lexer/tests/snapshots/lexer_test__punctuators.snap +++ b/crates/lexer/tests/snapshots/lexer_test__punctuators.snap @@ -1,25 +1,25 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs description: "(){};,+-*!===<=>=!=<>/.=!" --- -- LEFT_PAREN ( null -- RIGHT_PAREN ) null -- "LEFT_BRACE { null" -- "RIGHT_BRACE } null" -- SEMICOLON ; null -- "COMMA , null" -- PLUS + null -- MINUS - null -- STAR * null -- BANG_EQUAL != null -- EQUAL_EQUAL == null -- LESS_EQUAL <= null -- GREATER_EQUAL >= null -- BANG_EQUAL != null -- LESS < null -- GREATER > null -- SLASH / null -- DOT . null -- EQUAL = null -- BANG ! null -- EOF null +- "Token { token_type: LeftParen, origin: \"(\", line: 1 }" +- "Token { token_type: RightParen, origin: \")\", line: 1 }" +- "Token { token_type: LeftBrace, origin: \"{\", line: 1 }" +- "Token { token_type: RightBrace, origin: \"}\", line: 1 }" +- "Token { token_type: Semicolon, origin: \";\", line: 1 }" +- "Token { token_type: Comma, origin: \",\", line: 1 }" +- "Token { token_type: Plus, origin: \"+\", line: 1 }" +- "Token { token_type: Minus, origin: \"-\", line: 1 }" +- "Token { token_type: Star, origin: \"*\", line: 1 }" +- "Token { token_type: BangEqual, origin: \"!=\", line: 1 }" +- "Token { token_type: EqualEqual, origin: \"==\", line: 1 }" +- "Token { token_type: LessEqual, origin: \"<=\", line: 1 }" +- "Token { token_type: GreaterEqual, origin: \">=\", line: 1 }" +- "Token { token_type: BangEqual, origin: \"!=\", line: 1 }" +- "Token { token_type: Less, origin: \"<\", line: 1 }" +- "Token { token_type: Greater, origin: \">\", line: 1 }" +- "Token { token_type: Slash, origin: \"/\", line: 1 }" +- "Token { token_type: Dot, origin: \".\", line: 1 }" +- "Token { token_type: Equal, origin: \"=\", line: 1 }" +- "Token { token_type: Bang, origin: \"!\", line: 1 }" +- "Token { token_type: Eof, origin: \"\", line: 1 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__strings.snap b/crates/lexer/tests/snapshots/lexer_test__strings.snap index 4e69c8e..de05826 100644 --- a/crates/lexer/tests/snapshots/lexer_test__strings.snap +++ b/crates/lexer/tests/snapshots/lexer_test__strings.snap @@ -1,7 +1,7 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs description: "\"\"\n\"string\"" --- -- "STRING \"\" " -- "STRING \"string\" string" -- EOF null +- "Token { token_type: String, origin: \"\", line: 1 }" +- "Token { token_type: String, origin: \"string\", line: 2 }" +- "Token { token_type: Eof, origin: \"\", line: 2 }" diff --git a/crates/lexer/tests/snapshots/lexer_test__whitespace.snap b/crates/lexer/tests/snapshots/lexer_test__whitespace.snap index a9613bb..7e5ac49 100644 --- a/crates/lexer/tests/snapshots/lexer_test__whitespace.snap +++ b/crates/lexer/tests/snapshots/lexer_test__whitespace.snap @@ -1,9 +1,9 @@ --- -source: tests/lexer_test.rs +source: crates/lexer/tests/lexer_test.rs description: "space tabs\t\t\t\tnewlines\n\n//\n\n\nend//" --- -- IDENTIFIER space null -- IDENTIFIER tabs null -- IDENTIFIER newlines null -- IDENTIFIER end null -- EOF null +- "Token { token_type: Identifier, origin: \"space\", line: 1 }" +- "Token { token_type: Identifier, origin: \"tabs\", line: 1 }" +- "Token { token_type: Identifier, origin: \"newlines\", line: 1 }" +- "Token { token_type: Identifier, origin: \"end\", line: 6 }" +- "Token { token_type: Eof, origin: \"\", line: 6 }" diff --git a/crates/loxide/src/interpreter.rs b/crates/loxide/src/interpreter.rs index c8643d7..eb19c40 100644 --- a/crates/loxide/src/interpreter.rs +++ b/crates/loxide/src/interpreter.rs @@ -267,7 +267,7 @@ fn evaluate_statement<'de>( interpreter.environment.assign( name.origin, &EvaluatedValue::LoxFunction { - name: name.to_string(), + name: name.origin.to_string(), func_id, }, )?; @@ -332,7 +332,9 @@ fn evaluate_expression<'de>( TokenType::EqualEqual => Ok(EvaluatedValue::Bool(n1 == n2)), TokenType::BangEqual => Ok(EvaluatedValue::Bool(n1 != n2)), // TODO: Make unrepresentable by narrowing `operator` to `BinaryOperator:Not|Negate` - _ => panic!("{op} is not a valid token type for Expr::Binary with Numbers"), + _ => { + panic!("{op:?} is not a valid token type for Expr::Binary with Numbers") + } } } (EvaluatedValue::String(s1), EvaluatedValue::String(s2), operator) => { @@ -342,7 +344,7 @@ fn evaluate_expression<'de>( TokenType::BangEqual => Ok(EvaluatedValue::Bool(s1 != s2)), // TODO: Make unrepresentable by narrowing `operator` to `BinaryOperator:Not|Negate` _ => panic!( - "{operator} is not a valid token type for Expr:Binary with Strings" + "{operator:?} is not a valid token type for Expr:Binary with Strings" ), } } @@ -351,17 +353,17 @@ fn evaluate_expression<'de>( match operator.token_type { TokenType::EqualEqual => Ok(EvaluatedValue::Bool(false)), TokenType::BangEqual => Ok(EvaluatedValue::Bool(true)), - _ => panic!("{operator} is not supported for String<>Number"), + _ => panic!("{operator:?} is not supported for String<>Number"), } } (EvaluatedValue::Bool(b1), EvaluatedValue::Bool(b2), operator) => { match operator.token_type { TokenType::BangEqual => Ok(EvaluatedValue::Bool(b1 != b2)), TokenType::EqualEqual => Ok(EvaluatedValue::Bool(b1 == b2)), - _ => panic!("{operator} is not for suppoer Bool / Bool binary"), + _ => panic!("{operator:?} is not for suppoer Bool / Bool binary"), } } - (l, r, op) => todo!("Add handling for {l} {r} {op}"), + (l, r, op) => todo!("Add handling for {l} {r} {op:?}"), } } Expr::Unary { operator, right } => {