From cf7c64a7d9171f69f6f9b8fb5544ac69de3e85a8 Mon Sep 17 00:00:00 2001 From: Nick Krichevsky Date: Wed, 15 May 2024 08:52:55 -0400 Subject: [PATCH] Migrate to using Error token in lexer --- src/lex.rs | 31 ++++++++++++++++++------------- src/lib.rs | 8 +++++++- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/src/lex.rs b/src/lex.rs index 6c8574e..9bc197b 100644 --- a/src/lex.rs +++ b/src/lex.rs @@ -51,6 +51,8 @@ pub enum TokenKind { Var, While, Eof, + + Error, } #[derive(Debug, Clone)] @@ -93,38 +95,41 @@ impl Token { } } -pub fn scan_source(source: &str) -> Result, ScriptErrors> { +pub fn scan_source(source: &str, mut on_error: F) -> Vec { let mut idx = 0_usize; let mut line = 1_usize; let mut tokens = Vec::new(); - let mut errors = Vec::new(); while idx < source.len() { match scan_token(&source[idx..], line) { Ok(consumed) => { if let Some(token) = consumed.token { tokens.push(token); } + idx += consumed.span.chars; line += consumed.span.lines; } Err(err) => { - errors.push(err.error); + on_error(err.error); + tokens.push(Token { + kind: TokenKind::Error, + lexeme: String::new(), + line, + }); + idx += err.span.chars; line += err.span.lines; } } } - if errors.is_empty() { - tokens.push(Token { - kind: TokenKind::Eof, - lexeme: String::new(), - line: line + 1, - }); - Ok(tokens) - } else { - Err(errors.into()) - } + tokens.push(Token { + kind: TokenKind::Eof, + lexeme: String::new(), + line: line + 1, + }); + + tokens } fn scan_token(partial_source: &str, start_line: usize) -> Result { diff --git a/src/lib.rs b/src/lib.rs index bce58f0..46398bd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -32,7 +32,13 @@ impl Display for ScriptErrors { } pub fn run(script: &str) -> Result<(), ScriptErrors> { - let tokens = lex::scan_source(script)?; + let mut errors = Vec::new(); + let tokens = lex::scan_source(script, |err| errors.push(err)); + if !errors.is_empty() { + dbg!(tokens); + return Err(errors.into()); + } + for token in &tokens { print!("{}", token.lexeme()); }