diff --git a/src/lex.rs b/src/lex.rs index 6c8574e..9bc197b 100644 --- a/src/lex.rs +++ b/src/lex.rs @@ -51,6 +51,8 @@ pub enum TokenKind { Var, While, Eof, + + Error, } #[derive(Debug, Clone)] @@ -93,38 +95,41 @@ impl Token { } } -pub fn scan_source(source: &str) -> Result, ScriptErrors> { +pub fn scan_source(source: &str, mut on_error: F) -> Vec { let mut idx = 0_usize; let mut line = 1_usize; let mut tokens = Vec::new(); - let mut errors = Vec::new(); while idx < source.len() { match scan_token(&source[idx..], line) { Ok(consumed) => { if let Some(token) = consumed.token { tokens.push(token); } + idx += consumed.span.chars; line += consumed.span.lines; } Err(err) => { - errors.push(err.error); + on_error(err.error); + tokens.push(Token { + kind: TokenKind::Error, + lexeme: String::new(), + line, + }); + idx += err.span.chars; line += err.span.lines; } } } - if errors.is_empty() { - tokens.push(Token { - kind: TokenKind::Eof, - lexeme: String::new(), - line: line + 1, - }); - Ok(tokens) - } else { - Err(errors.into()) - } + tokens.push(Token { + kind: TokenKind::Eof, + lexeme: String::new(), + line: line + 1, + }); + + tokens } fn scan_token(partial_source: &str, start_line: usize) -> Result { diff --git a/src/lib.rs b/src/lib.rs index bce58f0..46398bd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -32,7 +32,13 @@ impl Display for ScriptErrors { } pub fn run(script: &str) -> Result<(), ScriptErrors> { - let tokens = lex::scan_source(script)?; + let mut errors = Vec::new(); + let tokens = lex::scan_source(script, |err| errors.push(err)); + if !errors.is_empty() { + dbg!(tokens); + return Err(errors.into()); + } + for token in &tokens { print!("{}", token.lexeme()); }