Change all script strings to Rc<str>

master
Nick Krichevsky 2024-05-22 17:45:29 -04:00
parent 3c348e92ca
commit 1b826430f0
4 changed files with 22 additions and 26 deletions

View File

@ -22,7 +22,8 @@ fn do_ast_codegen<W: Write>(mut output: W) {
) )
.unwrap(); .unwrap();
define_imports(&mut output, &["crate::lex::Token"]).expect("failed to define imports"); define_imports(&mut output, &["crate::lex::Token", "std::rc::Rc"])
.expect("failed to define imports");
writeln!(output).unwrap(); writeln!(output).unwrap();
define_literals( define_literals(
@ -30,7 +31,7 @@ fn do_ast_codegen<W: Write>(mut output: W) {
"LiteralValue", "LiteralValue",
&BTreeMap::from([ &BTreeMap::from([
("Number", Some("f64")), ("Number", Some("f64")),
("String", Some("String")), ("String", Some("Rc<str>")),
("True", None), ("True", None),
("False", None), ("False", None),
("Nil", None), ("Nil", None),

View File

@ -25,6 +25,7 @@ impl Environment {
const ROOT_KEY: u32 = 0; const ROOT_KEY: u32 = 0;
#[must_use] #[must_use]
#[allow(dead_code)]
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }

View File

@ -1,5 +1,8 @@
use itertools::{FoldWhile, Itertools}; use itertools::{FoldWhile, Itertools};
use std::iter::{self, Peekable}; use std::{
iter::{self, Peekable},
rc::Rc,
};
use thiserror::Error; use thiserror::Error;
use crate::ScriptError; use crate::ScriptError;
@ -28,8 +31,8 @@ pub enum TokenKind {
LessEqual, LessEqual,
// Literals // Literals
Identifier(String), Identifier(Rc<str>),
String(String), String(Rc<str>),
Number(f64), Number(f64),
// Keywords // Keywords
@ -92,10 +95,6 @@ impl Token {
&self.kind &self.kind
} }
pub fn into_kind(self) -> TokenKind {
self.kind
}
pub fn lexeme(&self) -> &str { pub fn lexeme(&self) -> &str {
&self.lexeme &self.lexeme
} }
@ -326,16 +325,19 @@ fn lex_remaining_string_literal<I: Iterator<Item = char>>(
.into_inner(); .into_inner();
if end_found { if end_found {
let lexeme = format!("\"{contents}\"");
// must include quotes
let length = contents.len() + 2;
Ok(Consumed { Ok(Consumed {
token: Some(Token { token: Some(Token {
kind: TokenKind::String(contents.clone()), kind: TokenKind::String(contents.into()),
lexeme: format!("\"{contents}\""), lexeme,
line: start_line + newlines, line: start_line + newlines,
}), }),
span: Span { span: Span {
lines: newlines, lines: newlines,
// must include the quotes chars: length,
chars: contents.len() + 2,
}, },
}) })
} else { } else {
@ -401,6 +403,6 @@ fn tokenize_word(word: String) -> TokenKind {
"true" => TokenKind::True, "true" => TokenKind::True,
"var" => TokenKind::Var, "var" => TokenKind::Var,
"while" => TokenKind::While, "while" => TokenKind::While,
_other => TokenKind::Identifier(word), _other => TokenKind::Identifier(word.into()),
} }
} }

View File

@ -81,7 +81,6 @@ fn parse<I: Iterator<Item = Token>, F: FnMut(ScriptError)>(
line: error.line.unwrap_or_default(), line: error.line.unwrap_or_default(),
location: String::new(), location: String::new(),
}); });
todo!("{}", error.message);
} }
} }
} }
@ -329,17 +328,10 @@ fn parse_primary<I: Iterator<Item = Token>>(iter: &mut Peekable<I>) -> Result<Ex
value: LiteralValue::Number(*number), value: LiteralValue::Number(*number),
token, token,
}), }),
TokenKind::String(_) => { TokenKind::String(value) => Ok(Expr::Literal {
// TODO: we used to do this with into_kind, but we need to change Token to Rc<str> to fix cloning issues value: LiteralValue::String(value.clone()),
// special case to avoid cloning token,
match token.kind() { }),
TokenKind::String(string) => Ok(Expr::Literal {
value: LiteralValue::String(string.clone()),
token,
}),
_ => unreachable!(),
}
}
TokenKind::LeftParen => { TokenKind::LeftParen => {
let expr = parse_expression(iter)?; let expr = parse_expression(iter)?;
match_token_kind!(iter, TokenKind::RightParen) match_token_kind!(iter, TokenKind::RightParen)