some cleanup

This commit is contained in:
hendrik 2024-09-23 17:09:10 +02:00
parent 385e6281d4
commit 65c75564cf
6 changed files with 28 additions and 53 deletions

2
.gitignore vendored
View File

@ -10,7 +10,7 @@ target/
*.pdb
# lox test file ans other
# lox test file and other
test.lox
other.lox
tarpaulin-report.html

View File

@ -1,5 +1,3 @@
use std::env;
use crate::{
error::interpreter_error::{InterpreterError, InterpreterErrorEnum},
statements::fkt::FunctionDeclaration,

View File

@ -48,53 +48,53 @@ impl<'a> Scanner<'a> {
let line = self.evaluator.line();
match c {
/* Single chars */
'(' => Ok(Token::new_op2(TokenType::LeftParen, "(".to_string(), line)),
'(' => Ok(Token::new(TokenType::LeftParen, "(".to_string(), line)),
')' => Ok(Token::new_op2(TokenType::RightParen, ")".to_string(), line)),
'{' => Ok(Token::new_op2(TokenType::LeftBrace, "{".to_string(), line)),
'}' => Ok(Token::new_op2(TokenType::RightBrace, "}".to_string(), line)),
',' => Ok(Token::new_op2(TokenType::Comma, ",".to_string(), line)),
'.' => Ok(Token::new_op2(TokenType::Dot, ".".to_string(), line)),
'-' => Ok(Token::new_op2(TokenType::Minus, "-".to_string(), line)),
'+' => Ok(Token::new_op2(TokenType::Plus, "+".to_string(), line)),
';' => Ok(Token::new_op2(TokenType::Semicolon, ";".to_string(), line)),
'*' => Ok(Token::new_op2(TokenType::Star, "*".to_string(), line)),
')' => Ok(Token::new(TokenType::RightParen, ")".to_string(), line)),
'{' => Ok(Token::new(TokenType::LeftBrace, "{".to_string(), line)),
'}' => Ok(Token::new(TokenType::RightBrace, "}".to_string(), line)),
',' => Ok(Token::new(TokenType::Comma, ",".to_string(), line)),
'.' => Ok(Token::new(TokenType::Dot, ".".to_string(), line)),
'-' => Ok(Token::new(TokenType::Minus, "-".to_string(), line)),
'+' => Ok(Token::new(TokenType::Plus, "+".to_string(), line)),
';' => Ok(Token::new(TokenType::Semicolon, ";".to_string(), line)),
'*' => Ok(Token::new(TokenType::Star, "*".to_string(), line)),
/* single chars or more */
'!' => {
if self.evaluator.match_and_advance('=').is_some() {
Ok(Token::new_op2(TokenType::BangEqual, "!=".to_string(), line))
Ok(Token::new(TokenType::BangEqual, "!=".to_string(), line))
} else {
Ok(Token::new_op2(TokenType::Bang, "!".to_string(), line))
Ok(Token::new(TokenType::Bang, "!".to_string(), line))
}
}
'=' => {
if self.evaluator.match_and_advance('=').is_some() {
Ok(Token::new_op2(
Ok(Token::new(
TokenType::EqualEqual,
"==".to_string(),
line,
))
} else {
Ok(Token::new_op2(TokenType::Equal, "=".to_string(), line))
Ok(Token::new(TokenType::Equal, "=".to_string(), line))
}
}
'<' => {
if self.evaluator.match_and_advance('=').is_some() {
Ok(Token::new_op2(TokenType::LessEqual, "<=".to_string(), line))
Ok(Token::new(TokenType::LessEqual, "<=".to_string(), line))
} else {
Ok(Token::new_op2(TokenType::Less, "<".to_string(), line))
Ok(Token::new(TokenType::Less, "<".to_string(), line))
}
}
'>' => {
if self.evaluator.match_and_advance('=').is_some() {
Ok(Token::new_op2(
Ok(Token::new(
TokenType::GreaterEqual,
">=".to_string(),
line,
))
} else {
Ok(Token::new_op2(TokenType::Greater, ">".to_string(), line))
Ok(Token::new(TokenType::Greater, ">".to_string(), line))
}
}
// comment
@ -106,13 +106,13 @@ impl<'a> Scanner<'a> {
}
Ok(Token::no_op())
} else {
Ok(Token::new_op2(TokenType::Slash, "/".to_string(), line))
Ok(Token::new(TokenType::Slash, "/".to_string(), line))
}
}
' ' | '\r' | '\t' | '\n' => Ok(Token::no_op()),
'"' => {
if let Some(str) = self.evaluator.take_string() {
Ok(Token::new_op2(TokenType::String, str, line))
Ok(Token::new(TokenType::String, str, line))
} else {
Err(TokenEvaluationError::new(
EvaluationError::UnterminatedString,
@ -122,7 +122,7 @@ impl<'a> Scanner<'a> {
}
c if c.is_ascii_digit() => {
if let Some(str) = self.evaluator.take_num() {
Ok(Token::new_op2(
Ok(Token::new(
TokenType::Number,
String::from(c) + &str,
line,
@ -139,9 +139,9 @@ impl<'a> Scanner<'a> {
let parsed = self.evaluator.take_word();
let word = String::from(x) + &parsed;
if let Some(t_type) = TokenType::reserved(&word) {
Ok(Token::new_op2(t_type, word, line))
Ok(Token::new(t_type, word, line))
} else {
Ok(Token::new_op2(TokenType::Identifier, word, line))
Ok(Token::new(TokenType::Identifier, word, line))
}
} else {
Err(TokenEvaluationError::unex_char(x, line))

View File

@ -7,7 +7,6 @@ pub struct Token {
t_type: TokenType,
lexeme: String,
line: usize,
//meta: MetaToken,
literal: Option<String>,
}
@ -21,7 +20,7 @@ fn parse_num(val: String) -> String {
}
impl Token {
pub fn new(t_type: TokenType, lexeme: String, literal: String, line: usize) -> Self {
pub fn new_general(t_type: TokenType, lexeme: String, literal: String, line: usize) -> Self {
Self {
t_type,
lexeme,
@ -54,7 +53,7 @@ impl Token {
self.line
}
pub fn new_op2(t_type: TokenType, literal: String, line: usize) -> Self {
pub fn new(t_type: TokenType, literal: String, line: usize) -> Self {
match t_type {
TokenType::String => Self {
t_type,
@ -77,28 +76,6 @@ impl Token {
}
}
pub fn new_op(t_type: TokenType, literal: Option<String>, line: usize) -> Self {
match t_type {
TokenType::String => Self {
t_type,
lexeme: format!("\"{}\"", literal.clone().unwrap()),
line,
literal,
},
TokenType::Number => Self {
t_type,
lexeme: literal.clone().unwrap(),
line,
literal: Some(parse_num(literal.unwrap())),
},
_ => Self {
t_type,
lexeme: literal.unwrap(),
line,
literal: None,
},
}
}
}
impl Display for Token {

View File

@ -14,7 +14,7 @@ impl TokenBag {
}
pub fn add_eof(&mut self, line: usize) {
self.add_token(Token::new_op2(TokenType::Eof, String::new(), line));
self.add_token(Token::new(TokenType::Eof, String::new(), line));
}
pub fn generate_output(&self) -> String {

View File

@ -231,7 +231,7 @@ impl Debug for TokenType {
Self::Var => write!(f, "VAR"),
Self::While => write!(f, "WHILE"),
Self::Eof => write!(f, "EOF"),
Self::NoOp => write!(f, "EOF"),
Self::NoOp => write!(f, "NoOp"),
}
}
}