Chapter 17: Compiling Expressions done

This commit is contained in:
Moritz Gmeiner 2023-01-31 22:54:12 +01:00
commit 1cca1494a4
20 changed files with 702 additions and 129 deletions

View file

@ -85,43 +85,43 @@ impl Lexer {
let c = self.advance();
let token_type = match c {
'(' => Some(LeftParen),
')' => Some(RightParen),
'{' => Some(LeftBrace),
'}' => Some(RightBrace),
',' => Some(Comma),
'.' => Some(Dot),
'+' => Some(Plus),
'-' => Some(Minus),
';' => Some(Semicolon),
'*' => Some(Star),
match c {
'(' => self.push_token(LeftParen),
')' => self.push_token(RightParen),
'{' => self.push_token(LeftBrace),
'}' => self.push_token(RightBrace),
',' => self.push_token(Comma),
'.' => self.push_token(Dot),
'+' => self.push_token(Plus),
'-' => self.push_token(Minus),
';' => self.push_token(Semicolon),
'*' => self.push_token(Star),
'!' => {
if self.consume('=') {
Some(BangEqual)
self.push_token(BangEqual)
} else {
Some(Bang)
self.push_token(Bang)
}
}
'=' => {
if self.consume('=') {
Some(EqualEqual)
self.push_token(EqualEqual)
} else {
Some(Equal)
self.push_token(Equal)
}
}
'<' => {
if self.consume('=') {
Some(LessEqual)
self.push_token(LessEqual)
} else {
Some(Less)
self.push_token(Less)
}
}
'>' => {
if self.consume('=') {
Some(GreaterEqual)
self.push_token(GreaterEqual)
} else {
Some(Greater)
self.push_token(Greater)
}
}
'/' => {
@ -129,8 +129,6 @@ impl Lexer {
// line comment
// advance until either source is empty or newline if found
while !self.source_is_empty() && self.advance() != '\n' {}
None
} else if self.consume('*') {
// block comment
@ -166,28 +164,21 @@ impl Lexer {
self.advance();
}
None
} else {
Some(Slash)
self.push_token(Slash)
}
}
'"' => self.try_parse_string(),
'0'..='9' => self.try_parse_number(),
' ' | '\r' | '\n' | '\t' => None, // handled automatically in advance()
' ' | '\r' | '\n' | '\t' => {} // handled automatically in advance()
c @ '_' | c if c.is_ascii_alphabetic() => self.try_parse_identifier(),
_ => {
self.errors.push(LexerError::UnexpectedCharacter {
c,
code_pos: self.code_pos,
});
None
}
};
if let Some(token_type) = token_type {
self.push_token(token_type);
}
}
fn source_is_empty(&self) -> bool {
@ -235,23 +226,24 @@ impl Lexer {
self.tokens.push(Token::new(token_type, self.code_pos));
}
fn try_parse_string(&mut self) -> Option<TokenType> {
fn try_parse_string(&mut self) {
// advance until second "
while self.advance() != '"' {
if self.source_is_empty() {
self.errors.push(LexerError::UnterminatedStringLiteral {
code_pos: self.code_pos,
});
return None;
return;
}
}
let string_literal = self.source[self.start + 1..self.current - 1].iter().collect();
Some(TokenType::String(string_literal))
// Some(TokenType::String(Box::new(string_literal)))
self.tokens.push(Token::new_string(string_literal, self.code_pos));
}
fn try_parse_number(&mut self) -> Option<TokenType> {
fn try_parse_number(&mut self) {
let is_some_digit = |c: Option<char>| c.map_or(false, |c| c.is_ascii_digit());
// eat all digits
@ -289,14 +281,15 @@ impl Lexer {
msg: err.to_string(),
code_pos: self.code_pos,
});
return None;
return;
}
};
Some(TokenType::Number(num))
// Some(TokenType::Number(num))
self.tokens.push(Token::new_number(num, self.code_pos));
}
fn try_parse_identifier(&mut self) -> Option<TokenType> {
fn try_parse_identifier(&mut self) {
let is_alpha_num_underscore =
|c: Option<char>| c.map_or(false, |c| matches!(c, '0'..='9' | 'A'..='Z' | '_' | 'a'..='z'));
@ -306,8 +299,18 @@ impl Lexer {
let lexeme: String = self.source[self.start..self.current].iter().collect();
let token_type = KEYWORDS.get(&lexeme).cloned().unwrap_or(TokenType::Identifier(lexeme));
/* let token_type = KEYWORDS
.get(&lexeme)
.cloned()
.unwrap_or(TokenType::Identifier(Box::new(lexeme))); */
Some(token_type)
if let Some(&token_type) = KEYWORDS.get(&lexeme) {
// Token::new(token_type, self.code_pos)
self.push_token(token_type);
} else {
self.tokens.push(Token::new_identifier(lexeme, self.code_pos));
}
// Some(token_type)
}
}