Chapter 10 done

This commit is contained in:
Moritz Gmeiner 2023-01-25 19:01:13 +01:00
commit 46f1030207
16 changed files with 1173 additions and 201 deletions

View file

@ -9,9 +9,10 @@ use super::{Token, TokenType};
static KEYWORDS: phf::Map<&'static str, TokenType> = phf_map! {
"and" => TokenType::And,
"break" => TokenType::Break,
"class" => TokenType::Class,
"else" => TokenType::Else,
"false" => TokenType::Else,
"false" => TokenType::False,
"for" => TokenType::For,
"fun" => TokenType::Fun,
"if" => TokenType::If,
@ -232,7 +233,7 @@ impl Lexer {
}
fn push_token(&mut self, token_type: TokenType) {
let lexeme: String = self.source[self.start..self.current].iter().collect();
// let lexeme: String = self.source[self.start..self.current].iter().collect();
self.tokens.push(Token::new(token_type, self.code_pos));
}

View file

@ -1,6 +1,6 @@
use crate::misc::CodePos;
#[allow(dead_code)]
#[allow(dead_code, clippy::upper_case_acronyms)]
#[derive(Debug, Clone, PartialEq)]
#[rustfmt::skip]
pub enum TokenType {
@ -20,12 +20,13 @@ pub enum TokenType {
Number(f64),
// Keywords
And, Class, Else, False, Fun, For, If, Nil, Or,
And, Break, Class, Else, False, Fun, For, If, Nil, Or,
Print, Return, Super, This, True, Var, While,
EOF
}
#[derive(Clone)]
pub struct Token {
pub token_type: TokenType,
// pub lexeme: String,