mirror of
https://github.com/MorizzG/MLox.git
synced 2025-12-06 04:22:41 +00:00
85 lines
1.9 KiB
OCaml
85 lines
1.9 KiB
OCaml
type token_type =
|
|
| LeftParen
|
|
| RightParen
|
|
| LeftBrace
|
|
| RightBrace
|
|
| Plus
|
|
| Minus
|
|
| Star
|
|
| Slash
|
|
| Bang
|
|
| Dot
|
|
| Comma
|
|
| Semicolon
|
|
| Equal
|
|
| EqualEqual
|
|
| BangEqual
|
|
| Greater
|
|
| Less
|
|
| GreaterEqual
|
|
| LessEqual
|
|
| Identifier of string
|
|
| String of string
|
|
| Number of float
|
|
| And
|
|
| Class
|
|
| Else
|
|
| False
|
|
| Fun
|
|
| For
|
|
| If
|
|
| Nil
|
|
| Or
|
|
| Print
|
|
| Return
|
|
| Super
|
|
| This
|
|
| True
|
|
| Var
|
|
| While
|
|
| Comment of string
|
|
| Eof
|
|
|
|
val pp_token_type : Format.formatter -> token_type -> unit
|
|
val show_token_type : token_type -> string
|
|
val keywords : (string, token_type) Hashtbl.t
|
|
|
|
type token = { token_type : token_type; pos : Error.code_pos }
|
|
|
|
val show_token : token -> string
|
|
|
|
type lexer_result = (token list, Error.lexer_error list) result
|
|
|
|
(* type state = {
|
|
source : string;
|
|
start_pos : int;
|
|
cur_pos : int;
|
|
tokens_rev : token list;
|
|
errors_rev : Error.lexer_error list;
|
|
line : int;
|
|
col : int;
|
|
}
|
|
module State : sig
|
|
type t = state
|
|
|
|
val is_digit : char -> bool
|
|
val is_alpha : char -> bool
|
|
val is_alphanum : char -> bool
|
|
val is_identifier : char -> bool
|
|
val is_at_end : state -> bool
|
|
val get_lexeme : state -> int -> int -> string
|
|
val advance : state -> char * state
|
|
val peek : state -> char option
|
|
val advance_if : char -> state -> bool * state
|
|
val advance_until : char -> state -> bool * state
|
|
val advance_while : (char -> bool) -> state -> state
|
|
val last_char : state -> char
|
|
val append_token : Error.code_pos -> token_type -> state -> state
|
|
val append_error : Error.code_pos -> string -> state -> state
|
|
val parse_number : state -> state
|
|
val parse_keyword_or_identifier : state -> state
|
|
val parse_block_commend : state -> state
|
|
val tokenize_rec : state -> state
|
|
end *)
|
|
|
|
val tokenize : string -> lexer_result
|