diff options
author | s-over-4 | 2023-06-13 15:53:40 -0400 |
---|---|---|
committer | s-over-4 | 2023-06-13 15:53:40 -0400 |
commit | cadd9f414fada14319a5950e67721724cfd5089d (patch) | |
tree | 332d2f2cab87051b425fb88a158ea01a05e574cd | |
parent | 0410bab11f4be35c22916935509a62a7e7bd8bd2 (diff) |
broken
-rwxr-xr-x | halk | bin | 30992 -> 30864 bytes | |||
-rw-r--r-- | src/include/lexer.h | 2 | ||||
-rw-r--r-- | src/lexer.c | 107 |
3 files changed, 19 insertions, 90 deletions
Binary files differ diff --git a/src/include/lexer.h b/src/include/lexer.h index 853a35c..f9db17e 100644 --- a/src/include/lexer.h +++ b/src/include/lexer.h @@ -23,7 +23,7 @@ extern void lexer_destroy (lexer_t* lexer); extern void lexer_next (lexer_t* lexer); extern void lexer_pass (lexer_t* lexer); extern token_t* lexer_get_next_token (lexer_t* lexer); // chars -> tokens -extern token_t* lexer_next_token (lexer_t* lexer, token_t* token); +extern token_t* lexer_next_token (lexer_t* lexer, int token_type); extern char* lexer_get_c_as_string (lexer_t* lexer); // collectors diff --git a/src/lexer.c b/src/lexer.c index aca05e3..f4907eb 100644 --- a/src/lexer.c +++ b/src/lexer.c @@ -38,17 +38,9 @@ void lexer_pass(lexer_t* lexer) { token_t* lexer_get_next_token(lexer_t* lexer) { while (LEXER_VALID) { - if (char_can_ignore(&lexer->c)) { lexer_pass(lexer); } - if (char_could_start_int(&lexer->c)) { - return lexer_next_token( - lexer, - token_init( - TOKEN_PRIM_INT, - lexer_get_c_as_string(lexer) - ) - ); - } - if (char_could_start_keyword(&lexer->c)) { return lexer_get_keyword(lexer); } + if (char_can_ignore(&lexer->c)) { lexer_pass(lexer); } + if (char_could_start_int(&lexer->c)) { return lexer_next_token(lexer, TOKEN_PRIM_INT); } + if (char_could_start_keyword(&lexer->c)) { return lexer_get_keyword(lexer); } switch (lexer->c) { case '\'': @@ -58,95 +50,37 @@ token_t* lexer_get_next_token(lexer_t* lexer) { return lexer_collect(lexer, '`', 1, 1, TOKEN_COMM); break; case ';': - return lexer_next_token( - lexer, - token_init( - TOKEN_EXPR_END, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_EXPR_END); break; case '=': - return lexer_next_token( - lexer, - token_init( - TOKEN_DEF_SET, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_DEF_SET); + break; + case '(': + return lexer_next_token(lexer, TOKEN_LGROUP); break; - case '(': return lexer_next_token( - lexer, - token_init( - TOKEN_LGROUP, - lexer_get_c_as_string(lexer) - ) - ); break; case ')': - return lexer_next_token( - lexer, - token_init( - TOKEN_RGROUP, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_RGROUP); break; case '#': return lexer_collect(lexer, '#', 1, 1, TOKEN_DIRECTIVE); break; case '.': - return lexer_next_token( - lexer, - token_init( - TOKEN_FN_APPLY, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_FN_APPLY); break; case ',': - return lexer_next_token( - lexer, - token_init( - TOKEN_LIST_DELIM, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_LIST_DELIM); break; case ':': - return lexer_next_token( - lexer, - token_init( - TOKEN_DEF_TAGS_DELIM, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_DEF_TAGS_DELIM); break; case '/': - return lexer_next_token( - lexer, - token_init( - TOKEN_NAMESPACE_DELIM, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_NAMESPACE_DELIM); break; case '{': - return lexer_next_token( - lexer, - token_init( - TOKEN_BLOCK_DELIM_START, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_BLOCK_DELIM_START); break; case '}': - return lexer_next_token( - lexer, - token_init( - TOKEN_BLOCK_DELIM_END, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_BLOCK_DELIM_END); break; case '[': return lexer_collect(lexer, ']', 1, 1, TOKEN_PRIM_STR); @@ -155,20 +89,15 @@ token_t* lexer_get_next_token(lexer_t* lexer) { return token_init(TOKEN_EOF, lexer_get_c_as_string(lexer)); break; default: - return lexer_next_token( - lexer, - token_init( - TOKEN_UNKNOWN, - lexer_get_c_as_string(lexer) - ) - ); + return lexer_next_token(lexer, TOKEN_UNKNOWN); } } return NULL; } -token_t* lexer_next_token(lexer_t* lexer, token_t* token) { +token_t* lexer_next_token(lexer_t* lexer, int token_type) { + token_t* token = token_init(token_type, lexer_get_c_as_string(lexer)); lexer_next(lexer); return token; } |