aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authors-over-42023-06-12 23:13:07 -0400
committers-over-42023-06-12 23:13:07 -0400
commit0abfc49851d60894e0fb9de6b5527a57766bd075 (patch)
tree425561b96c5bcfde12c6189724243c77d0674ae6 /src
parent363742d74864f0883fd8ee081f6ab2ba3ea71ac0 (diff)
fixed
Diffstat (limited to 'src')
-rw-r--r--src/include/token.h1
-rw-r--r--src/lexer.c147
-rw-r--r--src/main.c2
3 files changed, 83 insertions, 67 deletions
diff --git a/src/include/token.h b/src/include/token.h
index 865ba4e..dd0c42f 100644
--- a/src/include/token.h
+++ b/src/include/token.h
@@ -33,6 +33,7 @@ typedef struct TOKEN_STRUC {
TOKEN_ARRAY_DELIM_START, // [
TOKEN_ARRAY_DELIM_END, // ]
TOKEN_DEF_SET, // =
+ TOKEN_UNKNOWN, // ???
TOKEN_EOF, // \0
} type;
diff --git a/src/lexer.c b/src/lexer.c
index 838a907..b2f9894 100644
--- a/src/lexer.c
+++ b/src/lexer.c
@@ -59,21 +59,22 @@ token_t* lexer_get_next_token(lexer_t* lexer) {
break;
case ';':
return lexer_next_token(
- lexer,
- token_init(
- TOKEN_EXPR_END,
- lexer_get_c_as_string(lexer)
- )
- );
+ lexer,
+ token_init(
+ TOKEN_EXPR_END,
+ lexer_get_c_as_string(lexer)
+ )
+ );
break;
case '=':
return lexer_next_token(
- lexer,
- token_init(
- TOKEN_DEF_SET,
- lexer_get_c_as_string(lexer)
- )
- ); break;
+ lexer,
+ token_init(
+ TOKEN_DEF_SET,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
case '(': return lexer_next_token(
lexer,
token_init(
@@ -81,66 +82,80 @@ token_t* lexer_get_next_token(lexer_t* lexer) {
lexer_get_c_as_string(lexer)
)
); break;
- case ')': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_RGROUP,
- lexer_get_c_as_string(lexer)
- )
- ); break;
+ case ')':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_RGROUP,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
case '#':
return lexer_collect(lexer, '#', 1, 1, TOKEN_DIRECTIVE);
break;
- case '.': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_FN_APPLY,
- lexer_get_c_as_string(lexer)
- )
- ); break;
- case ',': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_LIST_DELIM,
- lexer_get_c_as_string(lexer)
- )
- ); break;
- case ':': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_DEF_TAGS_DELIM,
- lexer_get_c_as_string(lexer)
- )
- ); break;
- case '/': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_NAMESPACE_DELIM,
- lexer_get_c_as_string(lexer)
- )
- ); break;
- case '{': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_BLOCK_DELIM_START,
- lexer_get_c_as_string(lexer)
- )
- ); break;
- case '}': return lexer_next_token(
- lexer,
- token_init(
- TOKEN_BLOCK_DELIM_END,
- lexer_get_c_as_string(lexer)
- )
- ); break;
+ case '.':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_FN_APPLY,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
+ case ',':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_LIST_DELIM,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
+ case ':':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_DEF_TAGS_DELIM,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
+ case '/':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_NAMESPACE_DELIM,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
+ case '{':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_BLOCK_DELIM_START,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
+ case '}':
+ return lexer_next_token(
+ lexer,
+ token_init(
+ TOKEN_BLOCK_DELIM_END,
+ lexer_get_c_as_string(lexer)
+ )
+ );
+ break;
case '[':
return lexer_collect(lexer, ']', 1, 1, TOKEN_PRIM_STR);
break;
- case '\0': return token_init(TOKEN_EOF, lexer_get_c_as_string(lexer)); break;
+ case '\0':
+ return token_init(TOKEN_EOF, lexer_get_c_as_string(lexer));
+ break;
default:
- log_err("Unrecognized token");
- printf("%s", &lexer->c);
- exit(1);
+ return token_init(TOKEN_UNKNOWN, lexer_get_c_as_string(lexer));
}
}
@@ -165,7 +180,7 @@ char* lexer_get_c_as_string(lexer_t* lexer) {
token_t* lexer_collect(lexer_t* lexer, char end_char, int fskip, int lskip, int type) {
if (fskip) { lexer_next(lexer); }
- size_t len = 0; // length of collected token so far
+ size_t len = 1; // length of collected token so far
char* token = calloc(len, sizeof(char));
token[0] = '\0';
diff --git a/src/main.c b/src/main.c
index 040ef73..0af449b 100644
--- a/src/main.c
+++ b/src/main.c
@@ -13,7 +13,7 @@ int main(int argc, char* argv[]) {
long fsource_size;
char *source;
- fsource = fopen ("examples/hello.halk", "rb");
+ fsource = fopen("examples/hello.halk", "rb");
if (!fsource) {
log_err("Source file not found");
exit(1);