aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authors-over-42023-06-13 17:14:56 -0400
committers-over-42023-06-13 17:14:56 -0400
commita85c68d5fa2ff2a55fe648fe84f0d7833ee5d72b (patch)
treee29e75eca5897e6aca83363006346bb04f9b6ef3
parentcadd9f414fada14319a5950e67721724cfd5089d (diff)
gecc
-rw-r--r--examples/simple.halk2
-rwxr-xr-xhalkbin30864 -> 30720 bytes
-rw-r--r--src/include/token.h14
-rw-r--r--src/lexer.c8
-rw-r--r--src/main.c5
5 files changed, 13 insertions, 16 deletions
diff --git a/examples/simple.halk b/examples/simple.halk
index 8a3de60..e424ad8 100644
--- a/examples/simple.halk
+++ b/examples/simple.halk
@@ -1 +1 @@
-:str:variable = 'Hi.';
+:str:var = 'Hello';
diff --git a/halk b/halk
index 02857b1..bb60538 100755
--- a/halk
+++ b/halk
Binary files differ
diff --git a/src/include/token.h b/src/include/token.h
index dd0c42f..a42b81f 100644
--- a/src/include/token.h
+++ b/src/include/token.h
@@ -13,25 +13,21 @@
typedef struct TOKEN_STRUC {
enum TOKEN_ENUM {
TOKEN_KEYWORD, // keyword
- TOKEN_PRIM_STR_DELIM, // '
TOKEN_PRIM_STR, // 'string'
TOKEN_PRIM_INT, // 42
- TOKEN_COMM_DELIM, // `
TOKEN_COMM, // `comment`
TOKEN_EXPR_END, // ;
TOKEN_LGROUP, // (
TOKEN_RGROUP, // )
- TOKEN_DIRECTIVE_DELIM, // #
TOKEN_DIRECTIVE, // #DIRECTIVE#
TOKEN_FN_APPLY, // .
TOKEN_LIST_DELIM, // ,
- TOKEN_DEF_TAGS_DELIM, // :
- TOKEN_DEF, // def:def
- TOKEN_BLOCK_DELIM_START, // {
- TOKEN_BLOCK_DELIM_END, // }
+ TOKEN_DEF_TAG, // def:def
+ TOKEN_BLOCK_START, // {
+ TOKEN_BLOCK_END, // }
TOKEN_NAMESPACE_DELIM, // /
- TOKEN_ARRAY_DELIM_START, // [
- TOKEN_ARRAY_DELIM_END, // ]
+ TOKEN_ARRAY_START, // [
+ TOKEN_ARRAY_END, // ]
TOKEN_DEF_SET, // =
TOKEN_UNKNOWN, // ???
TOKEN_EOF, // \0
diff --git a/src/lexer.c b/src/lexer.c
index f4907eb..6cde958 100644
--- a/src/lexer.c
+++ b/src/lexer.c
@@ -71,16 +71,16 @@ token_t* lexer_get_next_token(lexer_t* lexer) {
return lexer_next_token(lexer, TOKEN_LIST_DELIM);
break;
case ':':
- return lexer_next_token(lexer, TOKEN_DEF_TAGS_DELIM);
+ return lexer_collect(lexer, ':', 1, 1, TOKEN_DEF_TAG);
break;
case '/':
return lexer_next_token(lexer, TOKEN_NAMESPACE_DELIM);
break;
case '{':
- return lexer_next_token(lexer, TOKEN_BLOCK_DELIM_START);
+ return lexer_next_token(lexer, TOKEN_BLOCK_START);
break;
case '}':
- return lexer_next_token(lexer, TOKEN_BLOCK_DELIM_END);
+ return lexer_next_token(lexer, TOKEN_BLOCK_END);
break;
case '[':
return lexer_collect(lexer, ']', 1, 1, TOKEN_PRIM_STR);
@@ -115,7 +115,7 @@ char* lexer_get_c_as_string(lexer_t* lexer) {
token_t* lexer_collect(lexer_t* lexer, char end_char, int fskip, int lskip, int type) {
if (fskip) { lexer_next(lexer); }
- size_t len = 1; // length of collected token so far
+ size_t len = 0; // length of collected token so far
char* token = calloc(len, sizeof(char));
token[0] = '\0';
diff --git a/src/main.c b/src/main.c
index 3b346de..37f37bf 100644
--- a/src/main.c
+++ b/src/main.c
@@ -11,9 +11,9 @@ int main(int argc, char* argv[]) {
long fsource_size;
char *source;
- fsource = fopen("examples/hello.halk", "rb");
+ fsource = fopen("examples/simple.halk", "rb");
if (!fsource) {
- die("source file not found: %s", "examples/hello.halk");
+ die("source file not found");
};
fseek(fsource, 0L, SEEK_END);
@@ -46,6 +46,7 @@ int main(int argc, char* argv[]) {
while ((token = lexer_get_next_token(lexer)) != NULL) {
log_inf("token type: [%d]\ttoken value: [%s]", token->type, token->value);
+ free(token);
}
fclose(fsource);