aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authors-over-42023-06-21 16:34:22 -0400
committers-over-42023-06-21 16:34:22 -0400
commit46f4397bf0718fbaf2cee5b4ffd56d4a36f2c4d8 (patch)
tree9e582eef34f67fd47117445e22a316e6c855ad5d /src
parent069204d6151326acc77bfd1f9072bd19882b948a (diff)
NO MORE MEMORY LEAKS :D
Diffstat (limited to 'src')
-rw-r--r--src/include/lexer.h1
-rw-r--r--src/include/token.h1
-rw-r--r--src/lexer.c5
-rw-r--r--src/main.c45
-rw-r--r--src/token.c6
5 files changed, 29 insertions, 29 deletions
diff --git a/src/include/lexer.h b/src/include/lexer.h
index 200be5d..a65f119 100644
--- a/src/include/lexer.h
+++ b/src/include/lexer.h
@@ -34,5 +34,4 @@ extern token_t* lexer_collect (lexer_t* lexer, int (*end_char)(ch
extern token_t* lexer_get_directive (lexer_t* lexer);
extern token_t* lexer_get_keyword (lexer_t* lexer);
-
#endif
diff --git a/src/include/token.h b/src/include/token.h
index 5fe9d35..a2a47bc 100644
--- a/src/include/token.h
+++ b/src/include/token.h
@@ -51,5 +51,6 @@ int token_char_pound(char c);
int token_char_colon(char c);
int token_char_kywrd(char c);
+void token_destroy(token_t* token);
#endif
diff --git a/src/lexer.c b/src/lexer.c
index 9734f99..776b043 100644
--- a/src/lexer.c
+++ b/src/lexer.c
@@ -18,7 +18,6 @@ lexer_t* lexer_init(char* content) {
}
void lexer_destroy(lexer_t* lexer) {
- free(lexer->content);
free(lexer);
}
@@ -92,7 +91,7 @@ token_t* lexer_get_next_token(lexer_t* lexer) {
return token_init(TOKEN_EOF, lexer_get_c_as_string(lexer));
break;
default:
- return lexer_next_token(lexer, TOKEN_UNKNOWN);
+ return token_init(TOKEN_UNKNOWN, lexer_get_c_as_string(lexer));
}
}
@@ -132,6 +131,8 @@ token_t* lexer_collect(lexer_t* lexer, int (*end_char)(char), int fskip, int lsk
memcpy(token + len, current, sizeof(char) * strlen(current));
len += sizeof(char) * strlen(current);
lexer_next(lexer);
+
+ free(current);
}
if (lskip) { lexer_next(lexer); }
diff --git a/src/main.c b/src/main.c
index 915078f..a054b16 100644
--- a/src/main.c
+++ b/src/main.c
@@ -7,48 +7,43 @@
int main(int argc, char* argv[]) {
- FILE* fsource;
- long fsource_size;
- char* source;
+ FILE* fsource;
+ long fsource_size;
+ char* source;
+ lexer_t* lexer;
+ int in_file;
fsource = fopen(argv[1], "rb");
- if (!fsource) {
- die("source file not found");
- };
-
+ if (!fsource) { free(fsource); die("source file not found"); };
fseek(fsource, 0L, SEEK_END);
fsource_size = ftell(fsource);
rewind(fsource);
-
source = calloc(1, fsource_size + 1);
-
- if (!source) {
- fclose(fsource);
- die("calloc failed");
- }
-
- if (1 != fread(source, fsource_size, 1, fsource)) {
- fclose(fsource);
- free(source);
- die("could not read source");
- }
-
+ if (!source) { fclose(fsource); free(source); die("calloc failed"); }
+ if (1 != fread(source, fsource_size, 1, fsource)) { fclose(fsource); free(source); die("could not read source"); }
log_inf("source file loaded");
- lexer_t* lexer = lexer_init(source);
+ lexer = lexer_init(source);
log_inf("lexer created");
log_inf("BEGIN INPUT");
log_raw(lexer->content);
log_inf("END INPUT");
- token_t* token = NULL;
+ in_file = 1;
+
+ while (in_file) {
+ token_t* token;
+
+ token = lexer_get_next_token(lexer);
+ log_inf("token type: [%02d]\t\ttoken value: [%s]", token->type, token->value);
+ (token->type == TOKEN_EOF) && (in_file = 0);
- while ((token = lexer_get_next_token(lexer)) != NULL) {
- log_inf("token type: [%s]\t\ttoken value: [%s]", token_get_type(token->type), token->value);
- free(token);
+ token_destroy(token);
}
+ // clean up
+ lexer_destroy(lexer);
fclose(fsource);
free(source);
diff --git a/src/token.c b/src/token.c
index b89096a..9b23810 100644
--- a/src/token.c
+++ b/src/token.c
@@ -2,7 +2,6 @@
#include "include/token.h"
-
// token constructor
token_t* token_init(int type, char* val) {
token_t* token = calloc(1, sizeof(struct TOKEN_STRUC));
@@ -126,3 +125,8 @@ int token_char_grave(char c) { return (c != '`'); }
int token_char_pound(char c) { return (c != '#'); }
int token_char_colon(char c) { return (c != ':'); }
int token_char_kywrd(char c) { return (char_could_split_keyword(&c)); }
+
+void token_destroy(token_t* token) {
+ free(token->value);
+ free(token);
+}