1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
|
#include "include/lexer.h"
#include "include/token.h"
#include <stdlib.h>
#include <string.h>
lexer_T* lexer_init(char* content) {
lexer_T* lexer = calloc(1, sizeof(struct LEXER_STRUC));
lexer->content = content;
lexer->i = 0;
lexer->c = content[lexer->i];
return lexer;
}
void lexer_next(lexer_T* lexer) {
// if not at end of line or file
if (lexer->c != '\0' && lexer->i < strlen(lexer->content)) {
lexer->i += 1; // advance index 1 character
lexer->c = lexer->content[lexer->i]; // set character w/ index
}
}
void lexer_pass(lexer_T* lexer) {
while (lexer->c == ' ' ||
lexer->c == '\t' ||
lexer->c == '\n') {
lexer_next(lexer);
}
}
token_T* lexer_get_next_token(lexer_T* lexer) {
while (lexer->c != '\0' && lexer->i < strlen(lexer->content)) {
if (lexer->c == ' ' ||
lexer->c == '\t' ||
lexer->c == '\n') {
lexer_pass(lexer);
}
switch (lexer->c) {
case '"': return lexer_get_string(lexer); break;
case '=': return lexer_next_token(
lexer,
token_init(
TOKEN_EQ,
lexer_get_c_as_string(lexer)
)
); break;
case '/': return lexer_next_token(
lexer,
token_init(
TOKEN_LORD,
lexer_get_c_as_string(lexer)
)
); break;
case '\\': return lexer_next_token(
lexer,
token_init(
TOKEN_RORD,
lexer_get_c_as_string(lexer)
)
); break;
case '&': return lexer_next_token(
lexer,
token_init(
TOKEN_AMP,
lexer_get_c_as_string(lexer)
)
); break;
case '[': return lexer_next_token(
lexer,
token_init(
TOKEN_LBRAK,
lexer_get_c_as_string(lexer)
)
); break;
case ']': return lexer_next_token(
lexer,
token_init(
TOKEN_RBRAK,
lexer_get_c_as_string(lexer)
)
); break;
case '#': return lexer_next_token(
lexer,
token_init(
TOKEN_POUND,
lexer_get_c_as_string(lexer)
)
); break;
case '~': return lexer_next_token(
lexer,
token_init(
TOKEN_TILDE,
lexer_get_c_as_string(lexer)
)
); break;
}
}
return token_init(TOKEN_EOF, "\0");
}
token_T* lexer_get_string(lexer_T* lexer) {
}
token_T* lexer_get_id(lexer_T* lexer) {
}
token_T* lexer_next_token(lexer_T* lexer, token_T* token) {
lexer_next(lexer);
return token;
}
char* lexer_get_c_as_string(lexer_T* lexer) {
}
|