summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorFabrice <fabrice@schaub-dev.xyz>2026-03-03 12:20:14 +0100
committerFabrice <fabrice@schaub-dev.xyz>2026-03-03 12:20:14 +0100
commit9d45db6f19c267e81ae950b507c7797caf35c7ec (patch)
tree2f16b26cd6ef1b45ffbf956d404d9429bd86c928 /src
parenta134e9d3651cc502c1653f8da8515cb4f30eb5fb (diff)
use makers
Diffstat (limited to 'src')
-rw-r--r--src/tokenizer.cc46
-rw-r--r--src/voidc.cc2
2 files changed, 35 insertions, 13 deletions
diff --git a/src/tokenizer.cc b/src/tokenizer.cc
index 515af26..1432961 100644
--- a/src/tokenizer.cc
+++ b/src/tokenizer.cc
@@ -13,7 +13,35 @@ struct Tokenizer {
Tokenizer(Buffer_Stack* stack) : buffer(nullptr), stack(stack) {}
};
-static inline Buffer* tokenizer_get_buffer(Tokenizer* tokenizer) {
+static inline Span tokenizer_make_span(const Tokenizer* tokenizer, usize start,
+ usize end) {
+ assert_neq(tokenizer, nullptr);
+
+ Buffer* buffer = tokenizer->buffer;
+ return Span(buffer->file, start, end);
+}
+
+static inline String tokenizer_make_lexeme(const Tokenizer* tokenizer,
+ usize start, usize end) {
+ assert_neq(tokenizer, nullptr);
+
+ Buffer* buffer = tokenizer->buffer;
+ return String(buffer->content[start], end - start);
+}
+
+static inline void tokenizer_make_token(const Tokenizer* tokenizer,
+ Token* token, Token_Kind kind,
+ usize start, usize end) {
+ assert_neq(tokenizer, nullptr);
+ assert_neq(token, nullptr);
+
+ String lexeme = tokenizer_make_lexeme(tokenizer, start, end);
+ Span span = tokenizer_make_span(tokenizer, start, end);
+
+ *token = Token(kind, lexeme, span);
+}
+
+static Buffer* tokenizer_get_buffer(Tokenizer* tokenizer) {
assert_neq(tokenizer, nullptr);
if (tokenizer->buffer != nullptr) return tokenizer->buffer;
@@ -25,7 +53,7 @@ static inline Buffer* tokenizer_get_buffer(Tokenizer* tokenizer) {
return buffer;
}
-static inline char tokenizer_advance(const Tokenizer* tokenizer,
+static char tokenizer_advance(const Tokenizer* tokenizer,
usize* offset) {
const String text = tokenizer->buffer->content;
@@ -37,14 +65,6 @@ static inline char tokenizer_advance(const Tokenizer* tokenizer,
return c;
}
-static inline String tokenizer_make_lexeme(const Tokenizer* tokenizer,
- usize start, usize end) {
- assert_neq(tokenizer, nullptr);
-
- Buffer* buffer = tokenizer->buffer;
- return String(buffer->content[start], end - start);
-}
-
bool tokenizer_next(Tokenizer* tokenizer, Token* out) {
assert_neq(tokenizer, nullptr);
assert_neq(out, nullptr);
@@ -62,8 +82,10 @@ again:
usize offset = start;
tokenizer_advance(tokenizer, &offset);
- String lexeme = tokenizer_make_lexeme(tokenizer, start, offset);
- *out = Token(Token_Kind_Eof, lexeme, Span(buffer->file, start, offset));
+ Token token = {};
+ tokenizer_make_token(tokenizer, &token, Token_Kind_Eof, start, offset);
+
+ *out = token;
buffer->cursor = offset;
return true;
}
diff --git a/src/voidc.cc b/src/voidc.cc
index c6682b2..526a10f 100644
--- a/src/voidc.cc
+++ b/src/voidc.cc
@@ -34,7 +34,7 @@ int main() {
buffer_stack_push(&stack, buffer1);
buffer_stack_push(&stack, buffer2);
-
+
Tokenizer tokenizer(&stack);
Token token = {};