summaryrefslogtreecommitdiff
path: root/src/tokenizer.cc
diff options
context:
space:
mode:
authorFabrice <fabrice@schaub-dev.xyz>2026-03-03 12:28:46 +0100
committerFabrice <fabrice@schaub-dev.xyz>2026-03-03 12:28:46 +0100
commit9c5b50bfeabd0bd56cd2f9180c4161288ca9431a (patch)
tree974aede303b820fb35a7f293f80b126819a50697 /src/tokenizer.cc
parent9d45db6f19c267e81ae950b507c7797caf35c7ec (diff)
not good
Diffstat (limited to 'src/tokenizer.cc')
-rw-r--r--src/tokenizer.cc35
1 files changed, 16 insertions, 19 deletions
diff --git a/src/tokenizer.cc b/src/tokenizer.cc
index 1432961..3451f69 100644
--- a/src/tokenizer.cc
+++ b/src/tokenizer.cc
@@ -41,16 +41,20 @@ static inline void tokenizer_make_token(const Tokenizer* tokenizer,
*token = Token(kind, lexeme, span);
}
-static Buffer* tokenizer_get_buffer(Tokenizer* tokenizer) {
+static Buffer* tokenizer_get_buffer(Tokenizer* tokenizer, usize* cursor) {
assert_neq(tokenizer, nullptr);
- if (tokenizer->buffer != nullptr) return tokenizer->buffer;
+ Buffer* curr = tokenizer->buffer;
+ if (likely(curr != nullptr)) {
+ *cursor = curr->cursor;
+ if(*cursor < curr->content.length) return curr;
+ }
- Buffer* buffer = nullptr;
- if (!buffer_stack_pop(tokenizer->stack, &buffer)) return nullptr;
+ if (!buffer_stack_pop(tokenizer->stack, &curr)) return nullptr;
- tokenizer->buffer = buffer;
- return buffer;
+ tokenizer->buffer = curr;
+ *cursor = curr->cursor;
+ return curr;
}
static char tokenizer_advance(const Tokenizer* tokenizer,
@@ -69,24 +73,17 @@ bool tokenizer_next(Tokenizer* tokenizer, Token* out) {
assert_neq(tokenizer, nullptr);
assert_neq(out, nullptr);
-again:
- Buffer* buffer = tokenizer_get_buffer(tokenizer);
+ usize cursor, advance;
+ Buffer* buffer = tokenizer_get_buffer(tokenizer, &cursor);
if (buffer == nullptr) return false;
- usize start = buffer->cursor;
- if (start == buffer->content.length) {
- tokenizer->buffer = nullptr;
- goto again;
- }
-
- usize offset = start;
- tokenizer_advance(tokenizer, &offset);
-
+ tokenizer_advance(tokenizer, &advance);
+
Token token = {};
- tokenizer_make_token(tokenizer, &token, Token_Kind_Eof, start, offset);
+ tokenizer_make_token(tokenizer, &token, Token_Kind_Eof, cursor, advance);
*out = token;
- buffer->cursor = offset;
+ buffer->cursor = advance;
return true;
}