diff options
| author | Fabrice <fabrice@schaub-dev.xyz> | 2026-03-03 17:16:23 +0100 |
|---|---|---|
| committer | Fabrice <fabrice@schaub-dev.xyz> | 2026-03-03 17:16:23 +0100 |
| commit | 22b860ab3fbd04768aeedf0ba0725b70f42f27dd (patch) | |
| tree | 91703d9baf98a3001e0b519444166ce5b85982b5 /src/tokenizer.cc | |
| parent | 8a46601a74462bf70c12253a5232bc21fc7f1427 (diff) | |
working on rewrite
Diffstat (limited to 'src/tokenizer.cc')
| -rw-r--r-- | src/tokenizer.cc | 25 |
1 files changed, 11 insertions, 14 deletions
diff --git a/src/tokenizer.cc b/src/tokenizer.cc index 221ea12..498bca3 100644 --- a/src/tokenizer.cc +++ b/src/tokenizer.cc @@ -40,35 +40,33 @@ static inline Token tokenizer_make_token(const Tokenizer* tokenizer, return Token(kind, lexeme, span); } -static Buffer* tokenizer_get_buffer(Tokenizer* tokenizer, usize* cursor) { +static bool tokenizer_get_buffer(Tokenizer* tokenizer, usize* cursor) { assert_neq(tokenizer, nullptr); Buffer* curr = tokenizer->buffer; if (likely(curr != nullptr)) { *cursor = curr->cursor; - if (*cursor < curr->content.length) return curr; + if (*cursor < curr->content.length) return true; } - if (!buffer_stack_pop(tokenizer->stack, &curr)) return nullptr; + if (!buffer_stack_pop(tokenizer->stack, &curr)) return false; tokenizer->buffer = curr; *cursor = curr->cursor; - return curr; + return true; } -[[nodiscard]] static bool tokenizer_advance(const Tokenizer* tokenizer, - usize* offset, wchar* out) { +[[nodiscard]] static bool tokenizer_char(const Tokenizer* tokenizer, + usize offset, wchar* out) { const String text = tokenizer->buffer->content; - usize curr_offset = *offset; - assert_ste(curr_offset, text.length); - if (curr_offset == text.length) return false; + assert_ste(offset, text.length); + if (offset == text.length) return false; - unsigned char c = *text[curr_offset - 1]; + unsigned char c = *text[offset]; u8 nobytes = utf8_nobytes(c); if (nobytes > 1) panic("no support for multi-byte chars: %c:%d", c, nobytes); - *offset += nobytes; *out = c; return true; } @@ -88,8 +86,7 @@ bool tokenizer_next(Tokenizer* tokenizer, Token* out) { assert_neq(out, nullptr); usize cursor; - Buffer* buffer = tokenizer_get_buffer(tokenizer, &cursor); - if (buffer == nullptr) return false; + if(unlikely(!tokenizer_get_buffer(tokenizer, &cursor))) return false; usize advance = cursor; wchar c; @@ -108,7 +105,7 @@ bool tokenizer_next(Tokenizer* tokenizer, Token* out) { } out: - buffer->cursor = advance; + tokenizer->buffer->cursor = advance; return true; } |
