diff options
| author | Fabrice <fabrice@schaub-dev.xyz> | 2026-03-03 09:42:37 +0100 |
|---|---|---|
| committer | Fabrice <fabrice@schaub-dev.xyz> | 2026-03-03 09:42:37 +0100 |
| commit | 3a9f7e6c9e1f44385c1950edc6267af8eea56a7e (patch) | |
| tree | fbf7175fbaec5d9776d105a6488680c87a694a61 /src/tokenizer.cc | |
| parent | 7ff9d2586a666bd420b98eb9f8de130cf57cd59d (diff) | |
formatting of tokens
Diffstat (limited to 'src/tokenizer.cc')
| -rw-r--r-- | src/tokenizer.cc | 24 |
1 files changed, 13 insertions, 11 deletions
diff --git a/src/tokenizer.cc b/src/tokenizer.cc index a1a365c..cf8a5fa 100644 --- a/src/tokenizer.cc +++ b/src/tokenizer.cc @@ -3,8 +3,8 @@ #include "common.cc" #include "source.cc" -#include "utf8.cc" #include "token.cc" +#include "utf8.cc" struct Tokenizer { Buffer* buffer; @@ -16,27 +16,29 @@ struct Tokenizer { static inline Buffer* tokenizer_get_buffer(Tokenizer* tokenizer) { assert_neq(tokenizer, nullptr); - if(tokenizer->buffer != nullptr) return tokenizer->buffer; + if (tokenizer->buffer != nullptr) return tokenizer->buffer; Buffer* buffer = nullptr; - if(!buffer_stack_pop(tokenizer->stack, &buffer)) return nullptr; + if (!buffer_stack_pop(tokenizer->stack, &buffer)) return nullptr; tokenizer->buffer = buffer; return buffer; } -static inline char tokenizer_advance(const Tokenizer* tokenizer, usize* offset) { +static inline char tokenizer_advance(const Tokenizer* tokenizer, + usize* offset) { const String text = tokenizer->buffer->content; - unsigned char c = *text[*offset]; + unsigned char c = *text[*offset]; u8 nobytes = utf8_nobytes(c); - if(nobytes > 1) panic("no support for multi-byte chars: %c:%d", c, nobytes); + if (nobytes > 1) panic("no support for multi-byte chars: %c:%d", c, nobytes); - offset += nobytes; + *offset += nobytes; return c; } -static inline String tokenizer_make_lexeme(const Tokenizer* tokenizer, usize start, usize end) { +static inline String tokenizer_make_lexeme(const Tokenizer* tokenizer, + usize start, usize end) { assert_neq(tokenizer, nullptr); Buffer* buffer = tokenizer->buffer; @@ -49,14 +51,14 @@ bool tokenizer_next(Tokenizer* tokenizer, Token* out) { again: Buffer* buffer = tokenizer_get_buffer(tokenizer); - if(buffer == nullptr) return false; + if (buffer == nullptr) return false; usize start = buffer->cursor; - if(start == buffer->content.length) { + if (start == buffer->content.length) { tokenizer->buffer = nullptr; goto again; } - + usize offset = start; tokenizer_advance(tokenizer, &offset); |
