summaryrefslogtreecommitdiff
path: root/src/tokenizer.cc
diff options
context:
space:
mode:
authorFabrice <fabrice@schaub-dev.xyz>2026-03-03 17:05:56 +0100
committerFabrice <fabrice@schaub-dev.xyz>2026-03-03 17:05:56 +0100
commit306241342887d33540c5e7b43b8539bde07f8c8e (patch)
treebdd6de40ceb1574c0447924c642ba1d3806466b1 /src/tokenizer.cc
parent710197797f399a17fc3bcabe2d3816e728487571 (diff)
return directly
Diffstat (limited to 'src/tokenizer.cc')
-rw-r--r--src/tokenizer.cc16
1 files changed, 7 insertions, 9 deletions
diff --git a/src/tokenizer.cc b/src/tokenizer.cc
index 029d440..59c0cd5 100644
--- a/src/tokenizer.cc
+++ b/src/tokenizer.cc
@@ -29,16 +29,15 @@ static inline String tokenizer_make_lexeme(const Tokenizer* tokenizer,
return String(buffer->content[start], end - start);
}
-static inline void tokenizer_make_token(const Tokenizer* tokenizer,
- Token* token, Token_Kind kind,
+static inline Token tokenizer_make_token(const Tokenizer* tokenizer,
+ Token_Kind kind,
usize start, usize end) {
assert_neq(tokenizer, nullptr);
- assert_neq(token, nullptr);
String lexeme = tokenizer_make_lexeme(tokenizer, start, end);
Span span = tokenizer_make_span(tokenizer, start, end);
- *token = Token(kind, lexeme, span);
+ return Token(kind, lexeme, span);
}
static Buffer* tokenizer_get_buffer(Tokenizer* tokenizer, usize* cursor) {
@@ -74,14 +73,14 @@ static Buffer* tokenizer_get_buffer(Tokenizer* tokenizer, usize* cursor) {
return true;
}
-static void tokenizer_lex_identifier(Tokenizer* tokenizer, usize* offset) {
+static Token tokenizer_lex_identifier(Tokenizer* tokenizer, usize start, usize *offset) {
assert_neq(tokenizer, nullptr);
- assert_neq(offset, nullptr);
wchar c;
while (tokenizer_advance(tokenizer, offset, &c))
if (!utf8_is_alnum(c) || c == '_') break;
+ return tokenizer_make_token(tokenizer, Token_Kind_Identifier, start, *offset);
}
bool tokenizer_next(Tokenizer* tokenizer, Token* out) {
@@ -98,14 +97,13 @@ bool tokenizer_next(Tokenizer* tokenizer, Token* out) {
&c); // We just checked that we are not at the end
if (utf8_is_identifier(c)) {
- tokenizer_lex_identifier(tokenizer, &advance);
- tokenizer_make_token(tokenizer, out, Token_Kind_Identifier, cursor, advance);
+ *out = tokenizer_lex_identifier(tokenizer, cursor, &advance);
goto out;
}
switch (c) {
default:
- tokenizer_make_token(tokenizer, out, Token_Kind_Invalid_Char, cursor,
+ *out = tokenizer_make_token(tokenizer, Token_Kind_Invalid_Char, cursor,
advance);
}