@@ -24,6 +24,7 @@ import string_list;
2424import string_pool;
2525import token local;
2626import utf8;
27+ import warning_flags;
2728
2829import string;
2930import stdlib;
@@ -266,6 +267,8 @@ public type Tokenizer struct {
266267
267268 string_pool.Pool* pool; // no ownership
268269 string_buffer.Buf* buf; // no ownership, used for strings and character constants
270+ const warning_flags.Flags* warnings;
271+
269272 ErrorFn on_error;
270273 void* on_error_arg;
271274
@@ -278,7 +281,7 @@ public type Tokenizer struct {
278281
279282 char[256] error_msg;
280283}
281- static_assert(408 , sizeof(Tokenizer));
284+ static_assert(416 , sizeof(Tokenizer));
282285
283286public fn void Tokenizer.init(Tokenizer* t,
284287 string_pool.Pool* pool,
@@ -287,6 +290,7 @@ public fn void Tokenizer.init(Tokenizer* t,
287290 SrcLoc loc_start,
288291 const keywords.Info* kwinfo,
289292 const string_list.List* features,
293+ const warning_flags.Flags* warnings,
290294 ErrorFn on_error,
291295 void* on_error_arg,
292296 bool raw_mode)
@@ -300,6 +304,7 @@ public fn void Tokenizer.init(Tokenizer* t,
300304 t.line_start = input;
301305 t.pool = pool;
302306 t.buf = buf;
307+ t.warnings = warnings;
303308 t.on_error = on_error;
304309 t.on_error_arg = on_error_arg;
305310
@@ -685,6 +690,15 @@ fn void Tokenizer.error(Tokenizer* t, Token* result, const char* format @(printf
685690 if (t.on_error) t.on_error(t.on_error_arg, FatalError, result.loc, t.error_msg);
686691}
687692
693+ fn void Tokenizer.warning(Tokenizer* t, SrcLoc loc, const char* format @(printf_format), ...) {
694+ va_list args;
695+ va_start(args, format);
696+ vsnprintf(t.error_msg, sizeof(t.error_msg), format, args);
697+ va_end(args);
698+
699+ if (t.on_error) t.on_error(t.on_error_arg, Warning, loc, t.error_msg);
700+ }
701+
688702// generate an error but keep parsing
689703fn void Tokenizer.num_error(Tokenizer* t, Token* result, const char* p, const char* format @(printf_format), ...) {
690704 va_list args;
@@ -720,9 +734,8 @@ fn void Tokenizer.lex_identifier(Tokenizer* t, Token* result) {
720734 while (Identifier_char[(u8)(*end)]) end++;
721735
722736 usize len = (usize)(end - start);
723- if (len > constants.MaxIdentifierLen && !t.raw_mode) {
724- t.error(result, "identifier too long (max %d chars)", constants.MaxIdentifierLen);
725- return;
737+ if (len > constants.MaxIdentifierLen && !t.raw_mode && t.warnings && !t.warnings.no_max_identifier_length) {
738+ t.warning(result.loc, "identifier too long (max %d chars)", constants.MaxIdentifierLen);
726739 }
727740 t.cur += len;
728741 result.name_idx = t.pool.add(start, len, true);
0 commit comments