@@ -395,13 +395,13 @@ function Base.peek(state::_LexerState, ::Type{_Token}, n::Int = 1)
395395 if token. kind != _TOKEN_IDENTIFIER
396396 continue
397397 end
398- # Here we have a _TOKEN_IDENTIFIER. If it is preceeded by a
399- # _TOKEN_NEWLINE, it may be a _TOKEN_KEYWORD.
398+ # Here we have a _TOKEN_IDENTIFIER. But if it is not preceeded by a
399+ # _TOKEN_NEWLINE, it cannot be a _TOKEN_KEYWORD.
400400 if ! _nothing_or_newline (_prior_token (state))
401- continue # It can't be a keyword
401+ continue
402402 end
403403 # It might be a _TOKEN_KEYWORD.
404- kw = _case_insenstive_identifier_to_keyword (token. value)
404+ ( kw = _case_insenstive_identifier_to_keyword (token. value) )
405405 if kw != = nothing
406406 # The token matches a single word keyword. All keywords are followed
407407 # by a new line, or an EOF.
@@ -414,25 +414,36 @@ function Base.peek(state::_LexerState, ::Type{_Token}, n::Int = 1)
414414 end
415415 continue
416416 end
417- for (a, b) in [" subject" => " to" , " such" => " that" ]
418- if _compare_case_insenstive (token, a)
419- # This _might_ be `subject to`, or it might just be a variable
420- # named `subject`, like `obj:\n subject\n`.
421- t = _peek_inner (state)
422- if t != = nothing
423- t2 = _peek_inner (state)
424- if _compare_case_insenstive (t, b) && _nothing_or_newline (t2)
425- state. peek_tokens[end ] =
426- _Token (_TOKEN_KEYWORD, " CONSTRAINTS" , token. pos)
427- else
428- push! (state. peek_tokens, t)
429- end
430- if t2 != = nothing
431- push! (state. peek_tokens, t2)
432- end
433- end
417+ # There are two keyword that contain whitespace: `subject to` and
418+ # `such that`
419+ for (a, b) in (" subject" => " to" , " such" => " that" )
420+ if ! _compare_case_insenstive (token, a)
434421 continue
435422 end
423+ # This _might_ be `subject to`, or it might just be a variable
424+ # named `subject`, like `obj:\n subject\n`.
425+ token_b = _peek_inner (state)
426+ if token_b === nothing
427+ # The next token is EOF. Nothing to do here.
428+ break
429+ elseif ! _compare_case_insenstive (token_b, b)
430+ # The second token doesn't match. Store `token_b` and break
431+ push! (state. peek_tokens, token_b)
432+ break
433+ end
434+ # We have something that matches (a, b), but a TOKEN_KEYWORD needs
435+ # to be followed by a new line.
436+ token_nl = _peek_inner (state)
437+ if _nothing_or_newline (token_nl)
438+ state. peek_tokens[end ] =
439+ _Token (_TOKEN_KEYWORD, " CONSTRAINTS" , token. pos)
440+ else
441+ push! (state. peek_tokens, token_b)
442+ end
443+ if token_nl != = nothing
444+ push! (state. peek_tokens, token_nl)
445+ end
446+ break
436447 end
437448 end
438449 return state. peek_tokens[n]
0 commit comments