@@ -29,6 +29,13 @@ struct _ReadCache{T}
2929 end
3030end
3131
32+ function _read_newline_or_eof (state)
33+ if (p = peek (state, _Token)) != = nothing
34+ _ = read (state, _Token, _TOKEN_NEWLINE)
35+ end
36+ return
37+ end
38+
3239"""
3340 Base.read!(io::IO, model::FileFormats.LP.Model)
3441
@@ -53,10 +60,9 @@ function Base.read!(io::IO, model::Model{T}) where {T}
5360 if token. kind == _TOKEN_KEYWORD
5461 _ = read (state, _Token)
5562 keyword = Symbol (token. value)
56- continue
63+ _read_newline_or_eof (state)
5764 elseif token. kind == _TOKEN_NEWLINE
58- _ = read (state, _Token)
59- continue
65+ _ = read (state, _Token, _TOKEN_NEWLINE)
6066 elseif keyword == :MINIMIZE
6167 MOI. set (cache. model, MOI. ObjectiveSense (), MOI. MIN_SENSE)
6268 _parse_objective (state, cache)
347353
348354_is_number (c:: Char ) = isdigit (c) || c in (' .' , ' e' , ' E' , ' +' , ' -' )
349355
356+ # We want an efficient way to check if `test.value` is a case-insensitive
357+ # version of `target`. Thsi is run for every identifier, so it needs to be fast.
358+ function _compare_case_insenstive (test:: _Token , target:: String )
359+ if test. kind != _TOKEN_IDENTIFIER || length (test. value) != length (target)
360+ return false
361+ end
362+ return all (lowercase (a) == b for (a, b) in zip (test. value, target))
363+ end
364+
350365function Base. peek (state:: _LexerState , :: Type{_Token} , n:: Int = 1 )
351366 @assert n >= 1
352367 while length (state. peek_tokens) < n
@@ -355,6 +370,23 @@ function Base.peek(state::_LexerState, ::Type{_Token}, n::Int = 1)
355370 return nothing
356371 end
357372 push! (state. peek_tokens, token)
373+ if _compare_case_insenstive (token, " subject" )
374+ t = _peek_inner (state)
375+ if _compare_case_insenstive (t, " to" )
376+ state. peek_tokens[end ] =
377+ _Token (_TOKEN_KEYWORD, " CONSTRAINTS" , token. pos)
378+ else
379+ push! (state. peek_tokens, t)
380+ end
381+ elseif _compare_case_insenstive (token, " such" )
382+ t = _peek_inner (state)
383+ if _compare_case_insenstive (t, " that" )
384+ state. peek_tokens[end ] =
385+ _Token (_TOKEN_KEYWORD, " CONSTRAINTS" , token. pos)
386+ else
387+ push! (state. peek_tokens, t)
388+ end
389+ end
358390 end
359391 return state. peek_tokens[n]
360392end
@@ -369,7 +401,8 @@ function _peek_inner(state::_LexerState)
369401 elseif isspace (c) # Whitespace
370402 _ = read (state, Char)
371403 elseif c == ' \\ ' # Comment: backslash until newline
372- while (c = read (state, Char)) != = nothing && c != ' \n '
404+ while (c = peek (state, Char)) != = nothing && c != ' \n '
405+ _ = read (state, Char)
373406 end
374407 elseif isdigit (c) || (c == ' -' && isdigit (peek (state, Char))) # Number
375408 buf = IOBuffer ()
@@ -385,21 +418,7 @@ function _peek_inner(state::_LexerState)
385418 _ = read (state, Char)
386419 end
387420 val = String (take! (buf))
388- l_val = lowercase (val)
389- if l_val == " subject"
390- t = peek (state, _Token)
391- if t. kind == _TOKEN_IDENTIFIER && lowercase (t. value) == " to"
392- _ = read (state, _Token) # Skip "to"
393- return _Token (_TOKEN_KEYWORD, " CONSTRAINTS" , pos)
394- end
395- elseif l_val == " such"
396- t = peek (state, _Token)
397- if t. kind == _TOKEN_IDENTIFIER && lowercase (t. value) == " that"
398- _ = read (state, _Token) # Skip "such"
399- return _Token (_TOKEN_KEYWORD, " CONSTRAINTS" , pos)
400- end
401- end
402- if (kw = get (_KEYWORDS, l_val, nothing )) != = nothing
421+ if (kw = get (_KEYWORDS, lowercase (val), nothing )) != = nothing
403422 return _Token (_TOKEN_KEYWORD, string (kw), pos)
404423 end
405424 return _Token (_TOKEN_IDENTIFIER, val, pos)
@@ -579,7 +598,12 @@ function _parse_quad_expression(
579598 )
580599 end
581600 end
582- _skip_newlines (state)
601+ while _next_token_is (state, _TOKEN_NEWLINE)
602+ if _next_token_is (state, _TOKEN_KEYWORD, 2 )
603+ break
604+ end
605+ _ = read (state, _Token, _TOKEN_NEWLINE)
606+ end
583607 if _next_token_is (state, _TOKEN_DIVISION)
584608 _ = read (state, _Token) # /
585609 # Must be /2
@@ -691,6 +715,9 @@ function _parse_expression(state::_LexerState, cache::_ReadCache{T}) where {T}
691715 p = read (state, _Token)
692716 _add_to_expression! (f, _parse_term (state, cache, - one (T)))
693717 elseif p. kind == _TOKEN_NEWLINE
718+ if _next_token_is (state, _TOKEN_KEYWORD, 2 )
719+ break
720+ end
694721 _ = read (state, _Token)
695722 else
696723 break
@@ -782,6 +809,7 @@ function _parse_objective(state::_LexerState, cache::_ReadCache)
782809 end
783810 f = _parse_expression (state, cache)
784811 MOI. set (cache. model, MOI. ObjectiveFunction {typeof(f)} (), f)
812+ _read_newline_or_eof (state)
785813 return
786814end
787815
@@ -828,6 +856,7 @@ function _parse_bound(state, cache)
828856 x = _parse_variable (state, cache)
829857 set = _parse_set_suffix (state, cache)
830858 _add_bound (cache, x, set)
859+ _read_newline_or_eof (state)
831860 return
832861 end
833862 # `a op x` or `a op x op b`
@@ -842,6 +871,7 @@ function _parse_bound(state, cache)
842871 rhs_set = _parse_set_suffix (state, cache)
843872 _add_bound (cache, x, rhs_set)
844873 end
874+ _read_newline_or_eof (state)
845875 return
846876end
847877
@@ -852,10 +882,11 @@ function _is_sos_constraint(state)
852882end
853883
854884# SOS_CONSTRAINT :=
855- # [NAME] S1:: (IDENTIFIER:NUMBER)+ \n
856- # | [NAME] S2:: (IDENTIFIER:NUMBER)+ \n
885+ # [NAME] S1:: (IDENTIFIER:NUMBER)+
886+ # | [NAME] S2:: (IDENTIFIER:NUMBER)+
857887#
858- # The newline character is required.
888+ # New lines are not supported within the line.
889+ # Terminating new lines are handled in _parse_constraint
859890function _parse_sos_constraint (
860891 state:: _LexerState ,
861892 cache:: _ReadCache{T} ,
904935# INDICATOR_CONSTRAINT :=
905936# IDENTIFIER "=" "0" "->" EXPRESSION SET_SUFFIX
906937# | IDENTIFIER "=" "1" "->" EXPRESSION SET_SUFFIX
938+ #
939+ # Terminating new lines are handled in _parse_constraint
907940function _parse_indicator_constraint (
908941 state:: _LexerState ,
909942 cache:: _ReadCache{T} ,
@@ -929,9 +962,9 @@ function _parse_indicator_constraint(
929962end
930963
931964# CONSTRAINT :=
932- # [NAME] EXPRESSION SET_SUFFIX
933- # | [NAME] SOS_CONSTRAINT
934- # | [NAME] INDICATOR_CONSTRAINT
965+ # [NAME] EXPRESSION SET_SUFFIX \n
966+ # | [NAME] SOS_CONSTRAINT \n
967+ # | [NAME] INDICATOR_CONSTRAINT \n
935968function _parse_constraint (state:: _LexerState , cache:: _ReadCache )
936969 name = _parse_optional_name (state, cache)
937970 # Check if this is an SOS constraint
@@ -947,5 +980,6 @@ function _parse_constraint(state::_LexerState, cache::_ReadCache)
947980 if name != = nothing
948981 MOI. set (cache. model, MOI. ConstraintName (), c, name)
949982 end
983+ _read_newline_or_eof (state)
950984 return
951985end
0 commit comments