@@ -1099,6 +1099,13 @@ defmodule Code.Fragment do
1099
1099
iex> Code.Fragment.container_cursor_to_quoted("foo +")
1100
1100
{:ok, {:+, [line: 1], [{:foo, [line: 1], nil}, {:__cursor__, [line: 1], []}]}}
1101
1101
1102
+ In order to parse the left-side of `->` properly, which appears both
1103
+ in anonymous functions and do-end blocks, the trailing fragment option
1104
+ must be given with the rest of the contents:
1105
+
1106
+ iex> Code.Fragment.container_cursor_to_quoted("fn x", trailing_fragment: " -> :ok end")
1107
+ {:ok, {:fn, [line: 1], [{:->, [line: 1], [[{:__cursor__, [line: 1], []}], :ok]}]}}
1108
+
1102
1109
## Options
1103
1110
1104
1111
* `:file` - the filename to be reported in case of parsing errors.
@@ -1121,46 +1128,108 @@ defmodule Code.Fragment do
1121
1128
* `:literal_encoder` - a function to encode literals in the AST.
1122
1129
See the documentation for `Code.string_to_quoted/2` for more information.
1123
1130
1131
+ * `:trailing_fragment` (since v1.18.0) - the rest of the contents after
1132
+ the cursor. This is necessary to correctly complete anonymous functions
1133
+ and the left-hand side of `->`
1134
+
1124
1135
"""
1125
1136
@ doc since: "1.13.0"
1126
1137
@ spec container_cursor_to_quoted ( List.Chars . t ( ) , keyword ( ) ) ::
1127
1138
{ :ok , Macro . t ( ) } | { :error , { location :: keyword , binary | { binary , binary } , binary } }
1128
1139
def container_cursor_to_quoted ( fragment , opts \\ [ ] ) do
1140
+ { trailing_fragment , opts } = Keyword . pop ( opts , :trailing_fragment )
1129
1141
opts = Keyword . take ( opts , [ :columns , :token_metadata , :literal_encoder ] )
1130
- opts = [ cursor_completion: true , emit_warnings: false ] ++ opts
1142
+ opts = [ check_terminators: { :cursor , [ ] } , emit_warnings: false ] ++ opts
1131
1143
1132
1144
file = Keyword . get ( opts , :file , "nofile" )
1133
1145
line = Keyword . get ( opts , :line , 1 )
1134
1146
column = Keyword . get ( opts , :column , 1 )
1135
1147
1136
1148
case :elixir_tokenizer . tokenize ( to_charlist ( fragment ) , line , column , opts ) do
1149
+ { :ok , line , column , _warnings , rev_tokens , rev_terminators }
1150
+ when trailing_fragment == nil ->
1151
+ { rev_tokens , rev_terminators } =
1152
+ with [ close , open , { _ , _ , :__cursor__ } = cursor | rev_tokens ] <- rev_tokens ,
1153
+ { _ , [ _ | after_fn ] } <- Enum . split_while ( rev_terminators , & ( elem ( & 1 , 0 ) != :fn ) ) ,
1154
+ true <- maybe_missing_stab? ( rev_tokens ) ,
1155
+ [ _ | rev_tokens ] <- Enum . drop_while ( rev_tokens , & ( elem ( & 1 , 0 ) != :fn ) ) do
1156
+ { [ close , open , cursor | rev_tokens ] , after_fn }
1157
+ else
1158
+ _ -> { rev_tokens , rev_terminators }
1159
+ end
1160
+
1161
+ tokens = reverse_tokens ( line , column , rev_tokens , rev_terminators )
1162
+ :elixir . tokens_to_quoted ( tokens , file , opts )
1163
+
1137
1164
{ :ok , line , column , _warnings , rev_tokens , rev_terminators } ->
1138
- tokens = :lists . reverse ( rev_tokens , rev_terminators )
1139
-
1140
- case :elixir . tokens_to_quoted ( tokens , file , opts ) do
1141
- { :ok , ast } ->
1142
- { :ok , ast }
1143
-
1144
- { :error , error } ->
1145
- # In case parsing fails, we give it another shot but handling fn/do/else/catch/rescue/after.
1146
- tokens =
1147
- :lists . reverse (
1148
- rev_tokens ,
1149
- [ { :stab_op , { line , column , nil } , :-> } , { nil , { line , column + 2 , nil } } ] ++
1150
- Enum . map ( rev_terminators , fn tuple ->
1151
- { line , column , info } = elem ( tuple , 1 )
1152
- put_elem ( tuple , 1 , { line , column + 5 , info } )
1153
- end )
1154
- )
1155
-
1156
- case :elixir . tokens_to_quoted ( tokens , file , opts ) do
1157
- { :ok , ast } -> { :ok , ast }
1158
- { :error , _ } -> { :error , error }
1159
- end
1160
- end
1165
+ tokens =
1166
+ with { before_start , [ _ | _ ] = after_start } <-
1167
+ Enum . split_while ( rev_terminators , & ( elem ( & 1 , 0 ) not in [ :do , :fn ] ) ) ,
1168
+ true <- maybe_missing_stab? ( rev_tokens ) ,
1169
+ opts =
1170
+ Keyword . put ( opts , :check_terminators , { :cursor , before_start } ) ,
1171
+ { :error , { meta , _ , ~c" end" } , _rest , _warnings , trailing_rev_tokens } <-
1172
+ :elixir_tokenizer . tokenize ( to_charlist ( trailing_fragment ) , line , column , opts ) do
1173
+ trailing_tokens =
1174
+ reverse_tokens ( meta [ :line ] , meta [ :column ] , trailing_rev_tokens , after_start )
1175
+
1176
+ Enum . reverse ( rev_tokens , drop_tokens ( trailing_tokens , 0 ) )
1177
+ else
1178
+ _ -> reverse_tokens ( line , column , rev_tokens , rev_terminators )
1179
+ end
1180
+
1181
+ :elixir . tokens_to_quoted ( tokens , file , opts )
1161
1182
1162
1183
{ :error , info , _rest , _warnings , _so_far } ->
1163
1184
{ :error , :elixir . format_token_error ( info ) }
1164
1185
end
1165
1186
end
1187
+
1188
+ defp reverse_tokens ( line , column , tokens , terminators ) do
1189
+ { terminators , _ } =
1190
+ Enum . map_reduce ( terminators , column , fn { start , _ , _ } , column ->
1191
+ atom = :elixir_tokenizer . terminator ( start )
1192
+
1193
+ { { atom , { line , column , nil } } , column + length ( Atom . to_charlist ( atom ) ) }
1194
+ end )
1195
+
1196
+ Enum . reverse ( tokens , terminators )
1197
+ end
1198
+
1199
+ defp drop_tokens ( [ { :"}" , _ } | _ ] = tokens , 0 ) , do: tokens
1200
+ defp drop_tokens ( [ { :"]" , _ } | _ ] = tokens , 0 ) , do: tokens
1201
+ defp drop_tokens ( [ { :")" , _ } | _ ] = tokens , 0 ) , do: tokens
1202
+ defp drop_tokens ( [ { :">>" , _ } | _ ] = tokens , 0 ) , do: tokens
1203
+ defp drop_tokens ( [ { :end , _ } | _ ] = tokens , 0 ) , do: tokens
1204
+ defp drop_tokens ( [ { :"," , _ } | _ ] = tokens , 0 ) , do: tokens
1205
+ defp drop_tokens ( [ { :stab_op , _ , :-> } | _ ] = tokens , 0 ) , do: tokens
1206
+
1207
+ defp drop_tokens ( [ { :"}" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter - 1 )
1208
+ defp drop_tokens ( [ { :"]" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter - 1 )
1209
+ defp drop_tokens ( [ { :")" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter - 1 )
1210
+ defp drop_tokens ( [ { :">>" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter - 1 )
1211
+ defp drop_tokens ( [ { :end , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter - 1 )
1212
+
1213
+ defp drop_tokens ( [ { :"{" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter + 1 )
1214
+ defp drop_tokens ( [ { :"[" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter + 1 )
1215
+ defp drop_tokens ( [ { :"(" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter + 1 )
1216
+ defp drop_tokens ( [ { :"<<" , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter + 1 )
1217
+ defp drop_tokens ( [ { :fn , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter + 1 )
1218
+ defp drop_tokens ( [ { :do , _ } | tokens ] , counter ) , do: drop_tokens ( tokens , counter + 1 )
1219
+
1220
+ defp drop_tokens ( [ _ | tokens ] , counter ) , do: drop_tokens ( tokens , counter )
1221
+ defp drop_tokens ( [ ] , 0 ) , do: [ ]
1222
+
1223
+ defp maybe_missing_stab? ( [ { :after , _ } | _ ] ) , do: true
1224
+ defp maybe_missing_stab? ( [ { :do , _ } | _ ] ) , do: true
1225
+ defp maybe_missing_stab? ( [ { :fn , _ } | _ ] ) , do: true
1226
+ defp maybe_missing_stab? ( [ { :else , _ } | _ ] ) , do: true
1227
+ defp maybe_missing_stab? ( [ { :catch , _ } | _ ] ) , do: true
1228
+ defp maybe_missing_stab? ( [ { :rescue , _ } | _ ] ) , do: true
1229
+
1230
+ defp maybe_missing_stab? ( [ { :stab_op , _ , :-> } | _ ] ) , do: false
1231
+ defp maybe_missing_stab? ( [ { :eol , _ } , next | _ ] ) when elem ( next , 0 ) != :"," , do: false
1232
+
1233
+ defp maybe_missing_stab? ( [ _ | tail ] ) , do: maybe_missing_stab? ( tail )
1234
+ defp maybe_missing_stab? ( [ ] ) , do: false
1166
1235
end
0 commit comments