diff --git a/docs/src/api.md b/docs/src/api.md index 9fa1a37f..d79c86a3 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -82,7 +82,7 @@ JuliaSyntax.kind ``` In addition to the `kind`, a small integer set of "flags" is included to -further distinguish details of each expresssion, accessed with the `flags` +further distinguish details of each expression, accessed with the `flags` function. The kind and flags can be wrapped into a `SyntaxHead` which is accessed with the `head` function. diff --git a/docs/src/design.md b/docs/src/design.md index 0f7e2a4f..fb2a06c2 100644 --- a/docs/src/design.md +++ b/docs/src/design.md @@ -317,7 +317,7 @@ parsing `key=val` pairs inside parentheses. ### Other oddities -* Operators with suffices don't seem to always be parsed consistently as the +* Operators with suffixes don't seem to always be parsed consistently as the same operator without a suffix. Unclear whether this is by design or mistake. For example, `[x +y] ==> (hcat x (+ y))`, but `[x +₁y] ==> (hcat (call +₁ x y))` @@ -425,7 +425,7 @@ First, there's no support for precise source locations and the existing data structures (bare flisp lists) can't easily be extended to add these. Fixing this would require changes to nearly all of the code. -Second, it's written in flisp: an aestheically pleasing, minimal but obscure +Second, it's written in flisp: an aesthetically pleasing, minimal but obscure implementation of Scheme. Learning Scheme is actually a good way to appreciate some of Julia's design inspiration, but it's quite a barrier for developers of Julia language tooling. (Flisp has no user-level documentation but non-schemers diff --git a/docs/src/reference.md b/docs/src/reference.md index 84fe09bc..100110cb 100644 --- a/docs/src/reference.md +++ b/docs/src/reference.md @@ -73,7 +73,7 @@ class of tokenization errors and lets the parser deal with them. ### Improvements to awkward AST forms -* Frakentuples with multiple parameter blocks like `(a=1, b=2; c=3; d=4)` are flattened into the parent tuple instead of using nested `K"parameters"` nodes (#133) +* `FrankenTuple`s with multiple parameter blocks like `(a=1, b=2; c=3; d=4)` are flattened into the parent tuple instead of using nested `K"parameters"` nodes (#133) * Using `try catch else finally end` is parsed with `K"catch"` `K"else"` and `K"finally"` children to avoid the awkwardness of the optional child nodes in the `Expr` representation (#234) * The dotted import path syntax as in `import A.b.c` is parsed with a `K"importpath"` kind rather than `K"."`, because a bare `A.b.c` has a very different nested/quoted expression representation (#244) * We use flags rather than child nodes to represent the difference between `struct` and `mutable struct`, `module` and `baremodule` (#220) diff --git a/src/expr.jl b/src/expr.jl index fd61243c..265fd022 100644 --- a/src/expr.jl +++ b/src/expr.jl @@ -440,7 +440,7 @@ function _internal_node_to_Expr(source, srcrange, head, childranges, childheads, a1 = args[1] if @isexpr(a1, :block) a1a = (args[1]::Expr).args - # Ugly logic to strip the Expr(:block) in certian cases for compatibility + # Ugly logic to strip the Expr(:block) in certain cases for compatibility if length(a1a) == 1 a = a1a[1] if a isa Symbol || @isexpr(a, :(=)) || @isexpr(a, :(::)) diff --git a/src/green_tree.jl b/src/green_tree.jl index be55e4f6..27da7ec4 100644 --- a/src/green_tree.jl +++ b/src/green_tree.jl @@ -12,7 +12,7 @@ As implementation choices, we choose that: * Nodes are immutable and don't know their parents or absolute position, so can be cached and reused -* Nodes are homogenously typed at the language level so they can be stored +* Nodes are homogeneously typed at the language level so they can be stored concretely, with the `head` defining the node type. Normally this would include a "syntax kind" enumeration, but it can also include flags and record information the parser knew about the layout of the child nodes. diff --git a/src/kinds.jl b/src/kinds.jl index 2386fe26..29b8120b 100644 --- a/src/kinds.jl +++ b/src/kinds.jl @@ -135,7 +135,7 @@ Register custom `Kind`s with the given `names`, belonging to a module `mod`. `names` is an array of arbitrary strings. In order for kinds to be represented by a small number of bits, some nontrivial -cooperation is reqired between modules using custom kinds: +cooperation is required between modules using custom kinds: * The integer `module_id` is globally unique for each `mod` which will be used together, and not larger than $_kind_module_id_max. * No two modules register the same `name`. The semantics of a given `kind` name diff --git a/src/parse_stream.jl b/src/parse_stream.jl index 8aad71df..b1594ea2 100644 --- a/src/parse_stream.jl +++ b/src/parse_stream.jl @@ -1,5 +1,5 @@ #------------------------------------------------------------------------------- -# Flags hold auxilary information about tokens/nonterminals which the Kind +# Flags hold auxiliary information about tokens/nonterminals which the Kind # doesn't capture in a nice way. # # TODO: Use `primitive type SyntaxFlags 16 end` rather than an alias? @@ -40,7 +40,7 @@ Set for K"tuple", K"block" or K"macrocall" which are delimited by parentheses const PARENS_FLAG = RawFlags(1<<5) """ -Set for K"quote" for the short form `:x` as oppsed to long form `quote x end` +Set for K"quote" for the short form `:x` as opposed to long form `quote x end` """ const COLON_QUOTE = RawFlags(1<<5) @@ -223,7 +223,7 @@ is_dotted(x) = has_flags(x, DOTOP_FLAG) """ is_suffixed(x) -Return true for operators which have sufficies, such as `+₁` +Return true for operators which have suffixes, such as `+₁` """ is_suffixed(x) = has_flags(x, SUFFIXED_FLAG) @@ -822,7 +822,7 @@ end Bump an invisible zero-width token into the output This is useful when surrounding syntax implies the presence of a token. For -example, `2x` means `2*x` via the juxtoposition rules. +example, `2x` means `2*x` via the juxtaposition rules. """ function bump_invisible(stream::ParseStream, kind, flags=EMPTY_FLAGS; error=nothing) diff --git a/src/parser.jl b/src/parser.jl index e1356e99..7502eb7a 100644 --- a/src/parser.jl +++ b/src/parser.jl @@ -723,7 +723,7 @@ function parse_cond(ps::ParseState) # FIXME: This is a very specific case. Error recovery should be handled more # generally elsewhere. if is_block_continuation_keyword(ps, kind(t)) - # a "continuaton keyword" is likely to belong to the surrounding code, so + # a "continuation keyword" is likely to belong to the surrounding code, so # we abort early # if true; x ? true elseif true end ==> (if true (block (if x true (error-t) (error-t))) (elseif true (block))) @@ -1472,7 +1472,7 @@ function parse_unary_prefix(ps::ParseState) end end -# Parses a chain of sufficies at function call precedence, leftmost binding +# Parses a chain of suffixes at function call precedence, leftmost binding # tightest. This handles # * Bracketed calls like a() b[] c{} # * Field access like a.b.c @@ -1722,7 +1722,7 @@ function parse_call_chain(ps::ParseState, mark, is_macrocall=false) # x`str` ==> (macrocall @x_cmd (cmdstring-r "str")) # x"" ==> (macrocall @x_str (string-r "")) # x`` ==> (macrocall @x_cmd (cmdstring-r "")) - # Triple quoted procesing for custom strings + # Triple quoted processing for custom strings # r"""\nx""" ==> (macrocall @r_str (string-s-r "x")) # r"""\n x\n y""" ==> (macrocall @r_str (string-s-r "x\n" "y")) # r"""\n x\\n y""" ==> (macrocall @r_str (string-s-r "x\\\n" "y")) @@ -1735,7 +1735,7 @@ function parse_call_chain(ps::ParseState, mark, is_macrocall=false) t = peek_token(ps) k = kind(t) if !preceding_whitespace(t) && is_string_macro_suffix(k) - # Macro sufficies can include keywords and numbers + # Macro suffixes can include keywords and numbers # x"s"y ==> (macrocall @x_str (string-r "s") "y") # x"s"end ==> (macrocall @x_str (string-r "s") "end") # x"s"in ==> (macrocall @x_str (string-r "s") "in") @@ -3484,7 +3484,7 @@ function parse_atom(ps::ParseState, check_identifiers=true) end emit(ps, mark, K"char") elseif leading_kind == K"Char" - # FIXME: This is a tokenization error and should be preceeded with + # FIXME: This is a tokenization error and should be preceded with # K"'". However this workaround is better than emitting a bare Char. bump(ps, remap_kind=K"Identifier") elseif leading_kind == K":" diff --git a/src/parser_api.jl b/src/parser_api.jl index 95f077ef..83a9ff3a 100644 --- a/src/parser_api.jl +++ b/src/parser_api.jl @@ -161,7 +161,7 @@ Token type resulting from calling `tokenize(text)` Use * `kind(tok)` to get the token kind -* `untokenize(tok, text)` to retreive the text +* `untokenize(tok, text)` to retrieve the text * Predicates like `is_error(tok)` to query token categories and flags """ struct Token @@ -177,7 +177,7 @@ head(t::Token) = t.head tokenize(text) Returns the tokenized UTF-8 encoded `text` as a vector of `Token`s. The -text for the token can be retreived by using `untokenize()`. The full text can be +text for the token can be retrieved by using `untokenize()`. The full text can be reconstructed with, for example, `join(untokenize.(tokenize(text), text))`. This interface works on UTF-8 encoded string or buffer data only. diff --git a/src/source_files.jl b/src/source_files.jl index 9d039d92..9c5ccf24 100644 --- a/src/source_files.jl +++ b/src/source_files.jl @@ -70,17 +70,17 @@ second form, get the line number at the given `byte_index` within `source`. source_line(x) = source_line(sourcefile(x), first_byte(x)) """ - souce_location(x) - souce_location(source::SourceFile, byte_index::Integer) + source_location(x) + source_location(source::SourceFile, byte_index::Integer) - souce_location(LineNumberNode, x) - souce_location(LineNumberNode, source, byte_index) + source_location(LineNumberNode, x) + source_location(LineNumberNode, source, byte_index) Get `(line,column)` of the first byte where object `x` appears in the source. The second form allows one to be more precise with the `byte_index`, given the source file. -Providing `LineNumberNode` as the first agrument will return the line and file +Providing `LineNumberNode` as the first argument will return the line and file name in a line number node object. """ source_location(x) = source_location(sourcefile(x), first_byte(x)) @@ -373,7 +373,7 @@ function highlight(io::IO, source::SourceFile, range::UnitRange; # The diagnostic range is compact and we show the whole thing _printstyled(io, source[p:q]; bgcolor=color) else - # Or large and we trucate the code to show only the region around the + # Or large and we truncate the code to show only the region around the # start and end of the error. _printstyled(io, source[p:y]; bgcolor=color) print(io, "⋮\n") diff --git a/test/parse_packages.jl b/test/parse_packages.jl index ef255d65..b5b08f48 100644 --- a/test/parse_packages.jl +++ b/test/parse_packages.jl @@ -38,7 +38,7 @@ base_tests_path = joinpath(Sys.BINDIR, Base.DATAROOTDIR, "julia", "test") return nothing end - # syntax.jl has some intentially weird syntax which we parse + # syntax.jl has some intentionally weird syntax which we parse # differently than the flisp parser, and some cases which we've # decided are syntax errors. if endswith(f, "syntax.jl") diff --git a/test/parser.jl b/test/parser.jl index f32e7ba6..37c82bc8 100644 --- a/test/parser.jl +++ b/test/parser.jl @@ -444,17 +444,17 @@ tests = [ "x``" => """(macrocall @x_cmd (cmdstring-r ""))""" "in\"str\"" => """(macrocall @in_str (string-r "str"))""" "outer\"str\"" => """(macrocall @outer_str (string-r "str"))""" - # Triple quoted procesing for custom strings + # Triple quoted processing for custom strings "r\"\"\"\nx\"\"\"" => raw"""(macrocall @r_str (string-s-r "x"))""" "r\"\"\"\n x\n y\"\"\"" => raw"""(macrocall @r_str (string-s-r "x\n" "y"))""" "r\"\"\"\n x\\\n y\"\"\"" => raw"""(macrocall @r_str (string-s-r "x\\\n" "y"))""" - # Macro sufficies can include keywords and numbers + # Macro suffixes can include keywords and numbers "x\"s\"y" => """(macrocall @x_str (string-r "s") "y")""" "x\"s\"end" => """(macrocall @x_str (string-r "s") "end")""" "x\"s\"in" => """(macrocall @x_str (string-r "s") "in")""" "x\"s\"2" => """(macrocall @x_str (string-r "s") 2)""" "x\"s\"10.0" => """(macrocall @x_str (string-r "s") 10.0)""" - # Cmd macro sufficies + # Cmd macro suffixes "x`s`y" => """(macrocall @x_cmd (cmdstring-r "s") "y")""" "x`s`end" => """(macrocall @x_cmd (cmdstring-r "s") "end")""" "x`s`in" => """(macrocall @x_cmd (cmdstring-r "s") "in")""" diff --git a/test/tokenize.jl b/test/tokenize.jl index 8913a20c..38ff3568 100644 --- a/test/tokenize.jl +++ b/test/tokenize.jl @@ -720,7 +720,7 @@ end @test toks(".1..") == [".1"=>K"Float", ".."=>K".."] @test toks("0x01..") == ["0x01"=>K"HexInt", ".."=>K".."] - # Dotted operators and other dotted sufficies + # Dotted operators and other dotted suffixes @test toks("1234 .+1") == ["1234"=>K"Integer", " "=>K"Whitespace", ".+"=>K"+", "1"=>K"Integer"] @test toks("1234.0+1") == ["1234.0"=>K"Float", "+"=>K"+", "1"=>K"Integer"] @test toks("1234.0 .+1") == ["1234.0"=>K"Float", " "=>K"Whitespace", ".+"=>K"+", "1"=>K"Integer"]