From 5216bc51d9e014ba4d07cd413277c8a6278b0d6b Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Tue, 11 Jun 2024 00:45:03 +0900 Subject: [PATCH 01/11] Remove `__precompile__(true)`. (#149) --- src/YAML.jl | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/YAML.jl b/src/YAML.jl index 31c6c59..8dfd90d 100644 --- a/src/YAML.jl +++ b/src/YAML.jl @@ -1,5 +1,3 @@ -__precompile__(true) - module YAML import Base: isempty, length, show, peek From 612b80d956065044f50ff1d5c5eeae1307ef24a2 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Tue, 11 Jun 2024 02:07:04 +0900 Subject: [PATCH 02/11] Issue warnings when unknown directives are loaded. (#150) Fix https://github.com/JuliaData/YAML.jl/issues/148. --- src/scanner.jl | 2 ++ test/runtests.jl | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/src/scanner.jl b/src/scanner.jl index 718095a..ff68673 100644 --- a/src/scanner.jl +++ b/src/scanner.jl @@ -850,7 +850,9 @@ function scan_directive(stream::TokenStream) value = (tag_handle, tag_prefix) end_mark = get_mark(stream) else + # Otherwise we warn and ignore the directive. end_mark = get_mark(stream) + @warn """unknown directive name: "$name" at $end_mark. We ignore this.""" while !in(peek(stream.input), "\0\r\n\u0085\u2028\u2029") forwardchars!(stream) end diff --git a/test/runtests.jl b/test/runtests.jl index 5e6e987..25b1ab5 100755 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -443,4 +443,10 @@ end @test_throws YAML.ScannerError YAML.load(""" '''a'' """) end +# issue #148 - warn unknown directives +@testset "issue #148" begin + @test (@test_logs (:warn, """unknown directive name: "FOO" at line 1, column 4. We ignore this.""") YAML.load("""%FOO bar baz\n\n--- "foo\"""")) == "foo" + @test (@test_logs (:warn, """unknown directive name: "FOO" at line 1, column 4. We ignore this.""") (:warn, """unknown directive name: "BAR" at line 2, column 4. We ignore this.""") YAML.load("""%FOO\n%BAR\n--- foo""")) == "foo" +end + end # module From 45f5cd2f43fe4ac424cc4f9615c8bdff560f407c Mon Sep 17 00:00:00 2001 From: Kevin Bonham Date: Mon, 10 Jun 2024 13:07:44 -0400 Subject: [PATCH 03/11] bump version PRs #150 #149 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 5667558..a84abe4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "YAML" uuid = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6" -version = "0.4.10" +version = "0.4.11" [deps] Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" From 2d592c15c4ec210a8d67dcee0e158243a748e158 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 00:05:32 +0900 Subject: [PATCH 04/11] Add a makeshift docstrings. (#169) --- src/YAML.jl | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/src/YAML.jl b/src/YAML.jl index 8dfd90d..c5d942d 100644 --- a/src/YAML.jl +++ b/src/YAML.jl @@ -1,3 +1,22 @@ +""" + YAML + +A package to treat YAML. +https://github.com/JuliaData/YAML.jl + +Reading: + +* `YAML.load` parses the first YAML document of a YAML file as a Julia object. +* `YAML.load_all` parses the all YAML documents of a YAML file. +* `YAML.load_file` is same with `YAML.load` except it reads from a file. +* `YAML.load_all_file` is same with `YAML.load_all` except it reads from a file. + +Writing: + +* `YAML.write` prints a Julia object as a YAML file. +* `YAML.write_file` is same with `YAML.write` except it writes to a file. +* `YAML.yaml` converts a given Julia object to a YAML-formatted string. +""" module YAML import Base: isempty, length, show, peek @@ -33,6 +52,12 @@ function _patch_constructors(more_constructors::_constructor, dicttype::_dicttyp end +""" + load(x::Union{AbstractString, IO}) + +Parse the string or stream `x` as a YAML file, and return the first YAML document as a +Julia object. +""" load(ts::TokenStream, constructor::Constructor) = construct_document(constructor, compose(EventStream(ts))) @@ -45,6 +70,12 @@ load(ts::TokenStream, more_constructors::_constructor = nothing, multi_construct load(input::IO, more_constructors::_constructor = nothing, multi_constructors::Dict = Dict(); kwargs...) = load(TokenStream(input), more_constructors, multi_constructors ; kwargs...) +""" + YAMLDocIterator + +An iterator type to represent multiple YAML documents. You can retrieve each YAML document +as a Julia object by iterating. +""" mutable struct YAMLDocIterator input::IO ts::TokenStream @@ -80,6 +111,11 @@ done(it::YAMLDocIterator, state) = it.next_doc === nothing iterate(it::YAMLDocIterator) = next(it, start(it)) iterate(it::YAMLDocIterator, s) = done(it, s) ? nothing : next(it, s) +""" + load_all(x::Union{AbstractString, IO}) -> YAMLDocIterator + +Parse the string or stream `x` as a YAML file, and return corresponding YAML documents. +""" load_all(input::IO, args...; kwargs...) = YAMLDocIterator(input, args...; kwargs...) @@ -89,11 +125,21 @@ load(input::AbstractString, args...; kwargs...) = load_all(input::AbstractString, args...; kwargs...) = load_all(IOBuffer(input), args...; kwargs...) +""" + load_file(filename::AbstractString) + +Parse the YAML file `filename`, and return the first YAML document as a Julia object. +""" load_file(filename::AbstractString, args...; kwargs...) = open(filename, "r") do input load(input, args...; kwargs...) end +""" + load_all_file(filename::AbstractString) -> YAMLDocIterator + +Parse the YAML file `filename`, and return corresponding YAML documents. +""" load_all_file(filename::AbstractString, args...; kwargs...) = open(filename, "r") do input load_all(input, args...; kwargs...) From 41b7bd124c4d3cdf5fc634780ba7f49519f92c64 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 00:07:02 +0900 Subject: [PATCH 05/11] Move `include`s to `YAML.jl` and move definitions of `Mark` and `Span` (#160) from `src/scanner.jl` to `src/tokens.jl`. This prevents possible duplicated-includings. --- src/YAML.jl | 6 ++++++ src/composer.jl | 4 ---- src/parser.jl | 2 -- src/scanner.jl | 26 -------------------------- src/tokens.jl | 20 ++++++++++++++++++++ 5 files changed, 26 insertions(+), 32 deletions(-) diff --git a/src/YAML.jl b/src/YAML.jl index c5d942d..2f7cbed 100644 --- a/src/YAML.jl +++ b/src/YAML.jl @@ -27,8 +27,14 @@ using Dates using Printf using StringEncodings +include("queue.jl") +include("buffered_input.jl") +include("tokens.jl") include("scanner.jl") +include("events.jl") include("parser.jl") +include("nodes.jl") +include("resolver.jl") include("composer.jl") include("constructor.jl") include("writer.jl") # write Julia dictionaries to YAML files diff --git a/src/composer.jl b/src/composer.jl index b410396..d92c4b2 100644 --- a/src/composer.jl +++ b/src/composer.jl @@ -1,8 +1,4 @@ -include("nodes.jl") -include("resolver.jl") - - struct ComposerError context::Union{String, Nothing} context_mark::Union{Mark, Nothing} diff --git a/src/parser.jl b/src/parser.jl index 182b861..3cc6ed9 100644 --- a/src/parser.jl +++ b/src/parser.jl @@ -1,6 +1,4 @@ -include("events.jl") - const DEFAULT_TAGS = Dict{String,String}("!" => "!", "!!" => "tag:yaml.org,2002:") diff --git a/src/scanner.jl b/src/scanner.jl index ff68673..a67e155 100644 --- a/src/scanner.jl +++ b/src/scanner.jl @@ -1,27 +1,4 @@ -include("queue.jl") -include("buffered_input.jl") - -# Position within the document being parsed -struct Mark - index::UInt64 - line::UInt64 - column::UInt64 -end - - -function show(io::IO, mark::Mark) - @printf(io, "line %d, column %d", mark.line, mark.column) -end - - -# Where in the stream a particular token lies. -struct Span - start_mark::Mark - end_mark::Mark -end - - struct SimpleKey token_number::UInt64 required::Bool @@ -45,9 +22,6 @@ function show(io::IO, error::ScannerError) end -include("tokens.jl") - - function detect_encoding(input::IO)::Encoding pos = position(input) start_bytes = Array{UInt8}(undef, 4) diff --git a/src/tokens.jl b/src/tokens.jl index eabd55b..0b279de 100644 --- a/src/tokens.jl +++ b/src/tokens.jl @@ -1,4 +1,24 @@ +# Position within the document being parsed +struct Mark + index::UInt64 + line::UInt64 + column::UInt64 +end + + +function show(io::IO, mark::Mark) + @printf(io, "line %d, column %d", mark.line, mark.column) +end + + +# Where in the stream a particular token lies. +struct Span + start_mark::Mark + end_mark::Mark +end + + # YAML Tokens. # Each token must include at minimum member "span::Span". abstract type Token end From 8180df472e196d873a34a78bb357ab548edec333 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 00:16:10 +0900 Subject: [PATCH 06/11] Fix comparison with `nothing` to use checking identity by `===` or `!==`. (#159) --- src/YAML.jl | 2 +- src/composer.jl | 2 +- src/constructor.jl | 2 +- src/parser.jl | 8 ++++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/YAML.jl b/src/YAML.jl index 2f7cbed..7e38f0e 100644 --- a/src/YAML.jl +++ b/src/YAML.jl @@ -44,7 +44,7 @@ const _dicttype = Union{Type,Function} # add a dicttype-aware version of construct_mapping to the constructors function _patch_constructors(more_constructors::_constructor, dicttype::_dicttype) - if more_constructors == nothing + if more_constructors === nothing more_constructors = Dict{String,Function}() else more_constructors = copy(more_constructors) # do not change the outside world diff --git a/src/composer.jl b/src/composer.jl index d92c4b2..3e9de4b 100644 --- a/src/composer.jl +++ b/src/composer.jl @@ -14,7 +14,7 @@ struct ComposerError end function show(io::IO, error::ComposerError) - if error.context != nothing + if error.context !== nothing print(io, error.context, " at ", error.context_mark, ": ") end print(io, error.problem, " at ", error.problem_mark) diff --git a/src/constructor.jl b/src/constructor.jl index 71d8a69..eeb8e29 100644 --- a/src/constructor.jl +++ b/src/constructor.jl @@ -15,7 +15,7 @@ struct ConstructorError end function show(io::IO, error::ConstructorError) - if error.context != nothing + if error.context !== nothing print(io, error.context, " at ", error.context_mark, ": ") end print(io, error.problem, " at ", error.problem_mark) diff --git a/src/parser.jl b/src/parser.jl index 3cc6ed9..b09e1cc 100644 --- a/src/parser.jl +++ b/src/parser.jl @@ -17,7 +17,7 @@ struct ParserError end function show(io::IO, error::ParserError) - if error.context != nothing + if error.context !== nothing print(io, error.context, " at ", error.context_mark, ": ") end print(io, error.problem, " at ", error.problem_mark) @@ -246,9 +246,9 @@ end function __parse_node(token::ScalarToken, stream::EventStream, block, start_mark, end_mark, anchor, tag, implicit) forward!(stream.input) end_mark = token.span.end_mark - if (token.plain && tag == nothing) || tag == "!" + if (token.plain && tag === nothing) || tag == "!" implicit = true, false - elseif tag == nothing + elseif tag === nothing implicit = false, true else implicit = false, false @@ -345,7 +345,7 @@ function _parse_node(token, stream::EventStream, block, indentless_sequence) end token = peek(stream.input) - if start_mark == nothing + if start_mark === nothing start_mark = end_mark = token.span.start_mark end From b47968b698bb9ac651236bb1ff19a13baff74ec7 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 00:20:20 +0900 Subject: [PATCH 07/11] Replace the functions that were removed long ago with new ones. (#165) --- src/scanner.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scanner.jl b/src/scanner.jl index a67e155..c92b60d 100644 --- a/src/scanner.jl +++ b/src/scanner.jl @@ -1551,7 +1551,7 @@ function scan_uri_escapes(stream::TokenStream, name::String, start_mark::Mark) get_mark(stream))) end end - push!(bytes, char(parse_hex(prefix(stream.input, 2)))) + push!(bytes, Char(parse(Int, prefix(stream.input, 2), base=16))) forwardchars!(stream, 2) end From d2c2ed72bfc99bbafa35052517d0418ac07b1858 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 00:20:46 +0900 Subject: [PATCH 08/11] Change `Resolver` to immutable because there is no reason to be mutable. (#166) --- src/resolver.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/resolver.jl b/src/resolver.jl index 9296d39..ff42f05 100644 --- a/src/resolver.jl +++ b/src/resolver.jl @@ -58,7 +58,7 @@ const default_implicit_resolvers = ] -mutable struct Resolver +struct Resolver implicit_resolvers::Vector{Tuple{String,Regex}} function Resolver() From 5531dedde6829c9873c5d455bd482a5d7dca231d Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 00:21:31 +0900 Subject: [PATCH 09/11] Decompose the expression of `while` loop for readability. (#167) --- src/composer.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/composer.jl b/src/composer.jl index 3e9de4b..1525a5c 100644 --- a/src/composer.jl +++ b/src/composer.jl @@ -130,7 +130,9 @@ function _compose_sequence_node(start_event::SequenceStartEvent, composer, ancho composer.anchors[anchor] = node end - while (event = peek(composer.input)) !== nothing + while true + event = peek(composer.input) + event === nothing && break __compose_sequence_node(event, composer, node) || break end From cadc9c5fb9c53d6e2c5b5bc5f6526f88223d23e4 Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 02:43:06 +0900 Subject: [PATCH 10/11] Fix a bug in `parse_document_content`. (#162) * Fix a bug in `parse_document_content`. `peek(stream.input)` is a token instance, so it should be `in(typeof(peek(stream.input)), [A, B, C, D]`. However, it is better to use `isa` and `Union`, as I mentioned in https://github.com/JuliaData/YAML.jl/pull/161. * Update src/parser.jl Co-authored-by: Kevin Bonham * Add a test. --------- Co-authored-by: Kevin Bonham --- src/parser.jl | 2 +- test/runtests.jl | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/parser.jl b/src/parser.jl index b09e1cc..a8ae805 100644 --- a/src/parser.jl +++ b/src/parser.jl @@ -212,7 +212,7 @@ end function parse_document_content(stream::EventStream) - if in(peek(stream.input), [DirectiveToken, DocumentStartToken, DocumentEndToken,StreamEndToken]) + if peek(stream.input) isa Union{DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken} event = process_empty_scalar(stream, peek(stream.input).span.start_mark) stream.state = pop!(stream.states) event diff --git a/test/runtests.jl b/test/runtests.jl index 25b1ab5..335c70e 100755 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -449,4 +449,9 @@ end @test (@test_logs (:warn, """unknown directive name: "FOO" at line 1, column 4. We ignore this.""") (:warn, """unknown directive name: "BAR" at line 2, column 4. We ignore this.""") YAML.load("""%FOO\n%BAR\n--- foo""")) == "foo" end +# issue #144 +@testset "issue #144" begin + @test YAML.load("---") === nothing +end + end # module From 3f35981aa920b350902b8f12a4934b5e1ad6305b Mon Sep 17 00:00:00 2001 From: Koki Fushimi Date: Thu, 13 Jun 2024 06:06:04 +0900 Subject: [PATCH 11/11] Refactoring by using `isa` (#161) * Use `isa` to compare the types of variables. * Change to use the expressions like `x isa Union{A, B, C}`. It is better in terms of performance. --- src/composer.jl | 10 +++---- src/constructor.jl | 2 +- src/parser.jl | 68 +++++++++++++++++++++++----------------------- 3 files changed, 40 insertions(+), 40 deletions(-) diff --git a/src/composer.jl b/src/composer.jl index 1525a5c..135c44b 100644 --- a/src/composer.jl +++ b/src/composer.jl @@ -30,21 +30,21 @@ end function compose(events) composer = Composer(events, Dict{String, Node}(), Resolver()) - @assert typeof(forward!(composer.input)) == StreamStartEvent + @assert forward!(composer.input) isa StreamStartEvent node = compose_document(composer) - if typeof(peek(composer.input)) == StreamEndEvent + if peek(composer.input) isa StreamEndEvent forward!(composer.input) else - @assert typeof(peek(composer.input)) == DocumentStartEvent + @assert peek(composer.input) isa DocumentStartEvent end node end function compose_document(composer::Composer) - @assert typeof(forward!(composer.input)) == DocumentStartEvent + @assert forward!(composer.input) isa DocumentStartEvent node = compose_node(composer) - @assert typeof(forward!(composer.input)) == DocumentEndEvent + @assert forward!(composer.input) isa DocumentEndEvent empty!(composer.anchors) node end diff --git a/src/constructor.jl b/src/constructor.jl index eeb8e29..711ced7 100644 --- a/src/constructor.jl +++ b/src/constructor.jl @@ -147,7 +147,7 @@ function flatten_mapping(node::MappingNode) elseif value_node isa SequenceNode submerge = [] for subnode in value_node.value - if typeof(subnode) != MappingNode + if !(subnode isa MappingNode) throw(ConstructorError("while constructing a mapping", node.start_mark, "expected a mapping node, but found $(typeof(subnode))", diff --git a/src/parser.jl b/src/parser.jl index a8ae805..ece2432 100644 --- a/src/parser.jl +++ b/src/parser.jl @@ -81,7 +81,7 @@ end function process_directives(stream::EventStream) stream.yaml_version = nothing stream.tag_handles = Dict{String, String}() - while typeof(peek(stream.input)) == DirectiveToken + while peek(stream.input) isa DirectiveToken token = forward!(stream.input) if token.name == "YAML" if stream.yaml_version !== nothing @@ -136,11 +136,11 @@ end function parse_implicit_document_start(stream::EventStream) token = peek(stream.input) # Parse a byte order mark - if typeof(token) == ByteOrderMarkToken + if token isa ByteOrderMarkToken forward!(stream.input) token = peek(stream.input) end - if !in(typeof(token), [DirectiveToken, DocumentStartToken, StreamEndToken]) + if !(token isa Union{DirectiveToken, DocumentStartToken, StreamEndToken}) stream.tag_handles = DEFAULT_TAGS event = DocumentStartEvent(token.span.start_mark, token.span.start_mark, false) @@ -157,22 +157,22 @@ end function parse_document_start(stream::EventStream) # Parse any extra document end indicators. - while typeof(peek(stream.input)) == DocumentEndToken + while peek(stream.input) isa DocumentEndToken stream.input = Iterators.rest(stream.input) end token = peek(stream.input) # Parse a byte order mark if it exists - if typeof(token) == ByteOrderMarkToken + if token isa ByteOrderMarkToken forward!(stream.input) token = peek(stream.input) end # Parse explicit document. - if typeof(token) != StreamEndToken + if !(token isa StreamEndToken) start_mark = token.span.start_mark version, tags = process_directives(stream) - if typeof(peek(stream.input)) != DocumentStartToken + if !(peek(stream.input) isa DocumentStartToken) throw(ParserError(nothing, nothing, "expected '' but found $(typeof(token))")) end @@ -198,7 +198,7 @@ function parse_document_end(stream::EventStream) token = peek(stream.input) start_mark = end_mark = token.span.start_mark explicit = false - if typeof(token) == DocumentEndToken + if token isa DocumentEndToken forward!(stream.input) end_mark = token.span.end_mark explicit = true @@ -305,25 +305,25 @@ function _parse_node(token, stream::EventStream, block, indentless_sequence) anchor = nothing tag = nothing start_mark = end_mark = tag_mark = nothing - if typeof(token) == AnchorToken + if token isa AnchorToken forward!(stream.input) start_mark = token.span.start_mark end_mark = token.span.end_mark anchor = token.value token = peek(stream.input) - if typeof(token) == TagToken + if token isa TagToken forward!(stream.input) tag_mark = token.span.start_mark end_mark = token.span.end_mark tag = token.value end - elseif typeof(token) == TagToken + elseif token isa TagToken forward!(stream.input) start_mark = token.span.start_mark end_mark = token.span.end_mark tag = token.value token = peek(stream.input) - if typeof(token) == AnchorToken + if token isa AnchorToken forward!(stream.input) end_mark = token.end_mark anchor = token.value @@ -351,7 +351,7 @@ function _parse_node(token, stream::EventStream, block, indentless_sequence) event = nothing implicit = tag === nothing || tag == "!" - if indentless_sequence && typeof(token) == BlockEntryToken + if indentless_sequence && token isa BlockEntryToken end_mark = token.span.end_mark stream.state = parse_indentless_sequence_entry event = SequenceStartEvent(start_mark, end_mark, anchor, tag, implicit, @@ -377,9 +377,9 @@ end function parse_block_sequence_entry(stream::EventStream) token = peek(stream.input) - if typeof(token) == BlockEntryToken + if token isa BlockEntryToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [BlockEntryToken, BlockEndToken]) + if !(peek(stream.input) isa Union{BlockEntryToken, BlockEndToken}) push!(stream.states, parse_block_sequence_entry) return parse_block_node(stream) else @@ -388,7 +388,7 @@ function parse_block_sequence_entry(stream::EventStream) end end - if typeof(token) != BlockEndToken + if !(token isa BlockEndToken) throw(ParserError("while parsing a block collection", stream.marks[end], "expected , but found $(typeof(token))", token.span.start_mark)) @@ -403,9 +403,9 @@ end function parse_indentless_sequence_entry(stream::EventStream) token = peek(stream.input) - if typeof(token) == BlockEntryToken + if token isa BlockEntryToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [BlockEntryToken, KeyToken, ValueToken, BlockEndToken]) + if !(peek(stream.input) isa Union{BlockEntryToken, KeyToken, ValueToken, BlockEndToken}) push!(stream.states, parse_indentless_sequence_entry) return parse_block_node(stream) else @@ -428,9 +428,9 @@ end function parse_block_mapping_key(stream::EventStream) token = peek(stream.input) - if typeof(token) == KeyToken + if token isa KeyToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [KeyToken, ValueToken, BlockEndToken]) + if !(peek(stream.input) isa Union{KeyToken, ValueToken, BlockEndToken}) push!(stream.states, parse_block_mapping_value) return parse_block_node_or_indentless_sequence(stream) else @@ -439,7 +439,7 @@ function parse_block_mapping_key(stream::EventStream) end end - if typeof(token) != BlockEndToken + if !(token isa BlockEndToken) throw(ParserError("while parsing a block mapping", stream.marks[end], "expected , but found $(typeof(token))", token.span.start_mark)) @@ -454,9 +454,9 @@ end function parse_block_mapping_value(stream::EventStream) token = peek(stream.input) - if typeof(token) == ValueToken + if token isa ValueToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [KeyToken, ValueToken, BlockEndToken]) + if !(peek(stream.input) isa Union{KeyToken, ValueToken, BlockEndToken}) push!(stream.states, parse_block_mapping_key) parse_block_node_or_indentless_sequence(stream) else @@ -485,7 +485,7 @@ end function _parse_flow_sequence_entry(token::Any, stream::EventStream, first_entry=false) if !first_entry - if typeof(token) == FlowEntryToken + if token isa FlowEntryToken forward!(stream.input) else throw(ParserError("while parsing a flow sequence", @@ -515,7 +515,7 @@ end function parse_flow_sequence_entry_mapping_key(stream::EventStream) token = forward!(stream.input) - if !in(typeof(token), [ValueToken, FlowEntryToken, FlowSequenceEndToken]) + if !(token isa Union{ValueToken, FlowEntryToken, FlowSequenceEndToken}) push!(stream.states, parse_flow_sequence_entry_mapping_value) parse_flow_node(stream) else @@ -527,9 +527,9 @@ end function parse_flow_sequence_entry_mapping_value(stream::EventStream) token = peek(stream.input) - if typeof(token) == ValueToken + if token isa ValueToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [FlowEntryToken, FlowSequenceEndToken]) + if !(peek(stream.input) isa Union{FlowEntryToken, FlowSequenceEndToken}) push!(stream.states, parse_flow_sequence_entry_mapping_end) parse_flow_node(stream) else @@ -559,9 +559,9 @@ end function parse_flow_mapping_key(stream::EventStream, first_entry=false) token = peek(stream.input) - if typeof(token) != FlowMappingEndToken + if !(token isa FlowMappingEndToken) if !first_entry - if typeof(token) == FlowEntryToken + if token isa FlowEntryToken forward!(stream.input) else throw(ParserError("while parsing a flow mapping", @@ -572,16 +572,16 @@ function parse_flow_mapping_key(stream::EventStream, first_entry=false) end token = peek(stream.input) - if typeof(token) == KeyToken + if token isa KeyToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [ValueToken, FlowEntryToken, FlowMappingEndToken]) + if !(peek(stream.input) isa Union{ValueToken, FlowEntryToken, FlowMappingEndToken}) push!(stream.states, parse_flow_mapping_value) return parse_flow_node(stream) else stream.state = parse_flow_mapping_value return process_empty_scalar(stream, token.span.end_mark) end - elseif typeof(token) != FlowMappingEndToken + elseif !(token isa FlowMappingEndToken) push!(stream.states, parse_flow_mapping_empty_value) return parse_flow_node(stream) end @@ -596,9 +596,9 @@ end function parse_flow_mapping_value(stream::EventStream) token = peek(stream.input) - if typeof(token) == ValueToken + if token isa ValueToken forward!(stream.input) - if !in(typeof(peek(stream.input)), [FlowEntryToken, FlowMappingEndToken]) + if !(peek(stream.input) isa Union{FlowEntryToken, FlowMappingEndToken}) push!(stream.states, parse_flow_mapping_key) parse_flow_node(stream) else